class MixinDef(activetags.ReplayBase): """A definition for a resource mixin. Resource mixins are resource descriptor fragments typically rooted in tables (though it's conceivable that other structures could grow mixin attributes as well). They are used to define and implement certain behaviours components of the DC software want to see: - products want to be added into their table, and certain fields are required within tables describing products - tables containing positions need some basic machinery to support scs. - siap needs quite a bunch of fields Mixins consist of events that are played back on the structure mixing in before anything else happens (much like original) and two procedure definitions, viz, processEarly and processLate. These can access the structure that has the mixin as substrate. processEarly is called as part of the substrate's completeElement method. processLate is executed just before the parser exits. This is the place to fix up anything that uses the table mixed in. Note, however, that you should be as conservative as possible here -- you should think of DC structures as immutable as long as possible. Programmatically, you can check if a certain table mixes in something by calling its mixesIn method. Recursive application of mixins, even to seperate objects, will deadlock. """ name_ = "mixinDef" _doc = base.UnicodeAttribute("doc", description="Documentation for" " this mixin", strip=False) _events = base.StructAttribute("events", childFactory=activetags.EmbeddedStream, description="Events to be played back into the structure mixing" " this in at mixin time.", copyable=True, default=base.NotGiven) _lateEvents = base.StructAttribute("lateEvents", childFactory=LateEvents, description="Events to be played back into the structure mixing" " this in at completion time.", copyable=True, default=base.NotGiven) _processEarly = base.StructAttribute("processEarly", default=None, childFactory=ProcessEarly, description="Code executed at element fixup.", copyable=True) _processLate = base.StructAttribute("processLate", default=None, childFactory=ProcessLate, description="Code executed resource fixup.", copyable=True) _pars = base.UniquedStructListAttribute("pars", childFactory=MixinPar, uniqueAttribute="key", description="Parameters available for this mixin.", copyable=True) _original = base.OriginalAttribute() def completeElement(self, ctx): # we want to double-expand macros in mixins. Thus, reset all # value/expanded events to plain values if self.events: self.events.unexpandMacros() if self.lateEvents: self.lateEvents.unexpandMacros() # This lock protects against multiple uses of applyTo. This is # necessary because during replay, we have macroExpansions and # macroParent reflect a concrete application's context. self.applicationLock = threading.Lock() self._completeElementNext(MixinDef, ctx) def _defineMacros(self, fillers, destination): """creates attributes macroExpansions and parentMacroPackage used by execMacros. Within mixins, you can use macros filled by mixin parameters or expanded by the substrate. This information is local to a concrete mixin application. Hence, applyTo calls this method, and the attributes created are invalid for any subsequent or parallel applyTo calls. Therefore, applyTo acquires the applicationLock before calling this. """ self.parentMacroPackage = None if hasattr(destination, "execMacro"): self.parentMacroPackage = destination self.macroExpansions = {} for p in self.pars: if p.key in fillers: self.macroExpansions[p.key] = fillers.pop(p.key) elif p.isDefaulted(): self.macroExpansions[p.key] = p.content_ else: raise base.StructureError("Mixin parameter %s mandatory"%p.key) if fillers: raise base.StructureError("The attribute(s) %s is/are not allowed" " on this mixin"%(",".join(fillers))) def execMacro(self, macName, args): if macName in self.macroExpansions: return self.macroExpansions[macName] try: if self.parentMacroPackage: return self.parentMacroPackage.execMacro(macName, args) except base.MacroError: raise base.MacroError( "No macro \\%s available in this mixin or substrate."%(macName), macName) def applyTo(self, destination, ctx, fillers={}): """replays the stored events on destination and arranges for processEarly and processLate to be run. """ with self.applicationLock: self._defineMacros(fillers.copy(), destination) if self.events: self.replay(self.events.events_, destination, ctx) if self.processEarly is not None: self.processEarly.compile(destination)(ctx, destination, self.macroExpansions) if self.processLate is not None: def procLate(rootStruct, parseContext): self.processLate.compile(destination)( destination, rootStruct, parseContext) ctx.addExitFunc(procLate) if self.lateEvents: origComplete = destination.completeElement def newComplete(ctx): with self.applicationLock: self._defineMacros(fillers.copy(), destination) self.replay(self.lateEvents.events_, destination, ctx) origComplete(ctx) destination.completeElement = newComplete def applyToFinished(self, destination): """applies the mixin to an object already parsed. Late callbacks will only be executed if destination has an rd attribute; if that is the case, this rd's idmap will be amended with anything the mixin comes up with. """ rd = None if hasattr(destination, "rd"): rd = destination.rd ctx = base.ParseContext() if rd is not None: ctx.idmap = destination.rd.idmap self.applyTo(destination, ctx) # we don't keep the application lock for this; applyToFinished # is more of a debugging thing, so we don't worry too much. if self.lateEvents: self.replay(self.lateEvents.events_, destination, ctx) if rd is not None: ctx.runExitFuncs(rd)
class EmbeddedGrammar(common.Grammar, base.RestrictionMixin): """A Grammar defined by a code application. To define this grammar, write a ProcApp iterator leading to code yielding row dictionaries. The grammar input is available as self.sourceToken; for normal grammars within data elements, that would be a fully qualified file name. Grammars can also return one "parameter" dictionary per source (the input to a make's parmaker). In an embedded grammar, you can define a pargetter to do that. It works like the iterator, except that it returns a single dictionary rather than yielding several of them. This could look like this, when the grammar input is some iterable:: <embeddedGrammar> <iterator> <setup> <code> testData = "a"*1024 </code> </setup> <code> for i in self.sourceToken: yield {'index': i, 'data': testData} </code> </iterator> </embeddedGrammar> """ name_ = "embeddedGrammar" _iterator = base.StructAttribute( "iterator", default=base.Undefined, childFactory=EmbeddedIterator, description="Code yielding row dictionaries", copyable=True) _pargetter = base.StructAttribute( "pargetter", default=None, childFactory=EmbeddedPargetter, description="Code returning a parameter dictionary", copyable=True) _isDispatching = base.BooleanAttribute( "isDispatching", default=False, description="Is this a dispatching grammar (i.e., does the row iterator" " return pairs of role, row rather than only rows)?", copyable=True) _notify = base.BooleanAttribute( "notify", default=False, description="Enable notification of begin/end of processing (as" " for other grammars; embedded grammars often have odd source" " tokens for which you don't want that).", copyable=True) def onElementComplete(self): self._onElementCompleteNext(EmbeddedGrammar) class RowIterator(common.RowIterator): _iterRows = self.iterator.compile() notify = self.notify if self.pargetter: RowIterator.getParameters = self.pargetter.compile() self.rowIterator = RowIterator
class FITSProdGrammar(Grammar): r"""A grammar that returns FITS-headers as dictionaries. This is the grammar you want when one FITS file corresponds to one row in the destination table. The keywords of the grammar record are the cards in the primary header (or some other hdu using the same-named attribute). "-" in keywords is replaced with an underscore for easier @-referencing. You can use a mapKeys element to effect further name cosmetics. This grammar should handle compressed FITS images transparently if set qnd="False". This means that you will essentially get the readers from the second extension for those even if you left hdu="0". The original header is preserved as the value of the header\_ key. This is mainly intended for use WCS use, as in ``pywcs.WCS(@header_)``. If you have more complex structures in your FITS files, you can get access to the pyfits HDU using the hdusField attribute. With ``hdusField="_H"``, you could say things like ``@_H[1].data[10][0]`` to get the first data item in the tenth row in the second HDU. """ name_ = "fitsProdGrammar" _qnd = base.BooleanAttribute( "qnd", default=True, description="Use a hack to read the FITS header more quickly. This only" " works for the primary HDU", copyable=True) _hduIndex = base.IntAttribute( "hdu", default=0, description="Take the header from this HDU. You must say qnd='False'" " for this to take effect.", copyable=True) _mapKeys = base.StructAttribute( "mapKeys", childFactory=MapKeys, default=None, copyable=True, description="Prescription for how to" " map header keys to grammar dictionary keys") _hdusAttr = base.UnicodeAttribute( "hdusField", default=None, description="If set, the complete pyfits HDU list for the FITS" " file is returned in this grammar field.", copyable=True) _maxHeaderBlocks = base.IntAttribute( "maxHeaderBlocks", default=40, copyable=True, description="Stop looking for" " FITS END cards and raise an error after this many blocks." " You may need to raise this for people dumping obscene amounts" " of data or history into headers.") rowIterator = FITSProdIterator def onElementComplete(self): if self.mapKeys is None: self.mapKeys = base.makeStruct(MapKeys) self._onElementCompleteNext(FITSProdGrammar)
class Service(base.Structure, base.ComputedMetaMixin, base.StandardMacroMixin, rscdef.IVOMetaMixin): """A service definition. A service is a combination of a core and one or more renderers. They can be published, and they carry the metadata published into the VO. You can set the defaultSort property on the service to a name of an output column to preselect a sort order. Note again that this will slow down responses for all but the smallest tables unless there is an index on the corresponding column. Properties evaluated: * defaultSort -- a key to sort on by default with the form renderer. This differs from the dbCore's sortKey in that this does not suppress the widget itself, it just sets a default for its value. Don't use this unless you have to; the combination of sort and limit can have disastrous effects on the run time of queries. * votableRespectsOutputTable -- usually, VOTable output puts in all columns from the underlying database table with low enough verbLevel (essentially). When this property is "True" (case-sensitive), that's not done and only the service's output table is evaluated. [Note that column selection is such a mess it needs to be fixed before version 1.0 anyway] """ name_ = "service" _core = CoreAttribute() _templates = base.DictAttribute( "templates", description="Custom" ' nevow templates for this service; use key "form" to replace the Form' " renderer's standard template. Start the path with two slashes to" " access system templates.", itemAttD=rscdef.ResdirRelativeAttribute( "template", description="resdir-relative path to a nevow template" " used for the function given in key."), copyable=True) _publications = base.StructListAttribute( "publications", childFactory=Publication, description="Sets and renderers this service" " is published with.") _limitTo = base.UnicodeAttribute( "limitTo", default=None, description="Limit access to the group given; the empty default disables" " access control.", copyable="True") _customPage = rscdef.ResdirRelativeAttribute( "customPage", default=None, description="resdir-relative path to custom page code. It is used" " by the 'custom' renderer", copyable="True") _allowedRenderers = base.StringSetAttribute( "allowed", description="Names of renderers allowed on this service; leave emtpy" " to allow the form renderer only.", copyable=True) _customRF = base.StructListAttribute( "customRFs", description="Custom render functions for use in custom templates.", childFactory=CustomRF, copyable=True) _customDF = base.StructListAttribute( "customDFs", description="Custom data functions for use in custom templates.", childFactory=CustomDF, copyable=True) _inputData = base.StructAttribute( "inputDD", default=base.NotGiven, childFactory=inputdef.InputDescriptor, description="A data descriptor" " for obtaining the core's input, usually based on a contextGrammar." " For many cores (e.g., DBCores), you do not want to give this" " but rather want to let service figure this out from the core.", copyable=True) _outputTable = base.StructAttribute( "outputTable", default=base.NotGiven, childFactory=outputdef.OutputTableDef, copyable=True, description="The output fields of this service.") _serviceKeys = base.StructListAttribute( "serviceKeys", childFactory=inputdef.InputKey, description="Input widgets for" " processing by the service, e.g. output sets.", copyable=True) _defaultRenderer = base.UnicodeAttribute( "defaultRenderer", default=None, description="A name of a renderer used when" " none is provided in the URL (lets you have shorter URLs).") _rd = rscdef.RDAttribute() _props = base.PropertyAttribute() _original = base.OriginalAttribute() metaModel = ("title(1), creationDate(1), description(1)," "subject, referenceURL(1), shortName(!)") # formats that should query the same fields as HTML (the others behave # like VOTables and offer a "verbosity" widget in forms). htmlLikeFormats = ["HTML", "tar"] ####################### Housekeeping methods def __repr__(self): return "<Service at %x>" % id(self) def completeElement(self, ctx): self._completeElementNext(Service, ctx) if not self.allowed: self.allowed.add("form") if self.core is base.Undefined: # undefined cores are only allowed with custom pages # (Deprecated) if self.customPage: self.core = core.getCore("nullCore")( self.rd).finishElement(None) base.ui.notifyWarning( "Custom page service %s without nullCore." " This is deprecated, please fix" % self.id) else: raise base.StructureError( "Services must have cores (add <nullCore/>" " if you really do not want a core, e.g., with fixed renderers)." ) # if there's only one renderer on this service, make it the default if self.defaultRenderer is None and len(self.allowed) == 1: self.defaultRenderer = list(self.allowed)[0] # empty output tables are filled from the core if self.outputTable is base.NotGiven: self.outputTable = self.core.outputTable # cache all kinds of things expensive to create and parse self._coresCache = {} self._inputDDCache = {} self._loadedTemplates = {} # Schedule the capabilities to be added when the parse is # done (i.e., the RD is complete) ctx.addExitFunc(lambda rd, ctx: self._addAutomaticCapabilities()) def onElementComplete(self): self._onElementCompleteNext(Service) # Index custom render/data functions self.nevowRenderers = {} for customRF in self.customRFs: self.nevowRenderers[customRF.name] = customRF.func self.nevowDataFunctions = {} for customDF in self.customDFs: self.nevowDataFunctions[customDF.name] = customDF.func self._compileCustomPage() self._computeResourceType() def _compileCustomPage(self): if self.customPage: try: modNs, moddesc = utils.loadPythonModule(self.customPage) modNs.RD = self.rd getattr(modNs, "initModule", lambda: None)() page = modNs.MainPage except ImportError: raise base.ui.logOldExc( base.LiteralParseError( "customPage", self.customPage, hint= "This means that an exception was raised while DaCHS" " tried to import the renderer module. If DaCHS ran" " with --debug, the original traceback is available" " in the logs.")) self.customPageCode = page, (os.path.basename( self.customPage), ) + moddesc def getTemplate(self, key): """returns the nevow template for the function key on this service. """ if key not in self._loadedTemplates: from nevow import loaders tp = self.templates[key] if tp.startswith("//"): self._loadedTemplates[key] = common.loadSystemTemplate(tp[2:]) else: self._loadedTemplates[key] = loaders.xmlfile( os.path.join(self.rd.resdir, tp)) return self._loadedTemplates[key] def getUWS(self): """returns a user UWS instance for this service. This is a service for the UWSAsyncRenderer. """ if not hasattr(self, "uws"): from gavo.protocols import useruws self.uws = useruws.makeUWSForService(self) return self.uws ################### Registry and related methods. @property def isVOPublished(self, renderer=None): """is true if there is any ivo_managed publication on this service. If renderer is non-None, only publications with this renderer name count. """ for pub in self.publications: if "ivo_managed" in pub.sets: if renderer: if pub.render == renderer: return True else: return True return False def _computeResourceType(self): """sets the resType attribute. Services are resources, and the registry code wants to know what kind. This method ventures a guess. You can override this decision by setting the resType meta item. """ if (self.outputTable.columns or self.outputTable.verbLevel or "tap" in self.allowed): self.resType = "catalogService" else: # no output table defined, we're a plain service self.resType = "nonTabularService" def _addAutomaticCapabilities(self): """adds some publications that are automatic for certain types of services. For services with ivo_managed publications and with useful cores (this keeps out doc-like publications, which shouldn't have VOSI resources), artificial VOSI publications are added. If there is _example meta, an examples publication is added. If this service exposes a table (i.e., a DbCore with a queriedTable) and that table is adql-readable, also add an auxiliary TAP publication if going to the VO. This is being run as an exit function from the parse context as we want the RD to be complete at this point (e.g., _examples meta might come from it). This also lets us liberally resolve references anywhere. """ if not self.isVOPublished: return vosiSet = set(["ivo_managed"]) # All actual services get VOSI caps if not isinstance(self.core, core.getCore("nullCore")): self._publications.feedObject( self, base.makeStruct(Publication, render="availability", sets=vosiSet, parent_=self)) self._publications.feedObject( self, base.makeStruct(Publication, render="capabilities", sets=vosiSet, parent_=self)) self._publications.feedObject( self, base.makeStruct(Publication, render="tableMetadata", sets=vosiSet, parent_=self)) # things querying tables get a TAP relationship if # their table is adql-queriable if isinstance(self.core, core.getCore("dbCore")): if self.core.queriedTable.adql: tapService = base.resolveCrossId("//tap#run") self._publications.feedObject( self, base.makeStruct(Publication, render="tap", sets=vosiSet, auxiliary=True, service=tapService, parent_=self)) # and they need a servedBy, too. # According to the "discovering dependent" note, we don't # do the reverse relationship lest the TAP service # gets too related... self.addMeta("servedBy", base.getMetaText(tapService, "title"), ivoId=base.getMetaText(tapService, "identifier")) # things with examples meta get an examples capability try: self.getMeta("_example", raiseOnFail=True) self._publications.feedObject( self, base.makeStruct(Publication, render="examples", sets=utils.AllEncompassingSet(), parent_=self)) except base.NoMetaKey: pass def getPublicationsForSet(self, names): """returns publications for set names in names. names must be a set. """ additionals = [] # for ivo_managed, also return a datalink endpoints if they're # there; the specs imply that might be useful some day. if self.getProperty("datalink", None): dlSvc = self.rd.getById(self.getProperty("datalink")) if "dlget" in dlSvc.allowed: additionals.append( base.makeStruct(Publication, render="dlget", sets="ivo_managed", service=dlSvc)) if "dlasync" in dlSvc.allowed: additionals.append( base.makeStruct(Publication, render="dlasync", sets="ivo_managed", service=dlSvc)) if "dlmeta" in dlSvc.allowed: additionals.append( base.makeStruct(Publication, render="dlmeta", sets="ivo_managed", service=dlSvc)) return [pub for pub in self.publications if pub.sets & names] + additionals def getURL(self, rendName, absolute=True, **kwargs): """returns the full canonical access URL of this service together with renderer. rendName is the name of the intended renderer in the registry of renderers. With absolute, a fully qualified URL is being returned. Further keyword arguments are translated into URL parameters in the query part. """ basePath = "%s%s/%s" % (base.getConfig( "web", "nevowRoot"), self.rd.sourceId, self.id) if absolute: basePath = base.getConfig("web", "serverURL") + basePath res = renderers.getRenderer(rendName).makeAccessURL(basePath) if kwargs: res = res + "?" + urllib.urlencode(kwargs) return res # used by getBrowserURL; keep external higher than form as long as # we have mess like Potsdam CdC. _browserScores = { "form": 10, "external": 12, "fixed": 15, "custom": 3, "img.jpeg": 2, "static": 1 } def getBrowserURL(self, fq=True): """returns a published URL that's suitable for a web browser or None if no such URL can be guessed. If you pass fq=False, you will get a path rather than a URL. """ # There can be multiple candidates for browser URLs (like when a service # has both form, static, and external renderers). If so, we select # by plain scores. browseables = [] for rendName in self.allowed: if self.isBrowseableWith(rendName): browseables.append((self._browserScores.get(rendName, -1), rendName)) if browseables: return self.getURL(max(browseables)[1], absolute=fq) else: return None def isBrowseableWith(self, rendName): """returns true if rendering this service through rendName results in something pretty in a web browser. """ try: return bool(renderers.getRenderer(rendName).isBrowseable(self)) except base.NotFoundError: # renderer name not known return False def getTableSet(self): """returns a list of table definitions that have something to do with this service. This is for VOSI-type queries. Usually, that's just the core's queried table or an output table, except when there is a TAP renderer on the service. All this is a bit heuristic; but then again, there's no rigorous definition for what's to be in a tables endpoint either. """ tables = [] # output our own outputTable if it sounds reasonable; if so, # add the core's queried table, too, if it has one. if self.outputTable and self.outputTable.columns: tables.append(self.outputTable) tables.append(getattr(self.core, "queriedTable", None)) else: # if our outputTable is no good, just use the one of the core qt = getattr(self.core, "queriedTable", None) if qt is None: qt = getattr(self.core, "outputTable", None) if qt is not None: tables.append(qt) # XXX TODO: This stinks big time. It's because we got TAP factorization # wrong. Sync and async should be renderers, and there should # be a core that then could say this kind of thing. That's not # yet the case, so: if "tap" in self.allowed: # tap never has "native" tables, so start afresh tables = [] mth = base.caches.getMTH(None) for tableName in mth.getTAPTables(): try: tables.append(mth.getTableDefForTable(tableName)) except: base.ui.notifyError( "Failure trying to retrieve table definition" " for table %s. Please fix the corresponding RD." % tableName) return [t for t in tables if t is not None and t.rd is not None] def declareServes(self, data): """adds meta to self and data indicating that data is served by service. This is used by table/@adql and the publish element on data. """ if data.registration: self.addMeta("serviceFor", base.getMetaText(data, "title", default="Anonymous"), ivoId=base.getMetaText(data, "identifier")) data.addMeta("servedBy", base.getMetaText(self, "title"), ivoId=base.getMetaText(self, "identifier")) # Since this is always initiated by the data, the dependency # must show up in its RD to be properly added on publication # and to be removed when the data is removed. data.rd.addDependency(self.rd, data.rd) ########################## Output field selection (ouch!) def _getVOTableOutputFields(self, queryMeta): """returns a list of OutputFields suitable for a VOTable response described by queryMeta. This is what's given for HTML when the columns verbLevel is low enough and there's no displayHint of noxml present. In addition, more columns are added from outputTable's parent (which usually will be the database table itself) if their verbLevel is low enough. this may be suppressed by setting the votableRespectsOutputTable property to "True". """ verbLevel = queryMeta.get("verbosity", 20) fields = [ f for f in self.getHTMLOutputFields(queryMeta) if f.verbLevel <= verbLevel and f.displayHint.get("noxml") != "true" ] if (verbLevel != "HTML" and self.getProperty( "votableRespectsOutputTable", None) != "True"): htmlNames = set(f.name for f in fields) for field in self.outputTable.parentTable: if field.name in htmlNames: continue if (field.displayHint.get("type") == "suppress" or field.displayHint.get("noxml") == "true"): continue if field.verbLevel <= verbLevel: fields.append(field) return rscdef.ColumnList(fields) _allSet = set(["ALL"]) def getHTMLOutputFields(self, queryMeta, ignoreAdditionals=False, raiseOnUnknown=True): """returns a list of OutputFields suitable for an HTML response described by queryMeta. raiseOnUnknown is used by customwidgets to avoid exceptions because of bad additional fields during form construction (when they aren't properly caught). """ requireSet = queryMeta["columnSet"] res = rscdef.ColumnList() # add "normal" output fields if requireSet: res.extend([ f for f in self.outputTable if f.sets == self._allSet or requireSet & f.sets ]) else: res.extend([ f for f in self.outputTable if f.displayHint.get("type") != "suppress" ]) # add user-selected fields if not ignoreAdditionals and queryMeta["additionalFields"]: cofs = self.core.outputTable.columns try: for fieldName in queryMeta["additionalFields"]: col = cofs.getColumnByName(fieldName) if isinstance(col, outputdef.OutputField): res.append(col) else: res.append(outputdef.OutputField.fromColumn(col)) except base.NotFoundError, msg: if raiseOnUnknown: raise base.ValidationError( "The additional field %s you requested" " does not exist" % repr(msg.lookedFor), colName="_OUTPUT") return res
class ComputedCore(core.Core): """A core wrapping external applications. ComputedCores wrap command line tools taking command line arguments, reading from stdin, and outputting to stdout. The command line arguments are taken from the inputTable's parameters, stdin is created by serializing the inputTable's rows like they are serialized for with the TSV output, except only whitespace is entered between the values. The output is the primary table of parsing the program's output with the data child. While in principle more declarative than PythonCores, these days I'd say rather use one of those. """ name_ = "computedCore" _computer = rscdef.ResdirRelativeAttribute("computer", default=base.Undefined, description="Resdir-relative basename of" " the binary doing the computation. The standard rules for" " cross-platform binary name determination apply.", copyable=True) _resultParse = base.StructAttribute("resultParse", description="Data descriptor to parse the computer's output.", childFactory=rscdef.DataDescriptor, copyable=True) def start_(self, ctx, name, value): if name=="outputTable": raise base.StructureError("Cannot define a computed core's" " output table.", hint="Computed cores have their output" " defined by the primary table of resultParse.") return core.Core.start_(self, ctx, name, value) def completeElement(self, ctx): if self.resultParse: self._outputTable.feedObject(self, outputdef.OutputTableDef.fromTableDef( self.resultParse.getPrimary(), ctx)) self._completeElementNext(ComputedCore, ctx) def _feedInto(self, data, destFile): """writes data into destFile from a thread. This is done to cheaply avoid deadlocks. Ok, I'll to a select loop piping directly into the grammar one of these days. """ def writeFile(): destFile.write(data) destFile.close() writeThread = threading.Thread(target=writeFile) writeThread.setDaemon(True) writeThread.start() return writeThread def _getArgs(self, inputTable): args = [base.getBinaryName(self.computer)] for par in inputTable.iterParams(): if par.content_ is base.NotGiven: raise base.ValidationError("Command line argument %s must not" " be undefined"%par.name, par.name, base.NotGiven) args.append(par.content_) return args def _getInput(self, inputTable): t = inputTable names = [c.name for c in t.tableDef] res = [] for row in base.SerManager(t, mfRegistry=argMFRegistry).getMappedValues(): res.append(" ".join([row[name] for name in names])) return str("\n".join(res)) def _runAndCapture(self, inputTable): # if we wanted to get really fancy, it shouldn't be hard to pipe that stuff # directly into the grammar. pipe = subprocess.Popen(self._getArgs(inputTable), 2**16, stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True, cwd=os.path.dirname(self.computer)) writeThread = self._feedInto(self._getInput(inputTable), pipe.stdin) data = pipe.stdout.read() pipe.stdout.close() writeThread.join(0.1) retcode = pipe.wait() if retcode!=0: raise base.ValidationError("The subprocess %s returned %s. This" " indicates an external executable could not be run or failed" " with your parameters. You should probably report this to the" " operators."%(os.path.basename(self.computer), retcode), "query") return data def run(self, service, inputTable, queryMeta): """starts the computing process if this is a computed data set. """ res = rsc.makeData(self.resultParse, forceSource=StringIO(self._runAndCapture(inputTable))) return res.getPrimaryTable()
class Core(base.Structure): """A definition of the "active" part of a service. Cores receive their input in tables the structure of which is defined by their inputTable attribute. The abstract core element will never occur in resource descriptors. See `Cores Available`_ for concrete cores. Use the names of the concrete cores in RDs. You can specify an input table in an inputTableXML and an output table in an outputTableXML class attribute. """ name_ = "core" inputTableXML = None outputTableXML = None # the cached prototype of the output table, filled in by # _OutputTableFactory _ot_prototype = None _rd = rscdef.RDAttribute() _inputTable = base.StructAttribute( "inputTable", default=base.NotGiven, childFactory=inputdef.InputTable, description="Description of the input data", copyable=True) _outputTable = base.StructAttribute( "outputTable", default=base.NotGiven, childFactory=_OutputTableFactory(), description="Table describing what fields are available from this core", copyable=True) _original = base.OriginalAttribute() _properties = base.PropertyAttribute() def __init__(self, parent, **kwargs): if self.inputTableXML is not None: if "inputTable" not in kwargs: kwargs["inputTable"] = base.parseFromString( inputdef.InputTable, self.inputTableXML) base.Structure.__init__(self, parent, **kwargs) def __repr__(self): return "<%s at %s>" % (self.__class__.__name__, id(self)) def __str__(self): return repr(self) def completeElement(self, ctx): self._completeElementNext(Core, ctx) if self.inputTable is base.NotGiven: self.inputTable = base.makeStruct(inputdef.InputTable) if self.outputTable is base.NotGiven: self.outputTable = self._outputTable.childFactory(self) def adaptForRenderer(self, renderer): """returns a core object tailored for renderer. """ newIT = self.inputTable.adaptForRenderer(renderer) if newIT is self.inputTable: return self else: return self.change(inputTable=newIT) def run(self, service, inputData, queryMeta): raise NotImplementedError("%s cores are missing the run method" % self.__class__.__name__) def makeUserDoc(self): return ("Polymorphous core element. May contain any of the cores" " mentioned in `Cores Available`_ .")
class Execute(base.Structure, base.ExpansionDelegator): """a container for calling code. This is a cron-like functionality. The jobs are run in separate threads, so they need to be thread-safe with respect to the rest of DaCHS. DaCHS serializes calls, though, so that your code should never run twice at the same time. At least on CPython, you must make sure your code does not block with the GIL held; this is still in the server process. If you do daring things, fork off (note that you must not use any database connections you may have after forking, which means you can't safely use the RD passed in). See the docs on `Element job`_. Then testing/debugging such code, use ``gavo admin execute rd#id`` to immediately run the jobs. """ name_ = "execute" _title = base.UnicodeAttribute( "title", default=base.Undefined, description="Some descriptive title for the job; this is used" " in diagnostics.", copyable=False, ) _at = base.StringListAttribute( "at", description="One or more hour:minute pairs at which to run" " the code each day. This conflicts with every. Optionally," " you can prefix each time by one of m<dom> or w<dow> for" " jobs only to be exectued at some day of the month or week, both" " counted from 1. So, 'm22 7:30, w3 15:02' would execute on" " the 22nd of each month at 7:30 UTC and on every wednesday at 15:02.", default=base.NotGiven, copyable=True, ) _every = base.IntAttribute( "every", default=base.NotGiven, description="Run the job roughly every this many seconds." " This conflicts with at. Note that the first execution of" " such a job is after every/10 seconds, and that the timers" " start anew at every server restart. So, if you restart" " often, these jobs may run much more frequently or not at all" " if the interval is large. If every is smaller than zero, the" " job will be executed immediately when the RD is being loaded and is" " then run every abs(every) seconds", copyable=True, ) _job = base.StructAttribute( "job", childFactory=CronJob, default=base.Undefined, description="The code to run.", copyable=True, ) _debug = base.BooleanAttribute( "debug", description="If true, on execution of external processes (span or" " spawnPython), the output will be accumulated and mailed to" " the administrator. Note that output of the actual cron job" " itself is not caught (it might turn up in serverStderr)." " You could use execDef.outputAccum.append(<stuff>) to have" " information from within the code included.", default=False) _properties = base.PropertyAttribute() _rd = common.RDAttribute() def spawn(self, cliList): """spawns an external command, capturing the output and mailing it to the admin if it failed. Output is buffered and mailed, so it shouldn't be too large. This does not raise an exception if it failed (in normal usage, this would cause two mails to be sent). Instead, it returns the returncode of the spawned process; if that's 0, you're ok. But in general, you wouldn't want to check it. """ p = subprocess.Popen(cliList, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) childOutput, _ = p.communicate() if p.returncode: cron.sendMailToAdmin( "A process spawned by %s failed with %s" % (self.title, p.returncode), "Output of %s:\n\n%s" % (cliList, childOutput)) elif self.debug: if childOutput: self.outputAccum.append("\n\n%s -> %s\n" % (cliList, p.returncode)) self.outputAccum.append(childOutput) return p.returncode def spawnPython(self, pythonFile): """spawns a new python interpreter executing pythonFile. pythonFile may be resdir-relative. """ self.spawn(["python", os.path.join(self.rd.resdir, pythonFile)]) def _parseAt(self, atSpec, ctx): """returns a tuple ready for cron.repeatAt from atSpec. see the at StringListAttribute for how it would look like; this parses one element of that string list. """ mat = re.match( r"(?P<dow>w\d\s+)?" r"(?P<dom>m\d\d?\s+)?" r"(?P<hr>\d+):(?P<min>\d+)", atSpec) if not mat: raise base.LiteralParseError("at", atSpec, pos=ctx.pos, hint= "This is hour:minute optionally prefixed by either w<weekday> or"\ " m<day of month>, each counted from 1.") hour, minute = int(mat.group("hr")), int(mat.group("min")) if not (0 <= hour <= 23 and 0 <= minute <= 59): raise base.LiteralParseError( "at", atSpec, pos=ctx.pos, hint= "This must be hour:minute with 0<=hour<=23 or 0<=minute<=59") dom = None if mat.group("dom"): dom = int(mat.group("dom")[1:]) if not 1 <= dom <= 28: raise base.LiteralParseError( "at", atSpec, pos=ctx.pos, hint="day-of-month in at must be between 1 and 28.") dow = None if mat.group("dow"): dow = int(mat.group("dow")[1:]) if not 1 <= dow <= 7: raise base.LiteralParseError( "at", atSpec, pos=ctx.pos, hint="day-of-week in at must be between 1 and 7.") return (dom, dow, hour, minute) def completeElement(self, ctx): self._completeElementNext(Execute, ctx) if len([s for s in [self.at, self.every] if s is base.NotGiven]) != 1: raise base.StructureError( "Exactly one of at and every required" " for Execute", pos=ctx.pos) if self.at is not base.NotGiven: self.parsedAt = [] for literal in self.at: self.parsedAt.append(self._parseAt(literal, ctx)) def onElementComplete(self): self._onElementCompleteNext(Execute) self.jobName = "%s#%s" % (self.rd.sourceId, self.title) self.callable = _guardedFunctionFactory.makeGuardedThreaded( self.job.compile(), self) if self.at is not base.NotGiven: cron.repeatAt(self.parsedAt, self.jobName, self.callable) else: cron.runEvery(self.every, self.jobName, self.callable)
class DatalinkCoreBase(svcs.Core, base.ExpansionDelegator): """Basic functionality for datalink cores. This is pulled out of the datalink core proper as it is used without the complicated service interface sometimes, e.g., by SSAP. """ _descriptorGenerator = base.StructAttribute( "descriptorGenerator", default=base.NotGiven, childFactory=DescriptorGenerator, description="Code that takes a PUBDID and turns it into a" " product descriptor instance. If not given," " //soda#fromStandardPubDID will be used.", copyable=True) _metaMakers = base.StructListAttribute( "metaMakers", childFactory=MetaMaker, description="Code that takes a data descriptor and either" " updates input key options or yields related data.", copyable=True) _dataFunctions = base.StructListAttribute( "dataFunctions", childFactory=DataFunction, description="Code that generates of processes data for this" " core. The first of these plays a special role in that it" " must set descriptor.data, the others need not do anything" " at all.", copyable=True) _dataFormatter = base.StructAttribute( "dataFormatter", default=base.NotGiven, childFactory=DataFormatter, description="Code that turns descriptor.data into a nevow resource" " or a mime, content pair. If not given, the renderer will be" " returned descriptor.data itself (which will probably not usually" " work).", copyable=True) _inputKeys = rscdef.ColumnListAttribute( "inputKeys", childFactory=svcs.InputKey, description="A parameter to one of the proc apps (data functions," " formatters) active in this datalink core; no specific relation" " between input keys and procApps is supposed; all procApps are passed" " all argments. Conventionally, you will write the input keys in" " front of the proc apps that interpret them.", copyable=True) # The following is a hack complemented in inputdef.makeAutoInputDD. # We probably want some other way to do this (if we want to do it # at all) rejectExtras = True def completeElement(self, ctx): if self.descriptorGenerator is base.NotGiven: self.descriptorGenerator = MS( DescriptorGenerator, procDef=base.resolveCrossId("//soda#fromStandardPubDID")) if self.dataFormatter is base.NotGiven: self.dataFormatter = MS( DataFormatter, procDef=base.caches.getRD("//datalink").getById( "trivialFormatter")) self.inputKeys.append( MS(svcs.InputKey, name="ID", type="text", ucd="meta.id;meta.main", multiplicity="multiple", std=True, description="The pubisher DID of the dataset of interest")) if self.inputTable is base.NotGiven: self.inputTable = MS(svcs.InputTable, params=self.inputKeys) # this is a cheat for service.getTableSet to pick up the datalink # table. If we fix this for TAP, we should fix it here, too. self.queriedTable = base.caches.getRD("//datalink").getById( "dlresponse") self._completeElementNext(DatalinkCoreBase, ctx) def getMetaForDescriptor(self, descriptor): """returns a pair of linkDefs, inputKeys for a datalink desriptor and this core. """ linkDefs, inputKeys, errors = [], self.inputKeys[:], [] for metaMaker in self.metaMakers: try: for item in metaMaker.compile(self)(self, descriptor): if isinstance(item, LinkDef): linkDefs.append(item) elif isinstance(item, DatalinkFault): errors.append(item) else: inputKeys.append(item) except Exception, ex: if base.DEBUG: base.ui.notifyError( "Error in datalink meta generator %s: %s" % (metaMaker, repr(ex))) base.ui.notifyError("Failing source: \n%s" % metaMaker.getFuncCode()) errors.append( DatalinkFault.Fault( descriptor.pubDID, "Unexpected failure while creating" " datalink: %s" % utils.safe_str(ex))) return linkDefs, inputKeys, errors
class CondDesc(base.Structure): """A query specification for cores talking to the database. CondDescs define inputs as a sequence of InputKeys (see `Element InputKey`_). Internally, the values in the InputKeys can be translated to SQL. """ name_ = "condDesc" _inputKeys = rscdef.ColumnListAttribute("inputKeys", childFactory=inputdef.InputKey, description="One or more InputKeys defining the condition's input.", copyable=True) _silent = base.BooleanAttribute("silent", default=False, description="Do not produce SQL from this CondDesc. This" " can be used to convey meta information to the core. However," " in general, a service is a more appropriate place to deal with" " such information, and thus you should prefer service InputKeys" " to silent CondDescs.", copyable=True) _required = base.BooleanAttribute("required", default=False, description="Reject queries not filling the InputKeys of this CondDesc", copyable=True) _fixedSQL = base.UnicodeAttribute("fixedSQL", default=None, description="Always insert this SQL statement into the query. Deprecated.", copyable=True) _buildFrom = base.ReferenceAttribute("buildFrom", description="A reference to a column or an InputKey to define" " this CondDesc", default=None) _phraseMaker = base.StructAttribute("phraseMaker", default=None, description="Code to generate custom SQL from the input keys", childFactory=PhraseMaker, copyable=True) _combining = base.BooleanAttribute("combining", default=False, description="Allow some input keys to be missing when others are given?" " (you want this for pseudo-condDescs just collecting random input" " keys)", # (and I wish I had a better idea) copyable="True") _group = base.StructAttribute("group", default=None, childFactory=rscdef.Group, description="Group child input keys in the input table (primarily" " interesting for web forms, where this grouping is shown graphically;" " Set the style property to compact to have a one-line group there)") _joiner = base.UnicodeAttribute("joiner", default="OR", description="When yielding multiple fragments, join them" " using this operator (probably the only thing besides OR is" " AND).", copyable=True) _original = base.OriginalAttribute() def __init__(self, parent, **kwargs): base.Structure.__init__(self, parent, **kwargs) # copy parent's resolveName if present for buildFrom resolution if hasattr(self.parent, "resolveName"): self.resolveName = self.parent.resolveName def __repr__(self): return "<CondDesc %s>"%",".join(ik.name for ik in self.inputKeys) @classmethod def fromInputKey(cls, ik, **kwargs): return base.makeStruct(CondDesc, inputKeys=[ik], **kwargs) @classmethod def fromColumn(cls, col, **kwargs): return base.makeStruct(cls, buildFrom=col, **kwargs) @property def name(self): """returns some key for uniqueness of condDescs. """ # This is necessary for ColumnLists that are used # for CondDescs as well. Ideally, we'd do this on an # InputKeys basis and yield their names (because that's what # formal counts on), but it's probably not worth the effort. return "+".join([f.name for f in self.inputKeys]) def completeElement(self, ctx): if self.buildFrom and not self.inputKeys: # use the column as input key; special renderers may want # to do type mapping, but the default is to have plain input self.inputKeys = [inputdef.InputKey.fromColumn(self.buildFrom)] self._completeElementNext(CondDesc, ctx) def expand(self, *args, **kwargs): """hands macro expansion requests (from phraseMakers) upwards. This is to the queried table if the parent has one (i.e., we're part of a core), or to the RD if not (i.e., we're defined within an rd). """ if hasattr(self.parent, "queriedTable"): return self.parent.queriedTable.expand(*args, **kwargs) else: return self.parent.rd.expand(*args, **kwargs) def _makePhraseDefault(self, ignored, inputKeys, inPars, outPars, core): # the default phrase maker uses whatever the individual input keys # come up with. for ik in self.inputKeys: yield base.getSQLForField(ik, inPars, outPars) # We only want to compile the phraseMaker if actually necessary. # condDescs may be defined within resource descriptors (e.g., in # scs.rd), and they can't be compiled there (since macros may # be missing); thus, we dispatch on the first call. def _getPhraseMaker(self): try: return self.__compiledPhraseMaker except AttributeError: if self.phraseMaker is not None: val = self.phraseMaker.compile() else: val = self._makePhraseDefault self.__compiledPhraseMaker = val return self.__compiledPhraseMaker makePhrase = property(_getPhraseMaker) def _isActive(self, inPars): """returns True if the dict inPars contains input to all our input keys. """ for f in self.inputKeys: if f.name not in inPars: return False return True def inputReceived(self, inPars, queryMeta): """returns True if all inputKeys can be filled from inPars. As a side effect, inPars will receive defaults form the input keys if there are any. """ if not self._isActive(inPars): return False keysFound, keysMissing = [], [] for f in self.inputKeys: if inPars.get(f.name) is None: keysMissing.append(f) else: if f.value!=inPars.get(f.name): # non-defaulted keysFound.append(f) if not keysMissing: return True # keys are missing. That's ok if none were found and we're not required if not self.required and not keysFound: return False if self.required: raise base.ValidationError("is mandatory but was not provided.", colName=keysMissing[0].name) # we're optional, but a value was given and others are missing if not self.combining: raise base.ValidationError("When you give a value for %s," " you must give value(s) for %s, too"%(keysFound[0].getLabel(), ", ".join(k.name for k in keysMissing)), colName=keysMissing[0].name) return True def asSQL(self, inPars, sqlPars, queryMeta): if self.silent or not self.inputReceived(inPars, queryMeta): return "" res = list(self.makePhrase( self, self.inputKeys, inPars, sqlPars, self.parent)) sql = base.joinOperatorExpr(self.joiner, res) if self.fixedSQL: sql = base.joinOperatorExpr(self.joiner, [sql, self.fixedSQL]) return sql def adaptForRenderer(self, renderer): """returns a changed version of self if renderer suggests such a change. This only happens if buildFrom is non-None. The method must return a "defused" version that has buildFrom None (or self, which will do because core.adaptForRenderer stops adapting if the condDescs are stable). The adaptors may also raise a Replace exception and return a full CondDesc; this is done, e.g., for spoints for the form renderer, since they need two input keys and a completely modified phrase. """ if not self.buildFrom: return self adaptor = inputdef.getRendererAdaptor(renderer) if adaptor is None: return self try: newInputKeys = [] for ik in self.inputKeys: newInputKeys.append(adaptor(ik)) if self.inputKeys==newInputKeys: return self else: return self.change(inputKeys=newInputKeys, buildFrom=None) except base.Replace, ex: return ex.newOb
class RD(base.Structure, base.ComputedMetaMixin, scripting.ScriptingMixin, base.StandardMacroMixin, common.PrivilegesMixin, registry.DateUpdatedMixin): """A resource descriptor (RD); the root for all elements described here. RDs collect all information about how to parse a particular source (like a collection of FITS images, a catalogue, or whatever), about the database tables the data ends up in, and the services used to access them. """ name_ = "resource" # this is set somewhere below once parsing has proceeded far enough # such that caching the RD make sense cacheable = False _resdir = base.FunctionRelativePathAttribute( "resdir", default=None, baseFunction=lambda instance: base.getConfig("inputsDir"), description="Base directory for source files and everything else" " belonging to the resource.", copyable=True) _schema = base.UnicodeAttribute( "schema", default=base.Undefined, description="Database schema for tables defined here. Follow the rule" " 'one schema, one RD' if at all possible. If two RDs share the same" " schema, the must generate exactly the same permissions for that" " schema; this means, in particular, that if one has an ADQL-published" " table, so must the other. In a nutshell: one schema, one RD.", copyable=True, callbacks=["_inferResdir"]) _dds = base.StructListAttribute( "dds", childFactory=rscdef.DataDescriptor, description="Descriptors for the data generated and/or published" " within this resource.", copyable=True, before="outputTables") _tables = base.StructListAttribute( "tables", childFactory=rscdef.TableDef, description="A table used or created by this resource", copyable=True, before="dds") _outputTables = base.StructListAttribute( "outputTables", childFactory=svcs.OutputTableDef, description="Canned output tables for later reference.", copyable=True) _rowmakers = base.StructListAttribute( "rowmakers", childFactory=rscdef.RowmakerDef, description="Transformations for going from grammars to tables." " If specified in the RD, they must be referenced from make" " elements to become active.", copyable=True, before="dds") _procDefs = base.StructListAttribute( "procDefs", childFactory=rscdef.ProcDef, description="Procedure definintions (rowgens, rowmaker applys)", copyable=True, before="rowmakers") _condDescs = base.StructListAttribute( "condDescs", childFactory=svcs.CondDesc, description="Global condition descriptors for later reference", copyable=True, before="cores") _resRecs = base.StructListAttribute( "resRecs", childFactory=registry.ResRec, description="Non-service resources for the IVOA registry. They will" " be published when gavo publish is run on the RD.") _services = base.StructListAttribute( "services", childFactory=svcs.Service, description="Services exposing data from this resource.", copyable=True) _macDefs = base.MacDefAttribute( before="tables", description="User-defined macros available on this RD") _mixinDefs = base.StructListAttribute( "mixdefs", childFactory=rscdef.MixinDef, description="Mixin definitions (usually not for users)") _require = base.ActionAttribute( "require", methodName="importModule", description="Import the named gavo module (for when you need something" " registred)") _cores = base.MultiStructListAttribute( "cores", childFactory=svcs.getCore, childNames=svcs.CORE_REGISTRY.keys(), description="Cores available in this resource.", copyable=True, before="services") _jobs = base.StructListAttribute( "jobs", childFactory=executing.Execute, description="Jobs to be run while this RD is active.") _tests = base.StructListAttribute( "tests", childFactory=regtest.RegTestSuite, description="Suites of regression tests connected to this RD.") # These replace themselves with expanded tables _viewDefs = base.StructAttribute( "simpleView", childFactory=rscdef.SimpleView, description="Definitions of views created from natural joins", default=None) _properties = base.PropertyAttribute() def __init__(self, srcId, **kwargs): # RDs never have parents, so contrary to all other structures they # are constructed with with a srcId instead of a parent. You # *can* have that None, but such RDs cannot be used to create # non-temporary tables, services, etc, since the srcId is used # in the construction of identifiers and such. self.sourceId = srcId base.Structure.__init__(self, None, **kwargs) # The rd attribute is a weakref on self. Always. So, this is the class # that roots common.RDAttributes self.rd = weakref.proxy(self) # real dateUpdated is set by getRD, this is just for RDs created # on the fly. self.dateUpdated = datetime.datetime.utcnow() # if an RD is parsed from a disk file, this gets set to its path # by getRD below self.srcPath = None # this is for modified-since and friends. self.loadedAt = time.time() # keep track of RDs depending on us for the registry code # (only read this) self.rdDependencies = set() def __iter__(self): return iter(self.dds) def __repr__(self): return "<resource descriptor for %s>" % self.sourceId def validate(self): if not utils.identifierPattern.match(self.schema): raise base.StructureError("DaCHS schema attributes must be valid" " python identifiers") def isDirty(self): """returns true if the RD on disk has a timestamp newer than loadedAt. """ if isinstance(self.srcPath, PkgResourcePath): # stuff from the resource package should not change underneath us. return False try: if self.srcPath is not None: return os.path.getmtime(self.srcPath) > self.loadedAt except os.error: # this will ususally mean the file went away return True return False def importModule(self, ctx): # this is a callback for the require attribute utils.loadInternalObject(self.require, "__doc__") def onElementComplete(self): for table in self.tables: self.readProfiles = self.readProfiles | table.readProfiles table.setMetaParent(self) self.serviceIndex = {} for svc in self.services: self.serviceIndex[svc.id] = svc svc.setMetaParent(self) for dd in self.dds: dd.setMetaParent(self) if self.resdir and not os.path.isdir(self.resdir): base.ui.notifyWarning( "RD %s: resource directory '%s' does not exist" % (self.sourceId, self.resdir)) self._onElementCompleteNext(RD) def _inferResdir(self, value): if self.resdir is None: self._resdir.feedObject(self, value) def iterDDs(self): return iter(self.dds) def getService(self, id): return self.serviceIndex.get(id, None) def getTableDefById(self, id): return self.getById(id, rscdef.TableDef) def getDataDescById(self, id): return self.getById(id, rscdef.DataDescriptor) def getById(self, id, forceType=None): try: res = self.idmap[id] except KeyError: raise base.NotFoundError(id, "Element with id", "RD %s" % (self.sourceId)) if forceType: if not isinstance(res, forceType): raise base.StructureError("Element with id '%s' is not a %s" % (id, forceType.__name__)) return res def getAbsPath(self, relPath): """returns the absolute path for a resdir-relative relPath. """ return os.path.join(self.resdir, relPath) def openRes(self, relPath, mode="r"): """returns a file object for relPath within self's resdir. Deprecated. This is going to go away, use getAbsPath and a context manager. """ return open(self.getAbsPath(relPath), mode) def getTimestampPath(self): """returns a path to a file that's accessed by Resource each time a bit of the described resource is written to the db. """ return os.path.join(base.getConfig("stateDir"), "updated_" + self.sourceId.replace("/", "+")) def touchTimestamp(self): """updates the timestamp on the rd's state file. """ fn = self.getTimestampPath() try: try: os.unlink(fn) except os.error: pass f = open(fn, "w") f.close() os.chmod(fn, 0664) try: os.chown(fn, -1, grp.getgrnam(base.getConfig("GavoGroup")[2])) except (KeyError, os.error): pass except (os.error, IOError): base.ui.notifyWarning("Could not update timestamp on RD %s" % self.sourceId) def _computeIdmap(self): res = {} for child in self.iterChildren(): if hasattr(child, "id"): res[child.id] = child return res def addDependency(self, rd, prereq): """declares that rd needs the RD prereq to properly work. This is used in the generation of resource records to ensure that, e.g. registred data have added their served-bys to the service resources. """ if rd.sourceId != prereq.sourceId: self.rdDependencies.add((rd.sourceId, prereq.sourceId)) def copy(self, parent): base.ui.notifyWarning("Copying an RD -- this may not be a good idea") new = base.Structure.copy(self, parent) new.idmap = new._computeIdmap() new.sourceId = self.sourceId return new def invalidate(self): """make the RD fail on every attribute read. See rscdesc._loadRDIntoCache for why we want this. """ errMsg = ("Loading of %s failed in another thread; this RD cannot" " be used here") % self.sourceId class BrokenClass(object): """A class that reacts to all attribute requests with a some exception. """ def __getattribute__(self, attributeName): if attributeName == "__class__": return BrokenClass raise base.ReportableError(errMsg) self.__class__ = BrokenClass def macro_RSTccby(self, stuffDesignation): """expands to a declaration that stuffDesignation is available under CC-BY. This only works in reStructured text (though it's still almost readable as source). """ return ("%s is licensed under the `Creative Commons Attribution 3.0" " License <http://creativecommons.org/licenses/by/3.0/>`_\n\n" ".. image:: /static/img/ccby.png\n\n") % stuffDesignation def macro_RSTcc0(self, stuffDesignation): """expands to a declaration that stuffDesignation is available under CC-0. This only works in reStructured text (though it's still almost readable as source). """ return ( "To the extent possible under law, the publisher has" " waived all copyright and related or neighboring rights to %s." " For details, see the `Creative Commons CC0 1.0" " Public Domain dedication" " <http://creativecommons.org/publicdomain/zero/1.0/>`_. Of course," " you should still give proper credit when using this data as" " required by good scientific practice.\n\n" ".. image:: /static/img/cc0.png\n\n") % stuffDesignation