コード例 #1
0
ファイル: processing.py プロジェクト: GMBarra/Docker
	def _createAuxiliaries(self, dd):
		self.previewDir = dd.rd.getAbsPath(
			dd.getProperty("previewDir"))
		if not os.path.isdir(self.previewDir):
			os.makedirs(self.previewDir)
		self.conn = base.getDBConnection("trustedquery")
		FileProcessor._createAuxiliaries(self, dd)
コード例 #2
0
 def run(self, service, inputTable, queryMeta):
     conn = base.getDBConnection(base.getDBProfile("admin"))
     table = rsc.TableForDef(self.queriedTable, connection=conn)
     table.addRow(inputTable.getParamDict())
     conn.commit()
     conn.close()
     return rsc.TableForDef(self.outputTable, rows=[{"nAffected": 1}])
コード例 #3
0
 def _makeConnection(self, kwargs):
     self.ownedConnection = False
     connection = kwargs.pop("connection", None)
     if connection is None:
         self.connection = base.getDBConnection("trustedquery")
         self.ownedConnection = True
     else:
         self.connection = connection
コード例 #4
0
def _getProductsTable():
    """returns an instance of the products table.

	Clients should use the getProductsTable below to save the cost of
	constructing the table.
	"""
    td = base.caches.getRD("//products").getById("products")
    conn = base.getDBConnection("admin", autocommitted=True)
    return rsc.TableForDef(td, connection=conn)
コード例 #5
0
def getQTableFromJob(parameters, jobId, queryProfile, timeout):
	"""returns a QueryTable for a TAP job.
	"""
	query, maxrec = _parseTAPParameters(jobId, parameters)
	connectionForQuery = base.getDBConnection(queryProfile)

	try:
		_noteWorkerPID(connectionForQuery)
	except: # Don't fail just because we can't kill workers
		base.ui.notifyError(
			"Could not obtain PID for the worker, job %s"%jobId)
	tdsForUploads = _ingestUploads(parameters.get("upload", ""), 
		connectionForQuery)

	base.ui.notifyInfo("taprunner executing %s"%query)
	return runTAPQuery(query, timeout, connectionForQuery,
		tdsForUploads, maxrec)
コード例 #6
0
def updateServiceList(rds, metaToo=False, connection=None, onlyWarn=True,
		keepTimestamp=False):
	"""updates the services defined in rds in the services table in the database.
	"""
	recordsWritten = 0
	parseOptions = rsc.getParseOptions(validateRows=True, batchSize=20)
	if connection is None:
		connection = base.getDBConnection("admin")
	dd = common.getServicesRD().getById("tables")
	dd.grammar = _rdRscRecGrammar
	dd.grammar.keepTimestamp = keepTimestamp
	depDD = common.getServicesRD().getById("deptable")
	msg = None
	for rd in rds:
		if rd.sourceId.startswith("/"):
			raise base.Error("Resource descriptor ID must not be absolute, but"
				" '%s' seems to be."%rd.sourceId)

		deletedUpdater = getDeletedIdentifiersUpdater(connection, rd)

		try:
			data = rsc.makeData(dd, forceSource=rd, parseOptions=parseOptions,
				connection=connection)
			recordsWritten += data.nAffected
			rsc.makeData(depDD, forceSource=rd, connection=connection)

			if metaToo:
				from gavo.protocols import tap
				tap.unpublishFromTAP(rd, connection)
				for dependentDD in rd:
					rsc.Data.create(dependentDD, connection=connection).updateMeta()
				tap.publishToTAP(rd, connection)

			deletedUpdater()

		except base.MetaValidationError, ex:
			msg = ("Aborting publication of rd '%s' since meta structure of"
				" %s (id='%s') is invalid:\n * %s")%(
				rd.sourceId, repr(ex.carrier), ex.carrier.id, "\n * ".join(ex.failures))
		except base.NoMetaKey, ex:
			msg = ("Aborting publication of '%s' at service '%s': Resource"
				" record generation failed: %s"%(
				rd.sourceId, ex.carrier.id, str(ex)))
コード例 #7
0
def makeData(dd,
             parseOptions=common.parseNonValidating,
             forceSource=None,
             connection=None,
             data=None,
             runCommit=True):
    """returns a data instance built from dd.

	It will arrange for the parsing of all tables generated from dd's grammar.
	If connection is passed in, the the entire operation will run within a 
	single transaction within this connection.  The connection will be
	rolled back or committed depending on the success of the operation
	(unless you pass runCommit=False, in which case a successful
	import will not be committed)..

	You can pass in a data instance created by yourself in data.  This
	makes sense if you want to, e.g., add some meta information up front.
	"""
    # Some proc setup does expensive things like actually building data.
    # We don't want that when validating and return some empty data thing.
    if getattr(base, "VALIDATING", False):
        return Data(dd, _TableCornucopeia())

    if connection is None:
        connection = base.getDBConnection("admin")

    if data is None:
        res = Data.create(dd, parseOptions, connection=connection)
    else:
        res = data
    res.recreateTables(connection)

    feederOpts = {
        "batchSize": parseOptions.batchSize,
        "runCommit": runCommit,
        "dumpIngestees": parseOptions.dumpIngestees
    }
    if dd.grammar and dd.grammar.isDispatching:
        feederOpts["dispatched"] = True

    with res.getFeeder(connection=connection, **feederOpts) as feeder:
        if forceSource is None:
            for source in dd.iterSources(connection):
                try:
                    processSource(res, source, feeder, parseOptions,
                                  connection)
                except _EnoughRows:
                    base.ui.notifyWarning(
                        "Source hit import limit, import aborted.")
                    break
                except base.SkipThis:
                    continue
        else:
            processSource(res, forceSource, feeder, parseOptions, connection)

    res.validateParams()

    if runCommit:
        res.commitAll()
    res.nAffected = feeder.getAffected()

    if parseOptions.buildDependencies:
        makeDependentsFor([dd], parseOptions, connection)

    return res
コード例 #8
0
	def _createObjects(self):
		self.readerConnection = base.getDBConnection(
			"trustedquery", autocommitted=True)