def _add_doc(self, doc_type, doc_title, doc_xml): """ Add a document which was lost because it was not on PROD. """ self._logger.info("adding %r document %r", doc_type, doc_title) # Wrap the document XML in the CdrDoc wrapper and create it. doc = cdr.makeCdrDoc(doc_xml, doc_type, ctrl={"DocTitle": doc_title}) doc_id = cdr.addDoc(self._session, doc=doc, checkIn="N", comment=Job.COMMENT, reason=Job.COMMENT) err = cdr.checkErr(doc_id) if err: self._logger.error("failure creating document: %s", err) return # Newly created document need to be versioned and unlocked separately. doc = self._lock_doc(doc_id) if doc: response = cdr.repDoc(self._session, doc=str(doc), checkIn="Y", val="Y", ver="Y", reason=Job.COMMENT, comment=Job.COMMENT) err = cdr.checkErr(response) if err: self._logger.error("failure unlocking %s: %s", doc_id, err)
def main(): parser = create_parser() opts = parser.parse_args() title = opts.title if not title: parser.error("empty title argument") if not isinstance(title, unicode): title = unicode(title.strip(), "latin-1") if "--" in title: parser.error("filter title cannot contain --") if not opts.session: password = getpass.getpass() session = cdr.login(opts.user, password, tier="PROD") error = cdr.checkErr(session) if error: parser.error(error) else: session = opts.session stub = u"""\ <?xml version="1.0" ?> <!-- Filter title: {} --> <xsl:transform xmlns:xsl = "http://www.w3.org/1999/XSL/Transform" xmlns:cdr = "cips.nci.nih.gov/cdr" version = "1.0"> <xsl:output method = "xml" encoding = "utf-8"/> <xsl:param name = "sample-param" select = "'default-value'"/> <!-- Sample template --> <xsl:template match = "@*|node()"> <xsl:copy> <xsl:apply-templates select = "@*|node()"/> </xsl:copy> </xsl:template> </xsl:transform> """.format(escape(title)).encode("utf-8") title = title.encode("utf-8") ctrl = dict(DocTitle=title) doc_opts = dict(doctype="Filter", ctrl=ctrl, encoding="utf-8") doc = cdr.Doc(stub, **doc_opts) cdr_id = cdr.addDoc(session, doc=str(doc), tier="PROD") error = cdr.checkErr(cdr_id) if error: parser.error(error) response = cdr.unlock(session, cdr_id, tier="PROD") error = cdr.checkErr(response) if error: parser.error(error) name = cdr_id + ".xml" with open(name, "wb") as fp: fp.write(stub) print(("Created {}".format(name))) if not opts.session: cdr.logout(session, tier="PROD")
def main(): """ Top-level entry point """ # Process the command-line arguments. parser = create_parser() opts = parser.parse_args() # Make sure we're not doing this on the production server. if not opts.tier and cdr.isProdHost() or opts.tier == "PROD": parser.error(""" This program can only be used to install a filter on the development or test server, not production. Use CreateFilter.py to create the filter in the production database, then use InstallFilter.py to install it in test or development with the same title/name and (almost certainly) a different local CDR ID. """) # If we don't already have a session ID, make one. if not opts.session: password = getpass.getpass() session = cdr.login(opts.user, password, tier=opts.tier) error = cdr.checkErr(session) if error: parser.error(error) else: session = opts.session # Load the document. info = DocInfo(opts.filename, parser) # Make sure the filter isn't already installed info.check_unique_title(opts.tier, parser) # All checks passed: add the document. ctrl = dict(DocTitle=info.title.encode("utf-8")) doc = cdr.Doc(info.xml, doctype="Filter", ctrl=ctrl, encoding="utf-8") comment = "New filter install" add_opts = dict(doc=str(doc), comment=comment, tier=opts.tier) cdr_id = cdr.addDoc(session, **add_opts) error = cdr.checkErr(cdr_id) if error: parser.error(error) # Unlock the document and display its ID. response = cdr.unlock(session, cdr_id, tier=opts.tier) error = cdr.checkErr(response) if error: parser.error(error) else: print(cdr_id)
def _mod_doc(self, doc_type, doc_title, doc_xml, doc_id): """ Add a new version for a document which was different on the two tiers. """ args = doc_type, doc_title, doc_id self._logger.info("updating %r document %r (CDR%d)", *args) # Lock the document, breaking any existing locks if necessary. doc = self._lock_doc(doc_id) if doc: # Plug in the preserved XML from PROD and create the new version. doc.xml = doc_xml.encode("utf-8") doc.ctrl["DocTitle"] = doc_title.encode("utf-8") response = cdr.repDoc(self._session, doc=str(doc), checkIn="Y", val="Y", ver="Y", reason=Job.COMMENT, comment=Job.COMMENT) err = cdr.checkErr(response) if err: args = cdr.normalize(doc_id), err self._logger.error("failure saving %s: %s", *args)
def replace(self): """ Update an existing CDR document (if not testing). Return True, which is bubbled up to the main loop in `run()`. """ if self.control.opts.test: self.control.logger.info("%s is changed", self.name) return True cdr.checkOutDoc(self.control.session, self.id, force="Y") comment = "Updated by install-docset.py" ctrl = { "DocTitle": self.title } opts = { "type": self.doctype, "encoding": "utf-8", "ctrl": ctrl } opts["id"] = cdr.normalize(self.id) cdr_doc = cdr.Doc(self.xml, **opts) opts = dict(doc=str(cdr_doc), checkIn="Y", ver="Y", comment=comment) opts["publishable"] = self.control.PUBLISHABLE cdr_id = cdr.repDoc(self.control.session, **opts) error = cdr.checkErr(cdr_id) if error: self.control.logger.error(error) sys.exit(1) self.control.logger.info("replaced %s (%s)", self.name, cdr_id) return True
def login(self): """ Create a CDR login session for adding/updating the documents. """ if self.opts.test: return None password = cdr.getpw(self.ACCOUNT) if not password: self.logger.error("account password not found") sys.exit(1) session = cdr.login(self.ACCOUNT, password) error = cdr.checkErr(session) if error: self.logger.error(error) sys.exit(1) return session
def _lock_doc(self, doc_id): """ Check out an existing CDR document. """ # If someone else has the document locked, break the lock. locker = self._find_locker(doc_id) if locker and locker.lower() != self._uid.lower(): if not self._unlock_doc(doc_id): return None # Fetch the document with a lock. doc = cdr.getDoc(self._session, doc_id, checkout="Y", getObject=True) err = cdr.checkErr(doc) if not err: return doc args = cdr.normalize(doc_id), err self._logger.error("failure locking %s: %r", *args) return None
def versionChanges(session, docId): LOGGER.info("saving unversioned changes for CDR%d", docId) doc = cdr.getDoc(session, docId, 'Y') err = cdr.checkErr(doc) if err: LOGGER.error("failure for CDR%d: %s", docId, err) return False docId, errors = cdr.repDoc(session, doc=doc, comment=COMMENT, reason=COMMENT, val='Y', ver='Y', showWarnings='Y', checkIn='Y', verPublishable='N') if errors: for e in cdr.getErrors(errors, asSequence=True): LOGGER.error(e) return docId and True or False
def add(self): """ Add the document to the CDR repository (if not testing). Return True, which is bubbled up to the main loop in `run()`. """ if self.control.opts.test: self.control.logger.info("%s is new", self.name) return True comment = "Added by install-docset.py" ctrl = { "DocTitle": self.title } opts = { "type": self.doctype, "encoding": "utf-8", "ctrl": ctrl } cdr_doc = cdr.Doc(self.xml, **opts) opts = dict(doc=str(cdr_doc), checkIn="Y", ver="Y", comment=comment) opts["publishable"] = self.control.PUBLISHABLE cdr_id = cdr.addDoc(self.control.session, **opts) error = cdr.checkErr(cdr_id) if error: self.control.logger.error(error) sys.exit(1) self.control.logger.info("added %s as %s", self.name, cdr_id) return True
def main(): """ Store the new version of the filter. Processing steps: 1. Parse the command-line options and arguments. 2. Load the new version of the filter from the file system. 3. Log into the CDR on the target server. 4. Find the CDR ID which matches the filter title 5. Check out the document from the target CDR server. 6. Store the new version on the target CDR server. 7. Report the number of the new version. 8. Clean up. """ #------------------------------------------------------------------ # 1. Parse the command-line options and arguments. #------------------------------------------------------------------ parser = create_parser() opts = parser.parse_args() pub = "Y" if opts.publishable else "N" # If no comment is specified the last comment used (from the # all_docs table) would be stored. # Setting the comment to something to overwrite the last comment # ----------------------------------------------------------------- comment = opts.comment or "Replaced w/o user comment" #------------------------------------------------------------------ # 2. Load the new version of the filter from the file system. #------------------------------------------------------------------ with open(opts.filename) as fp: xml = fp.read() if "]]>" in xml: parser.error("CdrDoc wrapper must be stripped from the file") #------------------------------------------------------------------ # 3. Log into the CDR on the target server. #------------------------------------------------------------------ if opts.session: session = opts.session else: password = getpass.getpass() session = cdr.login(opts.user, password, tier=opts.tier) error_message = cdr.checkErr(session) if error_message: parser.error(error_message) #------------------------------------------------------------------ # 4. Find out what the filter's document ID is. #------------------------------------------------------------------ doc_id = get_doc_id(xml, opts.tier, session) #------------------------------------------------------------------ # 5. Check out the document from the target CDR server. #------------------------------------------------------------------ args = dict(checkout="Y", getObject=True, tier=opts.tier) doc = cdr.getDoc(session, doc_id, **args) error_message = cdr.checkErr(doc) if error_message: parser.error(error_message) #------------------------------------------------------------------ # 6. Store the new version on the target CDR server. #------------------------------------------------------------------ doc.xml = xml.encode("utf-8") args = dict( doc=str(doc), checkIn="Y", setLinks="N", reason=comment, comment=comment, ver="Y", verPublishable=pub, tier=opts.tier ) doc_id = cdr.repDoc(session, **args) if not doc_id.startswith("CDR"): parser.error(error_message) #------------------------------------------------------------------ # 7. Report the number of the latest version. #------------------------------------------------------------------ versions = cdr.lastVersions(session, doc_id, tier=opts.tier) print(("Saved {} as version {}".format(doc_id, versions[0]))) #------------------------------------------------------------------ # 8. Clean up. #------------------------------------------------------------------ if not opts.session: cdr.logout(session, tier=opts.tier)
checkIn='Y', verPublishable='N') if errors: for e in cdr.getErrors(errors, asSequence=True): LOGGER.error(e) return docId and True or False #---------------------------------------------------------------------- # Processing starts here with setup. #---------------------------------------------------------------------- if len(sys.argv) < 4: sys.stderr.write("usage: %s uid pwd doctype\n" % sys.argv[0]) sys.exit(1) session = cdr.login(sys.argv[1], sys.argv[2]) errors = cdr.checkErr(session) if errors: sys.stderr.write("login failure: %s" % errors) sys.exit(1) docType = sys.argv[3] cursor = db.connect(user='******', timeout=300).cursor() #---------------------------------------------------------------------- # Determine the last version number for each versioned document of # the specified document type. Be sure to use the document type # for the current working document instead of the version table, # so we do the right thing for documents whose last version was # saved as a different document type than the current working # document has. The other effect of joining on the document table # (or rather, view) is to avoid doing anything with deleted documents. #----------------------------------------------------------------------
tier=opts.tier, showWarnings=True) # See if we already have the document installed doctype = "SweepSpecifications" query = "CdrCtl/Title contains %" result = cdr.search("guest", query, doctypes=[doctype], tier=opts.tier) if len(result) > 1: raise Exception("Can't have more than one sweep spec document") # If the document already exists, create a new version if result: doc_id = result[0].docId args = dict(checkout="Y", getObject=True, tier=opts.tier) doc = cdr.getDoc(opts.session, doc_id, **args) error_message = cdr.checkErr(doc) if error_message: parser.error(error_message) doc.xml = xml save_opts["doc"] = str(doc) doc_id, warnings = cdr.repDoc(opts.session, **save_opts) # Otherwise, create the document (with a first version) else: doc = cdr.Doc(xml, doctype, encoding="utf-8") save_opts["doc"] = str(doc) doc_id, warnings = cdr.addDoc(opts.session, **save_opts) # Let the user know how things went if warnings: print((doc_id and "WARNINGS" or "ERRORS"))
def expireMeetingRecordings(self, testMode): """ This is a "Custom" routine that sweeps away MP3 format meeting recordings that have passed their useful life. Implemented for JIRA Issue OCECDR-3886. Pass: testMode True = Don't actually delete any blobs, just report False = Update docs and delete blobs. """ cursor = None session = None # Need a connection to the CDR Server session = cdr.login('FileSweeper', cdr.getpw('FileSweeper')) if not session: FS_LOGGER.error("FileSweeper login to CdrServer failed") # But no reason not to do the rest of the sweep return # And a read-only connection to the database try: conn = db.connect() cursor = conn.cursor() except Exception as e: FS_LOGGER.exception("attempting DB connect") # But continue with the sweep cleanSession(cursor, session) return # Today's SQL Server date try: cursor.execute("SELECT GETDATE()") now = cursor.fetchone()[0] except Exception as e: FS_LOGGER.exception("getting DB date") cleanSession(cursor, session) return # Only want YYYY-MM-DD, not HMS nowDate = str(now)[:10] # Locate all Media documents linked to meeting recordings that # are older than Oldest days. # This is done by checking for any ADD DOCUMENT transaction in the # audit trail for one of the qualifying documents. If any ADD was # performed before the Oldest value, then there was a version of # the meeting recording from before that date. # The Media doc must also be found in one of the ...blob_usage tables. # If not, then any blob associated with it has already been deleted. isoFmt = "%Y-%m-%d" earlyDate = \ datetime.datetime.fromtimestamp(self.oldSpec).strftime(isoFmt) # DEBUG msg = "Looking for meeting recordings older than %s" FS_LOGGER.debug(msg, earlyDate) qry = """ SELECT d.id, d.title FROM document d JOIN query_term qt ON qt.doc_id = d.id JOIN audit_trail at ON at.document = d.id JOIN action act ON act.id = at.action WHERE qt.path = '/Media/MediaContent/Categories/Category' AND qt.value = 'meeting recording' AND act.name = 'ADD DOCUMENT' AND at.dt <= '%s' AND ( d.id IN ( SELECT doc_id FROM doc_blob_usage ) OR d.id IN ( SELECT doc_id FROM version_blob_usage ) ) """ % earlyDate # Read the info into memory try: cursor.execute(qry) rows = cursor.fetchall() except Exception as e: FS_LOGGER.exception("attempting to locate old blobs") cleanSession(cursor, session) return # If there weren't any, that's normal and okay if len(rows) == 0: FS_LOGGER.info("No meeting recordings needed to be deleted") cleanSession(cursor, session) return # Do we need to lock and load the docs for update? checkOut = 'Y' if testMode: checkOut = 'N' #------------------------------------------------------------------- # We've got some to delete. # For each Media document: # Send a transaction to the CDR Server to do the following: # Add a ProcessingStatus to the Media document to say what happened # Delete all of the blobs. #------------------------------------------------------------------- for row in rows: docId, title = row # Fetch the original document # We'll do this even in test mode to test the xml mods try: docObj = cdr.getDoc(session, docId, checkout=checkOut, getObject=True) except Exception as e: FS_LOGGER.exception("attempting to fetch doc %d", docId) cleanSession(cursor, session) return # Test for retrieval error, e.g., locked doc err = cdr.checkErr(docObj) if err: message = "Failed getDoc for CDR ID %s: %s, continuing" FS_LOGGER.error(message, docId, err) continue # Parse the xml preparatory to modifying it mediaRoot = et.fromstring(docObj.xml) # Create the new Comment field to record what we did # Make it the last subelement of the Media document element # It has to be there comment = et.SubElement(mediaRoot, 'Comment', audience='Internal', user='******', date=nowDate) comment.text = "Removed meeting recording object after expiration" # Back to serial XML newXml = et.tostring(mediaRoot) # If we're testing, just log what we would have done if testMode: # For log file actionMsg = 'would delete' else: # Send the doc back to the database: # Wrapped in CdrDoc wrapper # With command to delete all blobs actionMsg = 'deleted' saveXml = cdr.makeCdrDoc(newXml, 'Media', docObj.id) response = cdr.repDoc(session, doc=saveXml, comment='Removed meeting recording blobs', delAllBlobVersions=True, check_in=True) # Check response if not response[0]: errors = cdr.getErrors(response[1], errorsExpected=True, asSequence=False) message = "Saving Media xml for doc %s: %s" FS_LOGGER.error(message, docObj.id, errors) FS_LOGGER.info("Aborting expireMeetingRecords()") # Stop doing this, but continue rest of file sweeps. cleanSession(cursor, session) return # Log results for this media recording args = actionMsg, docId, title msg = "FileSweeper %s blobs for cdrId: %s\n%s" FS_LOGGER.info(msg, *args) # Cleanup cleanSession(cursor, session)
# ------------------------------------- logger = cdr.Logging.get_logger("RemoveProdGroups", console=True) logger.info('RemoveProdGroups - Started') logger.debug('Arguments: %s', opts) # Live or test mode # ----------------- if opts.runmode == "test": testMode = True else: testMode = False tier = opts.tier # Log into the CDR on the target server. # -------------------------------------- if opts.session: session = opts.session else: password = getpass.getpass() session = cdr.login(opts.user, password, tier=opts.tier) error_message = cdr.checkErr(session) if error_message: parser.error(error_message) error_count = updateGroups(session, testMode, tier) logger.info('RemoveProdGroups - Finished') logger.info('Missing groups: %d', error_count) sys.exit(0)