def test_debug_and_info(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') ing = Ingest.Ingest('cat_finalcut', 'test.junk', dbh=dbh) msg = "test message" ing._debug = False with capture_output() as (out, err): ing.debug(msg) output = out.getvalue().strip() self.assertEqual(output, "") ing._debug = True with capture_output() as (out, err): ing.debug(msg) output = out.getvalue().strip() self.assertTrue(msg in output) msg = "info message" with capture_output() as (out, err): ing.info(msg) output = out.getvalue().strip() self.assertTrue(msg in output) msg = "info message" with capture_output() as (out, err): ing.printinfo(msg) output = out.getvalue().strip() self.assertTrue(msg in output)
def test_ingest_datafile_contents(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') cur = dbh.cursor() res = cur.execute(f"select count(*) from {self.table:s} where filename='test.fits'") self.assertEqual(res.fetchall()[0][0], 0) self.assertFalse(diu.is_ingested('test.fits', self.table, dbh)) data = {'TESTER': make_fits().data} res = diu.ingest_datafile_contents('test.fits', 'test-ingest', self.table, self.metadata, data, dbh) self.assertEqual(res, 1000) self.assertTrue(diu.is_ingested('test.fits', self.table, dbh)) res = cur.execute(f"select count(*) from {self.table:s} where filename='test.fits'") self.assertEqual(res.fetchall()[0][0], 1000) data = make_fits(lastcol=False).data res = diu.ingest_datafile_contents('test2.fits', 'test-ingest', self.table, self.metadata, {'TESTER': data}, dbh) res = cur.execute(f"select count(*) from {self.table:s} where filename='test2.fits'") self.assertEqual(res.fetchall()[0][0], 1000) res = diu.ingest_datafile_contents('test3.fits', 'test-ingest', self.table, {'TESTER':{}}, {'TESTER': {}}, dbh) res = cur.execute(f"select count(*) from {self.table:s} where filename='test3.fits'") self.assertEqual(res.fetchall()[0][0], 0) res = diu.ingest_datafile_contents('test4.fits', 'test-ingest', self.table, self.metadata, {'TESTER': {}}, dbh) res = cur.execute(f"select count(*) from {self.table:s} where filename='test4.fits'") self.assertEqual(res.fetchall()[0][0], 1)
def test_blanks(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') ing = Ingest.Ingest('cat_finalcut', 'test.junk', dbh=dbh) self.assertEqual(ing.getNumObjects(), 0) self.assertIsNone(ing.generateRows()) self.assertEqual(ing.numAlreadyIngested(), 0) self.assertFalse(ing.isLoaded())
def main(argv): """ Main entry point Parameters ---------- argv : list Command line arguments """ args = parse_cmdline(argv) dbh = desdbi.DesDbi(args['des_services'], args['section']) curs = dbh.cursor() #curs.execute("select distinct(pipeline) from pfw_request where pipeline != 'hostname'") #results = curs.fetchall() #for res in results: pipeline = 'finalcut' stdp = None if args['file'] is not None: fh = open(args['file'], 'w', buffering=0) fh.write('<html><body><pre>\n') stdp = Print(fh) stde = Err(fh) sys.stdout = stdp sys.stderr = stde attinfo = query_attempts(dbh, pipeline, args['days']) save_att_taskids(attinfo, dbh) taskinfo, trans_taskinfo = query_tasks(dbh) find_hung(attinfo, taskinfo) find_trans_hung(attinfo, trans_taskinfo) if stdp is not None: fh.write('</pre></body></html>') fh.close() stdp.close() stde.close()
def __init__(self, filetype, datafile, hdu=None, order=None, dbh=None): self.objhdu = hdu if dbh is None: self.dbh = desdbi.DesDbi() else: self.dbh = dbh self.cursor = self.dbh.cursor() # get the table name that is being filled, based on the input data type self.cursor.execute( f"select table_name from ops_datafile_table where filetype='{filetype}'" ) self.targettable = self.cursor.fetchall()[0][0] self.filetype = filetype self.idColumn = None self.order = order self.fileColumn = None self.constants = {} self.orderedColumns = [] self.sqldata = [] self.fullfilename = datafile self.shortfilename = ingestutils.getShortFilename(datafile) self.status = 0 # dictionary of table columns in db self.dbDict = self.getObjectColumns()
def test_basic_operation(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') cur = dbh.cursor() semname = 'mock-in' cur.execute("select count(*) from SEMINFO where name='%s'" % semname) count = cur.fetchall()[0][0] #self.assertEqual(count, 3) sem = semaphore.DBSemaphore('mock-in', 123456, self.sfile, 'db-test') res = dbh.query_simple( 'SEMINFO', ['ID', 'REQUEST_TIME', 'GRANT_TIME', 'RELEASE_TIME'], orderby='REQUEST_TIME') self.assertEqual(len(res) - count, 1) self.assertIsNotNone(res[-1]['request_time']) self.assertIsNotNone(res[-1]['grant_time']) self.assertIsNone(res[-1]['release_time']) myid = res[-1]['id'] cur.execute( "select count(*) from semlock where name='%s' and in_use!=0" % semname) self.assertEqual(cur.fetchall()[0][0], 1) del sem cur.execute( "select count(*) from semlock where name='%s' and in_use!=0" % semname) self.assertEqual(cur.fetchall()[0][0], 0) res = dbh.query_simple( 'SEMINFO', ['ID', 'REQUEST_TIME', 'GRANT_TIME', 'RELEASE_TIME'], {'ID': myid}) self.assertIsNotNone(res[-1]['request_time']) self.assertIsNotNone(res[-1]['grant_time']) self.assertIsNotNone(res[-1]['release_time'])
def test_ingest(self): os.environ['DES_SERVICES'] = self.sfile os.environ['DES_DB_SECTION'] = 'db-test' dbh = desdbi.DesDbi(self.sfile, 'db-test') temp = copy.deepcopy(sys.argv) sys.argv = ['datafile_ingest.py', '--filename', '/var/lib/jenkins/test_data/D00526157_r_c01_r3463p01_hpix.fits', '--filetype', 'red_hpix'] output = '' with capture_output() as (out, err): dfi.main() output = out.getvalue().strip() count = 0 table = 'SE_OBJECT_HPIX' output = output.split('\n') for line in output: if 'ingest of' in line: temp = line.split(',')[1] count = int(temp.split()[0]) curs = dbh.cursor() curs.execute('select count(*) from ' + table) res = curs.fetchall()[0][0] self.assertEqual(res, count) sys.argv = temp
def query_run(self, reqnum=None, user_db=None, attnum=None, help_txt=False): """Method to query all or a subset of attnum, for a given reqnum, checking if the file exists Input: - reqnum: integer - attnum: list of integers or "all" string - user_db: user as listed in the DB, from which the corresponding reqnum will be erased Returns: - structured array with query """ ##list all tables in the DB ##q_all = "select distinct OBJECT_NAME" ##q_all += " from DBA_OBJECTS" ##q_all += " where OBJECT_TYPE="TABLE" and OWNER="DES_ADMIN"" if (reqnum is None) or (user_db is None) or (attnum is None): print "Not enough data for query" exit(1) if (attnum == "all"): aux_q1 = "" elif isinstance(attnum, list): aux_q1 = " and a.attnum in (" + ",".join(map(str, attnum)) + ")" print "\tQuery on req/user/attnums: {0}/{1}/{2}".format( reqnum, user_db.upper(), attnum) q1 = "select a.unitname,a.attnum,a.reqnum,a.id,a.user_created_by," q1 += " a.data_state,fai.path" q1 += " from pfw_attempt a,file_archive_info fai,desfile d" q1 += " where a.reqnum={0}".format(reqnum) q1 += aux_q1 q1 += " and a.user_created_by=\'{0}\'".format(user_db.upper()) q1 += " and a.id=d.pfw_attempt_id" q1 += " and fai.filename=d.filename" q1 += " order by a.id" desfile = os.path.join(os.getenv("HOME"), ".desservices.ini") section = "db-desoper" dbi = desdbi.DesDbi(desfile, section) if help_txt: help(dbi) cursor1 = dbi.cursor() cursor1.execute(q1) key = [item[0].lower() for item in cursor1.description] rows = cursor1.fetchall() if (len(rows) > 0): data1 = np.rec.array(rows, dtype=[(key[0], "|S25"), (key[1], "i4"), (key[2], "i4"), (key[3], "i4"), (key[4], "|S25"), (key[5], "|S10"), (key[6], "|S200")]) print "\t# PFW_IDs={0}".format(len(rows)) return data1 else: print "\t# PFW_IDs={0} ==> NO elements on disk".format(len(rows)) return None
def test_numAlreadyIngested(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') ing = Ingest.Ingest('cat_finalcut', 'test.junk', dbh=dbh) self.assertEqual(ing.numAlreadyIngested(), 0) with mock.patch.object(ing.dbh, 'cursor', side_effect=Exception('')): start = time.time() self.assertRaises(Exception, ing.numAlreadyIngested) stop = time.time() self.assertTrue(stop - start >= 39.)
def test_init(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') ing = Ingest.Ingest('cat_finalcut', 'test.junk', dbh=dbh) self.assertEqual(ing.getstatus(), 0) dbh.close() os.environ['DES_SERVICES'] = self.sfile os.environ['DES_DB_SECTION'] = 'db-test' ing = Ingest.Ingest('cat_finalcut', 'test.junk') self.assertEqual(ing.getstatus(), 0)
def test_array_ingest(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') data = {'TESTER': [{'count':[0,2,3,4,5], 'ra': np.array([1.22345]), 'comment': 'None'}, {'count':[554], 'ra': np.array([]), 'comment': 'hello'}]} res = diu.ingest_datafile_contents('test5.fits', 'fits', self.table, self.metadata, data, dbh) self.assertTrue(diu.is_ingested('test5.fits', self.table, dbh))
def test_init(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') dbh.autocommit = True cur = dbh.cursor() try: cur.execute("insert into catalog (filename, filetype, band, tilename, pfw_attempt_id) values ('D00526157_r_c01_r3463p01_red-fullcat.fits', 'cat_firstcut', 'r', 'abc', 123)") except: pass obj = fin.FitsIngest('cat_firstcut', '/var/lib/jenkins/test_data/D00526157_r_c01_r3463p01_red-fullcat.fits', {}, dbh=dbh) self.assertTrue(hasattr(obj, 'fits'))
def test_setCatalogInfo_corner(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') dbh.autocommit = True self.assertRaises(SystemExit, ccol.CoaddCatalog, ingesttype='band', filetype='cat_firstcut', datafile='/var/lib/jenkins/test_data/D00526157_r_c01_r3463p01_red-fullcatx.fits', idDict={}, dbh=dbh) cur = dbh.cursor() cur.execute("insert into catalog (filename, filetype, band, tilename, pfw_attempt_id) values ('D00526157_r_c01_r3463p01_red-fullcatx.fits', 'cat_firstcut', NULL, NULL, 123)") self.assertRaises(SystemExit, ccol.CoaddCatalog, ingesttype='band', filetype='cat_firstcut', datafile='/var/lib/jenkins/test_data/D00526157_r_c01_r3463p01_red-fullcatx.fits', idDict={}, dbh=dbh) cur.execute("update catalog set band='r' where pfw_attempt_id=123") self.assertRaises(SystemExit, ccol.CoaddCatalog, ingesttype='band', filetype='cat_firstcut', datafile='/var/lib/jenkins/test_data/D00526157_r_c01_r3463p01_red-fullcatx.fits', idDict={}, dbh=dbh) cur.execute("update catalog set tilename='abc' where pfw_attempt_id=123") _ = ccol.CoaddCatalog(ingesttype='band', filetype='cat_firstcut', datafile='/var/lib/jenkins/test_data/D00526157_r_c01_r3463p01_red-fullcatx.fits', idDict={}, dbh=dbh)
def test_generate_rows_corner(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') dbh.autocommit = True cur = dbh.cursor() try: cur.execute("insert into catalog (filename, filetype, band, tilename, pfw_attempt_id) values ('D00526157_r_c01_r3463p01_red-fullcat.fits', 'cat_firstcut', 'r', 'abc', 123)") except: pass obj = fin.FitsIngest('cat_firstcut', '/var/lib/jenkins/test_data/D00526157_r_c01_r3463p01_red-fullcat.fits', {}, dbh=dbh) retval = np.array([1,2,[3,4,5]]) with patch('databaseapps.FitsIngest.fitsio', return_value=retval): self.assertRaises(Exception, obj.generateRows)
def test_retrieveCoaddObjectIds(self): os.environ['DES_SERVICES'] = self.sfile os.environ['DES_DB_SECTION'] = 'db-test' dbh = desdbi.DesDbi(self.sfile, 'db-test') cur = dbh.cursor() cur.execute("insert into catalog (filename, filetype, band, tilename, pfw_attempt_id) values ('D00526157_r_c01_r3463p01_red-fullcat.fits', 'cat_firstcut', NULL, NULL, 123)") cur.execute("update catalog set tilename='abc',band='r' where pfw_attempt_id=123") ci = ccol.CoaddCatalog(ingesttype='band', filetype='cat_firstcut', datafile='/var/lib/jenkins/test_data/D00526157_r_c01_r3463p01_red-fullcat.fits', idDict={}, dbh=dbh) self.assertFalse(ci.idDict) cur.execute("insert into COADD_OBJECT_TEST (coadd_object_id, filename, object_number, band, tilename, pfw_attempt_id) values(123, 'D00526157_r_c01_r3463p01_red-fullcat.fits', 1234, 'r', 'acbd', 12345)") ci.retrieveCoaddObjectIds(pfwid=12345, table='COADD_OBJECT_TEST') self.assertTrue(ci.idDict)
def test_generateRows_errors(self): ids = dict(zip(range(1, 68392), range(68392, 1, -1))) dbh = desdbi.DesDbi(self.sfile, 'db-test') m = mgl.Mangle(self.filename, 'mangle_csv_ccdgon',ids, checkcount=True, dbh=dbh) with capture_output() as (out, _): self.assertEqual(1, m.generateRows()) output = out.getvalue().strip() self.assertTrue('Incorrect' in output) self.assertTrue('rows' in output) del m ids = dict(zip(range(1, 302), range(302, 1, -1))) m = mgl.Mangle(self.filename, 'mangle_csv_ccdgon',ids, checkcount=True, dbh=dbh) self.assertEqual(0, m.generateRows())
def test_ingest_main(self): #print(make_xml()) try: dbh = desdbi.DesDbi(self.sfile, 'db-test') open('test.xml', 'w').write(make_xml()) numrows = diu.datafile_ingest_main(dbh, 'xml', 'test.xml', self.table, self.metadata) self.assertEqual(numrows, 1000) self.assertRaises(ValueError, diu.datafile_ingest_main, dbh, '', '', '', {'a':0, 'b':1}) finally: try: os.unlink('test.xml') except: pass
def main(argv): # pragma: no cover """ Main entry point Parameters ---------- argv : list Command line arguments """ args = parse_cmdline(argv) #html = open(args['file'], 'w') html = open('/tmp/hungjobs.html', 'w') html.write("<html>\n<head>\n<title>Pipeline Status Monitor</title>\n") html.write('<script type="text/javascript">//<![CDATA[\n') html.write('function setTable(what) {\n') html.write('if(document.getElementById(what).style.display=="none") {\n') html.write(' document.getElementById(what).style.display="block";\n') html.write('}\n') html.write( 'else if(document.getElementById(what).style.display=="block") {\n') html.write(' document.getElementById(what).style.display="none";\n') html.write('}\n') html.write('}\n') html.write('//]]></script>\n') html.write("</head>\n<body>\n") html.write("<h2>Current Status as of " + str(datetime.datetime.now()) + "</h2>\n") html.write( '<b>Jobs highlighted in yellow have at least one task which is taking more than 4 hours to complete. <br>Some multiepoch tasks can take longer than this to complete, so be careful when evalutaing their status.</b>\n' ) html.write( '<p><b>Suspected Hung Transfers are noted when more than 2 hours have elapsed for transfers to the job or 4 hours when transferring to the archive.</b>' ) dbh = desdbi.DesDbi(args['des_services'], args['section']) curs = dbh.cursor() curs.execute( "select distinct(pipeline) from pfw_request where pipeline != 'hostname'" ) results = curs.fetchall() for res in results: pipeline = res[0] html.write("<h2>%s</h2>\n" % pipeline) attinfo = query_attempts(dbh, pipeline, args['days'], html) save_att_taskids(attinfo, dbh) taskinfo, trans_taskinfo = query_tasks(dbh) find_hung(attinfo, taskinfo, html) find_trans_hung(attinfo, trans_taskinfo, html) html.write("</body></html>\n") html.close() shutil.move('/tmp/hungjobs.html', args['file'])
def dbquery(cls,toquery,outdtype,dbsection='db-desoper',help_txt=False): """the personal setup file .desservices.ini must be pointed by desfile DB section by default will be desoper """ import despydb.desdbi as desdbi desfile = os.path.join(os.getenv("HOME"),".desservices.ini") section = dbsection dbi = desdbi.DesDbi(desfile,section) if help_txt: help(dbi) cursor = dbi.cursor() cursor.execute(toquery) cols = [line[0].lower() for line in cursor.description] rows = cursor.fetchall() outtab = np.rec.array(rows,dtype=zip(cols,outdtype)) return outtab
def __init__(self, request, filetype, datafile, temptable, targettable, fitsheader, dumponly, services, section): self.debug("start CatalogIngest.init()") self.dbh = desdbi.DesDbi(services, section, retry=True) self.debug("opening fits file") self.fits = fitsio.FITS(datafile) self.debug("fits file opened") self.request = request self.filetype = filetype self.fullfilename = datafile self.shortfilename = ingestutils.getShortFilename(datafile) if fitsheader is not None: if ingestutils.isInteger(fitsheader): self.objhdu = int(fitsheader) else: self.objhdu = fitsheader if dumponly: self.dump = True else: self.dump = False self.consts = [] self.debug(f"start resolveDbObject() for target: {targettable}") (self.targetschema, self.targettable) = ingestutils.resolveDbObject(targettable, self.dbh) if not temptable: self.temptable = f"DESSE_REQNUM{int(request):07d}" self.tempschema = self.targetschema else: self.debug(f"start resolveDbObject() for temp: {temptable}") (self.tempschema, self.temptable) = ingestutils.resolveDbObject(temptable, self.dbh) self.debug(f"target schema,table = {self.targetschema}, {self.targettable}; temp= {self.tempschema}, {self.temptable}") if self.dump: self.constDict = {} else: self.constDict = {"FILENAME": [self.shortfilename, True], "REQNUM": [request, False] } self.constlist.append("FILENAME") self.constlist.append("REQNUM") self.debug("start getObjectColumns()") self.dbDict = self.getObjectColumns() self.debug("CatalogIngest.init() done")
def db_query(self, to_query, outdtype=None): ''' Method to query the DB Inputs - to_query: str, contains the query. Do it changes the final SEMICOLON from easyaccess to desdbi? Returns - structured array ''' # What happens in the case of no rows? Test it if ea_import: # Needs: to_query connect = ea.connect('desoper') cursor = connect.cursor() try: df_obj = connect.query_to_pandas(to_query) except: t_e = 'Error in querying\n\n\t{0}\n\n'.format(to_query) logging.error(t_e) e = sys.exc_info()[0] logging.error(e) exit(1) connect.close() return df_obj # Test if dtype works fine, if not, use zip and construct # the structured array scratch # return df_obj.to_records(index=False) else: logging.warning('No easyaccess, will exit') # Needed variables: to_query, outdtype desfile = os.path.join(os.getenv('HOME'), '.desservices.ini') section = 'db-desoper' dbi = desdbi.DesDbi(desfile, section) cursor = dbi.cursor() cursor.execute(to_query) cols = [line[0].lower() for line in cursor.description] rows = cursor.fetchall() # # change to pandas! is easier for collabs # t_w = 'DESDBI Not implemented!' logging.warning(t_w) # # # exit(0) outtab = np.rec.array(rows, dtype=zip(cols, outdtype)) return outtab
def test_parseCSV_errors(self): ids = dict(zip(range(1, 68392), range(68392, 1, -1))) dbh = desdbi.DesDbi(self.sfile, 'db-test') m = mgl.Mangle(self.filename, 'mangle_csv_ccdgon',ids, dbh=dbh) self.assertRaises(Exception, m.parseCSV, self.filename, [int, str]) del m m = mgl.Mangle(self.filename, 'mangle_csv_ccdgon', {}, dbh=dbh) m.coadd_id = 2 self.assertRaises(KeyError, m.parseCSV, self.filename, [int, str, str, str, float, int, str]) with capture_output() as (out, _): m.skipmissing = True m.parseCSV(self.filename, [int, str, str, str, float, int, str]) output = out.getvalue().strip() self.assertTrue('301' in output) self.assertTrue('Skipped' in output)
def test_getObjectColumns(self): dbh = desdbi.DesDbi(self.sfile, 'db-test') ing = Ingest.Ingest('cat_finalcut', 'test.junk', dbh=dbh) cols = ing.getObjectColumns() self.assertTrue('WCL' in cols.keys()) self.assertTrue('FILENAME' in cols['WCL'].keys()) try: dbh.con.fakeResults(((None,'NITE', 0, 'NITE', 'int'), (0, 'EXPNUM', 0, 'EXPNUM', 'int'), ('PRIMARY', 'BAND', 0, 'BAND', 'char'), ('LDAC_OBJECTS', 'Y_IMAGE',0, 'Y_IMAGE', 'float'))) cols = ing.getObjectColumns() self.assertTrue(None in cols.keys()) self.assertTrue(0 in cols.keys()) self.assertTrue('Y_IMAGE' in cols['LDAC_OBJECTS'].keys()) finally: dbh.con.clearResults()
def main(argv): argsd = parse_args(argv) odbh = desdbi.DesDbi(argsd['des_services'], argsd['section']) ocurs = odbh.cursor() if argsd['fnsqlite3'] is not None: sdbh = sqlite3.connect(argsd['fnsqlite3']) scurs = sdbh.cursor() check_sqlite3(scurs) process_sqlite3(scurs, argsd['version'], argsd['fnskymap'], ocurs) else: process_csv(argsd['fncsv'], argsd['delim'], argsd['version'], argsd['fnskymap'], ocurs) check_oracle(ocurs, argsd['version']) #odbh.rollback() odbh.commit()
def test_ingest(self): os.environ['DES_SERVICES'] = self.sfile os.environ['DES_DB_SECTION'] = 'db-test' dbh = desdbi.DesDbi(self.sfile, 'db-test') temp = copy.deepcopy(sys.argv) sys.argv = ['catalog_ingest.py', '-request', '3463', '-filename', '/var/lib/jenkins/test_data/D00526157_r_c01_r3463p01_red-fullcat.fits', '-filetype', 'cat_firstcut', '-targettable', 'MAIN.SE_OBJECT'] output = '' with capture_output() as (out, err): self.assertEqual(cati.main(), 0) output = out.getvalue().strip() count = 0 table = None output = output.split('\n') for line in output: if 'LOAD' in line and 'finished' in line: line = line[line.find('LOAD'):] temp = line.split() count = int(temp[1]) elif 'Creating tablespace' in line: line = line[line.find('MAIN.'):] temp = line.split()[0] table = temp.split('.')[1] #dbh = desdbi.DesDbi(self.sfile, 'db-test') curs = dbh.cursor() curs.execute('select count(*) from ' + table) res = curs.fetchall()[0][0] self.assertEqual(res, count) sys.argv = ['catalog_ingest.py', '-request', '3463'] self.assertEqual(cati.main(), 1) sys.argv = temp
def TestDB(dbsection='db-desoper', help_txt=False): datatype = ['a80', 'i4', 'a5', 'i4', 'f4'] query = "select i.path,m.pfw_attempt_id,m.band,m.nite,f.expnum from \ flat_qa f,file_archive_info i, miscfile m where m.nite=20160808 \ and m.filename=f.filename and i.filename=f.filename and\ m.filetype='compare_dflat_binned_fp'" desfile = os.path.join(os.getenv('HOME'), '.desservices.ini') section = dbsection print 'check 0' dbi = desdbi.DesDbi(desfile, section) print 'check 1' if help_txt: help(dbi) cursor = dbi.cursor() cursor.execute(query) print 'check 2' cols = [line[0].lower() for line in cursor.description] rows = cursor.fetchall() outtab = np.rec.array(rows, dtype=zip(cols, datatype)) return outtab
def retrieveCoaddObjectIds(self, services=None, section=None, pfwid=None, table=None): """ Get the coadd object id's if the data have already been ingested """ if table is not None: self.printinfo('Getting Coadd IDs from alternate table') sqlstr = f"select object_number, coadd_object_id from {table} where pfw_attempt_id={pfwid}" tdbh = desdbi.DesDbi(services, section, retry=True) cursor = tdbh.cursor() else: self.printinfo("Getting Coadd IDs from database\n") sqlstr = f"select object_number, id from {self.targettable} where filename='{self.shortfilename}'" cursor = self.dbh.cursor() cursor.execute(sqlstr) records = cursor.fetchall() for r in records: if r[0] not in self.idDict: self.idDict[r[0]] = r[1]
def main(argv): """ Main entry point Parameters ---------- argv : list Command line arguments """ args = parse_cmdline(argv) html = open(args['file'], 'w') html.write("<html>\n<head>\n<title>Pipeline Status Monitor</title>\n") html.write('<script type="text/javascript">//<![CDATA[\n') html.write('function setTable(what) {\n') html.write('if(document.getElementById(what).style.display=="none") {\n') html.write(' document.getElementById(what).style.display="block";\n') html.write('}\n') html.write( 'else if(document.getElementById(what).style.display=="block") {\n') html.write(' document.getElementById(what).style.display="none";\n') html.write('}\n') html.write('}\n') html.write('//]]></script>\n') html.write("</head>\n<body>\n") dbh = desdbi.DesDbi(args['des_services'], args['section']) curs = dbh.cursor() curs.execute( "select distinct(pipeline) from pfw_request where pipeline != 'hostname'" ) results = curs.fetchall() for res in results: pipeline = res[0] html.write("<h2>%s</h2>\n" % pipeline) attinfo = query_attempts(dbh, pipeline, args['days'], html) save_att_taskids(attinfo, dbh) taskinfo, trans_taskinfo = query_tasks(dbh) find_hung(attinfo, taskinfo, html) find_trans_hung(attinfo, trans_taskinfo, html) html.write("</body></html>\n") html.close()
sys.exit(1) targetschema, targettable = parseTableName(args['targettable']) temptable = args['temptable'] tempschema = None if temptable is None: temptable = f"DESSE_REQNUM{int(args['request']):07d}" tempschema = targetschema else: tempschema, temptable = parseTableName(args['temptable']) if tempschema: print( f"Merging {tempschema + '.' + temptable} into {args['targettable']}..." ) else: print(f"Merging {temptable} into {args['targettable']}...") dbh = desdbi.DesDbi() cursor = dbh.cursor() if targetschema is None: cursor.callproc("pMergeObjects", [temptable, targettable, tempschema, targetschema]) else: cursor.callproc(f"{targetschema}.pMergeObjects", [temptable, targettable, tempschema, targetschema]) cursor.close() print("Merge complete")
default= 'name,file_header_name,filetype,filetype/file_header_name,filetype,filetype/attribute_name', help= 'The primary key(s) to use, one for each table, there can be two primary keys for a table, separated by a /' ) args, unknown_args = parser.parse_known_args() args = vars(args) # get the list of tables tables = args['tables'].split(',') #get the list of keys keys = args['keys'].split(',') # connect to the database dbh1 = desdbi.DesDbi(args['des_services'], args['section1']) if args['section2']: dbh2 = desdbi.DesDbi(args['des_services'], args['section2']) else: dbh2 = dbh1 # go over each table for num, table in enumerate(tables): print(f"Checking table {table.upper()}") cur1 = dbh1.cursor() cur2 = dbh2.cursor() # get the column names cur1.execute( f"select column_name from all_tab_cols where table_name='{table.upper()}' and owner='{args['schema1'].upper()}'" ) results = cur1.fetchall()