Beispiel #1
0
 def test_0041_TIME(self):
     """TIME"""
     import time
     self.cs.execute("""CREATE TABLE %s (P1 TIME) """ % self.tableName)
     ctime = time.time()
     self.cs.execute(
         """INSERT INTO %s
             VALUES (?)
         """ % self.tableName, DB2.TimeFromTicks(ctime))
     self.cs.execute("SELECT * FROM %s" % self.tableName)
     r = self.cs.fetchone()
     self.assertEqual(r[0], str(DB2.TimeFromTicks(ctime)))
Beispiel #2
0
 def __test_BLOB(self, auto_LOB_read):
     SIZE = 10
     self.__creatSampleBLOBTable()
     for i in range(SIZE):
         self.cs.execute(
             "INSERT INTO %s VALUES (?, ?)" % self.tableName,
             (DB2.Binary(chr(i) * i), DB2.Binary(chr(i) * (1024 - i))))
     self.cs.set_scrollable(1)
     self.cs.execute("SELECT * FROM %s" % self.tableName)
     self.cs.auto_LOB_read = auto_LOB_read
     rows = self.cs.fetchmany(SIZE)
     return rows
Beispiel #3
0
def overlapping(target):
    conn = DB2.connect('bgdb0', 'bglsysdb', 'db24bgls')
    cmd = "select bglsysdb.tbglbpblockmap.blockid," +\
          " bglsysdb.tbglblock.owner," +\
          " bglsysdb.tbglblock.status," +\
          " coalesce(bglsysdb.tbgljob.jobid, -1)," +\
          " coalesce(bglsysdb.tbgljob.username, 'nobody')," +\
          " coalesce(bglsysdb.tbgljob.status, ' ')" +\
          " from" +\
          "  (bglsysdb.tbgljob right join" +\
          "    (bglsysdb.tbglbpblockmap inner join bglsysdb.tbglblock" +\
          "    on bglsysdb.tbglbpblockmap.blockid=bglsysdb.tbglblock.blockid)" +\
          "   on bglsysdb.tbgljob.blockid=bglsysdb.tbglbpblockmap.blockid)" +\
          " where" +\
          "  bglsysdb.tbglbpblockmap.bpid in" +\
          "   (select bpid from bglsysdb.tbglbpblockmap where blockid='" +\
          target + "')" +\
          "  and bglsysdb.tbglblock.status <> 'F'" +\
          "  group by bglsysdb.tbglbpblockmap.blockid," +\
          "           bglsysdb.tbglblock.owner," +\
          "           bglsysdb.tbglblock.status," +\
          "           coalesce(bglsysdb.tbgljob.jobid, -1)," +\
          "           coalesce(bglsysdb.tbgljob.username, 'nobody')," +\
          "           coalesce(bglsysdb.tbgljob.status, ' ')"
    cur = conn.cursor()
    cur.execute(cmd)
    ret = []
    foo = cur.fetchone()
    while foo:
        ret.append((foo[0], foo[1], foo[2], foo[3], foo[4], foo[5]))
        foo = cur.fetchone()
    return (ret)
Beispiel #4
0
    def __init__(self, xmlparser=None, lwtparser=None, ldb=None):
        """
    Connects to the database and creates a cursor. Initializes the unique
    id table for this LIGO lw document.

    ldb = LIGOMetadataDatabase object
    xmlparser = pyRXP XML to tuple parser object
    lwtparser = LIGOLwParser object (tuple parser)
    """
        self.ldb = ldb
        if self.ldb:
            self.dbcon = DB2.connect(dsn=self.ldb.database, uid='', pwd='')
            self.curs = self.dbcon.cursor()
        else:
            self.dbcon = None
            self.curs = None
        self.xmlparser = xmlparser
        self.lwtparser = lwtparser
        if lwtparser:
            self.lwtparser.unique = None
        self.table = {}
        self.strtoxml = Xlator(
            {
                r'&': r'&amp;',
                r'>': r'&gt;',
                r'<': r'&lt;',
                '\\': '\\\\',
                '\"': '\\\"'
            }
        )  # Note: see https://www.gravity.phy.syr.edu/dokuwiki/doku.php?id=rpfisher:gluebughunt if this is confusing, the parser just cleanly handles the conversion of everything
Beispiel #5
0
 def __ilwdchar(self, istr):
     """
 If the ilwd:char field contains octal data, it is translated
 to a binary string and returned. Otherwise a lookup is done
 in the unique id dictionary and a binary string containing the
 correct unique id is returned.
 """
     istr_orig = istr
     istr = self.licrx.sub('', istr.encode('ascii'))
     istr = self.ricrx.sub('', istr)
     if self.octrx.match(istr):
         exec "istr = '" + istr + "'"
         # if the DB2 module is loaded, the string should be converted
         # to an instance of the DB2.Binary class. If not, leave it as
         # a string containing binary data.
         try:
             istr = DB2.Binary(istr)
         except:
             pass
     else:
         try:
             istr = self.unique.lookup(istr)
         except AttributeError:
             if not self.unique:
                 istr = istr_orig
             else:
                 raise LIGOLwParseError(
                     'unique id table has not been initialized')
     return istr
Beispiel #6
0
 def __setstate__(self, state):
     self.__dict__.update(state)
     self.qmconnect = FailureMode("QM Connection")
     if '--nodb2' not in sys.argv:
         import DB2
         self.db2 = DB2.connect(uid=self.config.get('db2uid'), pwd=self.config.get('db2pwd'),
                                dsn=self.config.get('db2dsn')).cursor()
Beispiel #7
0
 def __init__(self, database):
     """
 database = the name of the LIGO database to initalize
 """
     self.database = database
     self.uniqueids = {}
     conn = DB2.connect(dsn=database, uid='', pwd='')
     curs = conn.cursor()
     curs.execute(
         "SELECT tabname FROM syscat.tables WHERE definer<>'SYSIBM' "
         "AND TYPE='T' ORDER BY PARENTS ASC")
     self.tables = curs.fetchall()
     curs.execute(
         "SELECT tabname, colname FROM syscat.columns " +
         "WHERE typename = 'CHARACTER' AND length = 13 AND nulls = 'N'")
     for tab, col in curs.fetchall():
         tab = tab.lower()
         col = col.lower()
         try:
             self.uniqueids[tab][col] = 'ilwd:char'
         except KeyError:
             self.uniqueids[tab] = {}
             self.uniqueids[tab][col] = 'ilwd:char'
     curs.close()
     conn.close()
Beispiel #8
0
 def __setstate__(self, state):
     self.__dict__.update(state)
     self.qmconnect = FailureMode("QM Connection")
     if '--nodb2' not in sys.argv:
         import DB2
         self.db2 = DB2.connect(uid=self.config.get('db2uid'), pwd=self.config.get('db2pwd'),
                                dsn=self.config.get('db2dsn')).cursor()
Beispiel #9
0
def exeInfo(dbName, exeId):
    conn = DB2.Connection(dbName)
    cur = conn.cursor()
    cmd = "select sys_id, ctp_id from experiment.system_source where source_id in (select source_id from experiment.executable where executable_id = " + str(
        exeId) + ")"
    cur.execute(cmd)
    foo = cur.fetchone()
    return (foo[0], foo[1])
 def __init__(self, dbconfig, logging, dblock):
     self.dbconfig = dbconfig
     self.log = logging
     self.dblock = dblock
     
     self.conn = DB2.connect (dsn = self.dbconfig.database, uid = self.dbconfig.user, pwd = self.dbconfig.password)
     
     self.create_table()
     
     self.cpt = 0
Beispiel #11
0
    def insertNet(self, id, idsession, ts, type, ret, pid, ppid, scontext,
                  nameSubject, ipsource, portsource, ipdest, portdest, state,
                  typeconnect, netsize, proto, data):
        try:
            tsp = datetime(1601, 1, 1) + timedelta(microseconds=int(ts) / 10)
            request = "INSERT INTO tracenet(id,idsession,tstamp,type,return,pid,ppid,scontext,nameSubject,ipSource,PortSource,ipDest,PortDest,state,typeconnect,netsize,proto,data) VALUES(" + str(
                id) + ","
            request = request + str(idsession) + ",'"
            request = request + DB2.Timestamp(
                tsp.year, tsp.month, tsp.day, tsp.hour, tsp.minute, tsp.second,
                tsp.microsecond).get_SQL_value() + "','"
            request = request + str(type) + "','" + str(ret) + "'," + str(
                pid) + "," + str(ppid) + ",'"
            request = request + scontext.replace("'", "''") + "','"
            request = request + nameSubject.replace("'", "''") + "','"
            request = request + ipsource + "'," + portsource + ",'"
            request = request + ipdest + "'," + portdest + ",'"
            request = request + state + "','" + typeconnect + "'," + netsize + ",'" + proto + "','"
            request = request + data.replace("'", "''") + "')"
        except:
            self.log.debug("db2interface:insertNet:" + request)
            return
        #self.log.debug("InsertLine: "+request)
        cursor = self.conn.cursor()
        try:
            request = request.decode("unicode", "replace")
        except:
            try:
                request = request.encode('ascii', 'xmlcharrefreplace')
                #request.encode("utf8")
            except:
                request = "%s" % request
        try:
            try:
                request = request.encode("utf8")
            except:
                print("Error encoding UTF8")
            cursor.execute(request)
        except:
            print("Error DB2: " + request)
            request = request.encode('ascii', 'xmlcharrefreplace')
            try:
                cursor.execute(request)
            except:
                print("Error DB2 ASCII: " + request)

        if self.cpt == 1000:
            self.conn.commit()
            self.cpt = 0
        else:
            self.cpt = self.cpt + 1

        # self.conn.commit()
        cursor.close()
        return 1
Beispiel #12
0
 def __init__(self, dbname, schema, userid, password, root):
           self.connection = DB2.connect(dsn=dbname,uid=userid,pwd=password)
           self.dbname = dbname
           self.schema = schema.upper()
           self.userid = userid
           self.password = password
           self.tables = []
           self.__getTableNames()
           self.views = []
           self.__getViewNames()
           self.root = root
Beispiel #13
0
def getConnection( ):

    global connection

    if( connection == None ):

        config = ConfigParser.ConfigParser( )
        configFileName = os.environ['AFAP_HOME'] + '/conf/dbconnect.conf'
        config.readfp( open( configFileName ) )
        connection = DB2.connect(config.get('db2','tnsentry'),config.get('db2','username'),config.get('db2','password' ),0,1)
    return connection
Beispiel #14
0
    def __init__(self, dbconfig, logging, dblock):
        self.dbconfig = dbconfig
        self.log = logging
        self.dblock = dblock

        self.conn = DB2.connect(dsn=self.dbconfig.database,
                                uid=self.dbconfig.user,
                                pwd=self.dbconfig.password)

        self.create_table()

        self.cpt = 0
Beispiel #15
0
    def getPartIONodes(self, partname):
        '''retrieves the IOnodes for the specified partition'''
        if '--nodb2' in sys.argv:
            iodict = {
                '32wayN0': ['R00-M0-N0:J18-U01'],
                '32wayN1': ['R00-M0-N1:J18-U01'],
                '32wayN2': ['R00-M0-N2:J18-U01'],
                '32wayN3': ['R00-M0-N3:J18-U01'],
                '64wayN01': ['R00-M0-N0:J18-U01', 'R00-M0-N1:J18-U01'],
                '64wayN23': ['R00-M0-N2:J18-U01', 'R00-M0-N3:J18-U01'],
                '128wayN0123': [
                    'R00-M0-N0:J18-U01', 'R00-M0-N1:J18-U01',
                    'R00-M0-N2:J18-U01', 'R00-M0-N3:J18-U01'
                ]
            }
            return iodict.get(partname, None)

        ionodes = []
        db2 = DB2.connect(uid=self.config.get('db2uid'),
                          pwd=self.config.get('db2pwd'),
                          dsn=self.config.get('db2dsn')).cursor()

        # first get blocksize in nodes
        db2.execute("select size from BGLBLOCKSIZE where blockid='%s'" %
                    partname)
        blocksize = db2.fetchall()
        print 'blocksize is', blocksize[0][0], 'nodes'

        if int(blocksize[0][0]) < 512:
            print "small block"
            #tBGLSMALLBLOCK (BLOCKID, POSINMACHINE, PSETNUM, IONODEPOS, IONODECARD, IONODECHIP, COMPNODEPOS, NUMCOMPUTE)
            db2.execute(
                "select * from tBGLSMALLBLOCK where blockid='%s' order by ionodepos"
                % partname)
            result = db2.fetchall()
            for b in result:
                rack = b[1].strip()[1:3]
                midplane = b[1].strip()[-1]
                ionodes.append(
                    "R%s-M%s-%s:%s-%s" %
                    (rack, midplane, self.basemachine.__ncdefs__[b[3].strip()],
                     b[4].strip(), b[5].strip()))
        else:  #match up rack and midplane(s)?
            db2.execute("select bpid from TBGLBPBLOCKMAP where blockid='%s'" %
                        partname)
            result = db2.fetchall()
            for b in result:
                rack = b[0].strip()[1:3]
                midplane = b[0].strip()[-1]
                print "R%s-M%s" % (rack, midplane)
                #ionodes = self.getIONodes(rack, midplane)
        db2.close()
        return ionodes
Beispiel #16
0
    def test_0042_TIMESTAMP(self):
        """TIMESTAMP"""
        import time
        self.cs.execute("""CREATE TABLE %s (P1 TIMESTAMP) """ % self.tableName)
        ctime = time.time()
        self.cs.execute(
            """INSERT INTO %s
                VALUES (?)
            """ % self.tableName, DB2.TimestampFromTicks(ctime))
        self.cs.execute("SELECT * FROM %s" % self.tableName)
        r = self.cs.fetchone()
        # XXX
        self.assertEqual(r[0], str(DB2.TimestampFromTicks(ctime)))
        self.assertEqual(len(r[0]), 26)

        self.cs.execute("DELETE FROM %s" % self.tableName)

        value = []
        for p in ['000000', '000010', '000100', '001000', '010000', '100000']:
            ts = '2005-03-09-08.24.59.%s' % p
            value.append(ts)
            self.cs.execute(
                """INSERT INTO %s
                    VALUES (?)
                """ % self.tableName, ts)

        self.cs.execute("SELECT * FROM %s" % self.tableName)
        rows = self.cs.fetchall()
        for i in range(len(rows)):
            self.assertEqual(rows[i][0], value[i])

        for i in range(100):
            self.cs.execute("""INSERT INTO %s
                VALUES (CURRENT TIMESTAMP)""" % self.tableName)
            self.cs.execute("SELECT * FROM %s" % self.tableName)
            r = self.cs.fetchone()
            self.assertEqual(len(r[0]), 26)
            self.cs.execute("DELETE FROM %s" % self.tableName)
Beispiel #17
0
 def __init__(self, dbName):
     self.__conn = DB2.Connection(dbName)
     self.__cursor = self.__conn.cursor()
     pathstr = installbase() + '/usr/opt/bluegene/bin/setup/*.ref'
     self.__refFileList = glob.glob(pathstr)
     self.__keyMap = {}
     for i in self.__refFileList:
         refFile = open(i, "r")
         line = refFile.readline().split()
         implId = line[0]
         platId = line[1]
         foo = os.path.split(i)
         key = os.path.splitext(foo[1])[0]
         self.__keyMap[key] = (implId, platId)
Beispiel #18
0
 def __init__(self, racks, psetsize):
     Cobalt.Data.DataSet.__init__(self)
     if '--nodb2' not in sys.argv:
         try:
             import DB2
             conn = DB2.connect(uid=self.config.get('db2uid'), pwd=self.config.get('db2pwd'),
                                dsn=self.config.get('db2dsn'))
             self.db2 = conn.cursor()
         except:
             print "Failed to connect to DB2"
             sys.exit(1)
     self.qmconnect = FailureMode("QM Connection")
     #TODO read racks and pset size from conf file
     self.basemachine = BaseSet(racks, psetsize)
Beispiel #19
0
 def __init__(self, racks, psetsize):
     Cobalt.Data.DataSet.__init__(self)
     if '--nodb2' not in sys.argv:
         try:
             import DB2
             conn = DB2.connect(uid=self.config.get('db2uid'), pwd=self.config.get('db2pwd'),
                                dsn=self.config.get('db2dsn'))
             self.db2 = conn.cursor()
         except:
             print "Failed to connect to DB2"
             sys.exit(1)
     self.qmconnect = FailureMode("QM Connection")
     #TODO read racks and pset size from conf file
     self.basemachine = BaseSet(racks, psetsize)
def _do_main(argv):
    global db_name, db_user, db_pwd
    exp_tbls = []
    output_file = 'dbctl.ctl'
    optlist, args = getopt.getopt(argv, 'd:u:p:t:o:')
    for x, v in optlist:
        if x == '-d':
            db_name = v
        elif x == '-u':
            db_user = v
        elif x == '-p':
            db_pwd = v
        elif x == '-t':
            exp_tbls.append(v.upper())
        elif x == '-o':
            output_file = v
        else:
            print x
            _print_usage()

    #print " -d %s -u %s -p %s -o %s " % (db_name,db_user,db_pwd,output_file)
    #print exp_tbls
    writer = None
    try:
        writer = open(output_file, "w+")
    except:
        print "cannot open file [%s] !!" % output_file
        sys.exit(1)

    conn = None
    cur = None
    try:
        conn = DB2.connect(db_name, db_user, db_pwd)
        cur = conn.cursor()
    except:
        print "Cannot connect to database "
        sys.exit(1)

    for x in exp_tbls:
        print "generate table [%s] ... " % x
        if _gen_fields_from_db(cur, writer, x) == 0:
            print "generate success!!"
        else:
            print "generate failed!!"

    cur.close()
    conn.close()
    writer.close()
Beispiel #21
0
def _do_main(argv):
    global db_name,db_user,db_pwd
    exp_tbls = []
    output_file = 'dbctl.ctl'
    optlist,args = getopt.getopt(argv,'d:u:p:t:o:')
    for x,v in optlist:
        if x == '-d':
            db_name = v
        elif x == '-u':
            db_user = v
        elif x == '-p':
            db_pwd = v
        elif x == '-t':
            exp_tbls.append(v.upper())
        elif x == '-o':
            output_file = v
        else:
            print x
            _print_usage()

    #print " -d %s -u %s -p %s -o %s " % (db_name,db_user,db_pwd,output_file) 
    #print exp_tbls
    writer = None
    try:
        writer = open(output_file,"w+")
    except:
        print "cannot open file [%s] !!" % output_file
        sys.exit(1)

    conn = None
    cur = None
    try:
        conn = DB2.connect(db_name,db_user,db_pwd)
        cur = conn.cursor()
    except:
        print "Cannot connect to database "
        sys.exit(1)

    for x in exp_tbls:
        print "generate table [%s] ... " % x
        if _gen_fields_from_db(cur,writer,x) == 0:
            print "generate success!!"
        else:
            print "generate failed!!"

    cur.close()
    conn.close()
    writer.close()
Beispiel #22
0
    def get_connection(self):
        if settings.DISABLE_REPORTING_DB:
            raise SIMSProblem, "Reporting database access has been disabled in this deployment."
        elif feature_disabled('sims'):
            raise SIMSProblem, "Reporting database access has been temporarily disabled due to server maintenance or load."

        import DB2
        SIMSConn.DatabaseError = DB2.DatabaseError
        SIMSConn.DB2Error = DB2.Error
        try:
            dbconn = DB2.connect(dsn=self.sims_db, uid=self.sims_user, pwd=self.sims_passwd)
        except DB2._db2.DatabaseError:
            raise SIMSProblem, "Could not communicate with reporting database."
        cursor = dbconn.cursor()
        cursor.execute("SET SCHEMA "+self.schema)
        return dbconn, cursor
Beispiel #23
0
    def getPartIONodes(self, partname):
        '''retrieves the IOnodes for the specified partition'''
        if '--nodb2' in sys.argv:
            iodict = {'32wayN0':['R00-M0-N0:J18-U01'],
                      '32wayN1':['R00-M0-N1:J18-U01'],
                      '32wayN2':['R00-M0-N2:J18-U01'],
                      '32wayN3':['R00-M0-N3:J18-U01'],
                      '64wayN01':['R00-M0-N0:J18-U01','R00-M0-N1:J18-U01'],
                      '64wayN23':['R00-M0-N2:J18-U01','R00-M0-N3:J18-U01'],
                      '128wayN0123':['R00-M0-N0:J18-U01','R00-M0-N1:J18-U01',
                                     'R00-M0-N2:J18-U01','R00-M0-N3:J18-U01']}
            return iodict.get(partname, None)

        ionodes = []
        db2 = DB2.connect(uid=self.config.get('db2uid'),
                          pwd=self.config.get('db2pwd'), dsn=self.config.get('db2dsn')).cursor()
        
        # first get blocksize in nodes
        db2.execute("select size from BGLBLOCKSIZE where blockid='%s'" % partname)
        blocksize = db2.fetchall()
        print 'blocksize is', blocksize[0][0], 'nodes'

        if int(blocksize[0][0]) < 512:
            print "small block"
            #tBGLSMALLBLOCK (BLOCKID, POSINMACHINE, PSETNUM, IONODEPOS, IONODECARD, IONODECHIP, COMPNODEPOS, NUMCOMPUTE)
            db2.execute("select * from tBGLSMALLBLOCK where blockid='%s' order by ionodepos" % partname)
            result = db2.fetchall()
            for b in result:
                rack = b[1].strip()[1:3]
                midplane = b[1].strip()[-1]
                ionodes.append("R%s-M%s-%s:%s-%s" % (rack, midplane, self.basemachine.__ncdefs__[b[3].strip()],
                                                     b[4].strip(), b[5].strip()))
        else:  #match up rack and midplane(s)?
            db2.execute("select bpid from TBGLBPBLOCKMAP where blockid='%s'" % partname)
            result = db2.fetchall()
            for b in result:
                rack = b[0].strip()[1:3]
                midplane = b[0].strip()[-1]
                print "R%s-M%s" % (rack, midplane)
                #ionodes = self.getIONodes(rack, midplane)
        db2.close()
        return ionodes
Beispiel #24
0
    def connect():
        """ Connect to a DB2 database.

            All subclasses of DB2_Query will use this database for queries. 

        """
        
        if settings.DISABLE_REPORTING_DB:
            raise SIMSProblem, "Reporting database access has been disabled in this deployment."

        sims_user = settings.SIMS_USER
        sims_passwd = settings.SIMS_PASSWORD
        sims_db_name = settings.SIMS_DB_NAME
        sims_db_schema = settings.SIMS_DB_SCHEMA
        
        import DB2
        dbconn = DB2.connect(dsn=sims_db_name, uid=sims_user, pwd=sims_passwd)
        cursor = dbconn.cursor()
        cursor.execute("SET SCHEMA "+sims_db_schema)
        DB2_Query.db = dbconn
Beispiel #25
0
 def setUp(self):
     self.db = DB2.connect(**Config.ConnDict)
     self.tableName = 'PYDB2TEST_1'
     self.cs = self.db.cursor()
     self.type_data = [
         ('CHAR(3)', 'CCC'),
         ('VARCHAR(3)', 'V'),
         ('DATE', '2005-03-03'),
         ('TIME', '00:01:02'),
         #TODO fix timestamp
         ('TIMESTAMP', '2005-03-03-00.01.02.000000'),
         ('SMALLINT', 1),
         ('INTEGER', 2),
         ('BIGINT', 3L),
         ('REAL', 1.0),
         ('FLOAT', 1.0),
         ('DOUBLE', 1.0),
         ('REAL', 100.5),
         ('FLOAT', 100.5),
         ('DECIMAL(6,2)', 100.5),
     ]
Beispiel #26
0
 def setUp(self):
     self.db = DB2.connect(**Config.ConnDict)
     self.tableName = 'PYDB2TEST_1'
     self.cs = self.db.cursor()
     self.type_data = [
             ('CHAR(3)', 'CCC'),
             ('VARCHAR(3)', 'V'),
             ('DATE', '2005-03-03'),
             ('TIME', '00:01:02'),
             #TODO fix timestamp
             ('TIMESTAMP', '2005-03-03-00.01.02.000000'),
             ('SMALLINT', 1),
             ('INTEGER', 2),
             ('BIGINT', 3L),
             ('REAL', 1.0),
             ('FLOAT', 1.0),
             ('DOUBLE', 1.0),
             ('REAL', 100.5),
             ('FLOAT', 100.5),
             ('DECIMAL(6,2)', 100.5),
         ]
Beispiel #27
0
 def free_block(self, blockid, owner, status, jobid, username, job_status):
     if jobid > 0:  # there is a job running in this block
         self.setusername(username)
         self.do_cmd("killjob " + blockid + " " + str(jobid))
         self.do_cmd("waitjob " + blockid + " " + str(jobid))
     self.setusername(owner)
     self.do_cmd("free " + blockid)
     conn = DB2.connect('bgdb0', 'bglsysdb', 'db24bgls')
     cmd="select status from bglsysdb.tbglblock " +\
          " where blockid=\'" + blockid + "\'"
     cur = conn.cursor()
     status = ''
     waitCount = 0
     while status != 'F' and waitCount < 10:
         time.sleep(10)
         cur.execute(cmd)
         status = cur.fetchone()[0]
         waitCount = waitCount + 1
     if waitCount >= 10:
         return status
     return None
Beispiel #28
0
  def __init__(self,xmlparser=None,lwtparser=None,ldb=None):
    """
    Connects to the database and creates a cursor. Initializes the unique
    id table for this LIGO lw document.

    ldb = LIGOMetadataDatabase object
    xmlparser = pyRXP XML to tuple parser object
    lwtparser = LIGOLwParser object (tuple parser)
    """
    self.ldb = ldb
    if self.ldb:
      self.dbcon = DB2.connect(dsn=self.ldb.database, uid='', pwd='')
      self.curs = self.dbcon.cursor()
    else:
      self.dbcon = None
      self.curs = None
    self.xmlparser = xmlparser
    self.lwtparser = lwtparser
    if lwtparser:
      self.lwtparser.unique = None
    self.table = {}
    self.strtoxml = Xlator({ r'&' : r'&amp;', r'>' : r'&gt;', r'<' : r'&lt;', '\\' : '\\\\', '\"' : '\\\"' }) # Note: see https://www.gravity.phy.syr.edu/dokuwiki/doku.php?id=rpfisher:gluebughunt if this is confusing, the parser just cleanly handles the conversion of everything
Beispiel #29
0
 def __init__(self,database):
   """
   database = the name of the LIGO database to initalize
   """
   self.database = database
   self.uniqueids = {}
   conn = DB2.connect(dsn=database, uid='', pwd='')
   curs = conn.cursor()
   curs.execute("SELECT tabname FROM syscat.tables WHERE definer<>'SYSIBM' "
     "AND TYPE='T' ORDER BY PARENTS ASC")
   self.tables = curs.fetchall()
   curs.execute("SELECT tabname, colname FROM syscat.columns " +
     "WHERE typename = 'CHARACTER' AND length = 13 AND nulls = 'N'")
   for tab, col in curs.fetchall():
     tab = tab.lower()
     col = col.lower()
     try:
       self.uniqueids[tab][col] = 'ilwd:char'
     except KeyError:
       self.uniqueids[tab] = {}
       self.uniqueids[tab][col] = 'ilwd:char'
   curs.close()
   conn.close()
Beispiel #30
0
import DB2, sys, os, re

conn = DB2.connect(dsn='db', uid='uid', pwd='pwd')
c = conn.cursor()
c.execute('SET SCHEMA db2inst1')
DEST = "."

def read_pmid_file(pmids_f):
    lines = open(pmids_f, 'r').readlines()
    bundle = []
    pmids = []
    for (i, l) in enumerate(lines):
        pmids.append(l)
        if (i + 1) % 1000 == 0:
            bundle.append(pmids)
            pmids = []
    if pmids:
        bundle.append(pmids)
    return bundle

descriptor = {}
def load_mesh_descriptors():
    q = """select distinct mdtn.tree_number, md.descriptor_name
            from mesh_term_string_to_descriptor_ui AS mtstd 
            join mesh_desc_tree_number AS mdtn on mtstd.descriptor_ui = mdtn.descriptor_ui
            join mesh_descriptor AS md on md.descriptor_ui = mdtn.descriptor_ui"""
    c.execute(q)

    rows = c.fetchall()
    count, total = 0, len(rows)
    for (tree_number, descriptor_name) in rows:
Beispiel #31
0
def connect(dsn, username=None, password=None):
    """Create a connection to the specified database.

    This utility method attempts to connect to the database named by dsn using
    the (optional) username and password provided. The method attempts to use a
    variety of connection frameworks (PyDB2, pyodbc, IBM's official DB2 driver,
    PythonWin's ODBC stuff and mxODBC) depending on the underlying platform.

    Note that the queries in the methods below are written to be agnostic to
    the quirks of the various connection frameworks (e.g. PythonWin's ODBC
    module doesn't correctly handle certain dates hence why all DATE and
    TIMESTAMP fields are CAST to CHAR in the queries below).
    """
    logging.info('Connecting to database "%s"' % dsn)
    # Try the "official" IBM DB2 Python driver
    try:
        import ibm_db
        import ibm_db_dbi
    except ImportError:
        pass
    else:
        logging.info('Using IBM DB2 Python driver')
        if username is not None:
            return ibm_db_dbi.connect(dsn, username, password)
        else:
            return ibm_db_dbi.connect(dsn)
    # Try the PyDB2 driver
    try:
        import DB2
    except ImportError:
        pass
    else:
        logging.info('Using PyDB2 driver')
        if username is not None:
            return DB2.connect(dsn, username, password)
        else:
            return DB2.connect(dsn)
    # Try the pyodbc driver
    try:
        import pyodbc
    except ImportError:
        pass
    else:
        logging.info('Using pyodbc driver')
        # XXX Check whether escaping/quoting is required
        # XXX Should there be a way to specify the driver name? Given that on
        # unixODBC the driver alias is specified in odbcinst.ini, and on
        # Windows with DB2 9+ one can have multiple DB2 ODBC drivers installed
        # with differentiating suffixes
        if username is not None:
            return pyodbc.connect(
                'driver=IBM DB2 ODBC DRIVER;dsn=%s;uid=%s;pwd=%s' %
                (dsn, username, password))
        else:
            return pyodbc.connect('driver=IBM DB2 ODBC DRIVER;dsn=%s' % dsn)
    # Try the PythonWin ODBC driver
    try:
        import dbi
        import odbc
    except ImportError:
        pass
    else:
        logging.info('Using PyWin32 odbc driver')
        if username is not None:
            # XXX Check whether escaping/quoting is required
            return odbc.odbc("%s/%s/%s" % (dsn, username, password))
        else:
            return odbc.odbc(dsn)
    raise ImportError(
        'Unable to find a suitable connection framework; please install PyDB2, pyodbc, PyWin32, or mxODBC'
    )
#!/opt/discoverytools/anaconda/bin/python2.7

# This uses the PyDB2 package
import DB2

USER = '******'
PASSWORD = ''
DATASOURCE = 'FDW2P'

# Open a connection
conn = DB2.connect(dsn=DATASOURCE, uid=USER, pwd=PASSWORD)

# Get a cursor, which allows buffered fetching of rows
curs = conn.cursor()
curs.execute('SELECT LOS_EST_DIM_ID FROM FDWAE.LOS_EST_VW FETCH FIRST 10 ROWS ONLY')

# curs.fetchall() will fetch all of the results
rows = curs.fetchall()
for rownum, r in enumerate(rows):
    print rownum,":",repr(r)

# Close cursors when you're done
curs.close()


# Cursors can be used to iterate through large requests
curs = conn.cursor()
# Note the lack of a limit on this query
curs.execute('SELECT LOS_EST_DIM_ID FROM FDWAE.LOS_EST_VW')
rows = curs.fetchmany(size=2)
for rownum, r in enumerate(rows):
Beispiel #33
0
                          port="5432")

######################
# ODBC Databases
######################
import pyodbc
#SQL Server
dbconn = pyodbc.connect(
    'Driver={odbcdriver};Server=servername;Database=dbname;Trusted_Connection=yes;'
)

#MS Access
dbconn = pyodbc.connect(
    'Driver={Microsoft Access Driver (*.mdb, *.accdb)};DBQ=db_file_path;')
######################
# Oracle Databases
######################
import cx_Oracle
dsn_tns = cx_Oracle.makedsn('Host Name',
                            'Port Number',
                            service_name='Service Name')
dbconn = cx_Oracle.connect(user='******',
                           password='******',
                           dsn=dsn_tns)

######################
# DB2 Databases
######################
import DB2
dbconn = DB2.connect(dsn='sample', uid='db2inst1', pwd='******')
Beispiel #34
0
   def __init__(self, p_db):

      db          = p_db
      s           = ""
      self.db     = None
      self.cursor = None

     
      (self.dbtype, driver, hostname, port, dbname, user, pwd) = getDbStringDetails(db)
  
      if (self.dbtype in ["teradata", "maxdb"]) or (driver == "-odbc"):
       dsn        = self.odbc(hostname, port, dbname)
       print dsn
       self.db    = pyodbc.connect(dsn=dsn, user=user, password=pwd, ansi=True, autocommit=True)
       self.cursor=self.db.cursor()

      elif self.dbtype == "ase": 
         # hostname defined in interface file
         self.db     = Sybase.connect(dsn=hostname, user=user, passwd=pwd, database=dbname, auto_commit=True)                                 
         self.cursor = self.db.cursor()
         self.cursor.execute("set quoted_identifier on")

      elif self.dbtype in ["asa", "iq"]: 
         import sqlanydb                      # Module for Sybase ASA or IQ
         s           = "%s" % (hostname)
         print s
         self.db     = sqlanydb.connect(eng=s, userid=user, password=pwd, dbn=dbname)                   
         self.cursor = self.db.cursor()
         
      elif self.dbtype == "mssql":
         s           = "%s:%s" % (hostname, port)
         self.db     = pymssql.connect(host=s, user=user, password=pwd, database=dbname, as_dict=False)
         self.cursor = self.db.cursor()

      elif self.dbtype == "mysql":
         self.db     = MySQLdb.connect (host=hostname, port=int(port), user = user, passwd = pwd, db=dbname)
         self.cursor = self.db.cursor()

      elif self.dbtype == "db2":
         self.db     = DB2.connect (dsn=dbname, uid=user, pwd=pwd)
         self.cursor = self.db.cursor()

      elif self.dbtype in ["postgres", "greenplum"]:
         s           = "host='%s' port='%s' user='******' password='******' dbname='%s'" % (hostname, port, user, pwd, dbname)
         self.db     = psycopg2.connect (s)
         self.db.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
         self.cursor = self.db.cursor()

      elif self.dbtype == "oracle":
         s = "%s/%s@(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=%s)(PORT=%s))(CONNECT_DATA=(SERVICE_NAME=%s)))"
         s = s % (user, pwd, hostname, port, dbname) 
         self.db     = cx_Oracle.connect(s)
         self.cursor = self.db.cursor()

      elif self.dbtype == "netezza":
         # conn="DRIVER={MySQL ODBC 3.51 Driver}; SERVER=localhost; PORT=3306; DATABASE=mysql; UID=joe; 
         # PASSWORD=bloggs; OPTION=3;SOCKET=/var/run/mysqld/mysqld.sock;"   
         self.cursor = Connect (hostname, user, pwd)

      elif self.dbtype in ["hana"]:
         from hdbcli import dbapi
         self.db                       = dbapi.connect(address=hostname, port=30015+int(port), user=user, password=pwd, autocommit=True)
         self.cursor                   = self.db.cursor()

      elif self.dbtype in ["progress"]:
         dsn        = self.odbc(hostname, port, dbname) 
         self.db    = pyodbc.connect(dsn=dsn, user=user, password=pwd, autocommit=True)
         self.cursor=self.db.cursor()

      elif self.dbtype in ["ingres", "vectorwise"]:
         # vnode = @host,protocol,port[;attribute=value{;attribute=value}][[user,password]]
         s = "@%s,tcp_ip,%s;connection_type=direct" % (hostname, port)
         self.db=ingresdbi.connect(database=dbname, vnode=s, uid=user, pwd=pwd, dbms_pwd=pwd, autocommit = "Y")   # trace = (7, "dbi.log")
         self.cursor=self.db.cursor()

      else:
         perror("Unknown_db_type", self.dbtype)
Beispiel #35
0
def coalesce_seg(database, start_time, end_time):
    ret = 0  #assume execution successufl

    try:
        st = int(start_time)
        et = int(end_time)
        db = str(database.strip())
        #-------------------------------------------------------------------
        # Set up environment and get needed values
        #-------------------------------------------------------------------
        # Set up connection to the database
        dbconn = DB2.connect(dsn=db, uid='', pwd='', autoCommit=True)
        curs = dbconn.cursor()

        # create a new process_id
        sql = "select hex(GENERATE_UNIQUE()) from sysibm.sysdummy1"
        curs.execute(sql)
        hex_procid = curs.fetchone()[0]
        process_id = 'x' + '\'' + hex_procid + '\''

        # determine the local creator_db
        sql = "SELECT DEFAULT FROM SYSCAT.COLUMNS WHERE "
        sql += "TABNAME = 'PROCESS' AND COLNAME = 'CREATOR_DB'"
        curs.execute(sql)
        creator_db = int(curs.fetchone()[0])

        # prepare values for the new row to be inserted into the process table
        program = os.path.abspath(sys.argv[0])
        node = socket.gethostname()
        username = pwd.getpwuid(os.getuid()).pw_name
        unix_procid = os.getpid()
        proc_start_time = gpstime.GpsSecondsFromPyUTC(time.time())
        end_time = None
        jobid = 0
        domain = 'coalesce_local'

        # insert new row into process table
        sql = "INSERT INTO process "
        sql += "(program, is_online, node, username, unix_procid, start_time, jobid, domain, process_id, creator_db) "
        sql += "VALUES ('%s', 0, '%s', '%s', %d, %d, %d, '%s',%s, %d)" % (
            program, node, username, unix_procid, proc_start_time, jobid,
            domain, process_id, creator_db)
        curs.execute(sql)

        # get the BLOB process_id for later reference
        sql = "SELECT BLOB(process_id) from process where hex(process_id)='%s' " % hex_procid
        curs.execute(sql)
        blob_procid = curs.fetchone()[0]

        #========================================================================
        #
        #                                Main
        #
        #========================================================================
        # Algorithm:
        # 1. Find distinct version 1 segment type from segment_summary table witnin start_time, end_time range
        # 2. Find segments and intervals to coalesce
        # 3. Coalesce segments and intervals
        # 4. Insert coaleseced segments back in to the database
        # 5. Delete uncoalesced segments and intervals from the database

        # 1. Find distinct segment types matching our criteria from segment_summary within the specified time range
        sql = "SELECT distinct(hex(segment_summary.segment_def_id)) FROM segment_summary, segment_definer, process "
        sql += "WHERE segment_summary.segment_def_id=segment_definer.segment_def_id "
        sql += "AND segment_summary.segment_def_cdb=segment_definer.creator_db "
        sql += "AND segment_summary.process_id=process.process_id "
        sql += "AND segment_summary.creator_db=process.creator_db "
        # Removed next line so that all segments are coalesced: this will be slower up front but faster for queries and the long run
        #sql += "AND ((segment_definer.name like 'DMT-%' and segment_definer.version=1) or (process.ifos='V1' and process.program='SegOnline')) "
        sql += "AND segment_summary.start_time <=%d " % et
        sql += "AND segment_summary.end_time >= %d " % st
        curs.execute(sql)
        def_ids = curs.fetchall()
        if not def_ids:
            data_existence = 0
        else:
            data_existence = 1

        # loop in the segment types to fetch, coalesce, insert and delete
        for d in def_ids:
            # get the BLOB segment_def_id for later use
            sql = "SELECT BLOB(segment_def_id), ifos, name, version, creator_db "
            sql += "FROM segment_definer "
            sql += "WHERE hex(segment_def_id) = '%s' " % d[0]

            curs.execute(sql)
            result = curs.fetchone()
            blob_defid = result[0]
            ifos = result[1].strip()
            name = result[2]
            ver = result[3]
            def_cdb = result[4]

            # 2. Find segments and intervals to coalesce
            # get the segment start_time, end_time to coalesce, and according primary key to delete
            try:
                curs.execute("drop view seg_view")
            except:
                pass
            sql = "CREATE view seg_view (st,et,seg_id) AS "
            sql += "SELECT start_time,end_time, segment_id from segment "
            sql += "WHERE hex(segment_def_id) = '%s' " % d[0]
            sql += "AND segment.start_time <=%d " % et
            sql += "AND segment.end_time >= %d " % st
            print >> sys.stdout, (
                "Selecting segments to coalesce for %s version:%d %s ... " %
                (ifos, ver, name))
            curs.execute(sql)

            curs.execute("SELECT st,et from seg_view")
            seg_bf_cos = curs.fetchall()  # get the segments to coalesce

            # get the summary start_time, end_time to coalesce, and according primary key to delete
            try:
                curs.execute("drop view sum_view")
            except:
                pass
            sql = "CREATE view sum_view (st,et,sum_id) AS "
            sql += "SELECT start_time,end_time, segment_sum_id from segment_summary "
            sql += "WHERE hex(segment_def_id) = '%s' " % d[0]
            sql += "AND segment_summary.start_time <=%d " % et
            sql += "AND segment_summary.end_time >= %d " % st
            curs.execute(sql)

            curs.execute("SELECT st,et from sum_view")
            sum_bf_cos = curs.fetchall()  # get the summarys to coalesce

            # 3. Coalesce segments and intervals
            print >> sys.stdout, "Coalescing segments ... "
            segs = segments.segmentlist([])
            sums = segments.segmentlist([])
            for bf in seg_bf_cos:
                seg = segments.segment(int(bf[0]), int(bf[1]))
                segs.append(seg)
            for bf in sum_bf_cos:
                sum = segments.segment(int(bf[0]), int(bf[1]))
                sums.append(sum)

            segs.coalesce()
            sums.coalesce()

            # 4. Insert coaleseced segments back in to the database
            # insert coalesced segs into segment table
            insert_list = []
            for s in segs:
                # generate unique id for insertion
                curs.execute("VALUES BLOB(GENERATE_UNIQUE())")
                prim_id = curs.fetchone()[0]
                # generate a list of values to insert using executemany()
                insert_list.append((prim_id, creator_db, s[0], s[1],
                                    blob_defid, def_cdb, blob_procid))

            sql = "INSERT INTO segment "
            sql += "(segment_id, creator_db, start_time, end_time, segment_def_id, segment_def_cdb, process_id) "
            sql += "VALUES (?,?,?,?,?,?,?) "
            print >> sys.stdout, "Inserting coalesced segments back in ... "
            curs.executemany(sql, insert_list)

            # insert coalesced sums into segment_summary table
            insert_list = []
            for s in sums:
                # generate unique id for insertion
                curs.execute("VALUES BLOB(GENERATE_UNIQUE())")
                prim_id = curs.fetchone()[0]
                # generate a list of values to insert using executemany()
                insert_list.append((prim_id, creator_db, s[0], s[1],
                                    blob_defid, def_cdb, blob_procid))
            sql = "INSERT INTO segment_summary "
            sql += "(segment_sum_id, creator_db, start_time, end_time, segment_def_id, segment_def_cdb, process_id) "
            sql += "VALUES (?,?,?,?,?,?,?) "
            curs.executemany(sql, insert_list)

            # 5. Delete uncoalesced segments and intervals from the database
            print >> sys.stdout, "Deleting un-coaleseced segments ... "
            print >> sys.stdout
            sql = "DELETE FROM segment "
            sql += "WHERE segment_id in (select seg_id from seg_view) "
            sql += "AND process_id != %s " % process_id
            curs.execute(sql)

            sql = "DELETE FROM segment_summary "
            sql += "WHERE segment_sum_id in (select sum_id from sum_view) "
            sql += "AND process_id != %s " % process_id
            curs.execute(sql)

        # update end_time in process table
        sql = "update process set end_time=%d where hex(process_id)='%s' " % (
            gpstime.GpsSecondsFromPyUTC(time.time()), hex_procid)
        curs.execute(sql)

        try:
            curs.execute("drop view seg_view")
            curs.execute("drop view sum_view")
        except:
            pass
        curs.close()

    except Exception, e:
        ret = str(e)
        print >> sys.stdout, ("%s" % ret)
Beispiel #36
0
    def getIONodes(self):
        '''Get location of i/o nodes from db2'''
        if '--nodb2' not in sys.argv:
            db2 = DB2.connect(uid=self.config.get('db2uid'),
                              pwd=self.config.get('db2pwd'), dsn=self.config.get('db2dsn')).cursor()

            db2.execute("SELECT LOCATION,IPADDRESS FROM tbglippool")
            results = db2.fetchall()
            ioreturn = [(location.strip(), ip) for (location, ip) in results]
            db2.close()
        else:
        #sample for 1:32 system
            ioreturn = [('R00-M1-NA-I:J18-U01', '172.30.0.53'),
                        ('R00-M1-NA-I:J18-U11', '172.30.0.54'),
                        ('R00-M1-NB-I:J18-U01', '172.30.0.55'),
                        ('R00-M1-NB-I:J18-U11', '172.30.0.56'),
                        ('R00-M1-NC-I:J18-U01', '172.30.0.57'),
                        ('R00-M1-NC-I:J18-U11', '172.30.0.58'),
                        ('R00-M1-ND-I:J18-U01', '172.30.0.59'),
                        ('R00-M1-ND-I:J18-U11', '172.30.0.60'),
                        ('R00-M1-NE-I:J18-U01', '172.30.0.61'),
                        ('R00-M1-NE-I:J18-U11', '172.30.0.62'),
                        ('R00-M1-NF-I:J18-U01', '172.30.0.63'),
                        ('R00-M1-NF-I:J18-U11', '172.30.0.64'),
                        ('R00-M1-N9-I:J18-U11', '172.30.0.52'),
                        ('R00-M0-N1-I:J18-U11', '172.30.0.4'),
                        ('R00-M0-N2-I:J18-U01', '172.30.0.5'),
                        ('R00-M0-N2-I:J18-U11', '172.30.0.6'),
                        ('R00-M0-N3-I:J18-U01', '172.30.0.7'),
                        ('R00-M0-N3-I:J18-U11', '172.30.0.8'),
                        ('R00-M0-N4-I:J18-U01', '172.30.0.9'),
                        ('R00-M0-N4-I:J18-U11', '172.30.0.10'),
                        ('R00-M0-N5-I:J18-U01', '172.30.0.11'),
                        ('R00-M0-N5-I:J18-U11', '172.30.0.12'),
                        ('R00-M0-N6-I:J18-U01', '172.30.0.13'),
                        ('R00-M0-N6-I:J18-U11', '172.30.0.14'),
                        ('R00-M0-N7-I:J18-U01', '172.30.0.15'),
                        ('R00-M0-N7-I:J18-U11', '172.30.0.16'),
                        ('R00-M0-N8-I:J18-U01', '172.30.0.17'),
                        ('R00-M0-N8-I:J18-U11', '172.30.0.18'),
                        ('R00-M0-N9-I:J18-U01', '172.30.0.19'),
                        ('R00-M0-N9-I:J18-U11', '172.30.0.20'),
                        ('R00-M0-NA-I:J18-U01', '172.30.0.21'),
                        ('R00-M0-NA-I:J18-U11', '172.30.0.22'),
                        ('R00-M0-NB-I:J18-U01', '172.30.0.23'),
                        ('R00-M0-NB-I:J18-U11', '172.30.0.24'),
                        ('R00-M0-NC-I:J18-U01', '172.30.0.25'),
                        ('R00-M0-NC-I:J18-U11', '172.30.0.26'),
                        ('R00-M0-ND-I:J18-U01', '172.30.0.27'),
                        ('R00-M0-ND-I:J18-U11', '172.30.0.28'),
                        ('R00-M0-NE-I:J18-U01', '172.30.0.29'),
                        ('R00-M0-NE-I:J18-U11', '172.30.0.30'),
                        ('R00-M0-NF-I:J18-U01', '172.30.0.31'),
                        ('R00-M0-N0-I:J18-U01', '172.30.0.1'),
                        ('R00-M0-N0-I:J18-U11', '172.30.0.2'),
                        ('R00-M0-N1-I:J18-U01', '172.30.0.3'),
                        ('R00-M0-NF-I:J18-U11', '172.30.0.32'),
                        ('R00-M1-N0-I:J18-U01', '172.30.0.33'),
                        ('R00-M1-N0-I:J18-U11', '172.30.0.34'),
                        ('R00-M1-N1-I:J18-U01', '172.30.0.35'),
                        ('R00-M1-N1-I:J18-U11', '172.30.0.36'),
                        ('R00-M1-N2-I:J18-U01', '172.30.0.37'),
                        ('R00-M1-N2-I:J18-U11', '172.30.0.38'),
                        ('R00-M1-N3-I:J18-U01', '172.30.0.39'),
                        ('R00-M1-N3-I:J18-U11', '172.30.0.40'),
                        ('R00-M1-N4-I:J18-U01', '172.30.0.41'),
                        ('R00-M1-N4-I:J18-U11', '172.30.0.42'),
                        ('R00-M1-N5-I:J18-U01', '172.30.0.43'),
                        ('R00-M1-N5-I:J18-U11', '172.30.0.44'),
                        ('R00-M1-N6-I:J18-U01', '172.30.0.45'),
                        ('R00-M1-N6-I:J18-U11', '172.30.0.46'),
                        ('R00-M1-N7-I:J18-U01', '172.30.0.47'),
                        ('R00-M1-N7-I:J18-U11', '172.30.0.48'),
                        ('R00-M1-N8-I:J18-U01', '172.30.0.49'),
                        ('R00-M1-N8-I:J18-U11', '172.30.0.50'),
                        ('R00-M1-N9-I:J18-U01', '172.30.0.51')]

        ioreturn.sort()

        # if only using 1 ionode per ionode processor card, filter out
        # every other entry in ioreturn
        if self.psetsize in [32, 128]:
            for i in ioreturn:
                if 'U11' in i[0]:
                    #print 'deleting', i
                    ioreturn.remove(i)
            
        return [re.sub('-I', '', i[0]) for i in ioreturn]
Beispiel #37
0
 def setUp(self):
     self.db = DB2.connect(**Config.ConnDict)
     self.tableName = 'PYDB2TEST_0'
     self.cs = self.db.cursor()
Beispiel #38
0
 def test_001_ConnectSuccess(self):
     """DB2.connect() - Successful"""
     self.db = DB2.connect(**Config.ConnDict)
     self.db.close()
 def __init__(self, dbName):
     self.__dbName = dbName
     self.__conn = DB2.Connection(dbName)
Beispiel #40
0
def coalesce_seg(database, start_time, end_time):
  ret = 0            #assume execution successufl

  try:
    st = int(start_time)
    et = int(end_time)
    db = str(database.strip())
    #-------------------------------------------------------------------
    # Set up environment and get needed values
    #-------------------------------------------------------------------
    # Set up connection to the database
    dbconn = DB2.connect(dsn=db, uid='', pwd='', autoCommit=True)
    curs = dbconn.cursor()

    # create a new process_id
    sql = "select hex(GENERATE_UNIQUE()) from sysibm.sysdummy1"
    curs.execute(sql)
    hex_procid = curs.fetchone()[0]
    process_id = 'x' + '\'' + hex_procid + '\''

    # determine the local creator_db
    sql = "SELECT DEFAULT FROM SYSCAT.COLUMNS WHERE "
    sql += "TABNAME = 'PROCESS' AND COLNAME = 'CREATOR_DB'"
    curs.execute(sql)
    creator_db = int(curs.fetchone()[0])

  
    # prepare values for the new row to be inserted into the process table
    program = os.path.abspath(sys.argv[0])
    node = socket.gethostname()
    username = pwd.getpwuid(os.getuid()).pw_name
    unix_procid = os.getpid()
    proc_start_time = gpstime.GpsSecondsFromPyUTC(time.time())
    end_time = None
    jobid = 0
    domain = 'coalesce_local'

    # insert new row into process table
    sql = "INSERT INTO process "
    sql += "(program, is_online, node, username, unix_procid, start_time, jobid, domain, process_id, creator_db) "
    sql += "VALUES ('%s', 0, '%s', '%s', %d, %d, %d, '%s',%s, %d)" % (program, node, username, unix_procid, proc_start_time, jobid, domain, process_id, creator_db)
    curs.execute(sql)

    # get the BLOB process_id for later reference
    sql = "SELECT BLOB(process_id) from process where hex(process_id)='%s' " % hex_procid
    curs.execute(sql)
    blob_procid = curs.fetchone()[0]


    #========================================================================
    #
    #                                Main
    #
    #========================================================================
    # Algorithm:
    # 1. Find distinct version 1 segment type from segment_summary table witnin start_time, end_time range 
    # 2. Find segments and intervals to coalesce
    # 3. Coalesce segments and intervals
    # 4. Insert coaleseced segments back in to the database
    # 5. Delete uncoalesced segments and intervals from the database


    # 1. Find distinct segment types matching our criteria from segment_summary within the specified time range
    sql  = "SELECT distinct(hex(segment_summary.segment_def_id)) FROM segment_summary, segment_definer, process "
    sql += "WHERE segment_summary.segment_def_id=segment_definer.segment_def_id "
    sql += "AND segment_summary.segment_def_cdb=segment_definer.creator_db "
    sql += "AND segment_summary.process_id=process.process_id "
    sql += "AND segment_summary.creator_db=process.creator_db "
    # Removed next line so that all segments are coalesced: this will be slower up front but faster for queries and the long run
    #sql += "AND ((segment_definer.name like 'DMT-%' and segment_definer.version=1) or (process.ifos='V1' and process.program='SegOnline')) "
    sql += "AND segment_summary.start_time <=%d " % et
    sql += "AND segment_summary.end_time >= %d " % st
    curs.execute(sql)
    def_ids = curs.fetchall()
    if not def_ids:
      data_existence = 0
    else:
      data_existence = 1

    # loop in the segment types to fetch, coalesce, insert and delete
    for d in def_ids:
      # get the BLOB segment_def_id for later use 
      sql = "SELECT BLOB(segment_def_id), ifos, name, version, creator_db " 
      sql += "FROM segment_definer " 
      sql += "WHERE hex(segment_def_id) = '%s' " % d[0]

      curs.execute(sql)
      result = curs.fetchone()
      blob_defid = result[0]
      ifos = result[1].strip() 
      name = result[2]
      ver = result[3]
      def_cdb = result[4]

      # 2. Find segments and intervals to coalesce
      # get the segment start_time, end_time to coalesce, and according primary key to delete
      try:
        curs.execute("drop view seg_view")
      except:
        pass
      sql = "CREATE view seg_view (st,et,seg_id) AS "
      sql += "SELECT start_time,end_time, segment_id from segment "
      sql += "WHERE hex(segment_def_id) = '%s' " % d[0]
      sql += "AND segment.start_time <=%d " % et
      sql += "AND segment.end_time >= %d " % st
      print >> sys.stdout, ("Selecting segments to coalesce for %s version:%d %s ... " % (ifos,ver, name))
      curs.execute(sql)

      curs.execute("SELECT st,et from seg_view")
      seg_bf_cos = curs.fetchall()   # get the segments to coalesce

      # get the summary start_time, end_time to coalesce, and according primary key to delete
      try:
        curs.execute("drop view sum_view")
      except:
        pass
      sql = "CREATE view sum_view (st,et,sum_id) AS "
      sql += "SELECT start_time,end_time, segment_sum_id from segment_summary "
      sql += "WHERE hex(segment_def_id) = '%s' " % d[0]
      sql += "AND segment_summary.start_time <=%d " % et
      sql += "AND segment_summary.end_time >= %d " % st
      curs.execute(sql)

      curs.execute("SELECT st,et from sum_view")
      sum_bf_cos = curs.fetchall()   # get the summarys to coalesce

      # 3. Coalesce segments and intervals
      print >> sys.stdout, "Coalescing segments ... "
      segs = segments.segmentlist([]) 
      sums = segments.segmentlist([]) 
      for bf in seg_bf_cos:
        seg = segments.segment(int(bf[0]), int(bf[1]))
        segs.append(seg) 
      for bf in sum_bf_cos:
        sum = segments.segment(int(bf[0]), int(bf[1]))
        sums.append(sum) 

      segs.coalesce()
      sums.coalesce()


      # 4. Insert coaleseced segments back in to the database
      # insert coalesced segs into segment table
      insert_list = []
      for s in segs:
        # generate unique id for insertion
        curs.execute("VALUES BLOB(GENERATE_UNIQUE())")
        prim_id = curs.fetchone()[0]
        # generate a list of values to insert using executemany()
        insert_list.append((prim_id, creator_db, s[0], s[1], blob_defid, def_cdb, blob_procid))

      sql = "INSERT INTO segment "
      sql += "(segment_id, creator_db, start_time, end_time, segment_def_id, segment_def_cdb, process_id) "
      sql += "VALUES (?,?,?,?,?,?,?) "
      print >> sys.stdout, "Inserting coalesced segments back in ... "
      curs.executemany(sql, insert_list)

      # insert coalesced sums into segment_summary table
      insert_list = []
      for s in sums:
        # generate unique id for insertion
        curs.execute("VALUES BLOB(GENERATE_UNIQUE())")
        prim_id = curs.fetchone()[0]
        # generate a list of values to insert using executemany()
        insert_list.append((prim_id, creator_db, s[0], s[1], blob_defid, def_cdb, blob_procid))
      sql = "INSERT INTO segment_summary "
      sql += "(segment_sum_id, creator_db, start_time, end_time, segment_def_id, segment_def_cdb, process_id) "
      sql += "VALUES (?,?,?,?,?,?,?) "
      curs.executemany(sql, insert_list)

      # 5. Delete uncoalesced segments and intervals from the database
      print >> sys.stdout, "Deleting un-coaleseced segments ... "
      print >> sys.stdout 
      sql = "DELETE FROM segment "
      sql += "WHERE segment_id in (select seg_id from seg_view) "
      sql += "AND process_id != %s " % process_id
      curs.execute(sql)

      sql = "DELETE FROM segment_summary "
      sql += "WHERE segment_sum_id in (select sum_id from sum_view) "
      sql += "AND process_id != %s " % process_id
      curs.execute(sql)

    # update end_time in process table
    sql = "update process set end_time=%d where hex(process_id)='%s' " % (gpstime.GpsSecondsFromPyUTC(time.time()),hex_procid)
    curs.execute(sql)
  
    try:  
      curs.execute("drop view seg_view")
      curs.execute("drop view sum_view")
    except:
      pass
    curs.close()

  except Exception,e:
    ret = str(e)
    print >> sys.stdout, ("%s" % ret)
Beispiel #41
0
import sys
import mx.DateTime
import time
import os.path
import pwd

if len(sys.argv) < 3:
    print(sys.argv[0], "dbName ctp_id (to be cloned)", file=sys.stderr)
    sys.exit(-1)
dbName = sys.argv[1]
old_ctp = sys.argv[2]
creator = pwd.getpwuid(os.getuid())[0]
created = mx.DateTime.TimestampFromTicks(time.time())
desc = "created by cloning ctp_id " + str(old_ctp)
fileName = "cloned"
conn = DB2.connect(dbName)
cur = conn.cursor()
cmd = "insert into experiment.ctp_table (ctp_id, created, creator, filename, desc) values(DEFAULT, \'" + str(created) + "\', \'" + str(creator) + "\', \'" +\
      str(fileName) + "\', \'" + str(desc) +"\')"
cur.execute(cmd)
cmd = "select ctp_id from experiment.ctp_table where ctp_id = integer(IDENTITY_VAL_LOCAL())"
cur.execute(cmd)
foo = cur.fetchone()
new_ctp = foo[0]
print("New ctp_id: ", str(new_ctp))
cmd = "insert into experiment.ct_params (ctp_id, type, name, value) select value(" +\
      str(new_ctp) +\
      "," + str(new_ctp) + "), type, name, value from experiment.ct_params where ctp_id = " +\
      str(old_ctp)
cur.execute(cmd)
 def connect(self):
     self._conn = DB2.connect(self.readConfig('database'), self.readConfig('user'), self.readConfig('password'))
'''
Created on Dec 5, 2013
@author: markito
'''
import DB2
from ConfigParser import ConfigParser
from collections import Counter

config = ConfigParser()
config.read('config')

conn = DB2.connect(config.get('GemFireXD', 'database'), config.get('GemFireXD', 'user'), config.get('GemFireXD', 'password'))

SQL = 'SELECT text FROM tweets'
cursor = conn.cursor()
cursor.execute(SQL)

BATCH_SIZE = 1000
result = cursor.fetchmany(BATCH_SIZE)
wordCounter = Counter()

while result:
    # token list with every word with more than 3 letters on every tweet
    words=[ w
            for t in result
                for w in t[0].split()
                    if len(w) >= 3]
    # count frequency on token list
    for item in [words]:
        c = Counter(item)
        wordCounter = wordCounter + c
Beispiel #44
0
def update_status_DB2(logfile):
    db = DB2.connect('dbname', 'username', 'password')
    cursor = db.cursor()
Beispiel #45
0
    def __init__(self, p_db, connect = True):
        '''
            Parameter db="dbtype://*****:*****@(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=%s)(PORT=%s))(CONNECT_DATA=(SERVICE_NAME=%s)))"
                s = s % (user, pwd, hostname, port, dbname)
                self.db = cx_Oracle.connect(s)
                self.cursor = self.db.cursor()
    
            elif self.dbtype == "netezza":
                # conn="DRIVER={MySQL ODBC 3.51 Driver}; SERVER=localhost; PORT=3306; DATABASE=mysql; UID=joe;
                # PASSWORD=bloggs; OPTION=3;SOCKET=/var/run/mysqld/mysqld.sock;"
                self.cursor = Connect(hostname, user, pwd)
    
            elif self.dbtype in ["hana"]:
                from hdbcli import dbapi
                self.db = dbapi.connect(
                    address=hostname, port=30015+int(port), user=user, password=pwd, autocommit=True)
                self.cursor = self.db.cursor()
    
            elif self.dbtype in ["progress"]:
                dsn = self.odbc(hostname, port, dbname)
                self.db = pyodbc.connect(
                    dsn=dsn, user=user, password=pwd, autocommit=True)
                self.cursor = self.db.cursor()
    
            elif self.dbtype in ["zen"]:
                # Example: driver={Pervasive ODBC Interface};server=localhost;DBQ=demodata'
                # Example: driver={Pervasive ODBC Interface};server=hostname:port;serverdsn=dbname'
                dsn = dbname
                connString = "DRIVER={Pervasive ODBC Interface};SERVER=%s;ServerDSN=%s;UID=%s;PWD=%s;" % (
                    hostname, dsn, user, pwd)
                if connect:
                    self.db = pyodbc.connect(connString, autocommit=True)
                    self.cursor = self.db.cursor()

            elif self.dbtype in ["ingres", "vector", "vectorh", "actianx", "avalanche"]:
                connString = "DRIVER={Ingres};SERVER=@%s,tcp_ip,%s;DATABASE=%s;SERVERTYPE=INGRES;UID=%s;PWD=%s;" % (
                    hostname, port, dbname, user, pwd)
                if connect:
                    self.db = pyodbc.connect(connString, autocommit=True)
                    self.cursor = self.db.cursor()
            else:
                perror("Unknown_db_type", self.dbtype)
        except Exception as ex:
            self.logger.exception(ex)
Beispiel #46
0
 def setUp(self):
     self.db = DB2.connect(**Config.ConnDict)
     cursor = self.db.cursor()
     cursor.execute("CREATE TABLE test1(A BIGINT, B VARCHAR(50), C BIGINT)")
     cursor.close()
Beispiel #47
0
 def setUp(self):
     self.db = DB2.connect(**Config.ConnDict)
     cursor = self.db.cursor()
     cursor.execute("CREATE TABLE test1(A BIGINT, B VARCHAR(50), C BIGINT)")
     cursor.close()
# item_id (pmid), title_lower_case

# facets.tsv
# top_category_lower_case, top_category, mesh top heading

# top_category_lower_case_terms.tsv
# term_id, mesh_hierarchy

# top_category_lower_case_map.tsv
# item_id (pmid), term_id

# publication dates, searchable title, lucene index full text, sortkeys

import DB2, sys, os, re

conn = DB2.connect(dsn="db", uid="uid", pwd="pwd")
c = conn.cursor()
c.execute("SET SCHEMA db2inst1")
DEST = "."

descriptor = {}


def load_mesh_descriptors():
    q = """select distinct mdtn.tree_number, md.descriptor_name
            from mesh_term_string_to_descriptor_ui AS mtstd 
            join mesh_desc_tree_number AS mdtn on mtstd.descriptor_ui = mdtn.descriptor_ui
            join mesh_descriptor AS md on md.descriptor_ui = mdtn.descriptor_ui"""
    c.execute(q)

    rows = c.fetchall()
 # Flag we want:
 
 #ifo="L1"
 #name="DMT-SCIENCE"
 #version=2
 
 options=parse_command_line()
 ifo=options.ifo
 name=options.name
 version=int(options.segversion)
 segment_url=options.segment_url
 
 # DB2 connection
 
 #import DB2
 conn=DB2.connect(dsn='seg_cit',uid='',pwd='')
 curs=conn.cursor()
 
 # Grab the process information
 
 
 curs.execute("select segment_definer.ifos, segment_definer.name, segment_definer.version, process.start_time, process.end_time,process.program,process.creator_db,process.version,process.cvs_repository,process.cvs_entry_time,process.comment,process.is_online,process.node,process.username,process.unix_procid,process.jobid,process.domain,process.param_set,process.ifos,process.insertion_time, segment_definer.comment from segment_definer, process where process.process_id = segment_definer.process_id AND (segment_definer.ifos = '%s' AND segment_definer.name = '%s' AND segment_definer.version = %d)" %(ifo,name,version))
 procresult=curs.fetchall()
 
 ifo=procresult[0][0].strip()
 proc_start_time=procresult[0][3]
 proc_end_time=procresult[0][4]
 
 proc_program=procresult[0][5]
 
 proc_creator_db=procresult[0][6]
Beispiel #50
0
 def test_001_ConnectSuccess(self):
     """DB2.connect() - Successful"""
     self.db = DB2.connect(**Config.ConnDict)
     self.db.close()
Beispiel #51
0
 def setUp(self):
     self.db = DB2.connect(**Config.ConnDict)
     self.tableName = 'PYDB2TEST_0'
     self.cs = self.db.cursor()
Beispiel #52
0
def connect(dsn, username=None, password=None):
    """Create a connection to the specified database.

    This utility method attempts to connect to the database named by dsn using
    the (optional) username and password provided. The method attempts to use a
    variety of connection frameworks (PyDB2, pyodbc, IBM's official DB2 driver,
    PythonWin's ODBC stuff and mxODBC) depending on the underlying platform.

    Note that the queries in the methods below are written to be agnostic to
    the quirks of the various connection frameworks (e.g. PythonWin's ODBC
    module doesn't correctly handle certain dates hence why all DATE and
    TIMESTAMP fields are CAST to CHAR in the queries below).
    """
    logging.info('Connecting to database "%s"' % dsn)
    # Try the "official" IBM DB2 Python driver
    try:
        import ibm_db
        import ibm_db_dbi
    except ImportError:
        pass
    else:
        logging.info('Using IBM DB2 Python driver')
        if username is not None:
            return ibm_db_dbi.connect(dsn, username, password)
        else:
            return ibm_db_dbi.connect(dsn)
    # Try the PyDB2 driver
    try:
        import DB2
    except ImportError:
        pass
    else:
        logging.info('Using PyDB2 driver')
        if username is not None:
            return DB2.connect(dsn, username, password)
        else:
            return DB2.connect(dsn)
    # Try the pyodbc driver
    try:
        import pyodbc
    except ImportError:
        pass
    else:
        logging.info('Using pyodbc driver')
        # XXX Check whether escaping/quoting is required
        # XXX Should there be a way to specify the driver name? Given that on
        # unixODBC the driver alias is specified in odbcinst.ini, and on
        # Windows with DB2 9+ one can have multiple DB2 ODBC drivers installed
        # with differentiating suffixes
        if username is not None:
            return pyodbc.connect('driver=IBM DB2 ODBC DRIVER;dsn=%s;uid=%s;pwd=%s' % (dsn, username, password))
        else:
            return pyodbc.connect('driver=IBM DB2 ODBC DRIVER;dsn=%s' % dsn)
    # Try the PythonWin ODBC driver
    try:
        import dbi
        import odbc
    except ImportError:
        pass
    else:
        logging.info('Using PyWin32 odbc driver')
        if username is not None:
            # XXX Check whether escaping/quoting is required
            return odbc.odbc("%s/%s/%s" % (dsn, username, password))
        else:
            return odbc.odbc(dsn)
    raise ImportError('Unable to find a suitable connection framework; please install PyDB2, pyodbc, PyWin32, or mxODBC')