Пример #1
1
    def mail(self):
        password = self.__getMySQLPassword()

        try:
            db = MySQLdb.connection(self.base.mysql["host"], self.base.mysql["user"], password)
        except Exception:
            db = MySQLdb.connection(self.base.mysql["host"], self.base.mysql["user"], "")
        except Exception, msg:
            error_message(msg)
Пример #2
1
def host_usagetest_consumer():
    """host_usagetest_consumer() -> takes usage test results from producers of  
           such a metric ( 1-Nagios, 2-url of a UI) and populates WMSMonitor database
        """

    import os, commands, sys, fpformat

    sys.path.append("../common")
    import time
    import datetime
    import readconf_func
    import logging
    import socket
    import MySQLdb
    import urllib

    confvar = readconf_func.readconf()

    # CONNECTING TO DB
    # Opening myslq db connection
    logger.info("Starting db connection")
    try:
        db = MySQLdb.connection(
            host=confvar.get("WMSMON_DB_HOST"),
            user=confvar.get("WMSMON_DB_USER"),
            passwd=confvar.get("WMSMON_DB_PWD"),
            db=confvar.get("WMSMON_DB_NAME"),
        )

    except Exception, e:
        stri2 = "ERROR CONNECTING TO WMSMonitor DB: " + str(e)
        logger.error(stri2)
        logger.error("ERROR: Please check mysql daemon is running and connection parameters are correct!")
        sys.exit(1)
Пример #3
1
def authenticate(u, p):
    # Establich a connection

    db = MySQLdb.connection(host="localhost", user="xxx", passwd="xxx", db="xxx")

    result = 0

    # Run a MySQL query from Python and get the result set
    query = (
        """SELECT squid_user, squid_pass FROM surfwijzer_users WHERE squid_enabled='1' AND squid_user='"""
        + u
        + """' AND squid_pass='"""
        + p
        + """'"""
    )
    db.query(query)
    r = db.store_result()

    # Iterate through the result set
    # Example calls back up to 100 rows
    result = 0
    for row in r.fetch_row(1):
        # print row[0]
        # print row[1]
        # print row[2]
        if row[1] == p:
            result = 1

    return result
Пример #4
1
def get_one_record(sql):
    connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS)
    connection.select_db(MYSQL_DB)
    connection.query(sql)
    r = connection.store_result()
    row = r.fetch_row()
    if row:
        return row[0]
    return None
Пример #5
1
def get_user(id):
    connection = mdb.connection(host="localhost", user="root", passwd="", db="fbhack", use_unicode=True, charset="utf8")
    cursor = connection.cursor()

    cursor.execute("SELECT * FROM users WHERE facebook_id = %s"(id))

    row = cursor.fetchone()

    return jsonify(result="OK")
Пример #6
1
    def ftp(self):
        if not fileExists(self.base.vsftpd["bin"]):
            error_message("Vsftpd not installed!")

        password = self.__getMySQLPassword()

        try:
            db = MySQLdb.connection(self.base.mysql["host"], self.base.mysql["user"], password)
        except Exception, msg:
            error_message(msg)
Пример #7
0
    def __init__(self):
        framework.server.__init__(self, 7000, 20, framework.GlobalHandler, False)

        self.db = MySQLdb.connection(host="127.0.0.1", user="root", passwd="", db="aoc")

        self.packetMgr = {
            "UniverseAgent": {
                0: onInitiateAuthentification.onInitiateAuthentification,
                1: onAnswerChallenge.onAnswerChallenge,
            }
        }
Пример #8
0
def get_raw_filelist(sql):
    connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS)
    connection.select_db(MYSQL_DB)
    connection.query(sql)
    r = connection.store_result()
    row = r.fetch_row()
    result = ""
    while row:
        result = "%s \nfilename=%s id=%s newxx=%s" % (result, row[0][3], row[0][0], row[0][1])
        row = r.fetch_row()

    return result
Пример #9
0
    def __init__(self):
        framework.server.__init__(self, 7040, 20, framework.GlobalHandler, True)

        self.db = MySQLdb.connection(host="127.0.0.1", user="root", passwd="", db="aoc")

        self.packetMgr = {
            "GameAgent": {
                1: onPing.onPing,
                3: onAuthenticate2.onAuthenticate2,
                5: onUpdateGameServerStats.onUpdateGameServerStats,
            },
            "GameCharAgent": {},
        }
Пример #10
0
    def __init__(self, db=None, user="sahana", password="password", dbname="sahana"):
        """ Connect to datbase, init graph.

        If a database connection is supplied in db, use that, else connect
        using the supplied or defaulted user, password, and database name.
        """

        import MySQLdb

        OrderTables.__init__(self)
        if db:
            self.db = db
        else:
            self.db = MySQLdb.connection(host="localhost", user=user, passwd=password, db=dbname)
def retrieve_chrinfo(schema, outpath):
    try:
        print "trying to retrieve chromosome length from ucsc (%s)..." % schema
        db = MySQLdb.connection(host="genome-mysql.cse.ucsc.edu", user="genome", db=schema)
        db.query("""select chrom, size from chromInfo""")
        rows = db.store_result()
        outfile = file("%s/%s" % (outpath, schema), "w")
        for row in rows.fetch_row(10000000):
            outfile.write("%s\t%s\n" % (row[0], row[1]))

    except (OperationalError, ProgrammingError):
        print "The schema %s doesnt have the table chromInfo" % schema
    finally:
        db.close()
Пример #12
0
def get_record(sql):
    connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS)
    connection.select_db(MYSQL_DB)
    connection.query(sql)
    r = connection.store_result()
    row = r.fetch_row()
    if row:
        result = row[0][0]
    else:
        result = "0"
    while row:
        result = "%s <br/> %s" % (result, row[0])
        row = r.fetch_row()

    return result
Пример #13
0
def connect():
    """ Connect to MySQL database by loading database setttings from config.ini. """

    # Load configurations
    db_config = ReadMySQLConfig()

    try:
        print("Connecting to MySQL database...")
        conn = mdb.connection(**db_config)
        print("connection established.")

    except mdb.Error as e:
        print(e)

    finally:
        conn.close()
        print("connection closed.")
Пример #14
0
    def __init__(self):
        framework.server.__init__(self, 7001, 20, framework.GlobalHandler, False)

        self.db = MySQLdb.connection(host="127.0.0.1", user="root", passwd="", db="aoc")

        self.packetMgr = {
            "PlayerAgent": {
                0: onAuthenticate.onAuthenticate,
                1: onCreateCharacter.onCreateCharacter,
                6: onGetStartupData.onGetStartupData,
            }
        }

        # Retrieving dimensions from database (prevent a request each time)
        self.dimensions = {}
        self.db.query("SELECT * FROM `dimensions`")
        r = self.db.store_result()
        for f in r.fetch_row(r.num_rows(), how=1):
            self.dimensions[int(f["dimension_id"])] = f
Пример #15
0
def update_record(table, which, who, what, where):
    connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS)
    connection.select_db(MYSQL_DB)
    try:
        connection.query(
            "INSERT INTO `newxx`.`"
            + table
            + "` ( `who`, `which`, `what`, `where`) VALUES ( '"
            + who
            + "', '"
            + which
            + "', '"
            + what
            + "', '"
            + where
            + "')"
        )
        return connection.insert_id()
    except:
        return 0
Пример #16
0
def upload_file(table, newxxid, author, filename, content):
    connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS)
    connection.select_db(MYSQL_DB)
    try:
        connection.query(
            "INSERT INTO `newxx`.`"
            + table
            + "` ( `fileid`, `author`, `filename`, `content`) VALUES ( '"
            + newxxid
            + "', '"
            + author
            + "', '"
            + filename
            + "', '"
            + content
            + "')"
        )
        return "ok"
    except NameError, e:
        print e
Пример #17
0
def get_filelist(sql):
    connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS)
    connection.select_db(MYSQL_DB)
    connection.query(sql)
    r = connection.store_result()
    row = r.fetch_row()
    result = ""
    while row:
        result = "%s <br/>\n<id=%s> <a href='%s'>%s</a> %s %s %s" % (
            result,
            row[0][0],
            row[0][1],
            row[0][1],
            row[0][2],
            row[0][3],
            row[0][5],
        )
        row = r.fetch_row()

    return result
def main():
    schema = sys.argv[1]
    # get all the database names from the information schema in ucsc
    if schema == "all":
        print "retrieving all schema names..."
        try:
            db = MySQLdb.connection(host="genome-mysql.cse.ucsc.edu", user="genome", db="information_schema")
            db.query("""select schema_name from schemata""")
            rows = db.store_result()
            schemas = []
            for row in rows.fetch_row(10000000):
                schemas.extend(row)

        finally:
            db.close()
        # retrieve all chrinfo that exist
        for schema in schemas:
            retrieve_chrinfo(schema, os.path.dirname(__file__))
    else:
        retrieve_chrinfo(schema, os.path.dirname(__file__))
Пример #19
0
def plot_wmslist_inALIAS():
    """plot_wmslist_inALIAS() -> utility to plot on file the list 
           of wms in aliases defined for your site 
        """

    fileout = open("/var/www/html/wmsmon/main/wmspoolinfo.txt", "w")
    fileout.write("GENERAL INFO ABOUT CNAF WMS/LB INSTANCES POOL ON: " + commands.getoutput("date"))

    print "Starting db connection"
    try:
        db = MySQLdb.connection(
            host=confvar.get("WMSMON_DB_HOST"),
            user=confvar.get("WMSMON_DB_USER"),
            passwd=confvar.get("WMSMON_DB_PWD"),
            db=confvar.get("WMSMON_DB_NAME"),
        )

    except Exception, e:
        stri2 = "ERROR CONNECTING TO WMSMonitor DB: " + str(e)
        print stri2
        print "ERROR: Please check mysql daemon is running and connection parameters are correct!"
        sys.exit(1)
Пример #20
0
def wms_balancing_arbiter():
    """wms_balancing_arbiter() -> updating wms instances available behind an alias
           depending on the load of the instances according to the load metric provided by
           wms_balancing_metric function 
           Return None if errors are raised during calculation.
        """

    import os, commands, sys, fpformat

    sys.path.append("../common")
    import time
    import datetime
    import readconf_func
    import logging
    import socket
    import MySQLdb
    import logpredef

    logger = logging.getLogger("wms_balancing_arbiter")
    conf = readconf_func.readconf()

    # +++++++++++++++++++++++++++++
    # Opening myslq db connection
    try:
        db = MySQLdb.connection(
            host=conf.get("WMSMON_DB_HOST"),
            user=conf.get("WMSMON_DB_USER"),
            passwd=conf.get("WMSMON_DB_PWD"),
            db=conf.get("WMSMON_DB_NAME"),
        )
        logger.info("Starting db connection")
    except Exception, e:
        strxx = "ERROR CONNECTING TO WMSMonitor DB: " + str(e)
        logger.error(strxx)
        logger.error("ERROR: Please check mysql daemon is running and connection parameters are correct!")
        sys.exit(1)
def lib():
    data=request.body.read()
    root=ET.fromstring(data)
    recv_con={child.tag:child.text for child in root}
    d=recv['Content']
    if d.startswith('d '):
    elif d in ['l','list']:
        connection=MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS)
        connection.select_db(MYSQL_DB)
        cursor=connection.cursor()
        cursor.execute('SELECT * FROM MYSQL_diary')
        rows=[item[2] for item in cursor.fetchall()]
        data=''.join(rows)
        myxml = '''\
    <xml>
    <ToUserName><![CDATA[{}]]></ToUserName>
    <FromUserName><![CDATA[{}]]></FromUserName>
    <CreateTime>12345678</CreateTime>
    <MsgType><![CDATA[text]]></MsgType>
    <Content><![CDATA[{}]]></Content></xml>
    '''.format(mydict['FromUserName'],mydict['ToUserName'],data)
        return myxml
    elif d.startswith('b '):
        userid = recv_con['FromUserName']

        try:
            connection = MySQLdb.connect(host=MYSQL_HOST_M, port=MYSQL_PORT, \
        	                             user=MYSQL_USER, passwd=MYSQL_PASS, db="user_table")
            cursor = connection.cursor()
            cursor.execute('SELECT * FROM user_table')
            results = cursor.fetchall()
        except MySQLdb.Error, e:
            print "Error %d: %s" % (e.args[0],e.args[1])
            # reply_content = "系统服务器出错!"
            return None # 此处应该给用户提示
        finally:
Пример #22
0
    def run(self):
        # INIZIALIZATION
        logger = logging.getLogger("data_collector")
        TIME_AT_START = time.time()
        logger.info("THIS IS WMSMonitor data_collector_daemon")
        logger.info("Reading wmsmon conf file")
        confvar = readconf_func.readconf()

        # CONNECTING TO DB
        # Opening myslq db connection
        logger.info("Starting db connection")
        try:
            db = MySQLdb.connection(
                host=confvar.get("WMSMON_DB_HOST"),
                user=confvar.get("WMSMON_DB_USER"),
                passwd=confvar.get("WMSMON_DB_PWD"),
                db=confvar.get("WMSMON_DB_NAME"),
            )

        except Exception, e:
            str = "ERROR CONNECTING TO WMSMonitor DB: " + str(e)
            logger.error(str)
            logger.error("ERROR: Please check mysql daemon is running and connection parameters are correct!")
            sys.exit(1)
Пример #23
0
# enqueuing the parts of the sentence, like
#  self.ttsfile1 = IvrAudioFile()
#  self.ttsfile1.open("your_account_balance.wav", AUDIO_READ)
#  self.enqueue(self.ttsfile1, None)
#  self.ttsfile2 = IvrAudioFile()
#  self.ttsfile2.open("%i.wav"%int(res[0][0]), AUDIO_READ)
#  self.enqueue(self.ttsfile2, None)
#  etc...
#

from log import *
from ivr import *

import MySQLdb

db = MySQLdb.connection(host="127.0.0.1", user="root", passwd="sa07", db="business")
# or, when using config file db_balance.conf:
# db = MySQLdb.connection(host=config["db_host"], user=config["db_user"], passwd=config["db_pwd"], db=config["db_db"])


class IvrDialog(IvrDialogBase):
    ttsfile = None

    def onSessionStart(self, hdrs):
        db.query("select bal_int, bal_decimal from accounts where user='%s'" % self.dialog.user)
        r = db.store_result()
        res = r.fetch_row(1)
        if len(res):
            self.ttsfile = IvrAudioFile().tts(
                "Your account balance is %i dollars and %i cents" % (int(res[0][0]), int(res[0][1]))
            )
Пример #24
0
        print "Saved %s" % p
    else:
        print "For some reason, isfile(%s) returned True" % p

# Open file and read
fcount = 1

post_xml = '<post image="%s" post_date="%s" last_updated="%s" url="%s" num-comments="%s" blogger="%s"><post_title><![CDATA[%s]]></post_title><post_blurb><![CDATA[%s]]></post_blurb><post_content><![CDATA[%s]]></post_content></post>'

# Select post IDs
# Demo: tag:blogger.com,1999:blog-32316390.post-1712101221610946611 from posts.blogger_id
dbuser = ""
dbpass = ""
dbbase = ""

db = sql.connection(host="localhost", user=dbuser, passwd=dbpass, db=dbbase)
db.query("SELECT last_updated, `blogger_id` FROM posts")
r = db.store_result()
current_posts = {}

if "test" in sys.argv:
    sql_table = "posts_test"
    do_media = False
    save_file = "actual/probably_total_test.sql"
else:
    for s in r.fetch_row(maxrows=0):
        current_posts[s[1]] = datetime.strptime(s[0], "%Y-%m-%d %H:%M:%S")  # 0 = updated, 1 = blogger_id
    sql_table = "posts"
    do_media = True
    save_file = "actual/probably_total.sql"
Пример #25
0
    def __buildSkillTree(self):
        """
            WARNING: This should only be run manually to build the entire database, not all the time
            
            Builds the entire skill tree in the MySQL database
            Currently manual labor of cleaning out the table is required
        """
        info = {}
        info["required"] = []
        info["description"] = ""  # There may not be a description

        # connection to the database
        if self.DATABASE_PASSWORD != "":
            db_conn = MySQLdb.connection(
                user=self.DATABASE_USER, db=self.DATABASE_NAME, host=self.DATABASE_HOST, password=self.DATABASE_PASSWORD
            )
        else:
            db_conn = MySQLdb.connection(user=self.DATABASE_USER, db=self.DATABASE_NAME, host=self.DATABASE_HOST)
        db_conn.autocommit(True)

        # Get the tree from the API
        tree = self.apiSelect("skilltree")

        skills = tree.read()

        # For all the information in the API, go line by line
        for sline in skills.split("\r\n"):  # While not found and not end of data
            pgroupname_groupid = compile(""".*<row groupName="(.*)" groupID="(.*)">.*""").match(sline)
            pname_groupid_id = compile(""".*<row typeName="(.*)" groupID=".*" typeID="(.*)">.*""").match(sline)

            pdescription = compile(""".*<description>(.*)</description>.*""").match(sline)
            prank = compile(""".*<rank>(.*)</rank>.*""").match(sline)
            prequired = compile(""".*<row typeID="(.*)" skillLevel=".*"/>.*""").match(sline)
            pprimary = compile(""".*<primaryAttribute>(.*)</primaryAttribute>.*""").match(sline)
            psecondary = compile(""".*<secondaryAttribute>(.*)</secondaryAttribute>.*""").match(sline)

            if pgroupname_groupid:  # Happens only once
                info["groupname"] = pgroupname_groupid.groups()[0]
                info["groupid"] = int(pgroupname_groupid.groups()[1])
            if pname_groupid_id:
                info["name"] = pname_groupid_id.groups()[0]
                info["id"] = int(pname_groupid_id.groups()[1])
            if pdescription:
                info["description"] += pdescription.groups()[0]
            if prank:
                info["rank"] = int(prank.groups()[0])
            if prequired:
                info["required"].append(int(prequired.groups()[0]))
            if pprimary:
                info["primary"] = pprimary.groups()[0]
            if psecondary:
                info["secondary"] = psecondary.groups()[0]

            if info.has_key("secondary"):  # Last object to be built
                # SKILLTREE[id] = Skill(id, 0, 0, name=name, rank=rank, primary=primary, secondary=secondary, groupname=groupname, groupid=groupid, description=description, dependencies=required)

                info["skilltable"] = TABLE_SKILL
                info["grouptable"] = TABLE_GROUP
                info["attributetable"] = TABLE_ATTRIBUTE

                db_conn.query(
                    "SELECT id FROM %(grouptable)s WHERE group_name = '%(groupname)s'" % info
                )  # query for the groupname id
                groupid = db_conn.store_result()

                if groupid.num_rows() == 0:
                    db_conn.query(
                        "INSERT INTO %(grouptable)s (group_name, group_id) VALUES ('%(groupname)s', '%(groupid)s')"
                        % info
                    )  # query for the groupname id

                    db_conn.query(
                        "SELECT id FROM %(grouptable)s WHERE group_name = '%(groupname)s'" % info
                    )  # query for the groupname id
                    groupid = db_conn.store_result()

                # Finally
                info["groupname_id"] = int(groupid.fetch_row()[0][0])  # Grabs the single item

                db_conn.query(
                    "SELECT id FROM %(attributetable)s WHERE attribute_name = '%(primary)s'" % info
                )  # query for the attribute id
                primaryid = db_conn.store_result()

                if primaryid.num_rows() == 0:
                    db_conn.query(
                        "INSERT INTO %(attributetable)s (attribute_name) VALUES ('%(primary)s')" % info
                    )  # query for the groupname id

                    db_conn.query(
                        "SELECT id FROM %(attributetable)s WHERE attribute_name = '%(primary)s'" % info
                    )  # query for the attribute id
                    primaryid = db_conn.store_result()

                # Finally
                info["primary_id"] = int(primaryid.fetch_row()[0][0])  # Grabs the single item

                db_conn.query(
                    "SELECT id FROM %(attributetable)s WHERE attribute_name = '%(secondary)s'" % info
                )  # query for the attribute id
                secondaryid = db_conn.store_result()

                if secondaryid.num_rows() == 0:
                    db_conn.query(
                        "INSERT INTO %(attributetable)s (attribute_name) VALUES ('%(secondary)s')" % info
                    )  # query for the groupname id

                    db_conn.query(
                        "SELECT id FROM %(attributetable)s WHERE attribute_name = '%(secondary)s'" % info
                    )  # query for the attribute id
                    secondaryid = db_conn.store_result()

                # Finally
                info["secondary_id"] = int(secondaryid.fetch_row()[0][0])  # Grabs the single item

                # Replace the single quote with a double backslash and single quote
                info["description"] = info["description"].replace("'", "\\'")

                db_conn.query(
                    "INSERT INTO %(skilltable)s (skill_id, skill_name, rank, description, primary_attribute_id, secondary_attribute_id, groupname_id) VALUES (%(id)s, '%(name)s', %(rank)s, '%(description)s', %(primary_id)s, %(secondary_id)s, %(groupname_id)s)"
                    % info
                )  # commit skill

                # Hold the groupname and groupid [why?]
                groupname = info["groupname"]
                groupid = info["groupid"]
                # Reset everything
                info = {}
                info["groupname"] = groupname
                info["groupid"] = groupid
                info["description"] = ""
                info["required"] = []
Пример #26
0
def lb_query(lbhost, STARTDATE, ENDDATE, DBTYPE):

    # Initializing logger
    import logging

    logger = logging.getLogger("lb_query")

    confvar = readconf_func.readconf()

    users_stats = []
    # Establish a connection

    if DBTYPE == "LBPROXY":
        lbhost = confvar["LBPROXY_DB_HOST"]
        dbuser = confvar["LBPROXY_DB_USER"]
        dbname = confvar["LBPROXY_DB_NAME"]
    elif DBTYPE == "LBSERVER":
        lbhost = confvar["LB_DB_HOST"]
        dbuser = confvar["LB_DB_USER"]
        dbname = confvar["LB_DB_NAME"]

    logger.info("Establishing a connection with mysql DB")
    db = MySQLdb.connection(host=lbhost, user=dbuser, db=dbname, passwd=confvar["SERVER_MYSQL_PASSWORD"][1:-1])

    ################ MAIN DATA CONTAINER LIST INITIALIZATION ######
    wmsdata_list = []
    ###############################################################

    def put_into_wmsdata(wmsdata_list, wmshostname, userdn, fieldlist, valuelist):
        wmsFOUND = False
        for wmsdata in wmsdata_list:
            if wmsdata.host == wmshostname:
                wmsFOUND = True
                try:
                    wmsdata.add_user(userdn)
                except wmsdata_class.UserPresent:
                    #              logger.warning('User Already present in wmdata for host: ' + wmsdata.host)
                    for field in fieldlist:
                        wmsdata[userdn][field] = valuelist[fieldlist.index(field)]
        if not wmsFOUND:
            wmsdata = wmsdata_class.wmsdata(wmshostname)
            wmsdata.add_user(userdn)
            for field in fieldlist:
                wmsdata[userdn][field] = valuelist[fieldlist.index(field)]
            wmsdata_list.append(wmsdata)

    # Run a MySQL query to find the number of single jobs submitted in a given time interval PER USER and PER WMS
    logger.info(
        "Running a MySQL query to find the number of single jobs submitted in a given time interval PER USER and PER WMS"
    )
    querystr = (
        "select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events,short_fields inner join users on events.userid=users.userid where events.event=short_fields.event and code='17' and time_stamp>'"
        + STARTDATE
        + "' and time_stamp <='"
        + ENDDATE
        + "' and events.jobid=short_fields.jobid and name='NSUBJOBS' and value='0' group by users.cert_subj,host;"
    )
    logger.info("Query is : " + querystr)
    db.query(querystr)
    r = db.store_result()
    # Iterate through the result set
    WMP_in = 0
    if r:
        for i in range(1, r.num_rows() + 1):
            row = r.fetch_row()
            #  logger.debug('FOUND ROW: ' + row )
            if row:
                dn = row[0][0]
                rowhost = row[0][1]
                rowWMP_in = row[0][2]

                put_into_wmsdata(wmsdata_list, rowhost, dn, ["WMP_in"], [rowWMP_in])

    ######################################################################################################################
    ### We decided to take anymore the avg and the std of nodes per collection because they are not summable on more lb
    ### WHat we do is to take PER USER the total number of jobs in collection, the min and max of nodes per collection
    ### This are summable and avg calculation can be done on collector side
    ### Anyway we sum over user on sensors side and we return alse the total number of jobs per collection, min and max of nodes PER WMS
    ### Summing over wmsdata data will be done at the end of this function ore on the wrapper if the wmsdata_list is returned
    ##########################################################################################################################

    # Run a query to find per user and per host the number of collection, the total number of nodes in collection the min and max of nodes per collection

    logger.info(
        "Running a query to find per user and per host the number of collection, the total number of nodes in collection the min and max of nodes per collection"
    )
    querystr = (
        "select users.cert_subj, host, COUNT(value), sum(value), min(value),max(value) from events,short_fields inner join users on events.userid=users.userid where events.event=short_fields.event and code='17' and time_stamp>'"
        + STARTDATE
        + "' and time_stamp <='"
        + ENDDATE
        + "' and events.jobid=short_fields.jobid and name='NSUBJOBS' and short_fields.event='0' and value>'0' group by users.cert_subj,host"
    )
    logger.info("Query is : " + querystr)
    db.query(querystr)
    r = db.store_result()
    # Iterate through the result set
    if r:
        for i in range(1, r.num_rows() + 1):
            row = r.fetch_row()
            if row:
                dn = row[0][0]
                rowhost = row[0][1]
                rowWMP_in_col = row[0][2]
                rowWMP_in_col_nodes = row[0][3]
                rowWMP_in_col_min_nodes = row[0][4]
                rowWMP_in_col_max_nodes = row[0][5]

                put_into_wmsdata(
                    wmsdata_list,
                    rowhost,
                    dn,
                    ["WMP_in_col", "WMP_in_col_nodes", "WMP_in_col_min_nodes", "WMP_in_col_max_nodes"],
                    [rowWMP_in_col, rowWMP_in_col_nodes, rowWMP_in_col_min_nodes, rowWMP_in_col_max_nodes],
                )

    #  Run a query to find PER USER and PER WMS the number of jobs enqued to WM from WMP in a given time interval
    logger.info(
        "Run a query to find PER USER and PER WMS the number of jobs enqued to WM from WMP in a given time interval"
    )
    querystr = (
        "select  users.cert_subj, host, COUNT(events.jobid) from events,short_fields inner join users on events.userid=users.userid where events.event=short_fields.event and code='4' and time_stamp >'"
        + STARTDATE
        + "' and time_stamp <='"
        + ENDDATE
        + "' and events.jobid=short_fields.jobid and events.event=short_fields.event and  prog='NetworkServer' and name='RESULT' and value='OK' group by users.cert_subj,host;"
    )
    logger.info("Query is : " + querystr)
    db.query(querystr)
    r = db.store_result()
    if r:
        for i in range(1, r.num_rows() + 1):
            row = r.fetch_row()
            if row:
                dn = row[0][0]
                rowhost = row[0][1]
                rowWM_in = row[0][2]

                put_into_wmsdata(wmsdata_list, rowhost, dn, ["WM_in"], [rowWM_in])

    # Run a MySQL query to find the number both collection and single jobs enqueued to WM in a given time interval from LogMonitor (i.e. Resubmitted)
    logger.info(
        "Run a MySQL query to find the number both collection and single jobs enqueued to WM in a given time interval from LogMonitor (i.e. Resubmitted) PER USER and PER WMS"
    )
    querystr = (
        "select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events,short_fields inner join users on events.userid=users.userid where code='4' and time_stamp >'"
        + STARTDATE
        + "' and time_stamp <='"
        + ENDDATE
        + "' and events.jobid=short_fields.jobid and events.event=short_fields.event and name='RESULT' and value='OK' and prog='LogMonitor' group by users.cert_subj, host;"
    )
    logger.info("Query is : " + querystr)
    db.query(querystr)
    r = db.store_result()
    # Iterate through the result set
    if r:
        for i in range(1, r.num_rows() + 1):
            row = r.fetch_row()
            if row:
                usernew = row[0][0]
                index = row[0][0].find("/CN=proxy/CN=proxy")
                if index != -1:
                    usernew = row[0][0][0:index]
                dn = usernew
                rowhost = row[0][1]
                rowWM_in_res = row[0][2]

                put_into_wmsdata(wmsdata_list, rowhost, dn, ["WM_in_res"], [rowWM_in_res])

    # Run a MySQL query to find the number single jobs enqueued to Job Controller from WM in a given time interval PER WMS and PER USER
    logger.info(
        "Run a MySQL query to find the number single jobs enqueued to Job Controller from WM in a given time interval per USER and PER WMS"
    )
    querystr = (
        "select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events,short_fields inner join users on events.userid=users.userid where code='4' and time_stamp >'"
        + STARTDATE
        + "' and time_stamp <='"
        + ENDDATE
        + "' and events.jobid=short_fields.jobid and events.event=short_fields.event and name='RESULT' and value='OK' and prog='WorkloadManager' group by users.cert_subj,host;"
    )
    logger.info("Query is : " + querystr)
    db.query(querystr)
    r = db.store_result()
    # Iterate through the result set
    if r:
        for i in range(1, r.num_rows() + 1):
            row = r.fetch_row()
            if row:
                usernew = row[0][0]
                index = row[0][0].find("/CN=proxy/CN=proxy")
                if index != -1:
                    usernew = row[0][0][0:index]
                dn = usernew
                rowhost = row[0][1]
                rowJC_in = row[0][2]

                put_into_wmsdata(wmsdata_list, rowhost, dn, ["JC_in"], [rowJC_in])

    # Run a MySQL query to find the number single jobs enqueued to Condor from Job Controller in a given time interval PER USER and PER WMS
    logger.info(
        "Run a MySQL query to find the number single jobs enqueued to Condor from Job Controller in a given time interval PER USER and PER WMS"
    )
    querystr = (
        "select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events,short_fields inner join users on events.userid=users.userid where code='1' and time_stamp >'"
        + STARTDATE
        + "' and time_stamp <='"
        + ENDDATE
        + "' and events.jobid=short_fields.jobid and events.event=short_fields.event and name='RESULT' and value='OK' and prog='JobController' group by users.cert_subj,host;"
    )
    logger.info("Query is : " + querystr)
    db.query(querystr)
    r = db.store_result()
    # Iterate through the result set
    if r:
        for i in range(1, r.num_rows() + 1):
            row = r.fetch_row()
            if row:
                usernew = row[0][0]
                index = row[0][0].find("/CN=proxy/CN=proxy")
                if index != -1:
                    usernew = row[0][0][0:index]
                dn = usernew
                rowhost = row[0][1]
                rowJC_out = row[0][2]

                put_into_wmsdata(wmsdata_list, rowhost, dn, ["JC_out"], [rowJC_out])

    # Run a MySQL query to find the number of jobs done in a given time interval PER USER and PER WMS
    logger.info(
        "Run a MySQL query to find the number single jobs done successfully in a given time interval PER USER and PER WMS"
    )
    querystr = (
        "select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events,short_fields inner join users on events.userid=users.userid where events.jobid=short_fields.jobid and code='10' and time_stamp >'"
        + STARTDATE
        + "' and time_stamp <='"
        + ENDDATE
        + "' and prog='LogMonitor' and name='REASON' and (value='Job terminated successfully' or value='Job Terminated Successfully') group by users.cert_subj,host;"
    )
    logger.info("Query is : " + querystr)
    db.query(querystr)
    r = db.store_result()
    # Iterate through the result set
    if r:
        for i in range(1, r.num_rows() + 1):
            row = r.fetch_row()
            if row:
                usernew = row[0][0]
                index = row[0][0].find("/CN=proxy/CN=proxy")
                if index != -1:
                    usernew = row[0][0][0:index]
                dn = usernew
                rowhost = row[0][1]
                rowJOB_DONE = row[0][2]

                put_into_wmsdata(wmsdata_list, rowhost, dn, ["JOB_DONE"], [rowJOB_DONE])

    # Run a MySQL query to find the number of jobs aborted in a given time interval PER USER and PER WMS
    logger.info(
        "Run a MySQL query to find the number single jobs aborted in a given time interval PER USER and PER WMS"
    )
    querystr = (
        "select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events inner join users on events.userid=users.userid where code='12' and time_stamp >'"
        + STARTDATE
        + "' and time_stamp <='"
        + ENDDATE
        + "' group by users.cert_subj,host;"
    )

    logger.info("Query is : " + querystr)
    db.query(querystr)
    r = db.store_result()
    # Iterate through the result set
    if r:
        for i in range(1, r.num_rows() + 1):
            row = r.fetch_row()
            if row:
                usernew = row[0][0]
                index = row[0][0].find("/CN=proxy/CN=proxy")
                if index != -1:
                    usernew = row[0][0][0:index]
                dn = usernew
                rowhost = row[0][1]
                rowJOB_ABORTED = row[0][2]

                put_into_wmsdata(wmsdata_list, rowhost, dn, ["JOB_ABORTED"], [rowJOB_ABORTED])

    # Run a MySQL query to find the DEST_CE of jobs in a given time interval PER WMS
    logger.info("Run a MySQL query to find  DEST_CE of jobs in a given time interval PER WMS")

    ##### old ce query - this double counts ce for jobs landed onto cream ce

    # querystr="select value, host, COUNT(value) from (select DISTINCT(short_fields.event),events.jobid, short_fields.value, host from events,short_fields where events.jobid=short_fields.jobid  and time_stamp >'" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' and prog='WorkloadManager' and name='DEST_HOST' and value!='localhost' and value!='unavailable' and code='15') as temp group by value, host;"
    ##################################################

    ##### New query not to double counting ce for jobs landed onto cream ce
    querystr = (
        "select value,host,  count(value) from (select distinct(short_fields.jobid), value, host from short_fields inner join events where events.code='15' and events.prog = 'WorkloadManager' and name='DEST_HOST' and time_stamp > '"
        + STARTDATE
        + "' and time_stamp <='"
        + ENDDATE
        + "' and value!='localhost' and value!='unavailable' and events.jobid=short_fields.jobid) as temp group by value, host;"
    )
    ##################################################

    logger.info("Query is : " + querystr)
    db.query(querystr)
    r = db.store_result()
    # Iterate through the result set
    if r:
        for i in range(1, r.num_rows() + 1):
            row = r.fetch_row()
            if row:
                rowCE = row[0][0]
                rowhost = row[0][1]
                rowCEcount = row[0][2]
                wmsFOUND = False
                for wmsdata in wmsdata_list:
                    if wmsdata.host == rowhost:
                        wmsFOUND = True
                        try:
                            wmsdata.add_ce(rowCE)
                            wmsdata.add_ce_count(rowCE, rowCEcount)
                        except wmsdata_class.CEPresent:
                            #                   logger.warning('User Already present in wmdata for host: ' + wmsdata.host)
                            wmsdata.add_CE_count(rowCEcount)
                if not wmsFOUND:
                    wmsdata = wmsdata_class.wmsdata(rowhost)
                    wmsdata.add_ce(rowCE)
                    wmsdata.add_ce_count(rowCE, rowCEcount)
                    wmsdata_list.append(wmsdata)

    # Run a MySQL query to find the LB used to store the jobs in a given time interval
    # Available only if DBTYPE = LBPROXY

    if DBTYPE == "LBPROXY":
        logger.info("Run a MySQL query to find the LB used to store the jobs in a given time interval")
        querystr = (
            "select distinct dg_jobid from jobs inner join events on jobs.jobid=events.jobid where events.code = '17' and time_stamp > '"
            + STARTDATE
            + "' and time_stamp < '"
            + ENDDATE
            + "';"
        )
        logger.info("Query is : " + querystr)
        db.query(querystr)
        r = db.store_result()
        # Iterate through the result set
        if r:
            for i in range(1, r.num_rows() + 1):
                row = r.fetch_row()
                if row:
                    rowLB = row[0][0]
                    LBstr = LBstr = rowLB[rowLB.find("//") + 2 : rowLB.find(":9000")]
                    for wmsdata in wmsdata_list:
                        wmsdata.add_lb(LBstr)

    db.close()

    #   filename= confvar['INSTALL_PATH'] +'/sensors//tmp/USERSTATS_' +  lbhost + '_' + wmshost + '.txt'

    #   fileusersstats = open(filename,'w')
    #   fileusersstats.write('START OF FILE\n')
    #   for i in range(0,len(users_stats)):
    #      fileusersstats.write(str(users_stats[i][0]) + '|' + str(users_stats[i][1]) + '|' + str(users_stats[i][2]) + '|' + str(users_stats[i][3]) + '|' + str(users_stats[i][4]) + '|' + str(users_stats[i][5]) + '|' + str(users_stats[i][6]) + '|' + str(users_stats[i][7]) + '|' + str(users_stats[i][8]) + '|\n')

    #   fileusersstats.write('END OF FILE\n')
    #   fileusersstats.close()

    return wmsdata_list
Пример #27
0
        # initializing some variables
        logger.info("Reading wmsmon conf file")
        confvar = readconf_func.readconf()
        wmshost = line_tmp[0]
        wmshostport = confvar.get("PORT")
        lbhost = line_tmp[1]
        VO = line_tmp[2]
        dbhost = confvar.get("WMSMON_DB_HOST")
        dbuser = confvar.get("LB_DB_USER")
        dbname = confvar.get("LB_DB_NAME")
        STEPDATE = int(confvar.get("STEPDATE"))

        logger.info("Starting db connection")
        db = MySQLdb.connection(
            host=confvar.get("WMSMON_DB_HOST"),
            user=confvar.get("WMSMON_DB_USER"),
            passwd=confvar.get("WMSMON_DB_PWD"),
            db=confvar.get("WMSMON_DB_NAME"),
        )

        logger.info("Determining start and date")
        # normally these would be the values .....

        STARTDATE = time.strftime("%Y-%m-%d 00:05:00", time.localtime())
        ENDDATE = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
        DBDATE = ENDDATE

        # but if we are at the very beginning of the day (between midnight and 01:00:00  (this should be parametrized)
        # run the query for the whole day before, this is done to prevent loss of day near midnight of the same day
        # So....

        now = time.localtime()
Пример #28
0
#!/usr/bin/env python
import cgi, os, sys, getpass, cgitb, Cookie, sha, time, MySQLdb
import session as ss

form = cgi.FieldStorage()
sss = ss.Session()

DBServer = "127.0.0.1"
DBUser = ""
DBPass = ""
DBName = ""
db = MySQLdb.connection(host=DBServer, user=DBUser, passwd=DBPass, db=DBName)
DB = db.query

from HTMLParser import HTMLParser


class MLStripper(HTMLParser):
    def __init__(self):
        self.reset()
        self.fed = []

    def handle_data(self, d):
        self.fed.append(d)

    def get_data(self):
        return "".join(self.fed)


def strip_tags(html):
    s = MLStripper()
Пример #29
0
import urllib2

import sys
import time
import os

import MySQLdb as sql

# Select post IDs
# Demo: tag:blogger.com,1999:blog-32316390.post-1712101221610946611 from posts.blogger_id
db = sql.connection(host="localhost", user="root", passwd="a", db="rugbydump")
db.query("SELECT blogger_id, post_title FROM posts")
r = db.store_result()

posts = []

for row in r.fetch_row(maxrows=0):
    _Id = row[0]

    # [todo] skip if its a normal non-blogger comment (at least for testing) so it doesn't f'up, it won't be able to split

    _Id = _Id.split("-")[2:][0]
    posts.append(_Id)

#
proper_url = "http://rugbydump.blogspot.com/feeds/%s/comments/default?max-results=600"
whatisthisvar = 1
failed = []


def get_feed(post_id):
Пример #30
0
import MySQLdb

# If possible, read the password for root from /root/.my.cnf
config = "/root/.my.cnf"
if os.access(config, os.R_OK):
    f = open(config, "r")
    lines = f.readlines()
    f.close()
    for line in lines:
        findstring = "password="
        if findstring in line:
            passwd = line.replace(findstring, "").strip()

# DB1 is the db to sync to (Test or new Live)
db1 = MySQLdb.connection(host="localhost", user=user, passwd=passwd, db=new_db)

# DB2 is the db to sync from (backup of Live)
db2 = MySQLdb.connection(host="localhost", user=user, passwd=passwd, db=old_db)


def tablelist(db):
    db.query("SHOW TABLES;")
    r = db.store_result()
    tables = []
    for row in r.fetch_row(300):
        tables.append(row[0])
    return tables


# Dict to load up the database Structure
def tablestruct(db):
    tablestruct = {}