Example #1
0
def doSendAllResults(fo, setids):
    datasets = {}
    data = {}
    fo.write("{ resultcode: 0,")
    cur = db.cursor()
    setids = [int(x) for x in setids.split(",")]

    datasets[setids[0]] = {}
    # Not using bind variables, but we know that all of the values are integers because of the previous line
    sql = "SELECT IFNULL(data,0), a.value as `" + str(setids[0]) + "`"
    for x in setids[1:]:
        sql += ", IFNULL( ( SELECT value from dataset_values where time = a.time and dataset_id = " + str(x) + " ),0) as `" + str(x) + "`"
        datasets[x] = {}

    sql += """
                FROM dataset_values AS a
                LEFT JOIN dataset_extra_data as ded ON
                    a.dataset_id = ded.dataset_id AND a.time = ded.time
                WHERE a.dataset_id = """ + str(setids[0]) + """ ORDER BY a.time"""

    cur.execute(sql)
    i = 0
    for row in cur:
        data[i] = row[0]
        j = 1
        for x in setids:
            datasets[x][i] = row[j]
            j += 1
        i += 1
    cur.close()
    ## FIXME: this needs to be changed to generate a json data structure,
    ## not string concatenation:
    fo.write("results: {")
    for x in datasets:
        fo.write("'%s': [" % x)
        i = 0
        for y in datasets[x]:
            fo.write("%s,%s," % (y, datasets[x][y]))
        fo.write("],")

    fo.write("},")
    fo.write("rawdata: [")
    for x in data:
        fo.write("%s,'%s'," % (x, data[x]))

    fo.write("],")
    fo.write("stats: {")
    for x in setids:
        cur = db.cursor()
        cur.execute("SELECT avg(value), max(value), min(value) from dataset_values where dataset_id = ?  GROUP BY dataset_id", (x,))
        for row in cur:
            fo.write("'%s': [%s, %s, %s,]," % (x, row[0], row[1], row[2]))
        cur.close()
    fo.write("},")
    fo.write("}")
Example #2
0
def doSendAllResults(fo, setids):
    datasets = {}
    data = {}
    fo.write("{ resultcode: 0,")
    cur = db.cursor()
    setids = [int(x) for x in setids.split(",")]

    datasets[setids[0]] = {}
    # Not using bind variables, but we know that all of the values are integers because of the previous line
    sql = "SELECT IFNULL(data,0), a.value as `" + str(setids[0]) + "`"
    for x in setids[1:]:
        sql += ", IFNULL( ( SELECT value from dataset_values where time = a.time and dataset_id = " + str(x) + " ),0) as `" + str(x) + "`"
        datasets[x] = {}

    sql += """
                FROM dataset_values AS a
                LEFT JOIN dataset_extra_data as ded ON
                    a.dataset_id = ded.dataset_id AND a.time = ded.time
                WHERE a.dataset_id = """ + str(setids[0]) + """ ORDER BY a.time"""

    cur.execute(sql)
    i = 0
    for row in cur:
        data[i] = row[0]
        j = 1
        for x in setids:
            datasets[x][i] = row[j]
            j += 1
        i += 1
    cur.close()
    ## FIXME: this needs to be changed to generate a json data structure,
    ## not string concatenation:
    fo.write("results: {")
    for x in datasets:
        fo.write("'%s': [" % x)
        i = 0
        for y in datasets[x]:
            fo.write("%s,%s," % (y, datasets[x][y]))
        fo.write("],")

    fo.write("},")
    fo.write("rawdata: [")
    for x in data:
        fo.write("%s,'%s'," % (x, data[x]))

    fo.write("],")
    fo.write("stats: {")
    for x in setids:
        cur = db.cursor()
        cur.execute("SELECT avg(value), max(value), min(value) from dataset_values where dataset_id = ?  GROUP BY dataset_id", (x,))
        for row in cur:
            fo.write("'%s': [%s, %s, %s,]," % (x, row[0], row[1], row[2]))
        cur.close()
    fo.write("},")
    fo.write("}")
Example #3
0
def getTestRunValues(id):
    sql = """SELECT test_run_values.*, pages.name as page FROM test_run_values
            LEFT JOIN pages ON(test_run_values.page_id = pages.id)
            WHERE test_run_values.test_run_id = %s"""

    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql, (id))

    testRunValues = []

    if cursor.rowcount > 0:
        rows = cursor.fetchall()
        for row in rows:
            testRun = {'interval': row['interval_id'], 'value': row['value']}
            if row['page'] != None:
                testRun['page'] = row['page']

            testRunValues.append(testRun)
        result = {'stat': 'ok', 'values': testRunValues}
    else:
        result = {
            'stat': 'fail',
            'code': '105',
            'message': 'No values found for test run ' + str(id)
        }

    return result
Example #4
0
File: api.py Project: jfsiii/graphs
def update_combos_last_updated(last_updated):
    """Sets the valid_test_combinations_updated.last_updated field"""
    sql = """
    DELETE FROM valid_test_combinations_updated;
    INSERT INTO valid_test_combinations_updated VALUES (%s);
    """
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql, (last_updated,))
Example #5
0
def update_combos_last_updated(last_updated):
    """Sets the valid_test_combinations_updated.last_updated field"""
    sql = """
    DELETE FROM valid_test_combinations_updated;
    INSERT INTO valid_test_combinations_updated VALUES (%s);
    """
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql, (last_updated, ))
Example #6
0
def application(req):
    resp = Response(content_type='text/plain')
    resp.headers['Access-Control-Allow-Origin'] = '*'

    id = req.params.get('id')
    if 'show' in req.params:  # Legacy url?
        id = req.params.get('show').split(',')

    if id:
        id = int(id)
        sel = req.params.get('sel', '')
        selections = []
        if sel:
            selections = sel.split(',')
        if len(selections) == 2:
            start, end = int(selections[0]), int(selections[1])
        else:
            start = False

        cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
        sql = """SELECT tests.id, tests.name, machines.name AS machine_name, builds.ref_build_id, MAX(test_runs.date_run)
            FROM tests INNER JOIN test_runs ON (tests.id = test_runs.test_id)
            INNER JOIN builds ON (builds.id = test_runs.build_id)
            INNER JOIN machines ON (test_runs.machine_id = machines.id) WHERE tests.id = %s GROUP BY tests.id"""

        cursor.execute(sql, (id))

        if cursor.rowcount == 1:
            test = cursor.fetchone()
            resp.write("dataset,machine,branch,test\n")
            resp.write(','.join([str(test['id']), test['machine_name'],
                                 str(test['ref_build_id']), test['name']]))
            resp.write('\n')
            resp.write("dataset,time,value,buildid,data\n")

            sql = """SELECT date_run, average, builds.ref_build_id FROM test_runs INNER JOIN builds ON(test_runs.build_id = builds.id)
                    WHERE test_runs.test_id = %s"""
            params = (id,)

            if start:
                sql = sql + " AND date_run > %s AND date_run < %s"
                params = (id, start, end)

            cursor.execute(sql, params)

            if cursor.rowcount > 0:
                rows = cursor.fetchall()
                for row in rows:
                    resp.write('%s%s,%s,%s\n' %
                               (test['id'], row['date_run'],
                                row['average'], row['ref_build_id']))
    else:
        resp.write("Test not found")

    return resp
Example #7
0
def application(req):
    resp = Response(content_type='text/plain')
    resp.headers['Access-Control-Allow-Origin'] = '*'

    id = req.params.get('id')
    if 'show' in req.params:  # Legacy url?
        id = req.params.get('show').split(',')

    if id:
        id = int(id)
        sel = req.params.get('sel', '')
        selections = []
        if sel:
            selections = sel.split(',')
        if len(selections) == 2:
            start, end = int(selections[0]), int(selections[1])
        else:
            start = False

        cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
        sql = """SELECT tests.id, tests.name, machines.name AS machine_name, builds.ref_build_id, MAX(test_runs.date_run)
            FROM tests INNER JOIN test_runs ON (tests.id = test_runs.test_id)
            INNER JOIN builds ON (builds.id = test_runs.build_id)
            INNER JOIN machines ON (test_runs.machine_id = machines.id) WHERE tests.id = %s GROUP BY tests.id"""

        cursor.execute(sql, (id))

        if cursor.rowcount == 1:
            test = cursor.fetchone()
            resp.write("dataset,machine,branch,test\n")
            resp.write(','.join([str(test['id']), test['machine_name'],
                                 str(test['ref_build_id']), test['name']]))
            resp.write('\n')
            resp.write("dataset,time,value,buildid,data\n")

            sql = """SELECT date_run, average, builds.ref_build_id FROM test_runs INNER JOIN builds ON(test_runs.build_id = builds.id)
                    WHERE test_runs.test_id = %s"""
            params = (id,)

            if start:
                sql = sql + " AND date_run > %s AND date_run < %s"
                params = (id, start, end)

            cursor.execute(sql, params)

            if cursor.rowcount > 0:
                rows = cursor.fetchall()
                for row in rows:
                    resp.write('%s%s,%s,%s\n' %
                               (test['id'], row['date_run'],
                                row['average'], row['ref_build_id']))
    else:
        resp.write("Test not found")

    return resp
Example #8
0
def getTestOptions():
    """Get just the combinations of os/platform/test/branch that are valid
    (i.e., where there is at least one result)"""
    results = {}
    testMap = results['testMap'] = {}
    platformMap = results['platformMap'] = {}
    branchMap = results['branchMap'] = {}
    sql = """SELECT tests.id AS id, tests.pretty_name AS name FROM tests"""
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    for row in cursor.fetchall():
        testMap[row['id']] = {
            'name': row['name'],
            'platformIds': set(),
            'branchIds': set(),
        }
    sql = """SELECT os_list.id AS id, os_list.name AS name FROM os_list"""
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    for row in cursor.fetchall():
        platformMap[row['id']] = {
            'name': row['name'],
            'testIds': set(),
            'branchIds': set(),
        }
    sql = """SELECT branches.id AS id, branches.name AS name FROM branches"""
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    for row in cursor.fetchall():
        branchMap[row['id']] = {
            'name': row['name'],
            'platformIds': set(),
            'testIds': set(),
        }
    for row in get_test_combos():
        testMap[row['test_id']]['platformIds'].add(row['os_id'])
        testMap[row['test_id']]['branchIds'].add(row['branch_id'])
        platformMap[row['os_id']]['testIds'].add(row['test_id'])
        platformMap[row['os_id']]['branchIds'].add(row['branch_id'])
        branchMap[row['branch_id']]['testIds'].add(row['test_id'])
        branchMap[row['branch_id']]['platformIds'].add(row['os_id'])
    return results
Example #9
0
def doFindContinuousTest(fo, testname, machine, branch):
    cur = db.cursor()
    row = {}
    cur.execute("SELECT id FROM dataset_info WHERE test=? AND machine=? AND branch=?", (testname, machine, branch))

    if cur.rowcount == 1:
        row = cur.fetchone()
        test = {"id": row[0]}
    else:
        test = {}

    fo.write(json.dumps({"resultcode": 0, "test": test}))
Example #10
0
def doTestInfo(fo, id):
    cur = db.cursor()
    row = {}
    cur.execute("SELECT dataset_info.*, dataset_branchinfo.branchid FROM dataset_info JOIN dataset_branchinfo ON dataset_branchinfo.dataset_id = dataset_info.id WHERE dataset_info.id= ? LIMIT 1", (id,))

    if cur.rowcount == 1:
        row = cur.fetchone()
        testinfo = {"id": row[0], "machine": row[2], "test": row[3], "extra_data": row[5], "branch": row[6], "date": row[7], "buildid": row[8]}
    else:
        testinfo = {}

    fo.write(json.dumps({"resultcode": 0, "test": testinfo}))
Example #11
0
def doTestInfo(fo, id):
    cur = db.cursor()
    row = {}
    cur.execute("SELECT dataset_info.*, dataset_branchinfo.branchid FROM dataset_info JOIN dataset_branchinfo ON dataset_branchinfo.dataset_id = dataset_info.id WHERE dataset_info.id= ? LIMIT 1", (id,))

    if cur.rowcount == 1:
        row = cur.fetchone()
        testinfo = {"id": row[0], "machine": row[2], "test": row[3], "extra_data": row[5], "branch": row[6], "date": row[7], "buildid": row[8]}
    else:
        testinfo = {}

    fo.write(json.dumps({"resultcode": 0, "test": testinfo}))
Example #12
0
def doFindContinuousTest(fo, testname, machine, branch):
    cur = db.cursor()
    row = {}
    cur.execute("SELECT id FROM dataset_info WHERE test=? AND machine=? AND branch=?", (testname, machine, branch))

    if cur.rowcount == 1:
        row = cur.fetchone()
        test = {"id": row[0]}
    else:
        test = {}

    fo.write(json.dumps({"resultcode": 0, "test": test}))
Example #13
0
def get_test_combos():
    """Select the test combinations (not in the form we send to the browser,
    just a list of rows)"""
    sql = """
    SELECT valid_test_combinations.test_id AS test_id,
           valid_test_combinations.branch_id AS branch_id,
           valid_test_combinations.os_id AS os_id
    FROM valid_test_combinations
    """
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    return cursor.fetchall()
Example #14
0
File: api.py Project: jfsiii/graphs
def get_test_combos():
    """Select the test combinations (not in the form we send to the browser,
    just a list of rows)"""
    sql = """
    SELECT valid_test_combinations.test_id AS test_id,
           valid_test_combinations.branch_id AS branch_id,
           valid_test_combinations.os_id AS os_id
    FROM valid_test_combinations
    """
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    return cursor.fetchall()
Example #15
0
File: api.py Project: jfsiii/graphs
def getTestOptions():
    """Get just the combinations of os/platform/test/branch that are valid
    (i.e., where there is at least one result)"""
    results = {}
    testMap = results['testMap'] = {}
    platformMap = results['platformMap'] = {}
    branchMap = results['branchMap'] = {}
    sql = """SELECT tests.id AS id, tests.pretty_name AS name FROM tests"""
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    for row in cursor.fetchall():
        testMap[row['id']] = {'name': row['name'],
                              'platformIds': set(),
                              'branchIds': set(),
                              }
    sql = """SELECT os_list.id AS id, os_list.name AS name FROM os_list"""
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    for row in cursor.fetchall():
        platformMap[row['id']] = {'name': row['name'],
                                  'testIds': set(),
                                  'branchIds': set(),
                                  }
    sql = """SELECT branches.id AS id, branches.name AS name FROM branches"""
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    for row in cursor.fetchall():
        branchMap[row['id']] = {'name': row['name'],
                                'platformIds': set(),
                                'testIds': set(),
                                }
    for row in get_test_combos():
        testMap[row['test_id']]['platformIds'].add(row['os_id'])
        testMap[row['test_id']]['branchIds'].add(row['branch_id'])
        platformMap[row['os_id']]['testIds'].add(row['test_id'])
        platformMap[row['os_id']]['branchIds'].add(row['branch_id'])
        branchMap[row['branch_id']]['testIds'].add(row['test_id'])
        branchMap[row['branch_id']]['platformIds'].add(row['os_id'])
    return results
Example #16
0
File: api.py Project: jfsiii/graphs
def getAnnotations(test_run_id, returnType='dictionary'):
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    sql = "SELECT * FROM annotations WHERE test_run_id = %s"
    annotations = []
    cursor.execute(sql, (test_run_id))

    if cursor.rowcount > 0:
        annRows = cursor.fetchall()
        for annotation in annRows:
            if(returnType == 'dictionary'):
                annotations.append({'note': annotation['note'], 'bug_id': annotation['bug_id']})
            elif returnType == 'array':
                annotations.append([annotation['note'], annotation['bug_id']])
    return annotations
Example #17
0
def get_os_for_machine(machine):
    cur = db.cursor()
    cur.execute("""\
    SELECT os_list.name
    FROM os_list, machines
    WHERE machines.os_id = os_list.id
          AND machines.name = %s
    """, (machine,))
    row = cur.fetchone()
    if ' ' in row[0]:
        os_name, version = row[0].split(None, 1)
    else:
        os_name, version = row[0], ''
    return os_name, version
Example #18
0
def getLatestTestRunValues(id, req):
    #first get build information

    machineid = int(req.params['machineid'])
    branchid = int(req.params['branchid'])

    sql = """SELECT
                test_runs.*,
                builds.id as build_id,
                builds.ref_build_id,
                builds.ref_changeset,
                date_run
               FROM
                    test_runs INNER JOIN builds ON (builds.id = test_runs.build_id)
                        INNER JOIN branches ON (builds.branch_id = branches.id)
                                INNER JOIN machines ON (test_runs.machine_id = machines.id)
               WHERE
                    test_runs.test_id = %s
                    AND machines.id = %s
                    AND branches.id = %s
               ORDER BY
                    date_run DESC
               LIMIT 1
                    """

    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql, (id, machineid, branchid))

    if cursor.rowcount == 1:
        testRun = cursor.fetchone()
        values = getTestRunValues(testRun['id'])
        if values['stat'] == 'ok':
            result = {
                'stat': 'ok',
                'id': testRun['id'],
                'date_run': testRun['date_run'],
                'build_id': testRun['ref_build_id'],
                'values': values['values'],
            }
        else:
            result = values
    else:
        result = {
            'stat': 'fail',
            'code': '106',
            'message': 'No values found for test ' + str(id)
        }

    return result
Example #19
0
def doGetList(fo, type, branch, machine, testname):
    results = []
    s1 = ""
    if branch:
        s1 = "SELECT DISTINCT branch FROM dataset_info"
    if machine:
        s1 = "SELECT DISTINCT machine FROM dataset_info"
    if testname:
        s1 = "SELECT DISTINCT test FROM dataset_info"
    cur = db.cursor()
    cur.execute(s1 + " WHERE type = ?", (type,))
    for row in cur:
        results.append({"value": row[0]})
    cur.close()
    fo.write(json.dumps({"resultcode": 0, "results": results}))
Example #20
0
def get_os_for_machine(machine):
    cur = db.cursor()
    cur.execute(
        """\
    SELECT os_list.name
    FROM os_list, machines
    WHERE machines.os_id = os_list.id
          AND machines.name = %s
    """, (machine, ))
    row = cur.fetchone()
    if ' ' in row[0]:
        os_name, version = row[0].split(None, 1)
    else:
        os_name, version = row[0], ''
    return os_name, version
Example #21
0
def application(req):
    cursor = db.cursor()
    sql = open(sql_file).read()
    sql = [s.strip() for s in sql.split(';') if s.strip()]
    resp = Response(content_type='text/plain')
    for chunk in sql:
        try:
            cursor.execute(chunk)
        except:
            import sys
            print >> resp, "Bad SQL: %s" % chunk
            raise
    cursor.close()
    print >> resp, 'Setup ok'
    return resp
Example #22
0
def doGetList(fo, type, branch, machine, testname):
    results = []
    s1 = ""
    if branch:
        s1 = "SELECT DISTINCT branch FROM dataset_info"
    if machine:
        s1 = "SELECT DISTINCT machine FROM dataset_info"
    if testname:
        s1 = "SELECT DISTINCT test FROM dataset_info"
    cur = db.cursor()
    cur.execute(s1 + " WHERE type = ?", (type,))
    for row in cur:
        results.append({"value": row[0]})
    cur.close()
    fo.write(json.dumps({"resultcode": 0, "results": results}))
Example #23
0
def doListTests(fo, type, datelimit, branch, machine, testname, graphby):
    results = []
    s1 = ""

    # FIXME: This could be vulnerable to SQL injection!  Although it looks like checkstring should catch bad strings.
    if branch:
        s1 += " AND branch = '" + branch + "' "
    if machine:
        s1 += " AND machine = '" + machine + "' "
    if testname:
        s1 += " AND test = '" + testname + "' "

    cur = db.cursor()
    if graphby and graphby == 'bydata':
        cur.execute("SELECT id, machine, test, test_type, dataset_extra_data.data, extra_data, branch FROM dataset_extra_data JOIN dataset_info di ON dataset_extra_data.dataset_id = dataset_info.id WHERE type = ? AND test_type != ? AND (date >= ?) " + s1 + " GROUP BY machine,test,test_type,dataset_extra_data.data, extra_data, branch", (type, "baseline", datelimit))
    elif type == 'discrete' and graphby and graphby == 'buildid':
        cur.execute("SELECT DISTINCT(di.id), di.machine, di.test, di.test_type, di.date, di.extra_data, di.branch, dbi.branchid FROM dataset_info di LEFT JOIN dataset_branchinfo dbi ON di.id=dbi.dataset_id WHERE type = ? AND test_type != ? AND (date >= ?)" + s1 + " ORDER BY di.date ASC", (type, "baseline", datelimit))
    elif type == 'discrete' and not branch and not machine and not testname:
        cur.execute("SELECT MAX(id), machine, test, test_type, MAX(date), extra_data, branch FROM dataset_info WHERE type = ? AND test_type != ? AND (date >= ?) " + s1 + " GROUP BY machine, branch, test", (type, "baseline", datelimit))
    else:
        cur.execute("SELECT id, machine, test, test_type, date, extra_data, branch FROM dataset_info WHERE type = ? AND test_type != ? AND (date >= ?)" + s1, (type, "baseline", datelimit))
    for row in cur:
        buildid = ""
        if len(row) == 8:
            buildid = row[7]
        if graphby and graphby == 'bydata':
            results.append({"id": row[0],
                            "machine": row[1],
                            "test": row[2],
                            #"test_type": row[3],
                            "data": row[4],
                            "extra_data": row[5],
                            "branch": row[6],
                            "buildid": buildid})

        else:
            results.append({"id": row[0],
                            "machine": row[1],
                            "test": row[2],
                            #"test_type": row[3],
                            "date": row[4],
                            "extra_data": row[5],
                            "branch": row[6],
                            "buildid": buildid})

    cur.close()

    fo.write(json.dumps({"resultcode": 0, "results": results}))
Example #24
0
def doListTests(fo, type, datelimit, branch, machine, testname, graphby):
    results = []
    s1 = ""

    # FIXME: This could be vulnerable to SQL injection!  Although it looks like checkstring should catch bad strings.
    if branch:
        s1 += " AND branch = '" + branch + "' "
    if machine:
        s1 += " AND machine = '" + machine + "' "
    if testname:
        s1 += " AND test = '" + testname + "' "

    cur = db.cursor()
    if graphby and graphby == 'bydata':
        cur.execute("SELECT id, machine, test, test_type, dataset_extra_data.data, extra_data, branch FROM dataset_extra_data JOIN dataset_info di ON dataset_extra_data.dataset_id = dataset_info.id WHERE type = ? AND test_type != ? AND (date >= ?) " + s1 + " GROUP BY machine,test,test_type,dataset_extra_data.data, extra_data, branch", (type, "baseline", datelimit))
    elif type == 'discrete' and graphby and graphby == 'buildid':
        cur.execute("SELECT DISTINCT(di.id), di.machine, di.test, di.test_type, di.date, di.extra_data, di.branch, dbi.branchid FROM dataset_info di LEFT JOIN dataset_branchinfo dbi ON di.id=dbi.dataset_id WHERE type = ? AND test_type != ? AND (date >= ?)" + s1 + " ORDER BY di.date ASC", (type, "baseline", datelimit))
    elif type == 'discrete' and not branch and not machine and not testname:
        cur.execute("SELECT MAX(id), machine, test, test_type, MAX(date), extra_data, branch FROM dataset_info WHERE type = ? AND test_type != ? AND (date >= ?) " + s1 + " GROUP BY machine, branch, test", (type, "baseline", datelimit))
    else:
        cur.execute("SELECT id, machine, test, test_type, date, extra_data, branch FROM dataset_info WHERE type = ? AND test_type != ? AND (date >= ?)" + s1, (type, "baseline", datelimit))
    for row in cur:
        buildid = ""
        if len(row) == 8:
            buildid = row[7]
        if graphby and graphby == 'bydata':
            results.append({"id": row[0],
                            "machine": row[1],
                            "test": row[2],
                            #"test_type": row[3],
                            "data": row[4],
                            "extra_data": row[5],
                            "branch": row[6],
                            "buildid": buildid})

        else:
            results.append({"id": row[0],
                            "machine": row[1],
                            "test": row[2],
                            #"test_type": row[3],
                            "date": row[4],
                            "extra_data": row[5],
                            "branch": row[6],
                            "buildid": buildid})

    cur.close()

    fo.write(json.dumps({"resultcode": 0, "results": results}))
Example #25
0
def getAnnotations(test_run_id, returnType='dictionary'):
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    sql = "SELECT * FROM annotations WHERE test_run_id = %s"
    annotations = []
    cursor.execute(sql, (test_run_id))

    if cursor.rowcount > 0:
        annRows = cursor.fetchall()
        for annotation in annRows:
            if (returnType == 'dictionary'):
                annotations.append({
                    'note': annotation['note'],
                    'bug_id': annotation['bug_id']
                })
            elif returnType == 'array':
                annotations.append([annotation['note'], annotation['bug_id']])
    return annotations
Example #26
0
def application(req):
    cursor = db.cursor()
    sql = open(sql_file).read()
    sql = [
        s.strip()
        for s in sql.split(';')
        if s.strip()]
    resp = Response(content_type='text/plain')
    for chunk in sql:
        try:
            cursor.execute(chunk)
        except:
            import sys
            print >> resp, "Bad SQL: %s" % chunk
            raise
    cursor.close()
    print >> resp, 'Setup ok'
    return resp
Example #27
0
def getTest(id, attribute, req):
    if (attribute == 'runs'):
        return getTestRuns(id)
    else:
        sql = """SELECT
            tests.id,
            tests.pretty_name AS test_name,
            machines.name as machine_name,
            branches.name AS branch_name,
            os_list.name AS os_name,
            test_runs.date_run
        FROM
            tests INNER JOIN test_runs ON (tests.id = test_runs.test_id)
                INNER JOIN machines ON (machines.id = test_runs.machine_id)
                    INNER JOIN os_list ON (machines.os_id = os_list.id)
                        INNER JOIN builds ON (test_runs.build_id = builds.id)
                            INNER JOIN branches on (builds.branch_id = branches.id)
        WHERE
            tests.id = %s
        ORDER BY
            test_runs.date_run DESC
        LIMIT 1"""
        cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
        cursor.execute(sql, (id, ))

        if cursor.rowcount == 1:
            row = cursor.fetchone()
            #change column names to the names used here, then we don't need to re-label them
            test = {
                'id': row['id'],
                'name': row['test_name'],
                'branch': row['branch_name'],
                'os': row['os_name'],
                'machine': row['machine_name'],
            }
            result = {'stat': 'ok', 'test': test}
        else:
            result = {
                'stat': 'fail',
                'code': '101',
                'message': 'Test not found'
            }

        return result
Example #28
0
File: api.py Project: jfsiii/graphs
def getLatestTestRunValues(id, req):
    #first get build information

    machineid = int(req.params['machineid'])
    branchid = int(req.params['branchid'])

    sql = """SELECT
                test_runs.*,
                builds.id as build_id,
                builds.ref_build_id,
                builds.ref_changeset,
                date_run
               FROM
                    test_runs INNER JOIN builds ON (builds.id = test_runs.build_id)
                        INNER JOIN branches ON (builds.branch_id = branches.id)
                                INNER JOIN machines ON (test_runs.machine_id = machines.id)
               WHERE
                    test_runs.test_id = %s
                    AND machines.id = %s
                    AND branches.id = %s
               ORDER BY
                    date_run DESC
               LIMIT 1
                    """

    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql, (id, machineid, branchid))

    if cursor.rowcount == 1:
        testRun = cursor.fetchone()
        values = getTestRunValues(testRun['id'])
        if values['stat'] == 'ok':
            result = {'stat': 'ok',
                      'id': testRun['id'],
                      'date_run': testRun['date_run'],
                      'build_id': testRun['ref_build_id'],
                      'values': values['values'],
                      }
        else:
            result = values
    else:
        result = {'stat': 'fail', 'code': '106', 'message': 'No values found for test ' + str(id)}

    return result
Example #29
0
def getRevisionValues(req):
    """Returns a set of values for a given revision"""
    revisions = req.params.getall('revision')
    result = {
        'stat': 'ok',
        'revisions': {},
    }

    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    for rev in revisions:
        testRuns = result['revisions'].setdefault(rev, {})
        sql = """SELECT
                    test_runs.*,
                    tests.name as test_name,
                    tests.pretty_name,
                    builds.id as build_id,
                    builds.ref_build_id,
                    builds.ref_changeset,
                    os_list.name AS os_name
                FROM
                    test_runs INNER JOIN builds ON (builds.id = test_runs.build_id)
                        INNER JOIN tests ON (test_runs.test_id = tests.id)
                            INNER JOIN machines ON (machines.id = test_runs.machine_id)
                                INNER JOIN os_list ON (machines.os_id = os_list.id)
                WHERE
                    builds.ref_changeset = %s
                """

        cursor.execute(sql, (rev, ))
        for row in cursor.fetchall():
            testData = testRuns.setdefault(row['test_name'], {
                'name': row['pretty_name'],
                'id': row['test_id'],
                'test_runs': {}
            })
            platformRuns = testData['test_runs'].setdefault(
                row['os_name'], []).append([
                    row['id'], row['ref_build_id'], row['date_run'],
                    row['average']
                ], )

    return result
Example #30
0
def main():
    options, args = parser.parse_args()
    sql = ''
    if options.drop:
        fn = os.path.join(sql_dir, 'schema_drop.sql')
        sql += open(fn).read()
    if options.create or options.drop:
        fn = os.path.join(sql_dir, 'schema.sql')
        sql += open(fn).read()
    fn = os.path.join(sql_dir, 'test_data.sql')
    sql += open(fn).read()
    if options.sql:
        print sql
        return
    sqls = list(sql_lines(sql))
    for sql in sqls:
        print 'Executing %s' % sql.strip()
        cursor = db.cursor()
        cursor.execute(sql)
        cursor.close()
        db.commit()
Example #31
0
def main():
    options, args = parser.parse_args()
    sql = ''
    if options.drop:
        fn = os.path.join(sql_dir, 'schema_drop.sql')
        sql += open(fn).read()
    if options.create or options.drop:
        fn = os.path.join(sql_dir, 'schema.sql')
        sql += open(fn).read()
    fn = os.path.join(sql_dir, 'test_data.sql')
    sql += open(fn).read()
    if options.sql:
        print sql
        return
    sqls = list(sql_lines(sql))
    for sql in sqls:
        print 'Executing %s' % sql.strip()
        cursor = db.cursor()
        cursor.execute(sql)
        cursor.close()
        db.commit()
Example #32
0
File: api.py Project: jfsiii/graphs
def getTest(id, attribute, req):
    if(attribute == 'runs'):
        return getTestRuns(id)
    else:
        sql = """SELECT
            tests.id,
            tests.pretty_name AS test_name,
            machines.name as machine_name,
            branches.name AS branch_name,
            os_list.name AS os_name,
            test_runs.date_run
        FROM
            tests INNER JOIN test_runs ON (tests.id = test_runs.test_id)
                INNER JOIN machines ON (machines.id = test_runs.machine_id)
                    INNER JOIN os_list ON (machines.os_id = os_list.id)
                        INNER JOIN builds ON (test_runs.build_id = builds.id)
                            INNER JOIN branches on (builds.branch_id = branches.id)
        WHERE
            tests.id = %s
        ORDER BY
            test_runs.date_run DESC
        LIMIT 1"""
        cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
        cursor.execute(sql, (id,))

        if cursor.rowcount == 1:
            row = cursor.fetchone()
            #change column names to the names used here, then we don't need to re-label them
            test = {'id': row['id'],
                    'name': row['test_name'],
                    'branch': row['branch_name'],
                    'os': row['os_name'],
                    'machine': row['machine_name'],
                    }
            result = {'stat': 'ok', 'test': test}
        else:
            result = {'stat': 'fail', 'code': '101', 'message': 'Test not found'}

        return result
Example #33
0
def getTestRun(id, attribute, req):
    if attribute == 'values':
        return getTestRunValues(id)
    elif attribute == 'latest':
        return getLatestTestRunValues(id, req)
    elif attribute == 'revisions':
        return getRevisionValues(req)
    else:
        sql = """SELECT test_runs.*, builds.id as build_id, builds.ref_build_id as ref_build_id, builds.ref_changeset as changeset
                FROM test_runs INNER JOIN builds ON (test_runs.build_id = builds.id)
                WHERE test_runs.id = %s"""
        cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
        cursor.execute(sql, (id))

        if cursor.rowcount == 1:
            testRun = cursor.fetchone()
            annotations = getAnnotations(id, 'dictionary')
            result = {
                'stat': 'ok',
                'testrun': {
                    'id': testRun['id'],
                    'build': {
                        'id': testRun['build_id'],
                        'build_id': testRun['ref_build_id'],
                        'changeset': testRun['changeset']
                    },
                    'date_run': testRun['date_run'],
                    'average': testRun['average'],
                    'annotations': annotations
                },
            }
        else:
            return {
                'stat': 'fail',
                'code': '104',
                'message': 'Test run not found'
            }
    return result
Example #34
0
File: api.py Project: jfsiii/graphs
def getTestRunValues(id):
    sql = """SELECT test_run_values.*, pages.name as page FROM test_run_values
            LEFT JOIN pages ON(test_run_values.page_id = pages.id)
            WHERE test_run_values.test_run_id = %s"""

    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql, (id))

    testRunValues = []

    if cursor.rowcount > 0:
        rows = cursor.fetchall()
        for row in rows:
            testRun = {'interval': row['interval_id'], 'value': row['value']}
            if row['page'] != None:
                testRun['page'] = row['page']

            testRunValues.append(testRun)
        result = {'stat': 'ok', 'values': testRunValues}
    else:
        result = {'stat': 'fail', 'code': '105', 'message': 'No values found for test run ' + str(id)}

    return result
Example #35
0
File: api.py Project: jfsiii/graphs
def getRevisionValues(req):
    """Returns a set of values for a given revision"""
    revisions = req.params.getall('revision')
    result = {'stat': 'ok',
              'revisions': {},
              }

    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    for rev in revisions:
        testRuns = result['revisions'].setdefault(rev, {})
        sql = """SELECT
                    test_runs.*,
                    tests.name as test_name,
                    tests.pretty_name,
                    builds.id as build_id,
                    builds.ref_build_id,
                    builds.ref_changeset,
                    os_list.name AS os_name
                FROM
                    test_runs INNER JOIN builds ON (builds.id = test_runs.build_id)
                        INNER JOIN tests ON (test_runs.test_id = tests.id)
                            INNER JOIN machines ON (machines.id = test_runs.machine_id)
                                INNER JOIN os_list ON (machines.os_id = os_list.id)
                WHERE
                    builds.ref_changeset = %s
                """

        cursor.execute(sql, (rev,))
        for row in cursor:
            testData = testRuns.setdefault(row['test_name'],
                    {'name': row['pretty_name'], 'id': row['test_id'], 'test_runs': {}})
            platformRuns = testData['test_runs'].setdefault(row['os_name'], []).append(
                [row['id'], row['ref_build_id'], row['date_run'], row['average']],
                )

    return result
Example #36
0
File: api.py Project: jfsiii/graphs
def getTestRun(id, attribute, req):
    if attribute == 'values':
        return getTestRunValues(id)
    elif attribute == 'latest':
        return getLatestTestRunValues(id, req)
    elif attribute == 'revisions':
        return getRevisionValues(req)
    else:
        sql = """SELECT test_runs.*, builds.id as build_id, builds.ref_build_id as ref_build_id, builds.ref_changeset as changeset
                FROM test_runs INNER JOIN builds ON (test_runs.build_id = builds.id)
                WHERE test_runs.id = %s"""
        cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
        cursor.execute(sql, (id))

        if cursor.rowcount == 1:
            testRun = cursor.fetchone()
            annotations = getAnnotations(id, 'dictionary')
            result = {'stat': 'ok',
                      'testrun': {'id': testRun['id'], 'build': {'id': testRun['build_id'], 'build_id': testRun['ref_build_id'], 'changeset': testRun['changeset']},
                                  'date_run': testRun['date_run'], 'average': testRun['average'], 'annotations': annotations},
                      }
        else:
            return {'stat': 'fail', 'code': '104', 'message': 'Test run not found'}
    return result
Example #37
0
File: api.py Project: jfsiii/graphs
def getTestRuns(id, attribute, req):

    machineid = int(req.params.get('machineid', -1))
    branchid = int(req.params['branchid'])
    platformid = int(req.params.get('platformid', -1))

    days = int(req.params.get('days', 365))
    age = datetime.utcnow() - timedelta(days=days)

    if platformid == -1 and machineid != -1:
        sql = """
    SELECT test_runs.*, builds.id as build_id, builds.ref_build_id, builds.ref_changeset
    FROM test_runs INNER JOIN builds ON (builds.id = test_runs.build_id)
                   INNER JOIN branches ON (builds.branch_id = branches.id)
                   INNER JOIN machines ON (test_runs.machine_id = machines.id)
    WHERE test_runs.test_id = %s
          AND machines.id = %s
          AND branches.id = %s
          AND machines.is_active <> 0
          AND date_run >= %s
    ORDER BY date_run ASC
"""

        cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
        cursor.execute(sql, (id, machineid, branchid, mktime(age.timetuple())))
    elif machineid == -1 and platformid != -1:
        sql = """
    SELECT test_runs.*, builds.id as build_id, builds.ref_build_id, builds.ref_changeset
    FROM test_runs INNER JOIN builds ON (builds.id = test_runs.build_id)
                   INNER JOIN branches ON (builds.branch_id = branches.id)
                   INNER JOIN machines ON (test_runs.machine_id = machines.id)
                   INNER JOIN os_list ON (machines.os_id = os_list.id)
    WHERE test_runs.test_id = %s
          AND os_list.id = %s
          AND branches.id = %s
          AND machines.is_active <> 0
          AND date_run >= %s
    ORDER BY date_run ASC
"""
        cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
        cursor.execute(sql, (id, platformid, branchid, mktime(age.timetuple())))
    else:
        raise exc.HTTPBadRequest("You must provide one machineid *or* platformid")

    if cursor.rowcount > 0:
        rows = cursor.fetchall()
        averages = {}
        ave_totals = {}
        testRuns = []
        for row in rows:
            row_avg = 0
            if row['average'] != None:
                row_avg = row['average']
            averages[row['ref_changeset']] = averages.get(row['ref_changeset'], 0) + row_avg
            ave_totals[row['ref_changeset']] = ave_totals.get(row['ref_changeset'], 0) + 1
            annotations = getAnnotations(row['id'], 'array')
            testRuns.append([row['id'], [row['build_id'], row['ref_build_id'], row['ref_changeset']], row['date_run'], row_avg, row['run_number'], annotations, row['machine_id']])

        averages = dict(
            (changeset, total / ave_totals[changeset])
            for changeset, total in averages.iteritems())
        result = {'age': mktime(age.timetuple()), 'stat': 'ok', 'test_runs': testRuns,
                  'averages': averages,
                  'min': min(row['average'] for row in rows),
                  'max': max(row['average'] for row in rows),
                  'date_range': [min(r['date_run'] for r in rows),
                                 max(r['date_run'] for r in rows)]}
    else:
        result = {'stat': 'fail', 'code': '102', 'message': 'No test runs found for test id ' + str(id)}

    return result
Example #38
0
def application(req):
    resp = Response(content_type='text/plain')
    link_format = "RETURN:%s:%.2f:%sshow=%d\n"
    link_str = ""

    # incoming query string has the following parameters:
    # type=discrete|continuous
    #  indicates discrete vs. continuous dataset, defaults to continuous
    # value=n
    #  (REQUIRED) value to be recorded as the actual test value
    # tbox=foopy
    #  (REQUIRED) name of the tinderbox reporting the value (or rather, the name that is to be given this set of data)
    # testname=test
    #  (REQUIRED) the name of this test
    # data=rawdata
    #  raw data for this test
    # time=seconds
    #  time since the epoch in GMT of this test result; if ommitted, current time at time of script run is used
    # date
    #  date that the test was run - this is for discrete graphs
    # branch=1.8.1,1.8.0 or 1.9.0
    #  name of the branch that the build was generated for
    # branchid=id
    #  date of the build
    #  http://wiki.mozilla.org/MozillaQualityAssurance:Build_Ids

    #takes as input a file for parsing in csv with the format:
    # value,testname,tbox,time,data,branch,branchid,type,data

    # Create the DB schema if it doesn't already exist
    # XXX can pull out dataset_info.machine and dataset_info.{test,test_type} into two separate tables,
    # if we need to.

    # value,testname,tbox,time,data,branch,branchid,type,data

    fields = ["value", "testname", "tbox", "timeval", "date", "branch", "branchid", "type", "data"]
    strFields = ["type", "data", "tbox", "testname", "branch", "branchid"]
    numFields = ["date", "timeval", "value"]
    d_ids = []
    all_ids = []
    all_types = []
    values = {}
    if 'filename' in req.POST:
        val = req.POST["filename"]
        if val.file:
            resp.write('found a file\n')
            for line in val.file:
                line = line.rstrip("\n\r")
                ## FIXME: not actually CSV:
                contents = line.split(',')
                #clear any previous content in the fields variables - stops reuse of data over lines
                for field in fields:
                    ## FIXME: just terrible, just terrible
                    values[field] = ''
                if len(contents) < 7:
                    raise exc.HTTPBadRequest("Incompatable file format")
                for field, content in zip(fields, contents):
                    ## FIXME: more terrible
                    values[field] = content
                for strField in strFields:
                    if strField not in values:
                        continue
                    if not checkString(values[strField]):
                        raise exc.HTTPBadRequest(
                            "Invalid string arg: ", strField, " '" + values[strField] + "'")
                for numField in numFields:
                    if numField not in values:
                        continue
                    if not checkNumber(values[numField]):
                        raise exc.HTTPBadRequest(
                            "Invalid string arg: ", numField, " '" + values[numField] + "'")

                #do some checks to ensure that we are enforcing the requirement rules of the script
                if (not values['type']):
                    values['type'] = "continuous"

                if (not values['timeval']):
                    values['timeval'] = int(time.time())

                if type == "discrete" and not values['date']:
                    raise exc.HTTPBadRequest("Bad args, need a valid date")

                if not values['value'] or not values['tbox'] or not values['testname']:
                    raise exc.HTTPBadRequest("Bad args")

                # figure out our dataset id
                setid = -1

                # Not a big fan of this while loop.  If something goes wrong with the select it will insert until the script times out.
                while setid == -1:
                    cur = db.cursor()
                    cur.execute("SELECT id FROM dataset_info WHERE type <=> ? AND machine <=> ? AND test <=> ? AND test_type <=> ? AND extra_data <=> ? AND branch <=> ? AND date <=> ? limit 1",
                                (values['type'], values['tbox'], values['testname'], "perf", "branch=" + values['branch'], values['branch'], values['date']))
                    res = cur.fetchall()
                    cur.close()

                    if len(res) == 0:
                        db.execute("INSERT INTO dataset_info (type, machine, test, test_type, extra_data, branch, date) VALUES (?,?,?,?,?,?,?)",
                                   (values['type'], values['tbox'], values['testname'], "perf", "branch=" + values['branch'], values['branch'], values['date']))
                    else:
                        setid = res[0][0]

                #determine if we've seen this set of data before
                if values['type'] == "discrete" and int(values['timeval']) == 0:
                    cur = db.cursor()
                    cur.execute("SELECT dataset_id FROM dataset_values WHERE dataset_id = ? AND time = ?", (setid, values['timeval']))
                    res = cur.fetchall()
                    cur.close
                    if len(res) != 0:
                        print "found a matching discrete data set"
                        db.execute("DELETE FROM dataset_values WHERE dataset_id = ?", (setid,))
                        db.execute("DELETE FROM dataset_branchinfo WHERE dataset_id = ?", (setid,))
                        db.execute("DELETE FROM dataset_extra_data WHERE dataset_id = ?", (setid,))
                        db.execute("DELETE FROM annotations WHERE dataset_id = ?", (setid,))
                elif (type == "continuous"):
                    cur = db.cursor()
                    cur.execute("SELECT dataset_id FROM dataset_values WHERE dataset_id = ? AND time = ?", (setid, values['timeval']))
                    res = cur.fetchall()
                    cur.close
                    if len(res) != 0:
                        print "found a matching continuous data point"
                        db.execute("DELETE FROM dataset_values WHERE dataset_id = ? AND time = ?", (setid, values['timeval']))
                        db.execute("DELETE FROM dataset_branchinfo WHERE dataset_id = ? AND time = ?", (setid, values['timeval']))
                        db.execute("DELETE FROM dataset_extra_data WHERE dataset_id = ? AND time = ?", (setid, values['timeval']))
                        db.execute("DELETE FROM annotations WHERE dataset_id = ? AND time = ?", (setid, values['timeval']))

                db.execute("INSERT INTO dataset_values (dataset_id, time, value) VALUES (?,?,?)", (setid, values['timeval'], values['value']))
                db.execute("INSERT INTO dataset_branchinfo (dataset_id, time, branchid) VALUES (?,?,?)", (setid, values['timeval'], values['branchid']))
                if values.get('data'):
                    db.execute("INSERT INTO dataset_extra_data (dataset_id, time, data) VALUES (?,?,?)", (setid, values['timeval'], values['data']))

                if values['type'] == "discrete":
                    if not setid in d_ids:
                        d_ids.append(setid)
                if not setid in all_ids:
                    all_ids.append(setid)
                    all_types.append(values['type'])

        for setid, t in zip(all_ids, all_types):
            cur = db.cursor()
            cur.execute("SELECT MIN(time), MAX(time), test FROM dataset_values, dataset_info WHERE dataset_id = ? and id = dataset_id GROUP BY test", (setid,))
            res = cur.fetchall()
            cur.close()
            tstart = res[0][0]
            tend = res[0][1]
            testname = res[0][2]
            if t == "discrete":
                link_str += (link_format % (testname, float(-1), "graph.html#type=series&", setid,))
            else:
                tstart = 0
                link_str += (link_format % (testname, float(-1), "graph.html#", setid,))

        #this code auto-adds a set of continuous data for each series of discrete data sets - creating an overview of the data
        # generated by a given test (matched by machine, test, test_type, extra_data and branch)
        for setid in d_ids:
            cur = db.cursor()
            #throw out the largest value and take the average of the rest
            cur.execute("SELECT AVG(value) FROM dataset_values WHERE dataset_id = ? and value != (SELECT MAX(value) from dataset_values where dataset_id = ?)", (setid, setid))
            res = cur.fetchall()
            cur.close()
            avg = res[0][0]
            if avg is not None:
                cur = db.cursor()
                cur.execute("SELECT machine, test, test_type, extra_data, branch, date FROM dataset_info WHERE id = ?", (setid,))
                res = cur.fetchall()
                cur.close()
                tbox = res[0][0]
                testname = res[0][1]
                test_type = res[0][2]
                extra_data = res[0][3]
                branch = str(res[0][4])
                timeval = res[0][5]
                date = ''
                cur = db.cursor()
                cur.execute("SELECT branchid FROM dataset_branchinfo WHERE dataset_id = ?", (setid,))
                res = cur.fetchall()
                cur.close()
                branchid = res[0][0]
                dsetid = -1
                while dsetid == -1:
                    cur = db.cursor()
                    cur.execute("SELECT id from dataset_info where type = ? AND machine <=> ? AND test = ? AND test_type = ? AND extra_data = ? AND branch <=> ? AND date <=> ? limit 1",
                            ("continuous", tbox, testname + "_avg", "perf", "branch=" + branch, branch, date))
                    res = cur.fetchall()
                    cur.close()
                    if len(res) == 0:
                        db.execute("INSERT INTO dataset_info (type, machine, test, test_type, extra_data, branch, date) VALUES (?,?,?,?,?,?,?)",
                               ("continuous", tbox, testname + "_avg", "perf", "branch=" + branch, branch, date))
                    else:
                        dsetid = res[0][0]
                cur = db.cursor()
                cur.execute("SELECT * FROM dataset_values WHERE dataset_id=? AND time <=> ? limit 1", (dsetid, timeval))
                res = cur.fetchall()
                cur.close()
                if len(res) == 0:
                    db.execute("INSERT INTO dataset_values (dataset_id, time, value) VALUES (?,?,?)", (dsetid, timeval, avg))
                    db.execute("INSERT INTO dataset_branchinfo (dataset_id, time, branchid) VALUES (?,?,?)", (dsetid, timeval, branchid))
                else:
                    db.execute("UPDATE dataset_values SET value=? WHERE dataset_id=? AND time <=> ?", (avg, dsetid, timeval))
                    db.execute("UPDATE dataset_branchinfo SET branchid=? WHERE dataset_id=? AND time <=> ?", (branchid, dsetid, timeval))
                cur = db.cursor()
                cur.execute("SELECT MIN(time), MAX(time) FROM dataset_values WHERE dataset_id = ?", (dsetid,))
                res = cur.fetchall()
                cur.close()
                tstart = 0
                tend = res[0][1]
                link_str += (link_format % (testname, float(avg), "graph.html#", dsetid,))

        db.commit()
    resp.write('Inserted.\n')
    resp.write(link_str)
Example #39
0
def getTestRuns(id, attribute, req):

    machineid = int(req.params.get('machineid', -1))
    branchid = int(req.params['branchid'])
    platformid = int(req.params.get('platformid', -1))

    days = int(req.params.get('days', 365))
    age = datetime.utcnow() - timedelta(days=days)

    if platformid == -1 and machineid != -1:
        sql = """
    SELECT test_runs.*, builds.id as build_id, builds.ref_build_id, builds.ref_changeset
    FROM test_runs INNER JOIN builds ON (builds.id = test_runs.build_id)
                   INNER JOIN branches ON (builds.branch_id = branches.id)
                   INNER JOIN machines ON (test_runs.machine_id = machines.id)
    WHERE test_runs.test_id = %s
          AND machines.id = %s
          AND branches.id = %s
          AND machines.is_active <> 0
          AND date_run >= %s
    ORDER BY date_run ASC
"""

        cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
        cursor.execute(sql, (id, machineid, branchid, mktime(age.timetuple())))
    elif machineid == -1 and platformid != -1:
        sql = """
    SELECT test_runs.*, builds.id as build_id, builds.ref_build_id, builds.ref_changeset
    FROM test_runs INNER JOIN builds ON (builds.id = test_runs.build_id)
                   INNER JOIN branches ON (builds.branch_id = branches.id)
                   INNER JOIN machines ON (test_runs.machine_id = machines.id)
                   INNER JOIN os_list ON (machines.os_id = os_list.id)
    WHERE test_runs.test_id = %s
          AND os_list.id = %s
          AND branches.id = %s
          AND machines.is_active <> 0
          AND date_run >= %s
    ORDER BY date_run ASC
"""
        cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
        cursor.execute(sql,
                       (id, platformid, branchid, mktime(age.timetuple())))
    else:
        raise exc.HTTPBadRequest(
            "You must provide one machineid *or* platformid")

    if cursor.rowcount > 0:
        rows = cursor.fetchall()
        averages = {}
        ave_totals = {}
        testRuns = []
        for row in rows:
            row_avg = 0
            if row['average'] != None:
                row_avg = row['average']
            averages[row['ref_changeset']] = averages.get(
                row['ref_changeset'], 0) + row_avg
            ave_totals[row['ref_changeset']] = ave_totals.get(
                row['ref_changeset'], 0) + 1
            annotations = getAnnotations(row['id'], 'array')
            testRuns.append([
                row['id'],
                [row['build_id'], row['ref_build_id'],
                 row['ref_changeset']], row['date_run'], row_avg,
                row['run_number'], annotations, row['machine_id']
            ])

        averages = dict((changeset, total / ave_totals[changeset])
                        for changeset, total in averages.iteritems())
        result = {
            'age':
            mktime(age.timetuple()),
            'stat':
            'ok',
            'test_runs':
            testRuns,
            'averages':
            averages,
            'min':
            min(row['average'] for row in rows),
            'max':
            max(row['average'] for row in rows),
            'date_range': [
                min(r['date_run'] for r in rows),
                max(r['date_run'] for r in rows)
            ]
        }
    else:
        result = {
            'stat': 'fail',
            'code': '102',
            'message': 'No test runs found for test id ' + str(id)
        }

    return result
Example #40
0
File: api.py Project: jfsiii/graphs
def getTests(id, attribute, req):
    if attribute == 'short':
        update_valid_test_combinations()
        result = getTestOptions()
        result['stat'] = 'ok'
        result['from'] = 'db'
        return result

    sql = """SELECT DISTINCT
                tests.id,
                tests.pretty_name AS test_name,
                machines.name as machine_name,
                machines.id as machine_id,
                branches.name AS branch_name,
                branches.id AS branch_id,
                os_list.id AS os_id ,
                os_list.name AS os_name
            FROM
                tests INNER JOIN test_runs ON (tests.id = test_runs.test_id)
                    INNER JOIN machines ON (machines.id = test_runs.machine_id)
                        INNER JOIN os_list ON (machines.os_id = os_list.id)
                            INNER JOIN builds ON (test_runs.build_id = builds.id)
                                INNER JOIN branches on (builds.branch_id = branches.id)
            WHERE machines.is_active <> 0
            ORDER BY branches.id, machines.id"""
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    tests = []
    #fetch row count first, then check for length
    if attribute == 'short':
        testMap = {}
        branchMap = {}
        platformMap = {}
        machineMap = {}
    if cursor.rowcount > 0:
        rows = cursor.fetchall()

        for row in rows:
            if attribute == 'short':
                if row['id'] not in tests:
                    testMap[row['id']] = {'name': row['test_name'],
                                          'branch': set(),
                                          'platform': set(),
                                          'machine': set(),
                                          }
                testMap[row['id']]['branch'].add(row['branch_id'])
                testMap[row['id']]['platform'].add(row['os_id'])
                testMap[row['id']]['machine'].add(row['machine_id'])
                if row['branch_id'] not in branchMap:
                    branchMap[row['branch_id']] = {'name': row['branch_name'],
                                                   'test': set(),
                                                   'platform': set(),
                                                   'machine': set(),
                                                  }
                branchMap[row['branch_id']]['test'].add(row['id'])
                branchMap[row['branch_id']]['platform'].add(row['os_id'])
                branchMap[row['branch_id']]['machine'].add(row['machine_id'])
                if row['os_id'] not in platformMap:
                    platformMap[row['os_id']] = {'name': row['os_name'],
                                                 'test': set(),
                                                 'branch': set(),
                                                 'machine': set(),
                                                 }
                platformMap[row['os_id']]['test'].add(row['id'])
                platformMap[row['os_id']]['branch'].add(row['branch_id'])
                platformMap[row['os_id']]['machine'].add(row['machine_id'])
                if row['machine_id'] not in machineMap:
                    machineMap[row['machine_id']] = {'name': row['machine_name'],
                                                     'test': set(),
                                                     'platform': set(),
                                                     'branch': set(),
                                                     }
                machineMap[row['machine_id']]['test'].add(row['id'])
                machineMap[row['machine_id']]['branch'].add(row['branch_id'])
                machineMap[row['machine_id']]['platform'].add(row['os_id'])
                continue

            tests.append(
                {'id': row['id'],
                 'name': row['test_name'],
                 'branch': {'name': row['branch_name'], 'id': row['branch_id']},
                 'platform': {'name': row['os_name'], 'id': row['os_id']},
                 'machine': {'name': row['machine_name'], 'id': row['machine_id']}})

        if attribute == 'short':
            for item in testMap, machineMap, branchMap, platformMap:
                for id in item:
                    for key in item[id]:
                        if isinstance(item[id][key], set):
                            item[id][key] = list(item[id][key])
            result = {'stat': 'ok', 'from': 'db', 'testMap': testMap,
                      'machineMap': machineMap, 'branchMap': branchMap,
                      'platformMap': platformMap}
            return result

        result = {'stat': 'ok', "from": "db", 'tests': tests}
    else:
        #if we don't find any tests, we have a problem
        result = {'stat': 'fail', 'code': '103', 'message': 'No tests found'}
    return result
Example #41
0
def doSendResults(fo, setid, starttime, endtime, raw, graphby, extradata=None):
    s1 = ""
    s2 = ""
    if starttime:
        s1 = " AND time >= " + starttime
    if endtime:
        s2 = " AND time <= " + endtime

    fo.write("{ resultcode: 0,")

    cur = db.cursor()
    if not graphby or graphby == "time":
        cur.execute("SELECT time, value FROM dataset_values WHERE dataset_id = ? " + s1 + s2 + " ORDER BY time", (setid,))
    else:
        getByDataResults(cur, setid, extradata, starttime, endtime)
    fo.write("results: [")
    for row in cur:
        if row[1] == 'nan':
            continue
        fo.write("%s,%s," % (row[0], row[1]))
    cur.close()
    fo.write("],")

    cur = db.cursor()
    cur.execute("SELECT time, value FROM annotations WHERE dataset_id = ? " + s1 + s2 + " ORDER BY time", (setid,))
    fo.write("annotations: [")
    for row in cur:
        fo.write("%s,'%s'," % (row[0], row[1]))
    cur.close()
    fo.write("],")

    cur = db.cursor()
    cur.execute("SELECT test FROM dataset_info WHERE id = ?", (setid,))
    row = cur.fetchone()
    test_name = row[0]

    cur.execute("SELECT id, extra_data FROM dataset_info WHERE test = ? and test_type = ?", (test_name, "baseline"))
    baselines = cur.fetchall()

    fo.write("baselines: {")
    for baseline in baselines:
        cur.execute("SELECT value FROM dataset_values WHERE dataset_id = ? LIMIT 1", (baseline[0],))
        row = cur.fetchone()
        fo.write("'%s': '%s'," % (baseline[1], row[0]))
    fo.write("},")
    cur.close()

    if raw:
        cur = db.cursor()
        cur.execute("SELECT time, data FROM dataset_extra_data WHERE dataset_id = ? " + s1 + s2 + " ORDER BY time", (setid,))
        fo.write("rawdata: [")
        for row in cur:
            blob = row[1]
            if "\\" in blob:
                blob = blob.replace("\\", "\\\\")
            if "'" in blob:
                blob = blob.replace("'", "\\'")
            fo.write("%s,'%s'," % (row[0], blob))
        cur.close()
        fo.write("],")

    cur = db.cursor()
    cur.execute("SELECT avg(value), max(value), min(value) from dataset_values where dataset_id = ? " + s1 + s2 + " GROUP BY dataset_id", (setid,))
    fo.write("stats: [")
    for row in cur:
        fo.write("%s, %s, %s," % (row[0], row[1], row[2]))
    cur.close()
    fo.write("],")

    fo.write("}")
Example #42
0
def application(req):
    resp = Response(content_type='text/plain')
    link_format = "RETURN:%s:%.2f:%sshow=%d\n"
    link_str = ""

    # incoming query string has the following parameters:
    # type=discrete|continuous
    #  indicates discrete vs. continuous dataset, defaults to continuous
    # value=n
    #  (REQUIRED) value to be recorded as the actual test value
    # tbox=foopy
    #  (REQUIRED) name of the tinderbox reporting the value (or rather, the name that is to be given this set of data)
    # testname=test
    #  (REQUIRED) the name of this test
    # data=rawdata
    #  raw data for this test
    # time=seconds
    #  time since the epoch in GMT of this test result; if ommitted, current time at time of script run is used
    # date
    #  date that the test was run - this is for discrete graphs
    # branch=1.8.1,1.8.0 or 1.9.0
    #  name of the branch that the build was generated for
    # branchid=id
    #  date of the build
    #  http://wiki.mozilla.org/MozillaQualityAssurance:Build_Ids

    #takes as input a file for parsing in csv with the format:
    # value,testname,tbox,time,data,branch,branchid,type,data

    # Create the DB schema if it doesn't already exist
    # XXX can pull out dataset_info.machine and dataset_info.{test,test_type} into two separate tables,
    # if we need to.

    # value,testname,tbox,time,data,branch,branchid,type,data

    fields = [
        "value", "testname", "tbox", "timeval", "date", "branch", "branchid",
        "type", "data"
    ]
    strFields = ["type", "data", "tbox", "testname", "branch", "branchid"]
    numFields = ["date", "timeval", "value"]
    d_ids = []
    all_ids = []
    all_types = []
    values = {}
    if 'filename' in req.POST:
        val = req.POST["filename"]
        if val.file:
            resp.write('found a file\n')
            for line in val.file:
                line = line.rstrip("\n\r")
                ## FIXME: not actually CSV:
                contents = line.split(',')
                #clear any previous content in the fields variables - stops reuse of data over lines
                for field in fields:
                    ## FIXME: just terrible, just terrible
                    values[field] = ''
                if len(contents) < 7:
                    raise exc.HTTPBadRequest("Incompatable file format")
                for field, content in zip(fields, contents):
                    ## FIXME: more terrible
                    values[field] = content
                for strField in strFields:
                    if strField not in values:
                        continue
                    if not checkString(values[strField]):
                        raise exc.HTTPBadRequest("Invalid string arg: ",
                                                 strField,
                                                 " '" + values[strField] + "'")
                for numField in numFields:
                    if numField not in values:
                        continue
                    if not checkNumber(values[numField]):
                        raise exc.HTTPBadRequest("Invalid string arg: ",
                                                 numField,
                                                 " '" + values[numField] + "'")

                #do some checks to ensure that we are enforcing the requirement rules of the script
                if (not values['type']):
                    values['type'] = "continuous"

                if (not values['timeval']):
                    values['timeval'] = int(time.time())

                if type == "discrete" and not values['date']:
                    raise exc.HTTPBadRequest("Bad args, need a valid date")

                if not values['value'] or not values['tbox'] or not values[
                        'testname']:
                    raise exc.HTTPBadRequest("Bad args")

                # figure out our dataset id
                setid = -1

                # Not a big fan of this while loop.  If something goes wrong with the select it will insert until the script times out.
                while setid == -1:
                    cur = db.cursor()
                    cur.execute(
                        "SELECT id FROM dataset_info WHERE type <=> ? AND machine <=> ? AND test <=> ? AND test_type <=> ? AND extra_data <=> ? AND branch <=> ? AND date <=> ? limit 1",
                        (values['type'], values['tbox'], values['testname'],
                         "perf", "branch=" + values['branch'],
                         values['branch'], values['date']))
                    res = cur.fetchall()
                    cur.close()

                    if len(res) == 0:
                        db.execute(
                            "INSERT INTO dataset_info (type, machine, test, test_type, extra_data, branch, date) VALUES (?,?,?,?,?,?,?)",
                            (values['type'], values['tbox'],
                             values['testname'], "perf",
                             "branch=" + values['branch'], values['branch'],
                             values['date']))
                    else:
                        setid = res[0][0]

                #determine if we've seen this set of data before
                if values['type'] == "discrete" and int(
                        values['timeval']) == 0:
                    cur = db.cursor()
                    cur.execute(
                        "SELECT dataset_id FROM dataset_values WHERE dataset_id = ? AND time = ?",
                        (setid, values['timeval']))
                    res = cur.fetchall()
                    cur.close
                    if len(res) != 0:
                        print "found a matching discrete data set"
                        db.execute(
                            "DELETE FROM dataset_values WHERE dataset_id = ?",
                            (setid, ))
                        db.execute(
                            "DELETE FROM dataset_branchinfo WHERE dataset_id = ?",
                            (setid, ))
                        db.execute(
                            "DELETE FROM dataset_extra_data WHERE dataset_id = ?",
                            (setid, ))
                        db.execute(
                            "DELETE FROM annotations WHERE dataset_id = ?",
                            (setid, ))
                elif (type == "continuous"):
                    cur = db.cursor()
                    cur.execute(
                        "SELECT dataset_id FROM dataset_values WHERE dataset_id = ? AND time = ?",
                        (setid, values['timeval']))
                    res = cur.fetchall()
                    cur.close
                    if len(res) != 0:
                        print "found a matching continuous data point"
                        db.execute(
                            "DELETE FROM dataset_values WHERE dataset_id = ? AND time = ?",
                            (setid, values['timeval']))
                        db.execute(
                            "DELETE FROM dataset_branchinfo WHERE dataset_id = ? AND time = ?",
                            (setid, values['timeval']))
                        db.execute(
                            "DELETE FROM dataset_extra_data WHERE dataset_id = ? AND time = ?",
                            (setid, values['timeval']))
                        db.execute(
                            "DELETE FROM annotations WHERE dataset_id = ? AND time = ?",
                            (setid, values['timeval']))

                db.execute(
                    "INSERT INTO dataset_values (dataset_id, time, value) VALUES (?,?,?)",
                    (setid, values['timeval'], values['value']))
                db.execute(
                    "INSERT INTO dataset_branchinfo (dataset_id, time, branchid) VALUES (?,?,?)",
                    (setid, values['timeval'], values['branchid']))
                if values.get('data'):
                    db.execute(
                        "INSERT INTO dataset_extra_data (dataset_id, time, data) VALUES (?,?,?)",
                        (setid, values['timeval'], values['data']))

                if values['type'] == "discrete":
                    if not setid in d_ids:
                        d_ids.append(setid)
                if not setid in all_ids:
                    all_ids.append(setid)
                    all_types.append(values['type'])

        for setid, t in zip(all_ids, all_types):
            cur = db.cursor()
            cur.execute(
                "SELECT MIN(time), MAX(time), test FROM dataset_values, dataset_info WHERE dataset_id = ? and id = dataset_id GROUP BY test",
                (setid, ))
            res = cur.fetchall()
            cur.close()
            tstart = res[0][0]
            tend = res[0][1]
            testname = res[0][2]
            if t == "discrete":
                link_str += (link_format % (
                    testname,
                    float(-1),
                    "graph.html#type=series&",
                    setid,
                ))
            else:
                tstart = 0
                link_str += (link_format % (
                    testname,
                    float(-1),
                    "graph.html#",
                    setid,
                ))

        #this code auto-adds a set of continuous data for each series of discrete data sets - creating an overview of the data
        # generated by a given test (matched by machine, test, test_type, extra_data and branch)
        for setid in d_ids:
            cur = db.cursor()
            #throw out the largest value and take the average of the rest
            cur.execute(
                "SELECT AVG(value) FROM dataset_values WHERE dataset_id = ? and value != (SELECT MAX(value) from dataset_values where dataset_id = ?)",
                (setid, setid))
            res = cur.fetchall()
            cur.close()
            avg = res[0][0]
            if avg is not None:
                cur = db.cursor()
                cur.execute(
                    "SELECT machine, test, test_type, extra_data, branch, date FROM dataset_info WHERE id = ?",
                    (setid, ))
                res = cur.fetchall()
                cur.close()
                tbox = res[0][0]
                testname = res[0][1]
                test_type = res[0][2]
                extra_data = res[0][3]
                branch = str(res[0][4])
                timeval = res[0][5]
                date = ''
                cur = db.cursor()
                cur.execute(
                    "SELECT branchid FROM dataset_branchinfo WHERE dataset_id = ?",
                    (setid, ))
                res = cur.fetchall()
                cur.close()
                branchid = res[0][0]
                dsetid = -1
                while dsetid == -1:
                    cur = db.cursor()
                    cur.execute(
                        "SELECT id from dataset_info where type = ? AND machine <=> ? AND test = ? AND test_type = ? AND extra_data = ? AND branch <=> ? AND date <=> ? limit 1",
                        ("continuous", tbox, testname + "_avg", "perf",
                         "branch=" + branch, branch, date))
                    res = cur.fetchall()
                    cur.close()
                    if len(res) == 0:
                        db.execute(
                            "INSERT INTO dataset_info (type, machine, test, test_type, extra_data, branch, date) VALUES (?,?,?,?,?,?,?)",
                            ("continuous", tbox, testname + "_avg", "perf",
                             "branch=" + branch, branch, date))
                    else:
                        dsetid = res[0][0]
                cur = db.cursor()
                cur.execute(
                    "SELECT * FROM dataset_values WHERE dataset_id=? AND time <=> ? limit 1",
                    (dsetid, timeval))
                res = cur.fetchall()
                cur.close()
                if len(res) == 0:
                    db.execute(
                        "INSERT INTO dataset_values (dataset_id, time, value) VALUES (?,?,?)",
                        (dsetid, timeval, avg))
                    db.execute(
                        "INSERT INTO dataset_branchinfo (dataset_id, time, branchid) VALUES (?,?,?)",
                        (dsetid, timeval, branchid))
                else:
                    db.execute(
                        "UPDATE dataset_values SET value=? WHERE dataset_id=? AND time <=> ?",
                        (avg, dsetid, timeval))
                    db.execute(
                        "UPDATE dataset_branchinfo SET branchid=? WHERE dataset_id=? AND time <=> ?",
                        (branchid, dsetid, timeval))
                cur = db.cursor()
                cur.execute(
                    "SELECT MIN(time), MAX(time) FROM dataset_values WHERE dataset_id = ?",
                    (dsetid, ))
                res = cur.fetchall()
                cur.close()
                tstart = 0
                tend = res[0][1]
                link_str += (link_format % (
                    testname,
                    float(avg),
                    "graph.html#",
                    dsetid,
                ))

        db.commit()
    resp.write('Inserted.\n')
    resp.write(link_str)
Example #43
0
def doSendResults(fo, setid, starttime, endtime, raw, graphby, extradata=None):
    s1 = ""
    s2 = ""
    if starttime:
        s1 = " AND time >= " + starttime
    if endtime:
        s2 = " AND time <= " + endtime

    fo.write("{ resultcode: 0,")

    cur = db.cursor()
    if not graphby or graphby == "time":
        cur.execute("SELECT time, value FROM dataset_values WHERE dataset_id = ? " + s1 + s2 + " ORDER BY time", (setid,))
    else:
        getByDataResults(cur, setid, extradata, starttime, endtime)
    fo.write("results: [")
    for row in cur:
        if row[1] == 'nan':
            continue
        fo.write("%s,%s," % (row[0], row[1]))
    cur.close()
    fo.write("],")

    cur = db.cursor()
    cur.execute("SELECT time, value FROM annotations WHERE dataset_id = ? " + s1 + s2 + " ORDER BY time", (setid,))
    fo.write("annotations: [")
    for row in cur:
        fo.write("%s,'%s'," % (row[0], row[1]))
    cur.close()
    fo.write("],")

    cur = db.cursor()
    cur.execute("SELECT test FROM dataset_info WHERE id = ?", (setid,))
    row = cur.fetchone()
    test_name = row[0]

    cur.execute("SELECT id, extra_data FROM dataset_info WHERE test = ? and test_type = ?", (test_name, "baseline"))
    baselines = cur.fetchall()

    fo.write("baselines: {")
    for baseline in baselines:
        cur.execute("SELECT value FROM dataset_values WHERE dataset_id = ? LIMIT 1", (baseline[0],))
        row = cur.fetchone()
        fo.write("'%s': '%s'," % (baseline[1], row[0]))
    fo.write("},")
    cur.close()

    if raw:
        cur = db.cursor()
        cur.execute("SELECT time, data FROM dataset_extra_data WHERE dataset_id = ? " + s1 + s2 + " ORDER BY time", (setid,))
        fo.write("rawdata: [")
        for row in cur:
            blob = row[1]
            if "\\" in blob:
                blob = blob.replace("\\", "\\\\")
            if "'" in blob:
                blob = blob.replace("'", "\\'")
            fo.write("%s,'%s'," % (row[0], blob))
        cur.close()
        fo.write("],")

    cur = db.cursor()
    cur.execute("SELECT avg(value), max(value), min(value) from dataset_values where dataset_id = ? " + s1 + s2 + " GROUP BY dataset_id", (setid,))
    fo.write("stats: [")
    for row in cur:
        fo.write("%s, %s, %s," % (row[0], row[1], row[2]))
    cur.close()
    fo.write("],")

    fo.write("}")
Example #44
0
    test_type = "perf"
else:
    test_type = "baseline"

if len(args) != 4:
    parser.print_help()
    sys.exit()

(testname, tbox, mtype, branch) = args[0:4]
branchid = "xxxxxxxx"
date = ""
graph_type = "continuous"

setid = -1
while setid == -1:
    cur = db.cursor()
    if options.baseline:
        cur.execute(
            "SELECT id FROM dataset_info WHERE type = ? AND machine <=> ? AND test <=> ? AND test_type=? AND extra_data <=> ? AND branch <=> ?",
            (graph_type, tbox, testname, test_type, options.baseline, branch))
    else:
        cur.execute(
            "SELECT id FROM dataset_info WHERE type = ? AND machine <=> ? AND test <=> ? AND test_type=? AND branch<=>? AND extra_data IS NULL",
            (graph_type, tbox, testname, test_type, branch))
    res = cur.fetchall()
    cur.close()

    if len(res) == 0:
        db.execute(
            "INSERT INTO dataset_info (type, machine, test, test_type, extra_data, branch, date) VALUES (?,?,?,?,?,?,?)",
            (graph_type, tbox, testname, test_type, options.baseline, branch,
Example #45
0
def update_valid_test_combinations(reporter=None):
    """Updates the list of valid test combinations"""
    sql = """SELECT last_updated FROM valid_test_combinations_updated"""
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    last_updated = cursor.fetchone()
    if not last_updated:
        last_updated = 0
    else:
        last_updated = last_updated['last_updated']
    existing_combos = set()
    for row in get_test_combos():
        existing_combos.add((row['test_id'], row['os_id'], row['branch_id']))
    sql = """
    SELECT test_runs.test_id AS test_id,
           machines.os_id AS os_id,
           builds.branch_id AS branch_id,
           test_runs.date_run AS date_run
    FROM test_runs, machines, builds
    WHERE machines.id = test_runs.machine_id
          AND test_runs.build_id = builds.id
          AND machines.is_active
          AND test_runs.date_run >= %s
    ORDER BY test_runs.date_run
    LIMIT 100000
    """
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql, (last_updated, ))
    new_last_updated = last_updated
    if reporter:
        if last_updated:
            reporter('Updating combos from scratch')
        else:
            reporter('Updating combos since %s' % last_updated)
    count = 0
    for row in cursor.fetchall():
        count += 1
        if reporter and (not count % 1000):
            reporter('Read %s records' % count)
        if not count % 10000:
            # Just update every so often
            update_combos_last_updated(new_last_updated)
            db.commit()
        try:
            key = (row['test_id'], row['os_id'], row['branch_id'])
            if key not in existing_combos:
                sql = """
                INSERT INTO valid_test_combinations (test_id, os_id, branch_id)
                VALUES (%s, %s, %s)
                """
                cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
                cursor.execute(sql, key)
                existing_combos.add(key)
        except:
            # This way if things are interrupted we can still record our progress
            if new_last_updated > last_updated:
                if reporter:
                    reporter('Exception; updated last_updated to %s' %
                             new_last_updated)
                try:
                    update_combos_last_updated(new_last_updated)
                    db.commit()
                except:
                    pass
            raise
        new_last_updated = max(row['date_run'], new_last_updated)
    if reporter:
        reporter(
            'Finished completely (%s items), updating last_updated to %s' %
            (count, new_last_updated))
    update_combos_last_updated(new_last_updated)
    db.commit()
Example #46
0
    test_type = "perf"
else:
    test_type = "baseline"

if len(args) != 4:
    parser.print_help()
    sys.exit()

(testname, tbox, mtype, branch) = args[0:4]
branchid = "xxxxxxxx"
date = ""
graph_type = "continuous"

setid = -1
while setid == -1:
    cur = db.cursor()
    if options.baseline:
        cur.execute("SELECT id FROM dataset_info WHERE type = ? AND machine <=> ? AND test <=> ? AND test_type=? AND extra_data <=> ? AND branch <=> ?",
            (graph_type, tbox, testname, test_type, options.baseline, branch))
    else:
        cur.execute("SELECT id FROM dataset_info WHERE type = ? AND machine <=> ? AND test <=> ? AND test_type=? AND branch<=>? AND extra_data IS NULL",
            (graph_type, tbox, testname, test_type, branch))
    res = cur.fetchall()
    cur.close()

    if len(res) == 0:
        db.execute("INSERT INTO dataset_info (type, machine, test, test_type, extra_data, branch, date) VALUES (?,?,?,?,?,?,?)",
                   (graph_type, tbox, testname, test_type, options.baseline, branch, date))
    else:
        setid = res[0][0]
Example #47
0
def getTests(id, attribute, req):
    if attribute == 'short':
        result = getTestOptions()
        result['stat'] = 'ok'
        result['from'] = 'db'
        return result

    sql = """SELECT DISTINCT
                tests.id,
                tests.pretty_name AS test_name,
                machines.name as machine_name,
                machines.id as machine_id,
                branches.name AS branch_name,
                branches.id AS branch_id,
                os_list.id AS os_id ,
                os_list.name AS os_name
            FROM
                tests INNER JOIN test_runs ON (tests.id = test_runs.test_id)
                    INNER JOIN machines ON (machines.id = test_runs.machine_id)
                        INNER JOIN os_list ON (machines.os_id = os_list.id)
                            INNER JOIN builds ON (test_runs.build_id = builds.id)
                                INNER JOIN branches on (builds.branch_id = branches.id)
            WHERE machines.is_active <> 0
            ORDER BY branches.id, machines.id"""
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    tests = []
    #fetch row count first, then check for length
    if attribute == 'short':
        testMap = {}
        branchMap = {}
        platformMap = {}
        machineMap = {}
    if cursor.rowcount > 0:
        rows = cursor.fetchall()

        for row in rows:
            if attribute == 'short':
                if row['id'] not in tests:
                    testMap[row['id']] = {
                        'name': row['test_name'],
                        'branch': set(),
                        'platform': set(),
                        'machine': set(),
                    }
                testMap[row['id']]['branch'].add(row['branch_id'])
                testMap[row['id']]['platform'].add(row['os_id'])
                testMap[row['id']]['machine'].add(row['machine_id'])
                if row['branch_id'] not in branchMap:
                    branchMap[row['branch_id']] = {
                        'name': row['branch_name'],
                        'test': set(),
                        'platform': set(),
                        'machine': set(),
                    }
                branchMap[row['branch_id']]['test'].add(row['id'])
                branchMap[row['branch_id']]['platform'].add(row['os_id'])
                branchMap[row['branch_id']]['machine'].add(row['machine_id'])
                if row['os_id'] not in platformMap:
                    platformMap[row['os_id']] = {
                        'name': row['os_name'],
                        'test': set(),
                        'branch': set(),
                        'machine': set(),
                    }
                platformMap[row['os_id']]['test'].add(row['id'])
                platformMap[row['os_id']]['branch'].add(row['branch_id'])
                platformMap[row['os_id']]['machine'].add(row['machine_id'])
                if row['machine_id'] not in machineMap:
                    machineMap[row['machine_id']] = {
                        'name': row['machine_name'],
                        'test': set(),
                        'platform': set(),
                        'branch': set(),
                    }
                machineMap[row['machine_id']]['test'].add(row['id'])
                machineMap[row['machine_id']]['branch'].add(row['branch_id'])
                machineMap[row['machine_id']]['platform'].add(row['os_id'])
                continue

            tests.append({
                'id': row['id'],
                'name': row['test_name'],
                'branch': {
                    'name': row['branch_name'],
                    'id': row['branch_id']
                },
                'platform': {
                    'name': row['os_name'],
                    'id': row['os_id']
                },
                'machine': {
                    'name': row['machine_name'],
                    'id': row['machine_id']
                }
            })

        if attribute == 'short':
            for item in testMap, machineMap, branchMap, platformMap:
                for id in item:
                    for key in item[id]:
                        if isinstance(item[id][key], set):
                            item[id][key] = list(item[id][key])
            result = {
                'stat': 'ok',
                'from': 'db',
                'testMap': testMap,
                'machineMap': machineMap,
                'branchMap': branchMap,
                'platformMap': platformMap
            }
            return result

        result = {'stat': 'ok', "from": "db", 'tests': tests}
    else:
        #if we don't find any tests, we have a problem
        result = {'stat': 'fail', 'code': '103', 'message': 'No tests found'}
    return result
Example #48
0
File: api.py Project: jfsiii/graphs
def update_valid_test_combinations(reporter=None):
    """Updates the list of valid test combinations"""
    sql = """SELECT last_updated FROM valid_test_combinations_updated"""
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql)
    last_updated = cursor.fetchone()
    if not last_updated:
        last_updated = 0
    else:
        last_updated = last_updated['last_updated']
    existing_combos = set()
    for row in get_test_combos():
        existing_combos.add((row['test_id'], row['os_id'], row['branch_id']))
    sql = """
    SELECT test_runs.test_id AS test_id,
           machines.os_id AS os_id,
           builds.branch_id AS branch_id,
           test_runs.date_run AS date_run
    FROM test_runs, machines, builds
    WHERE machines.id = test_runs.machine_id
          AND test_runs.build_id = builds.id
          AND machines.is_active
          AND test_runs.date_run >= %s
    ORDER BY test_runs.date_run
    LIMIT 100000
    """
    cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
    cursor.execute(sql, (last_updated,))
    new_last_updated = last_updated
    if reporter:
        if last_updated:
            reporter('Updating combos from scratch')
        else:
            reporter('Updating combos since %s' % last_updated)
    count = 0
    for row in cursor.fetchall():
        count += 1
        if reporter and (not count % 1000):
            reporter('Read %s records' % count)
        if not count % 10000:
            # Just update every so often
            update_combos_last_updated(new_last_updated)
            db.commit()
        try:
            key = (row['test_id'], row['os_id'], row['branch_id'])
            if key not in existing_combos:
                sql = """
                INSERT INTO valid_test_combinations (test_id, os_id, branch_id)
                VALUES (%s, %s, %s)
                """
                cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
                cursor.execute(sql, key)
                existing_combos.add(key)
        except:
            # This way if things are interrupted we can still record our progress
            if new_last_updated > last_updated:
                if reporter:
                    reporter('Exception; updated last_updated to %s'
                             % new_last_updated)
                try:
                    update_combos_last_updated(new_last_updated)
                    db.commit()
                except:
                    pass
            raise
        new_last_updated = max(row['date_run'], new_last_updated)
    if reporter:
        reporter('Finished completely (%s items), updating last_updated to %s'
                 % (count, new_last_updated))
    update_combos_last_updated(new_last_updated)
    db.commit()