Ejemplo n.º 1
0
    def __init__(self, config):
        self.log = logging.getLogger("gerrkins")
        self.db_uri = config.get('subunit', 'subunit_uri')

        shell.parse_args([])
        shell.CONF.set_override('connection', self.db_uri, group='database')
        self.session = api.get_session()
Ejemplo n.º 2
0
def main():
    init_testr()
    shell.parse_args([])
    shell.CONF.set_override('connection', DB_URI, group='database')
    with tempfile.NamedTemporaryFile() as fd:
        generate_subunit_stream(fd)
        populate_testrepository(fd.name)
def main():
    shell.parse_args([])
    shell.CONF.set_override('connection', DB_URI, group='database')
    session = api.get_session()
    run_ids = get_run_ids(session)
    session.close()
    preseed_path = os.path.join(TEMPEST_PATH, 'preseed-streams')
    os.mkdir(preseed_path)
    for run in run_ids:
        with open(os.path.join(preseed_path, run + '.subunit'), 'w') as fd:
            write_subunit.sql2subunit(run, fd)
Ejemplo n.º 4
0
def main():
    shell.parse_args([])
    shell.CONF.set_override('connection', DB_URI, group='database')
    session = api.get_session()
    run_ids = get_run_ids(session)
    session.close()
    preseed_path = os.path.join(TEMPEST_PATH, 'preseed-streams')
    os.mkdir(preseed_path)
    for run in run_ids:
        with open(os.path.join(preseed_path, run + '.subunit'), 'w') as fd:
            write_subunit.sql2subunit(run, fd)
 def __init__(self, gearman_worker, filters, subunit2sql_conf, mqtt=None):
     super(SubunitRetriever, self).__init__()
     self.gearman_worker = gearman_worker
     self.filters = filters
     # Initialize subunit2sql settings
     self.config = subunit2sql_conf
     shell.cli_opts()
     extensions = shell.get_extensions()
     shell.parse_args([], [self.config])
     self.extra_targets = shell.get_targets(extensions)
     self.mqtt = mqtt
Ejemplo n.º 6
0
def main():
    config = ConfigParser.ConfigParser()
    config.read(['etc/jenkins2sql.conf'])
    host = config.get('default', 'host', '127.0.0.1')
    try:
        port = config.getint('default', 'port')
    except ConfigParser.NoOptionError:
        port = 5000
    db_uri = config.get('default', 'db_uri')
    shell.cli_opts()
    shell.parse_args([])
    shell.CONF.set_override('connection', db_uri, group='database')
    app.run(debug=True, host=host, port=port)
def main():
    shell.parse_args([])
    shell.CONF.set_override('connection', DB_URI, group='database')
    session = api.get_session()
    runs = api.get_recent_successful_runs_by_run_metadata(
        'build_name', 'gate-tempest-dsvm-neutron-full',
        num_runs=10, session=session)
    session.close()
    preseed_path = os.path.join(TEMPEST_PATH, 'preseed-streams')
    if not os.path.isdir(preseed_path):
        os.mkdir(preseed_path)
    for run in runs:
        with open(os.path.join(preseed_path, run.uuid + '.subunit'), 'w') as fd:
            write_subunit.sql2subunit(run.uuid, fd)
Ejemplo n.º 8
0
def main():
    shell.parse_args([])
    shell.CONF.set_override('connection', DB_URI, group='database')
    session = api.get_session()
    runs = api.get_recent_successful_runs_by_run_metadata(
        'build_name',
        'gate-tempest-dsvm-neutron-full',
        num_runs=10,
        session=session)
    session.close()
    preseed_path = os.path.join(TEMPEST_PATH, 'preseed-streams')
    if not os.path.isdir(preseed_path):
        os.mkdir(preseed_path)
    for run in runs:
        with open(os.path.join(preseed_path, run.uuid + '.subunit'),
                  'w') as fd:
            write_subunit.sql2subunit(run.uuid, fd)
Ejemplo n.º 9
0
def main():
    shell.parse_args(sys.argv)
    session = api.get_session()
    project_data = get_jobs_per_project(session)
    averages = []
    for project in project_data:
        changes = project_data[project]
        values = list(changes.values())
        averages.append((project, np.mean(values), np.amax(values)))

    # sort by the 2nd column (avg)
    averages.sort(key=lambda tup: tup[1])
    labels = [x[0].split("/")[1] for x in averages]
    data = [x[1] for x in averages]
    maxes = [x[2] for x in averages]
    title = 'Average gate jobs per project'
    plot_histogram(labels, data, 'job_per_changes', title)
    title = 'Max gate jobs per project'
    plot_histogram(labels, maxes, 'max_job_per_changes', title)
Ejemplo n.º 10
0
def main():
    cli_opts()
    shell.parse_args(sys.argv)
    if not CONF.run_id and not CONF.average:
        print("You must specify either a run_id or generate an average run" " stream")
        return 1
    if CONF.run_id and CONF.average:
        print("You can either generate a stream for a run_id or an average run" " stream, but not both.")
        return 1
    if CONF.out_path:
        fd = open(CONF.out_path, "w")
    else:
        fd = sys.stdout
    if not CONF.average:
        sql2subunit(CONF.run_id, fd)
    else:
        avg_sql2subunit(fd)
    if CONF.out_path:
        fd.close()
Ejemplo n.º 11
0
def main():
    shell.parse_args([])
    shell.CONF.set_override('connection', DB_URI, group='database')

    preseed_path = os.path.join(TEMPEST_PATH, 'preseed-streams')
    os.mkdir(preseed_path)
    try:
        session = api.get_session()
        run_ids = api.get_recent_successful_runs(num_runs=10, session=session)
        session.close()
        for run in run_ids:
            with open(os.path.join(preseed_path, run + '.subunit'), 'w') as fd:
                write_subunit.sql2subunit(run, fd)
    except:
        # copy the static preseed files if failed to get preseeds from logstash
        src_dir = "/opt/nodepool-scripts/"
        for file in os.listdir(src_dir):
            if file.endswith(".subunit"):
                file_path = os.path.join(src_dir, file)
                shutil.copy(file_path, preseed_path)
Ejemplo n.º 12
0
def main():
    cli_opts()
    shell.parse_args(sys.argv)
    if not CONF.run_id and not CONF.average:
        print('You must specify either a run_id or generate an average run'
              ' stream')
        return 1
    if CONF.run_id and CONF.average:
        print('You can either generate a stream for a run_id or an average run'
              ' stream, but not both.')
        return 1
    if CONF.out_path:
        fd = open(CONF.out_path, 'w')
    else:
        fd = sys.stdout
    if not CONF.average:
        sql2subunit(CONF.run_id, fd)
    else:
        avg_sql2subunit(fd)
    if CONF.out_path:
        fd.close()
Ejemplo n.º 13
0
def main():
    cli_opts()
    shell.parse_args(sys.argv)
    CONF.command.func()
    print('Graph saved at: %s' % CONF.output)
def main():
    shell.parse_args([])
 def __init__(self, subunitq, subunit2sql_conf):
     self.subunitq = subunitq
     self.config = subunit2sql_conf
     # Initialize subunit2sql settings
     shell.cli_opts()
     shell.parse_args([], [self.config])
Ejemplo n.º 16
0
def main():
    cli_opts()
    shell.parse_args(sys.argv)
    CONF.command.func()
    print('Graph saved at: %s' % CONF.output)
Ejemplo n.º 17
0
def main():
    cli_opts()
    shell.parse_args(sys.argv)
    run_times = generate_series(CONF.test_id)
def main():
    shell.parse_args([])
 def __init__(self, subunitq, subunit2sql_conf):
     self.subunitq = subunitq
     self.config = subunit2sql_conf
     # Initialize subunit2sql settings
     shell.cli_opts()
     shell.parse_args([], [self.config])