def main(): get_env('INFRABOX_VERSION') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_PORT') get_env('INFRABOX_GERRIT_PORT') get_env('INFRABOX_GERRIT_HOSTNAME') get_env('INFRABOX_GERRIT_USERNAME') get_env('INFRABOX_GERRIT_KEY_FILENAME') cluster_name = get_env('INFRABOX_CLUSTER_NAME') conn = connect_db() conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) logger.info("Connected to database") elect_leader(conn, 'gerrit-review', cluster_name) curs = conn.cursor() curs.execute("LISTEN job_update;") logger.info("Waiting for job updates") while True: if not is_active(conn, cluster_name): logger.info("cluster is inactive or disabled, sleeping") time.sleep(5) continue is_leader(conn, 'gerrit-review', cluster_name) curs.execute('commit;') while conn.notifies: notify = conn.notifies.pop(0) logger.debug("got notify: %s" % notify.payload) handle_job_update(conn, json.loads(notify.payload)) time.sleep(3)
def main(): get_env('INFRABOX_VERSION') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_PORT') gerrit_port = int(get_env('INFRABOX_GERRIT_PORT')) gerrit_hostname = get_env('INFRABOX_GERRIT_HOSTNAME') gerrit_username = get_env('INFRABOX_GERRIT_USERNAME') gerrit_key_filename = get_env('INFRABOX_GERRIT_KEY_FILENAME') cluster_name = get_env('INFRABOX_CLUSTER_NAME') conn = connect_db() logger.info("Connected to db") client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(username=gerrit_username, hostname=gerrit_hostname, port=gerrit_port, key_filename=gerrit_key_filename) client.get_transport().set_keepalive(60) logger.info("Connected to gerrit") _, stdout, _ = client.exec_command('gerrit stream-events') logger.info("Waiting for stream-events") for line in stdout: for _ in range(0, 2): try: event = json.loads(line) if event['type'] in ("patchset-created", "draft-published", "change-merged"): logger.debug(json.dumps(event, indent=4)) if not is_active(conn, cluster_name): logger.info( "cluster is inactive or disabled, skipping") break handle_patchset_created(conn, event) break except psycopg2.IntegrityError: logger.info('duplicated key, skip this commit') try: conn.close() except: pass conn = connect_db() logger.info("reconnected to db") break except psycopg2.OperationalError: try: conn.close() except: pass conn = connect_db() logger.info("reconnected to db")