def run(self): self.logger.info("Starting scheduler") while True: conn = connect_db() conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) self.conn = conn self.handle() self.conn.close() time.sleep(1)
def main(): get_env('INFRABOX_SERVICE') get_env('INFRABOX_VERSION') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_PORT') conn = connect_db() migrate_db(conn) conn.close()
def before_request(): g.db = DB(connect_db()) def release_db(): db = getattr(g, 'db', None) if not db: return db.close() g.db = None g.release_db = release_db
def main(): root_url = os.environ['INFRABOX_ROOT_URL'] print "ROOT_URL: %s" % root_url while True: time.sleep(1) try: r = requests.get(root_url) if r.status_code in (200, 404): break print r.text except: pass print "Server not yet ready" connect_db() # Wait for DB with open('results.xml', 'wb') as output: unittest.main(testRunner=xmlrunner.XMLTestRunner(output=output))
def main(): # Arguments parser = argparse.ArgumentParser(prog="scheduler.py") parser.add_argument("--docker-registry", required=True, type=str, help="Host for the registry to use") parser.add_argument("--loglevel", choices=['debug', 'info', 'warning'], help="Log level") parser.add_argument("--tag", required=True, type=str, help="Image tag to use for internal images") args = parser.parse_args() get_env('INFRABOX_SERVICE') get_env('INFRABOX_VERSION') get_env('INFRABOX_CLUSTER_NAME') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_PORT') get_env('INFRABOX_ROOT_URL') get_env('INFRABOX_GENERAL_DONT_CHECK_CERTIFICATES') get_env('INFRABOX_GENERAL_WORKER_NAMESPACE') get_env('INFRABOX_JOB_MAX_OUTPUT_SIZE') get_env('INFRABOX_JOB_MOUNT_DOCKER_SOCKET') get_env('INFRABOX_JOB_SECURITY_CONTEXT_CAPABILITIES_ENABLED') if get_env('INFRABOX_GERRIT_ENABLED') == 'true': get_env('INFRABOX_GERRIT_USERNAME') get_env('INFRABOX_GERRIT_HOSTNAME') get_env('INFRABOX_GERRIT_PORT') # try to read from filesystem with open('/var/run/secrets/kubernetes.io/serviceaccount/token', 'r') as f: args.token = str(f.read()).strip() args.api_server = "https://" + get_env('INFRABOX_KUBERNETES_MASTER_HOST') \ + ":" + get_env('INFRABOX_KUBERNETES_MASTER_PORT') os.environ[ 'REQUESTS_CA_BUNDLE'] = '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt' conn = connect_db() conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) scheduler = Scheduler(conn, args) scheduler.run()
def setUp(self): self.app = server.app.test_client() server.app.testing = True self.conn = connect_db() cur = self.conn.cursor() cur.execute('TRUNCATE auth_token') cur.close() self.conn.commit() self.project_id = 'a514af82-3c4f-4bb5-b1da-a89a0ced5e6f' self.project_token = 'bb14af82-3c4f-4bb5-b1da-a89a0ced5e6f' self.job_id = 'c514af82-3c4f-4bb5-b1da-a89a0ced5e6f'
def __listen(socketio): conn = connect_db() conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) cur = conn.cursor() cur.execute("LISTEN job_update") while True: trampoline(conn, read=True) conn.poll() while conn.notifies: n = conn.notifies.pop() socketio.start_background_task(__handle_event, json.loads(n.payload), socketio)
def main(): get_env('INFRABOX_VERSION') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_PORT') get_env('INFRABOX_ADMIN_PASSWORD') get_env('INFRABOX_ADMIN_EMAIL') conn = connect_db() migrate_db(conn) configure_admin(conn) conn.close()
def main(): # pragma: no cover get_env('INFRABOX_VERSION') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_PORT') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_OPA_HOST') get_env('INFRABOX_OPA_PORT') get_env('INFRABOX_OPA_PUSH_INTERVAL') conn = connect_db() opa_start_push_loop() wsgi.server(eventlet.listen(('0.0.0.0', 8081)), app)
def setUp(self): conn = connect_db() cur = conn.cursor() cur.execute('''DELETE FROM job''') cur.execute('''DELETE FROM auth_token''') cur.execute('''DELETE FROM collaborator''') cur.execute('''DELETE FROM project''') cur.execute('''DELETE FROM "user"''') cur.execute('''DELETE FROM source_upload''') cur.execute('''DELETE FROM build''') cur.execute('''DELETE FROM test_run''') cur.execute('''DELETE FROM job_stat''') cur.execute('''DELETE FROM measurement''') cur.execute('''DELETE FROM test''') cur.execute('''DELETE FROM job_markup''') cur.execute('''DELETE FROM secret''') cur.execute( '''INSERT INTO "user"(id, github_id, avatar_url, name, email, github_api_token, username) VALUES(%s, 1, 'avatar', 'name', 'email', 'token', 'login')''', (self.user_id, )) cur.execute( '''INSERT INTO project(name, type, id, public) VALUES('test', 'upload', %s, true)''', (self.project_id, )) cur.execute( '''INSERT INTO collaborator(project_id, user_id, owner) VALUES(%s, %s, true)''', ( self.project_id, self.user_id, )) cur.execute( '''INSERT INTO auth_token(project_id, id, description, scope_push, scope_pull) VALUES(%s, %s, 'asd', true, true)''', ( self.project_id, self.token_id, )) cur.execute( '''INSERT INTO secret(project_id, name, value) VALUES(%s, 'SECRET_ENV', %s)''', (self.project_id, encrypt_secret('hello world'))) conn.commit() os.environ['INFRABOX_CLI_TOKEN'] = encode_project_token( self.token_id, self.project_id) print os.environ['INFRABOX_CLI_TOKEN'] self.root_url = os.environ['INFRABOX_ROOT_URL']
def main(): # pragma: no cover get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_PORT') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_CACHET_ENDPOINT') get_env('INFRABOX_CACHET_API_TOKEN') while True: c = Cachet() try: db = DB(connect_db()) c.update(db) except Exception as e: logger.exception(e) finally: time.sleep(10)
def setUp(self): self.app = server.app.test_client() server.app.testing = True self.conn = connect_db() self.execute('TRUNCATE "user"') self.execute('TRUNCATE project') self.execute('TRUNCATE collaborator') self.execute('TRUNCATE repository') self.execute('TRUNCATE commit') self.execute('TRUNCATE build') self.execute('TRUNCATE job') self.project_id = '1514af82-3c4f-4bb5-b1da-a89a0ced5e6f' self.user_id = '2514af82-3c4f-4bb5-b1da-a89a0ced5e6f' self.repo_id = '3514af82-3c4f-4bb5-b1da-a89a0ced5e6f' self.build_id = '4514af82-3c4f-4bb5-b1da-a89a0ced5e6f' self.sha = 'd670460b4b4aece5915caf5c68d12f560a9fe3e4' self.execute( """ INSERT INTO collaborator (user_id, project_id, owner) VALUES (%s, %s, true); """, (self.user_id, self.project_id)) self.execute( """ INSERT INTO "user" (id, github_id, username, avatar_url) VALUES (%s, 1, 'testuser', 'url'); """, (self.user_id, )) self.execute( """ INSERT INTO project(id, name, type) VALUES (%s, 'testproject', 'gerrit'); """, (self.project_id, )) self.execute( """ INSERT INTO repository(id, name, html_url, clone_url, github_id, project_id, private) VALUES (%s, 'testrepo', 'url', 'clone_url', 0, %s, true); """, (self.repo_id, self.project_id))
def main(): parser = argparse.ArgumentParser(prog="checker.py") args = parser.parse_args() # Validate if env vars are setted get_env('INFRABOX_VERSION') get_env('INFRABOX_CLUSTER_NAME') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_PORT') get_env('INFRABOX_ROOT_URL') get_env('INFRABOX_HA_CHECK_INTERVAL') get_env('INFRABOX_HA_ACTIVE_TIMEOUT') urllib3.disable_warnings() logger = get_logger("checker_main") # Try to read from filesystem with open('/var/run/secrets/kubernetes.io/serviceaccount/token', 'r') as f: args.token = str(f.read()).strip() kube_apiserver_host = get_env('INFRABOX_KUBERNETES_MASTER_HOST') kube_apiserver_port = get_env('INFRABOX_KUBERNETES_MASTER_PORT') args.api_server = "https://" + kube_apiserver_host + ":" + kube_apiserver_port conn = connect_db() conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) is_monitoring_enabled = get_env("INFRABOX_MONITORING_ENABLED") == 'true' if is_monitoring_enabled: logger.info("Monitoring enabled. Starting HTTP server for metrics") server_port = os.environ.get('INFRABOX_PORT', 8080) start_http_server(server_port) checker = Checker(conn, args) checker.run()
def main(): get_env('INFRABOX_VERSION') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_PORT') get_env('INFRABOX_GERRIT_PORT') get_env('INFRABOX_GERRIT_HOSTNAME') get_env('INFRABOX_GERRIT_USERNAME') get_env('INFRABOX_GERRIT_KEY_FILENAME') cluster_name = get_env('INFRABOX_CLUSTER_NAME') conn = connect_db() conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) logger.info("Connected to database") elect_leader(conn, 'gerrit-review', cluster_name) curs = conn.cursor() curs.execute("LISTEN job_update;") logger.info("Waiting for job updates") while True: if not is_active(conn, cluster_name): logger.info("cluster is inactive or disabled, sleeping") time.sleep(5) continue is_leader(conn, 'gerrit-review', cluster_name) curs.execute('commit;') while conn.notifies: notify = conn.notifies.pop(0) logger.debug("got notify: %s" % notify.payload) handle_job_update(conn, json.loads(notify.payload)) time.sleep(3)
def main(): # pragma: no cover get_env('INFRABOX_VERSION') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_PORT') conn = connect_db() conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) logger.info("Connected to database") curs = conn.cursor() curs.execute("LISTEN job_update;") logger.info("Waiting for job updates") while True: if select.select([conn], [], [], 5) != ([], [], []): conn.poll() while conn.notifies: notify = conn.notifies.pop(0) handle_job_update(conn, json.loads(notify.payload))
def main(): get_env('INFRABOX_SERVICE') get_env('INFRABOX_VERSION') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_PORT') gerrit_port = int(get_env('INFRABOX_GERRIT_PORT')) gerrit_hostname = get_env('INFRABOX_GERRIT_HOSTNAME') gerrit_username = get_env('INFRABOX_GERRIT_USERNAME') gerrit_key_filename = get_env('INFRABOX_GERRIT_KEY_FILENAME') conn = connect_db() logger.info("Connected to db") elect_leader(conn, "gerrit-trigger") client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(username=gerrit_username, hostname=gerrit_hostname, port=gerrit_port, key_filename=gerrit_key_filename) client.get_transport().set_keepalive(60) logger.info("Connected to gerrit") _, stdout, _ = client.exec_command('gerrit stream-events') logger.info("Waiting for stream-events") for line in stdout: event = json.loads(line) if event['type'] == "patchset-created": logger.info(json.dumps(event, indent=4)) handle_patchset_created(conn, event)
def __init__(self): self.conn = connect_db() self.conn.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) self.daemon_json = None
import os import base64 from unittest import TestCase import eventlet eventlet.monkey_patch() import psycopg2 import psycopg2.extensions import requests from pyinfraboxutils.db import DB, connect_db from pyinfraboxutils.token import encode_project_token from pyinfraboxutils.ibopa import opa_push_all conn = connect_db() conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) class InputTests(TestCase): def get(self, url, password='******'): auth = base64.b64encode('infrabox:%s' % encode_project_token( password, '2514af82-3c4f-4bb5-b1da-a89a0ced5e6f')) headers = {'authorization': "Basic " + auth} return requests.get(url, headers=headers) def test_token_does_not_exist(self): r = self.get('http://docker-registry:8080/v2') self.assertEqual(r.status_code, 401)
def before_request(): g.db = DB(connect_db())
class TestClient: app = server.app.test_client() server.app.testing = True conn = connect_db() @staticmethod def execute(stmt, args=None): cur = TestClient.conn.cursor() cur.execute(stmt, args) cur.close() TestClient.conn.commit() @staticmethod def execute_many(stmt, args=None): cur = TestClient.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(stmt, args) d = cur.fetchall() cur.close() TestClient.conn.commit() return d @staticmethod def execute_one(stmt, args=None): return TestClient.execute_many(stmt, args)[0] @staticmethod def get_user_authorization(user_id): # pragma: no cover token = encode_user_token(user_id) h = {'Authorization': 'token %s' % token} return h @staticmethod def get_job_authorization(job_id): # pragma: no cover job_api_token = encode_job_token(job_id) h = {'Authorization': 'token %s' % job_api_token} return h @staticmethod def get_project_authorization(token_id, project_id): # pragma: no cover project_token = encode_project_token(token_id, project_id, 'myproject') h = {'Authorization': 'token %s' % project_token} return h @staticmethod def get(url, headers): # pragma: no cover if not headers: return r = TestClient.app.get(url, headers=headers) if r.mimetype == 'application/json': j = json.loads(r.data) return j return r @staticmethod def delete(url, headers): # pragma: no cover if not headers: return r = TestClient.app.delete(url, headers=headers) if r.mimetype == 'application/json': j = json.loads(r.data) return j return r @staticmethod def post(url, data, headers, content_type='application/json'): # pragma: no cover if not headers: return if content_type == 'application/json': data = json.dumps(data) r = TestClient.app.post(url, data=data, headers=headers, content_type=content_type) if r.mimetype == 'application/json': j = json.loads(r.data) return j return r @staticmethod def opa_push(): opa_push_all()
def main(): get_env('INFRABOX_VERSION') get_env('INFRABOX_DATABASE_DB') get_env('INFRABOX_DATABASE_USER') get_env('INFRABOX_DATABASE_PASSWORD') get_env('INFRABOX_DATABASE_HOST') get_env('INFRABOX_DATABASE_PORT') gerrit_port = int(get_env('INFRABOX_GERRIT_PORT')) gerrit_hostname = get_env('INFRABOX_GERRIT_HOSTNAME') gerrit_username = get_env('INFRABOX_GERRIT_USERNAME') gerrit_key_filename = get_env('INFRABOX_GERRIT_KEY_FILENAME') cluster_name = get_env('INFRABOX_CLUSTER_NAME') conn = connect_db() logger.info("Connected to db") client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(username=gerrit_username, hostname=gerrit_hostname, port=gerrit_port, key_filename=gerrit_key_filename) client.get_transport().set_keepalive(60) logger.info("Connected to gerrit") _, stdout, _ = client.exec_command('gerrit stream-events') logger.info("Waiting for stream-events") for line in stdout: for _ in range(0, 2): try: event = json.loads(line) if event['type'] in ("patchset-created", "draft-published", "change-merged"): logger.debug(json.dumps(event, indent=4)) if not is_active(conn, cluster_name): logger.info( "cluster is inactive or disabled, skipping") break handle_patchset_created(conn, event) break except psycopg2.IntegrityError: logger.info('duplicated key, skip this commit') try: conn.close() except: pass conn = connect_db() logger.info("reconnected to db") break except psycopg2.OperationalError: try: conn.close() except: pass conn = connect_db() logger.info("reconnected to db")