Ejemplo n.º 1
0
 def get(self):
     auth = init_saml_auth()
     try:
         return redirect(auth.logout())
     except Exception as e:
         logger.error("Could not initiate Single Logout: %s", e)
         return redirect(get_root_url("global"))
Ejemplo n.º 2
0
    def get(self):
        auth = init_saml_auth()
        try:
            redirect_url = auth.process_slo()
        except Exception as e:
            logger.error("Single Logout failed: %s", e)
            return redirect(get_root_url("global"))
        errors = auth.get_errors()
        if len(errors) != 0:
            logger.error("Single Logout failed: %s", "; ".join(errors))
            return redirect(get_root_url("global"))

        if redirect_url is None:
            redirect_url = redirect_url = get_root_url("global")

        response = redirect(redirect_url)
        response.set_cookie("token", "", expires=0)
        return response
Ejemplo n.º 3
0
    def post(self):
        auth = init_saml_auth()
        auth.process_response()
        errors = auth.get_errors()

        logger.info("Request: %s %s", request, request.headers)

        if len(errors) != 0:
            logger.error("Authentication failed: %s", "; ".join(errors))
            abort(500, "Authentication failed")

        if not auth.is_authenticated():
            logger.error("User returned unauthorized from IdP")
            abort(401, "Unauthorized")

        attributes = get_attribute_dict(auth)
        logger.debug("User data: %s", attributes)

        email = format_user_field(
            get_env("INFRABOX_ACCOUNT_SAML_EMAIL_FORMAT"), attributes).lower()

        # Check if user already exists in database
        user = g.db.execute_one_dict(
            """
                SELECT id FROM "user"
                WHERE email = %s
            """, [email])

        if not user:
            name = format_user_field(
                get_env("INFRABOX_ACCOUNT_SAML_NAME_FORMAT"), attributes)
            username = format_user_field(
                get_env("INFRABOX_ACCOUNT_SAML_USERNAME_FORMAT"), attributes)

            user = g.db.execute_one_dict(
                """
                INSERT INTO "user" (name, username, email)
                VALUES (%s, %s, %s) RETURNING id
            """, [name, username, email])

        token = encode_user_token(user["id"])

        g.db.commit()

        redirect_url = get_root_url("global") + "/dashboard/"
        logger.debug("Redirecting authenticated user to %s", redirect_url)
        response = redirect(redirect_url)
        response.set_cookie("token", token)
        return response
Ejemplo n.º 4
0
    def get(self):
        '''
        Returns the cluster settings
        '''
        github_enabled = os.environ['INFRABOX_GITHUB_ENABLED'] == 'true'
        o = {
            'INFRABOX_GITHUB_ENABLED': github_enabled,
            'INFRABOX_SSO_LOGIN_ENABLED': os.environ['INFRABOX_ACCOUNT_SAML_ENABLED'] == 'true',
            'INFRABOX_GERRIT_ENABLED': os.environ['INFRABOX_GERRIT_ENABLED'] == 'true',
            'INFRABOX_ACCOUNT_SIGNUP_ENABLED': os.environ['INFRABOX_ACCOUNT_SIGNUP_ENABLED'] == 'true',
            'INFRABOX_ACCOUNT_LDAP_ENABLED': os.environ['INFRABOX_ACCOUNT_LDAP_ENABLED'] == 'true',
            'INFRABOX_ROOT_URL': get_root_url('global'),
            'INFRABOX_GENERAL_REPORT_ISSUE_URL': os.environ['INFRABOX_GENERAL_REPORT_ISSUE_URL'],
            'INFRABOX_CLUSTER_NAME': os.environ['INFRABOX_CLUSTER_NAME'],
            'INFRABOX_LEGAL_PRIVACY_URL':  os.environ['INFRABOX_LEGAL_PRIVACY_URL'],
            'INFRABOX_LEGAL_TERMS_OF_USE_URL':  os.environ['INFRABOX_LEGAL_TERMS_OF_USE_URL'],
        }

        if github_enabled:
            o['INFRABOX_GITHUB_LOGIN_ENABLED'] = os.environ['INFRABOX_GITHUB_LOGIN_ENABLED'] == 'true'

        return o
Ejemplo n.º 5
0
    def post(self):
        '''
        Create new project
        '''
        user_id = g.token['user']['id']

        b = request.get_json()
        name = b['name']
        typ = b['type']
        private = b['private']

        projects = g.db.execute_one_dict(
            '''
            SELECT COUNT(*) as cnt
            FROM project p
            INNER JOIN collaborator co
            ON p.id = co.project_id
            AND co.user_id = %s
        ''', [user_id])

        if projects['cnt'] > 50:
            abort(400, 'too many projects')

        project = g.db.execute_one_dict(
            '''
            SELECT *
            FROM project
            WHERE name = %s
        ''', [name])

        if project:
            abort(400, 'A project with this name already exists')

        if typ == 'github':
            github_repo_name = b.get('github_repo_name', None)

            if not github_repo_name:
                abort(400, 'github_repo_name not set')

            split = github_repo_name.split('/')
            owner = split[0]
            repo_name = split[1]

            user = g.db.execute_one_dict(
                '''
                SELECT github_api_token
                FROM "user"
                WHERE id = %s
            ''', [user_id])

            if not user:
                abort(404)

            api_token = user['github_api_token']

            headers = {
                "Authorization": "token " + api_token,
                "User-Agent": "InfraBox"
            }
            url = '%s/repos/%s/%s' % (os.environ['INFRABOX_GITHUB_API_URL'],
                                      owner, repo_name)

            # TODO(ib-steffen): allow custom ca bundles
            r = requests.get(url, headers=headers, verify=False)

            if r.status_code != 200:
                abort(400, 'Failed to get github repo')

            repo = r.json()

            if not repo['permissions']['admin']:
                abort(400, 'You are not allowed to connect this repo')

            r = g.db.execute_one_dict(
                '''
                SELECT *
                FROM repository
                WHERE github_id = %s
            ''', [repo['id']])

            if r:
                abort('Repo already connected')

        project = g.db.execute_one_dict(
            '''
            INSERT INTO project (name, type, public)
            VALUES (%s, %s, %s) RETURNING id
        ''', [name, typ, not private])
        project_id = project['id']

        g.db.execute(
            '''
            INSERT INTO collaborator (user_id, project_id, role)
            VALUES (%s, %s, 'Owner')
        ''', [user_id, project_id])

        if typ == 'github':
            split = github_repo_name.split('/')
            owner = split[0]
            repo_name = split[1]

            clone_url = repo['clone_url']
            if repo['private']:
                clone_url = repo['ssh_url']

            g.db.execute(
                '''
                INSERT INTO repository (name, html_url, clone_url, github_id,
                                        private, project_id, github_owner)
                VALUES (%s, %s, %s, %s, %s, %s, %s)
            ''', [
                    repo['name'], repo['html_url'], clone_url, repo['id'],
                    repo['private'], project_id, repo['owner']['login']
                ])

            insecure_ssl = "0"
            if os.environ[
                    'INFRABOX_GENERAL_DONT_CHECK_CERTIFICATES'] == 'true':
                insecure_ssl = "1"

            webhook_config = {
                'name': "web",
                'active': True,
                'events':
                ["create", "delete", "public", "pull_request", "push"],
                'config': {
                    'url': get_root_url('global') + '/github/hook',
                    'content_type': "json",
                    'secret': os.environ['INFRABOX_GITHUB_WEBHOOK_SECRET'],
                    'insecure_ssl': insecure_ssl
                }
            }

            headers = {
                "Authorization": "token " + api_token,
                "User-Agent": "InfraBox"
            }
            url = '%s/repos/%s/%s/hooks' % (
                os.environ['INFRABOX_GITHUB_API_URL'], owner, repo_name)

            # TODO(ib-steffen): allow custom ca bundles
            r = requests.post(url,
                              headers=headers,
                              json=webhook_config,
                              verify=False)

            if r.status_code != 201:
                abort(400, 'Failed to create github webhook')

            hook = r.json()

            g.db.execute(
                '''
                UPDATE repository SET github_hook_id = %s
                WHERE github_id = %s
            ''', [hook['id'], repo['id']])

            # deploy key
            key = RSA.generate(2048)
            private_key = key.exportKey('PEM')
            public_key = key.publickey().exportKey('OpenSSH')
            deploy_key_config = {
                'title': "InfraBox",
                'key': public_key,
                'read_only': True
            }

            url = '%s/repos/%s/%s/keys' % (
                os.environ['INFRABOX_GITHUB_API_URL'], owner, repo_name)

            # TODO(ib-steffen): allow custom ca bundles
            r = requests.post(url,
                              headers=headers,
                              json=deploy_key_config,
                              verify=False)

            if r.status_code != 201:
                abort(400, 'Failed to create deploy key')

            g.db.execute(
                '''
                UPDATE repository SET private_key = %s
                WHERE github_id = %s
            ''', [private_key, repo['id']])

        elif typ == 'gerrit':
            g.db.execute(
                '''
                INSERT INTO repository (name, private, project_id, html_url, clone_url, github_id)
                VALUES (%s, false, %s, '', '', 0)
            ''', [name, project_id])

        g.db.commit()

        # Push updated collaborator and project data to Open Policy Agent
        opa_push_project_data(g.db)
        opa_push_collaborator_data(g.db)

        return OK('Project added')
Ejemplo n.º 6
0
def handle_job_update(conn, event):
    job_id = event['job_id']

    jobs = execute_sql(
        conn, '''
        SELECT id, state, name, project_id, build_id
        FROM job
        WHERE id = %s
    ''', [job_id])

    if not jobs:
        return False

    job = jobs[0]

    project_id = job['project_id']
    build_id = job['build_id']

    projects = execute_sql(
        conn, '''
        SELECT id, name, type
        FROM project
        WHERE id = %s
    ''', [project_id])

    if not projects:
        return False

    project = projects[0]

    if project['type'] != 'github':
        return False

    builds = execute_sql(
        conn, '''
        SELECT id, build_number, restart_counter, commit_id
        FROM build
        WHERE id = %s
    ''', [build_id])

    if not builds:
        return False

    build = builds[0]

    project_name = project['name']
    job_state = job['state']
    job_name = job['name'].split(".")[0]
    commit_sha = build['commit_id']
    build_id = build['id']
    build_number = build['build_number']
    build_restartCounter = build['restart_counter']

    # determine github commit state
    state = 'success'
    if job_state in ('scheduled', 'running', 'queued'):
        state = 'pending'

    if job_state in ('failure', 'skipped', 'killed', 'unstable'):
        state = 'failure'

    if job_state == 'error':
        state = 'error'

    logger.info("")
    logger.info("Handle job %s", job_id)
    logger.info("Setting state to %s", state)

    token = execute_sql(
        conn, '''
        SELECT github_api_token FROM "user" u
        INNER JOIN collaborator co
            ON co.role = 'Owner'
            AND co.project_id = %s
            AND co.user_id = u.id
    ''', [project_id])

    if not token:
        logger.warn("No API token, not updating status")
        return False

    github_api_token = token[0]['github_api_token']

    github_status_url = execute_sql(
        conn, '''
        SELECT github_status_url
        FROM "commit"
        WHERE id = %s
        AND project_id = %s
    ''', [commit_sha, project_id])[0]['github_status_url']

    ha_mode = os.environ.get('INFRABOX_HA_ENABLED') == 'true'
    if ha_mode:
        dashboard_url = get_root_url('global')
    else:
        dashboard_url = execute_sql(
            conn, '''
                    SELECT root_url
                    FROM cluster
                    WHERE name = 'master'
                ''', [])[0]['root_url']

    target_url = '%s/dashboard/#/project/%s/build/%s/%s/job/%s' % (
        dashboard_url, project_name, build_number, build_restartCounter,
        urllib.quote_plus(job_name).replace('+', '%20'))

    payload = {
        "state": state,
        "target_url": target_url,
        "description": "InfraBox",
        "context": "Job: %s" % job_name
    }

    headers = {
        "Authorization": "token " + github_api_token,
        "User-Agent": "InfraBox"
    }

    # TODO(ib-steffen): support ca bundles
    try:
        r = requests.post(github_status_url,
                          data=json.dumps(payload),
                          headers=headers,
                          timeout=10,
                          verify=False)

        if r.status_code != 201:
            logger.warn("Failed to update github status: %s", r.text)
            logger.warn(github_status_url)
        else:
            logger.info("Successfully updated github status")
    except Exception as e:
        logger.warn("Failed to update github status: %s", e)
        return False

    return True
Ejemplo n.º 7
0
def handle_job_update(conn, event):
    if event['type'] != 'UPDATE':
        return

    job_id = event['job_id']

    c = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    c.execute(
        '''
        SELECT id, state, name, project_id, build_id
        FROM job
        WHERE id = %s
    ''', [job_id])

    job = c.fetchone()
    c.close()

    if not job:
        return

    project_id = job['project_id']
    build_id = job['build_id']

    c = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    c.execute(
        '''
        SELECT id, name, type
        FROM project
        WHERE id = %s
    ''', [project_id])
    project = c.fetchone()
    c.close()

    if not project:
        return

    if project['type'] != 'gerrit':
        return

    c = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    c.execute(
        '''
        SELECT id, build_number, restart_counter, commit_id
        FROM build
        WHERE id = %s
    ''', [build_id])
    build = c.fetchone()
    c.close()

    project_name = project['name']
    project_id = project['id']
    job_state = job['state']
    job_name = job['name']
    commit_sha = build['commit_id']
    build_id = build['id']
    build_number = build['build_number']
    build_restart_counter = build['restart_counter']

    if job_state in ('queued', 'scheduled', 'running'):
        return

    gerrit_port = int(get_env('INFRABOX_GERRIT_PORT'))
    gerrit_hostname = get_env('INFRABOX_GERRIT_HOSTNAME')
    gerrit_username = get_env('INFRABOX_GERRIT_USERNAME')
    gerrit_key_filename = get_env('INFRABOX_GERRIT_KEY_FILENAME')

    ha_mode = os.environ.get('INFRABOX_HA_ENABLED') == 'true'
    if ha_mode:
        dashboard_url = get_root_url('global')
    else:
        dashboard_url = execute_sql(
            conn, '''
                SELECT root_url
                FROM cluster
                WHERE name = 'master'
            ''', [])[0]['root_url']

    client = paramiko.SSHClient()
    client.load_system_host_keys()
    client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    client.connect(username=gerrit_username,
                   hostname=gerrit_hostname,
                   port=gerrit_port,
                   key_filename=gerrit_key_filename)
    client.get_transport().set_keepalive(60)

    project_name_quote = urllib.quote_plus(project_name).replace('+', '%20')
    build_url = "%s/dashboard/#/project/%s/build/%s/%s" % (
        dashboard_url, project_name_quote, build_number, build_restart_counter)

    c = conn.cursor()
    c.execute(
        '''
        SELECT count(*)
        FROM build
        WHERE build_number = %s
        AND restart_counter > %s
        AND project_id = %s''',
        [build_number, build_restart_counter, project_id])
    newer_builds = c.fetchone()[0]
    c.close()

    update_vote = True
    if newer_builds > 0:
        update_vote = False

    c = conn.cursor()
    c.execute(
        '''SELECT state, count(*) FROM job WHERE build_id = %s GROUP BY state''',
        [build_id])
    states = c.fetchall()
    c.close()

    vote = None
    if len(states) == 1 and states[0][0] == 'finished':
        # all finished
        vote = "+1"
        message = "Build finished: %s" % build_url
    else:
        for s in states:
            if s[0] in ('running', 'scheduled', 'queued'):
                # still some running
                vote = "0"
                message = "Build running: %s" % build_url
                break
            elif s[0] != 'finished':
                # not successful
                vote = "-1"
                message = "Build failed: %s" % build_url

    if (job_name == 'Create Jobs' and vote == '0') or vote in ('-1', '+1'):
        logger.info('Setting InfraBox=%s for sha=%s', vote, commit_sha)
        cmd = 'gerrit review --project %s -m "%s" ' % (project_name, message)

        if update_vote:
            cmd += '--label InfraBox=%s ' % vote

        cmd += commit_sha
        execute_ssh_cmd(client, cmd)

    client.close()
Ejemplo n.º 8
0
        def post(self, project_id):
            project = g.db.execute_one_dict(
                '''
                SELECT type
                FROM project
                WHERE id = %s
            ''', [project_id])

            if not project:
                abort(404, 'Project not found')

            if project['type'] != 'upload':
                abort(400, 'Project is not of type "upload"')

            build_id = str(uuid.uuid4())
            key = '%s.zip' % build_id

            stream = request.files['project.zip'].stream
            storage.upload_project(stream, key)

            clusters = g.db.execute_many_dict(
                '''
                SELECT root_url
                FROM cluster
                WHERE active = true
                AND enabled = true
                AND name != %s
            ''', [os.environ['INFRABOX_CLUSTER_NAME']])

            for c in clusters:
                stream.seek(0)
                url = '%s/api/v1/projects/%s/upload/%s/' % (
                    c['root_url'], project_id, build_id)
                files = {'project.zip': stream}
                token = encode_project_token(g.token['id'], project_id,
                                             'myproject')
                headers = {'Authorization': 'bearer ' + token}
                logger.info('Also uploading to %s', url)

                # TODO(ib-steffen): allow custom ca bundles
                r = requests.post(url,
                                  files=files,
                                  headers=headers,
                                  timeout=120,
                                  verify=False)

                if r.status_code != 200:
                    abort(500, "Failed to upload data")

            build_number = g.db.execute_one_dict(
                '''
                SELECT count(distinct build_number) + 1 AS build_number
                FROM build AS b
                WHERE b.project_id = %s
            ''', [project_id])['build_number']

            source_upload_id = g.db.execute_one(
                '''
                INSERT INTO source_upload(filename, project_id, filesize) VALUES (%s, %s, 0) RETURNING ID
            ''', [key, project_id])[0]

            g.db.execute(
                '''
                INSERT INTO build (commit_id, build_number, project_id, source_upload_id, id)
                VALUES (null, %s, %s, %s, %s)
            ''', [build_number, project_id, source_upload_id, build_id])

            definition = {
                'build_only': False,
                'resources': {
                    'limits': {
                        'cpu': 0.5,
                        'memory': 1024
                    }
                }
            }

            g.db.execute(
                '''
                INSERT INTO job (id, state, build_id, type, name, project_id,
                                 dockerfile, definition, cluster_name)
                VALUES (gen_random_uuid(), 'queued', %s, 'create_job_matrix',
                        'Create Jobs', %s, '', %s, %s);
            ''', [build_id, project_id,
                  json.dumps(definition), None])

            project_name = g.db.execute_one(
                '''
                SELECT name FROM project WHERE id = %s
            ''', [project_id])[0]

            root_url = get_root_url('global')
            url = '%s/dashboard/#/project/%s/build/%s/1' % (
                root_url, project_name, build_number)

            data = {
                'build': {
                    'id': build_id,
                    'number': build_number
                },
                'url': url
            }

            g.db.commit()

            return OK('successfully started build', data=data)
Ejemplo n.º 9
0
    def get(self, project_id):
        job_name = request.args.get('job_name', None)
        filename = request.args.get('filename', None)
        branch = request.args.get('branch', None)
        if not filename or not job_name:
            abort(404)

        p = g.db.execute_one_dict(
            '''
            SELECT type, name FROM project WHERE id = %s
        ''', [project_id])
        project_type = p['type']
        project_name = p['name']

        result = None

        if branch and project_type in ('github', 'gerrit'):
            result = g.db.execute_one_dict(
                '''
                SELECT j.id, name, c.id commit_id, b.build_number, b.restart_counter, c.branch
                FROM (
                    SELECT id, name, unnest(archive) AS archive, definition->>'name' AS realname, start_date, build_id
                    FROM job
                    WHERE project_id = %s
                ) j
                JOIN build b ON b.id = j.build_id
                JOIN "commit" c ON b.commit_id = c.id
                WHERE archive->>'filename' = %s AND realname = %s AND c.branch = %s
                ORDER BY start_date DESC;
            ''', [project_id, filename, job_name, branch])
        else:
            result = g.db.execute_one_dict(
                '''
                SELECT j.id, name, c.id commit_id, b.build_number, b.restart_counter, c.branch
                FROM (
                    SELECT id, name, unnest(archive) AS archive, definition->>'name' AS realname, start_date, build_id
                    FROM job
                    WHERE project_id = %s
                ) j
                JOIN build b ON b.id = j.build_id
                LEFT JOIN commit c ON b.commit_id = c.id
                WHERE archive->>'filename' = %s AND realname = %s
                ORDER BY start_date DESC;
            ''', [project_id, filename, job_name])

        if not result:
            abort(404)

        job_id = result['id']

        build_number = '{build_number}.{restart_counter}'.format(
            build_number=result['build_number'],
            restart_counter=result['restart_counter'])
        root_url = get_root_url('global')
        build_url = '%s/dashboard/#/project/%s/build/%s/%s' % (
            root_url, project_name, result['build_number'],
            result['restart_counter'])
        job_url = '%s/job/%s' % (build_url, result['name'])

        headers = {
            'Infrabox-Build-Number': build_number,
            'Infrabox-Build-Url': build_url,
            'Infrabox-Job-Url': job_url,
        }
        if result['branch']:
            headers['Infrabox-Branch'] = result['branch']
        if result['commit_id']:
            headers['Infrabox-Commit'] = result['commit_id']

        f = storage.download_archive('%s/%s' % (job_id, filename))

        if not f:
            abort(404)

        filename = os.path.basename(filename)

        resp = make_response(
            send_file(f,
                      attachment_filename=filename,
                      mimetype=mimetypes.guess_type(filename)[0]))
        resp.headers = headers
        return resp
Ejemplo n.º 10
0
    def get(self):
        job_id = g.token['job']['id']
        data = {}

        # get all the job details
        r = g.db.execute_one('''
            SELECT
                j.name,
                null,
                j.dockerfile,
                p.id,
                p.type,
                p.name,
                b.id,
                b.commit_id,
                b.source_upload_id,
                b.build_number,
                u.github_api_token,
                u.username,
                null,
                j.type,
                null,
                null,
                j.repo,
                null,
                j.state,
                null,
                j.env_var,
                j.env_var_ref,
                null,
                null,
                u.id,
                j.build_arg,
                j.deployment,
                null,
                b.restart_counter,
                j.definition
            FROM job j
            INNER JOIN build b
                ON j.build_id = b.id
                AND j.project_id = b.project_id
            INNER JOIN collaborator co
                ON co.project_id = j.project_id
                AND co.role = 'Owner'
            INNER JOIN "user" u
                ON co.user_id = u.id
            INNER JOIN project p
                ON co.project_id = p.id
            WHERE j.id = %s
        ''', [job_id])

        limits = {}
        definition = r[29]
        build_only = True

        if definition:
            limits = definition['resources']['limits']
            build_only = definition.get('build_only', True)

        data['job'] = {
            "id": job_id,
            "name": r[0],
            "dockerfile": r[2],
            "build_only": build_only,
            "type": r[13],
            "repo": r[16],
            "state": r[18],
            "cpu": limits.get('cpu', 1),
            "memory": limits.get('memory', 1024),
            "build_arguments": r[25],
            "definition": r[29]
        }

        state = data['job']['state']
        if state in ("finished", "error", "failure", "skipped", "killed", "unstable"):
            abort(409, 'job not running anymore')

        env_vars = r[20]
        env_var_refs = r[21]
        deployments = r[26]

        data['project'] = {
            "id": r[3],
            "type": r[4],
            "name": r[5],
        }

        data['build'] = {
            "id": r[6],
            "commit_id": r[7],
            "source_upload_id": r[8],
            "build_number": r[9],
            "restart_counter": r[28]
        }

        data['repository'] = {
            "owner": r[11],
            "name": None,
            "github_api_token": r[10],
            "private": False
        }

        data['commit'] = {
            "branch": None,
            "tag": None
        }

        pull_request_id = None
        if data['project']['type'] == 'github' or data['project']['type'] == 'gerrit':
            r = g.db.execute_one('''
                SELECT
                    r.clone_url, r.name, r.private
                FROM repository r
                WHERE r.project_id = %s
            ''', [data['project']['id']])

            data['repository']['clone_url'] = r[0]
            data['repository']['name'] = r[1]
            data['repository']['private'] = r[2]

            # A regular commit
            r = g.db.execute_one('''
                SELECT
                    c.branch, c.committer_name, c.tag, c.pull_request_id
                FROM commit c
                WHERE c.id = %s
                    AND c.project_id = %s
            ''', [data['build']['commit_id'], data['project']['id']])

            data['commit'] = {
                "id": data['build']['commit_id'],
                "branch": r[0],
                "committer_name": r[1],
                "tag": r[2]
            }
            pull_request_id = r[3]

        if data['project']['type'] == 'upload':
            r = g.db.execute_one('''
                SELECT filename FROM source_upload
                WHERE id = %s
            ''', [data['build']['source_upload_id']])

            data['source_upload'] = {
                "filename": r[0]
            }

        # get dependencies
        r = g.db.execute_many('''
              WITH RECURSIVE next_job(id, parent) AS (
                      SELECT j.id, (p->>'job-id')::uuid parent
                      FROM job j, jsonb_array_elements(j.dependencies) AS p
                      WHERE j.id = %s
                  UNION
                      SELECT j.id, (p->>'job-id')::uuid parent
                      FROM job j
                      LEFT JOIN LATERAL jsonb_array_elements(j.dependencies) AS p ON true,
                      next_job nj WHERE j.id = nj.parent
              )
              SELECT id, name, state, start_date, end_date, dependencies
              FROM job WHERE id IN (SELECT distinct id FROM next_job WHERE id != %s)
        ''', [data['job']['id'], data['job']['id']])

        data['dependencies'] = []

        for d in r:
            data['dependencies'].append({
                "id": d[0],
                "name": d[1],
                "state": d[2],
                "start_date": str(d[3]),
                "end_date": str(d[4]),
                "depends_on": d[5]
            })

        # get parents
        r = g.db.execute_many('''
          SELECT id, name FROM job where id
              IN (SELECT (deps->>'job-id')::uuid FROM job, jsonb_array_elements(job.dependencies) as deps WHERE id = %s)
        ''', [data['job']['id']])

        data['parents'] = []

        for d in r:
            data['parents'].append({
                "id": d[0],
                "name": d[1]
            })

        # get the secrets
        secrets = g.db.execute_many('''
             SELECT name, value
             FROM secret
             WHERE project_id = %s
        ''', [data['project']['id']])

        is_fork = data['job'].get('fork', False)
        def get_secret(name):
            if is_fork:
                abort(400, 'Access to secret %s is not allowed from a fork' % name)

            for ev in secrets:
                if ev[0] == name:
                    return decrypt_secret(ev[1])
            return None

        # Deployments
        data['deployments'] = []
        if deployments:
            for dep in deployments:
                if dep['type'] == 'docker-registry':
                    if 'password' not in dep:
                        data['deployments'].append(dep)
                        continue

                    secret_name = dep['password']['$secret']
                    secret = get_secret(secret_name)

                    if secret is None:
                        abort(400, "Secret %s not found" % secret_name)

                    dep['password'] = secret
                    data['deployments'].append(dep)
                elif dep['type'] == 'gcr':
                    service_account = dep['service_account']['$secret']
                    secret = get_secret(service_account)

                    if secret is None:
                        abort(400, "Secret %s not found" % service_account)

                    dep['service_account'] = secret
                    data['deployments'].append(dep)
                elif dep['type'] == 'ecr':
                    access_key_id_name = dep['access_key_id']['$secret']
                    secret = get_secret(access_key_id_name)

                    if secret is None:
                        abort(400, "Secret %s not found" % access_key_id_name)

                    dep['access_key_id'] = secret

                    secret_access_key_name = dep['secret_access_key']['$secret']
                    secret = get_secret(secret_access_key_name)

                    if secret is None:
                        abort(400, "Secret %s not found" % secret_access_key_name)

                    dep['secret_access_key'] = secret
                    data['deployments'].append(dep)
                else:
                    abort(400, "Unknown deployment type")

        # Registries
        data['registries'] = []
        definition = data['job']['definition']
        registries = None

        if definition:
            registries = definition.get('registries', None)

        if registries:
            for r in registries:
                if r['type'] == 'docker-registry':
                    secret_name = r['password']['$secret']
                    secret = get_secret(secret_name)

                    if secret is None:
                        abort(400, "Secret %s not found" % secret_name)

                    r['password'] = secret
                    data['registries'].append(r)
                elif r['type'] == 'ecr':
                    access_key_id_name = r['access_key_id']['$secret']
                    secret = get_secret(access_key_id_name)

                    if secret is None:
                        abort(400, "Secret %s not found" % access_key_id_name)

                    r['access_key_id'] = secret

                    secret_access_key_name = r['secret_access_key']['$secret']
                    secret = get_secret(secret_access_key_name)

                    if secret is None:
                        abort(400, "Secret %s not found" % secret_access_key_name)

                    r['secret_access_key'] = secret
                    data['registries'].append(r)
                else:
                    abort(400, "Unknown deployment type")

        root_url = get_root_url("global")

        # Default env vars
        project_name = urllib.quote_plus(data['project']['name']).replace('+', '%20')
        job_name = urllib.quote_plus(data['job']['name']).replace('+', '%20')
        build_url = "%s/dashboard/#/project/%s/build/%s/%s" % (root_url,
                                                               project_name,
                                                               data['build']['build_number'],
                                                               data['build']['restart_counter'])
        job_url = "%s/dashboard/#/project/%s/build/%s/%s/job/%s" % (root_url,
                                                                    project_name,
                                                                    data['build']['build_number'],
                                                                    data['build']['restart_counter'],
                                                                    job_name)

        job_api_url = "%s/api/v1/projects/%s/jobs/%s" % (root_url,
                                                         data['project']['id'],
                                                         data['job']['id'])

        build_api_url = "%s/api/v1/projects/%s/builds/%s" % (root_url,
                                                             data['project']['id'],
                                                             data['build']['id'])

        data['env_vars'] = {
            "TERM": "xterm-256color",
            "INFRABOX_JOB_ID": data['job']['id'],
            "INFRABOX_JOB_URL": job_url,
            "INFRABOX_JOB_API_URL": job_api_url,
            "INFRABOX_BUILD_API_URL": build_api_url,
            "INFRABOX_BUILD_NUMBER": "%s" % data['build']['build_number'],
            "INFRABOX_BUILD_RESTART_COUNTER": "%s" % data['build']['restart_counter'],
            "INFRABOX_BUILD_URL": build_url,
        }

        data['secrets'] = {}

        if data['commit']['branch']:
            data['env_vars']['INFRABOX_GIT_BRANCH'] = data['commit']['branch']

        if data['commit']['tag']:
            data['env_vars']['INFRABOX_GIT_TAG'] = data['commit']['tag']

        if pull_request_id:
            data['env_vars']['INFRABOX_GITHUB_PULL_REQUEST'] = "true"

        if env_vars:
            for name, value in env_vars.iteritems():
                data['env_vars'][name] = str(value)

        if env_var_refs:
            for name, value in env_var_refs.iteritems():
                secret = get_secret(value)

                if secret is None:
                    abort(400, "Secret %s not found" % value)

                data['secrets'][name] = secret

        return jsonify(data)
Ejemplo n.º 11
0
    def get(self):
        job_id = g.token['job']['id']
        data = {}

        # get all the job details
        r = g.db.execute_one(
            '''
            SELECT
                j.name,
                null,
                j.dockerfile,
                p.id,
                p.type,
                p.name,
                b.id,
                b.commit_id,
                b.source_upload_id,
                b.build_number,
                u.github_api_token,
                u.username,
                null,
                j.type,
                null,
                null,
                j.repo,
                null,
                j.state,
                null,
                j.env_var,
                j.env_var_ref,
                null,
                null,
                u.id,
                j.build_arg,
                j.deployment,
                null,
                b.restart_counter,
                j.definition
            FROM job j
            INNER JOIN build b
                ON j.build_id = b.id
                AND j.project_id = b.project_id
            INNER JOIN collaborator co
                ON co.project_id = j.project_id
                AND co.role = 'Owner'
            INNER JOIN "user" u
                ON co.user_id = u.id
            INNER JOIN project p
                ON co.project_id = p.id
            WHERE j.id = %s
        ''', [job_id])

        limits = {}
        definition = r[29]
        build_only = True

        if definition:
            limits = definition['resources']['limits']
            build_only = definition.get('build_only', True)

        data['job'] = {
            "id": job_id,
            "name": r[0],
            "dockerfile": r[2],
            "build_only": build_only,
            "type": r[13],
            "repo": r[16],
            "state": r[18],
            "cpu": limits.get('cpu', 1),
            "memory": limits.get('memory', 1024),
            "build_arguments": r[25],
            "definition": r[29]
        }

        state = data['job']['state']
        if state in ("finished", "error", "failure", "skipped", "killed",
                     "unstable"):
            abort(409, 'job not running anymore')

        env_vars = r[20]
        env_var_refs = r[21]
        deployments = r[26]

        data['project'] = {
            "id": r[3],
            "type": r[4],
            "name": r[5],
        }

        data['build'] = {
            "id": r[6],
            "commit_id": r[7],
            "source_upload_id": r[8],
            "build_number": r[9],
            "restart_counter": r[28]
        }

        data['repository'] = {
            "owner": r[11],
            "name": None,
            "github_api_token": r[10],
            "private": False
        }

        data['commit'] = {"branch": None, "tag": None}

        pull_request_id = None
        commit_env = None
        if data['project']['type'] == 'github' or data['project'][
                'type'] == 'gerrit':
            r = g.db.execute_one(
                '''
                SELECT
                    r.clone_url, r.name, r.private
                FROM repository r
                WHERE r.project_id = %s
            ''', [data['project']['id']])

            data['repository']['clone_url'] = r[0]
            data['repository']['name'] = r[1]
            data['repository']['private'] = r[2]

            # A regular commit
            r = g.db.execute_one(
                '''
                SELECT
                    c.branch, c.committer_name, c.tag, c.pull_request_id, c.env
                FROM commit c
                WHERE c.id = %s
                    AND c.project_id = %s
            ''', [data['build']['commit_id'], data['project']['id']])

            data['commit'] = {
                "id": data['build']['commit_id'],
                "branch": r[0],
                "committer_name": r[1],
                "tag": r[2]
            }
            pull_request_id = r[3]
            commit_env = r[4]

        if data['project']['type'] == 'upload':
            r = g.db.execute_one(
                '''
                SELECT filename FROM source_upload
                WHERE id = %s
            ''', [data['build']['source_upload_id']])

            data['source_upload'] = {"filename": r[0]}

        # get dependencies
        r = g.db.execute_many(
            '''
              WITH RECURSIVE next_job(id, parent) AS (
                      SELECT j.id, (p->>'job-id')::uuid parent
                      FROM job j, jsonb_array_elements(j.dependencies) AS p
                      WHERE j.id = %s
                  UNION
                      SELECT j.id, (p->>'job-id')::uuid parent
                      FROM job j
                      LEFT JOIN LATERAL jsonb_array_elements(j.dependencies) AS p ON true,
                      next_job nj WHERE j.id = nj.parent
              )
              SELECT id, name, state, start_date, end_date, dependencies
              FROM job WHERE id IN (SELECT distinct id FROM next_job WHERE id != %s)
        ''', [data['job']['id'], data['job']['id']])

        data['dependencies'] = []

        for d in r:
            data['dependencies'].append({
                "id": d[0],
                "name": d[1],
                "state": d[2],
                "start_date": str(d[3]),
                "end_date": str(d[4]),
                "depends_on": d[5]
            })

        # get parents
        r = g.db.execute_many(
            '''
          SELECT id, name FROM job where id
              IN (SELECT (deps->>'job-id')::uuid FROM job, jsonb_array_elements(job.dependencies) as deps WHERE id = %s)
        ''', [data['job']['id']])

        data['parents'] = []

        for d in r:
            data['parents'].append({"id": d[0], "name": d[1]})

        # get the secrets
        secrets = g.db.execute_many(
            '''
             SELECT name, value
             FROM secret
             WHERE project_id = %s
        ''', [data['project']['id']])

        is_fork = data['job'].get('fork', False)

        def get_secret_type(name):
            try:
                json.loads(name)
                return 'vault'
            except ValueError:
                return 'secret'

        def get_auth_type(res):
            token, role_id, secret_id = res[2], res[5], res[6]
            if token:
                validate_res = 'token'
            else:
                # validate appRole
                validate_res = 'appRole'
                if not role_id or not secret_id:
                    validate_res = 'error'
            return validate_res

        def get_secret(name, project_id=None):
            secret_type = get_secret_type(name)
            if secret_type == 'vault':
                vault = json.loads(name)
                vault_name = vault['$vault']
                secret_path = vault['$vault_secret_path']
                secret_key = vault['$vault_secret_key']

                if not project_id:
                    abort(
                        400,
                        "project_id is essential for getting Vault: '%s' " %
                        vault_name)

                result = g.db.execute_one(
                    """
                  SELECT url, version, token, ca, namespace, role_id, secret_id FROM vault WHERE name = %s and project_id = %s
                """, [vault_name, project_id])

                if not result:
                    abort(
                        400, "Cannot get Vault '%s' in project '%s' " %
                        (vault_name, project_id))

                url, version, token, ca, namespace, role_id, secret_id = result[
                    0], result[1], result[2], result[3], result[4], result[
                        5], result[6]
                if not namespace:
                    namespace = ''
                if version == 'v1':
                    url += '/v1/' + namespace + '/' + secret_path
                elif version == 'v2':
                    paths = secret_path.split('/')
                    url += '/v1/' + namespace + '/' + paths[
                        0] + '/data/' + '/'.join(paths[1:])
                # choose validate way
                validate_res = get_auth_type(result)
                if validate_res == 'token':
                    app.logger.info('validate way is token')
                elif validate_res == 'appRole':
                    app_role = {'role_id': role_id, 'secret_id': secret_id}
                    json_data = json.dumps(app_role)
                    app_role_url = result[
                        0] + '/v1/' + namespace + '/auth/approle/login'
                    res = requests.post(url=app_role_url,
                                        data=json_data,
                                        verify=False)
                    if res.status_code == 200:
                        json_res = json.loads(res.content)
                        token = json_res['auth']['client_token']
                    else:
                        abort(
                            400,
                            "Getting value from vault error: url is '%s', validate way is appRole "
                            % (url))
                else:
                    abort(
                        400, "Validate way is '%s' ! result is '%s' " %
                        (validate_res, result))

                if not ca:
                    res = requests.get(url=url,
                                       headers={'X-Vault-Token': token},
                                       verify=False)
                else:
                    with tempfile.NamedTemporaryFile(delete=False) as f:
                        f.write(ca)
                        f.flush()  # ensure all data written
                        res = requests.get(url=url,
                                           headers={'X-Vault-Token': token},
                                           verify=f.name)
                if res.status_code == 200:
                    json_res = json.loads(res.content)
                    if json_res['data'].get('data') and isinstance(
                            json_res['data'].get('data'), dict):
                        value = json_res['data'].get('data').get(secret_key)
                    else:
                        value = json_res['data'].get(secret_key)
                    return value
                else:
                    abort(
                        400,
                        "Getting value from vault error: url is '%s', token is '%s' "
                        % (url, result))
            else:
                if is_fork:
                    abort(
                        400, 'Access to secret %s is not allowed from a fork' %
                        name)

                for ev in secrets:
                    if ev[0] == name:
                        return decrypt_secret(ev[1])
                return None

        # Deployments
        data['deployments'] = []
        if deployments:
            for dep in deployments:
                if dep['type'] == 'docker-registry':
                    if 'password' not in dep:
                        data['deployments'].append(dep)
                        continue

                    secret_name = dep['password']['$secret']
                    secret = get_secret(secret_name)

                    if secret is None:
                        abort(400, "Secret %s not found" % secret_name)

                    dep['password'] = secret
                    data['deployments'].append(dep)
                elif dep['type'] == 'gcr':
                    service_account = dep['service_account']['$secret']
                    secret = get_secret(service_account)

                    if secret is None:
                        abort(400, "Secret %s not found" % service_account)

                    dep['service_account'] = secret
                    data['deployments'].append(dep)
                elif dep['type'] == 'ecr':
                    access_key_id_name = dep['access_key_id']['$secret']
                    secret = get_secret(access_key_id_name)

                    if secret is None:
                        abort(400, "Secret %s not found" % access_key_id_name)

                    dep['access_key_id'] = secret

                    secret_access_key_name = dep['secret_access_key'][
                        '$secret']
                    secret = get_secret(secret_access_key_name)

                    if secret is None:
                        abort(400,
                              "Secret %s not found" % secret_access_key_name)

                    dep['secret_access_key'] = secret
                    data['deployments'].append(dep)
                else:
                    abort(400, "Unknown deployment type")

        # Registries
        data['registries'] = []
        definition = data['job']['definition']
        registries = None

        if definition:
            registries = definition.get('registries', None)

        if registries:
            for r in registries:
                if r['type'] == 'docker-registry':
                    secret_name = r['password']['$secret']
                    secret = get_secret(secret_name)

                    if secret is None:
                        abort(400, "Secret %s not found" % secret_name)

                    r['password'] = secret
                    data['registries'].append(r)
                elif r['type'] == 'gcr':
                    service_account = r['service_account']['$secret']
                    secret = get_secret(service_account)

                    if secret is None:
                        abort(400, "Secret %s not found" % service_account)

                    r['service_account'] = secret
                    data['registries'].append(r)
                elif r['type'] == 'ecr':
                    access_key_id_name = r['access_key_id']['$secret']
                    secret = get_secret(access_key_id_name)

                    if secret is None:
                        abort(400, "Secret %s not found" % access_key_id_name)

                    r['access_key_id'] = secret

                    secret_access_key_name = r['secret_access_key']['$secret']
                    secret = get_secret(secret_access_key_name)

                    if secret is None:
                        abort(400,
                              "Secret %s not found" % secret_access_key_name)

                    r['secret_access_key'] = secret
                    data['registries'].append(r)
                else:
                    abort(400, "Unknown registry type")

        root_url = get_root_url("global")

        # Default env vars
        project_name = urllib.quote_plus(data['project']['name']).replace(
            '+', '%20')
        job_name = urllib.quote_plus(data['job']['name']).replace('+', '%20')
        build_url = "%s/dashboard/#/project/%s/build/%s/%s" % (
            root_url, project_name, data['build']['build_number'],
            data['build']['restart_counter'])
        job_url = "%s/dashboard/#/project/%s/build/%s/%s/job/%s" % (
            root_url, project_name, data['build']['build_number'],
            data['build']['restart_counter'], job_name)

        job_api_url = "%s/api/v1/projects/%s/jobs/%s" % (
            root_url, data['project']['id'], data['job']['id'])

        build_api_url = "%s/api/v1/projects/%s/builds/%s" % (
            root_url, data['project']['id'], data['build']['id'])

        data['env_vars'] = {
            "TERM":
            "xterm-256color",
            "INFRABOX_JOB_ID":
            data['job']['id'],
            "INFRABOX_JOB_URL":
            job_url,
            "INFRABOX_JOB_API_URL":
            job_api_url,
            "INFRABOX_BUILD_API_URL":
            build_api_url,
            "INFRABOX_BUILD_NUMBER":
            "%s" % data['build']['build_number'],
            "INFRABOX_BUILD_RESTART_COUNTER":
            "%s" % data['build']['restart_counter'],
            "INFRABOX_BUILD_URL":
            build_url,
        }

        data['secrets'] = {}

        if data['commit']['branch']:
            data['env_vars']['INFRABOX_GIT_BRANCH'] = data['commit']['branch']

        if data['commit']['tag']:
            data['env_vars']['INFRABOX_GIT_TAG'] = data['commit']['tag']

        if data['commit'].get('id', None):
            data['env_vars']['INFRABOX_COMMIT_ID'] = data['commit']['id']

        if pull_request_id:
            data['env_vars']['INFRABOX_GITHUB_PULL_REQUEST'] = "true"

        if commit_env:
            data['env_vars'].update(commit_env)

        if env_vars:
            for name, value in env_vars.iteritems():
                try:
                    data['env_vars'][name] = str(value)
                except UnicodeEncodeError:
                    data['env_vars'][name] = value.encode('utf-8')

        if env_var_refs:
            for name, value in env_var_refs.iteritems():
                secret = get_secret(value, data['project']['id'])

                if secret is None:
                    abort(400, "Secret %s not found" % value)

                data['secrets'][name] = secret

        return jsonify(data)
Ejemplo n.º 12
0
    def get(self):
        state = request.args.get('state')
        code = request.args.get('code')
        t = request.args.get('t', None)

        if not states.get(state, None):
            abort(401)

        del states[state]

        # TODO(ib-steffen): allow custom ca bundles
        r = requests.post(GITHUB_TOKEN_URL,
                          data={
                              'client_id': GITHUB_CLIENT_ID,
                              'client_secret': GITHUB_CLIENT_SECRET,
                              'code': code,
                              'state': state
                          },
                          headers={'Accept': 'application/json'},
                          verify=False)

        if r.status_code != 200:
            logger.error(r.text)
            abort(500)

        result = r.json()

        access_token = result['access_token']
        check_org(access_token)

        # TODO(ib-steffen): allow custom ca bundles
        r = requests.get(GITHUB_USER_PROFILE_URL,
                         headers={
                             'Accept': 'application/json',
                             'Authorization': 'token %s' % access_token
                         },
                         verify=False)
        gu = r.json()

        github_id = gu['id']

        if os.environ['INFRABOX_GITHUB_LOGIN_ENABLED'] == 'true':
            user = g.db.execute_one_dict(
                '''
                SELECT id FROM "user"
                WHERE github_id = %s
            ''', [github_id])

            if not user:
                user = g.db.execute_one_dict(
                    '''
                    INSERT INTO "user" (github_id, username, avatar_url, name)
                    VALUES (%s, %s, %s, %s) RETURNING id
                ''', [github_id, gu['login'], gu['avatar_url'], gu['name']])

            user_id = user['id']
        else:
            if not t:
                abort(404)

            user = g.db.execute_one_dict(
                '''
                SELECT id
                FROM "user"
                WHERE github_api_token = %s
            ''', [t])

            if not user:
                abort(404)

            user_id = user['id']

        g.db.execute(
            '''
            UPDATE "user" SET github_api_token = %s, github_id = %s
            WHERE id = %s
        ''', [access_token, github_id, user_id])

        g.db.commit()

        token = encode_user_token(user_id)
        url = get_root_url('global') + '/dashboard/'
        logger.debug("Redirecting GitHub user to %s", url)
        res = redirect(url)
        res.set_cookie('token', token)
        return res
Ejemplo n.º 13
0
from flask_restplus import Resource

from pyinfraboxutils import get_logger, get_root_url
from pyinfraboxutils.ibrestplus import api
from pyinfraboxutils.token import encode_user_token

logger = get_logger('github')

GITHUB_CLIENT_ID = os.environ['INFRABOX_GITHUB_CLIENT_ID']
GITHUB_CLIENT_SECRET = os.environ['INFRABOX_GITHUB_CLIENT_SECRET']
GITHUB_AUTHORIZATION_URL = os.environ[
    'INFRABOX_GITHUB_LOGIN_URL'] + "/oauth/authorize"
GITHUB_TOKEN_URL = os.environ[
    'INFRABOX_GITHUB_LOGIN_URL'] + "/oauth/access_token"
GITHUB_USER_PROFILE_URL = os.environ['INFRABOX_GITHUB_API_URL'] + "/user"
GITHUB_CALLBACK_URL = get_root_url('global') + "/github/auth/callback"

# TODO(ib-steffen): move into DB
states = {}


def get_next_page(r):
    link = r.headers.get('Link', None)

    if not link:
        return None

    n1 = link.find('rel=\"next\"')

    if n1 < 0:
        return None