Ejemplo n.º 1
0
def main():
    global verbose
    global logger

    args = get_args()
    db_name = args.db_name
    check_if_exists(db_name)
    # providers = ['"'+p.strip()+'"' for p in args.providers.split(",")]
    providers = ['"' + p.strip() + '"' for p in "GitHub,Gist".split(",")]
    launch_limit = args.launch_limit
    access_token = args.access_token
    if not access_token:
        access_token = None
        print(
            "No token for GitHub API, no additional data will be fetched from GitHub API."
        )
    max_workers = args.max_workers
    verbose = args.verbose

    _, script_ts_safe = get_utc_ts()
    logger_name = f'{os.path.basename(__file__)[:-3]}_at_{script_ts_safe}'.replace(
        "-", "_")
    logger = get_logger(logger_name)
    if verbose:
        print(f"Logs are in {logger_name}.log")

    create_repo_table(db_name, providers, launch_limit, access_token,
                      max_workers)
    print(f"""\n
    Repo data is extracted from `{launch_table}` table and saved into `{repo_table}` table.
    You can open this database with `sqlite3 {db_name}` command and then run any sqlite3 command, 
    e.g., `select count(*) from {repo_table}` to get number of repos.
    """)
Ejemplo n.º 2
0
def deploy_grafana_config(conf_file):
    '''Deploy Grafana ConfigMap'''
    secret_name = 'grafana-config'
    src_file = os.path.join(os.getcwd(),
                            "deploy/monitoring/grafana/" + conf_file)
    dst_file = os.path.join(os.getcwd(), conf_file)
    with open(src_file) as fp:
        data = fp.read()
    data = data.replace('REPLACE_NAMESPACE', f'"{deploy_options.namespace}"')
    with open(dst_file, 'w') as fp:
        fp.write(data)
    if not utils.check_if_exists(k8s_object='secret',
                                 k8s_object_name=secret_name,
                                 target=deploy_options.target,
                                 namespace=deploy_options.namespace,
                                 profile=deploy_options.profile):
        print("Creating Grafana Configuration")
        cmd = "{} create secret generic {} --namespace={} --from-file=grafana.ini={}".format(
            CMD_BIN, secret_name, deploy_options.namespace, dst_file)
        utils.check_output(cmd)
    else:
        print("Updating Grafana Configuration")
        cmd = "{} delete secret {} --namespace={}".format(
            CMD_BIN, secret_name, deploy_options.namespace)
        utils.check_output(cmd)
        cmd = "{} create secret generic {} --namespace={} --from-file=grafana.ini={}".format(
            CMD_BIN, secret_name, deploy_options.namespace, dst_file)
        utils.check_output(cmd)
Ejemplo n.º 3
0
def main():
    utils.set_profile(deploy_options.target, deploy_options.profile)

    ## Main OLM Manifest for K8s
    if deploy_options.target != "oc-ingress":
        # K8s
        deployed = utils.check_if_exists('namespace', 'olm', namespace='olm')
        if not deployed:
            olm_manifests = [
                "https://github.com/operator-framework/operator-lifecycle-manager/releases/download/0.15.1/crds.yaml",
                "https://github.com/operator-framework/operator-lifecycle-manager/releases/download/0.15.1/olm.yaml"
            ]
            for manifest_url in olm_manifests:
                file_name = "build/{}".format(
                    os.path.basename(urlparse(manifest_url).path))
                dst_file = os.path.join(os.getcwd(), file_name)
                print("Deploying {}".format(dst_file))
                urlretrieve(manifest_url, dst_file)
                utils.apply(dst_file)

            check_deployment()

    else:
        # OCP
        print("OLM Deployment not necessary")
Ejemplo n.º 4
0
def index():
    """ Index page """
    count = Address.query.count()
    form = SpammerForm()
    # try to validate, and check for AJAX submission
    if form.validate_on_submit():
            if not utils.check_if_exists(form.address.data):
                flash(
                    u"We've added %s to the database." % form.address.data,
                    "text-success")
            else:
                flash(
                    u"We already know about %s, though." % form.address.data,
                    "text-success")
    if request.is_xhr:
        # OK to send back a fragment
        return render_template(
            'form.jinja',
            form=form,
            )
    # GET or no JS, so render a full page
    return render_template(
        'index.jinja',
        form=form,
        count=count,
        recaptcha_public_key=app.config['RECAPTCHA_PUBLIC_KEY'])
Ejemplo n.º 5
0
def main():
    utils.verify_build_directory(deploy_options.namespace)

    ## Main OLM Manifest for K8s
    if deploy_options.target != "oc-ingress":
        # K8s
        deployed = utils.check_if_exists(k8s_object='namespace',
                                         k8s_object_name='olm',
                                         target=deploy_options.target,
                                         namespace='olm',
                                         profile=deploy_options.profile)
        if not deployed:
            olm_manifests = [
                "https://github.com/operator-framework/operator-lifecycle-manager/releases/download/0.15.1/crds.yaml",
                "https://github.com/operator-framework/operator-lifecycle-manager/releases/download/0.15.1/olm.yaml"
            ]
            for manifest_url in olm_manifests:
                dst_file = os.path.join(
                    os.getcwd(), 'build', deploy_options.namespace,
                    os.path.basename(urlparse(manifest_url).path))
                print("Deploying {}".format(dst_file))
                urlretrieve(manifest_url, dst_file)
                utils.apply(target=deploy_options.target,
                            namespace=None,
                            profile=deploy_options.profile,
                            file=dst_file)

            check_deployment()

    else:
        # OCP
        print("OLM Deployment not necessary")
Ejemplo n.º 6
0
def deploy_oauth_reqs():
    # Token generation for session_secret
    session_secret = secrets.token_hex(43)
    secret_name = 'grafana-proxy'
    if not utils.check_if_exists('secret', secret_name):
        cmd = "{} -n assisted-installer create secret generic {} --from-literal=session_secret={}".format(
            CMD_BIN, secret_name, session_secret)
        utils.check_output(cmd)

    ## Create and Annotate Serviceaccount
    sa_name = 'grafana'
    if not utils.check_if_exists('sa', sa_name):
        cmd = "{} -n assisted-installer create serviceaccount {} ".format(
            CMD_BIN, sa_name)
        utils.check_output(cmd)
    json_manifest = '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"grafana"}}'
    annotation_name = 'serviceaccounts.openshift.io/oauth-redirectreference.grafana'
    cmd = "{} -n assisted-installer annotate serviceaccount {} --overwrite {}='{}'".format(
        CMD_BIN, sa_name, annotation_name, json_manifest)
    utils.check_output(cmd)

    # Get OCP Certificate
    if not utils.check_if_exists('secret', 'openshift-custom-ca'):
        secret_name = 'router-certs-default'
        ns = 'openshift-ingress'
        template = '{{index .data "tls.crt"}}'
        cmd = "{} get secret {} --namespace={} --template '{}'".format(
            CMD_BIN, secret_name, ns, template)
        ca_cert = utils.check_output(cmd)

        # Renderized secret with CA Certificate of the OCP Cluster
        src_file = os.path.join(
            os.getcwd(),
            "deploy/monitoring/prometheus/assisted-installer-ocp-prometheus-custom-ca.yaml"
        )
        dst_file = os.path.join(
            os.getcwd(),
            "build/assisted-installer-ocp-prometheus-custom-ca.yaml")
        topic = 'OCP Custom CA'
        with open(src_file, "r") as src:
            with open(dst_file, "w+") as dst:
                data = src.read()
                data = data.replace("BASE64_CERT", ca_cert)
                print("Deploying {}: {}".format(topic, dst_file))
                dst.write(data)
        utils.apply(dst_file)
Ejemplo n.º 7
0
def deploy_oauth_reqs():
    '''oauth Integration in OCP'''
    ## Token generation for session_secret
    session_secret = secrets.token_hex(43)
    secret_name = 'prometheus-k8s-proxy'
    if not utils.check_if_exists(k8s_object='secret',
                                 k8s_object_name=secret_name,
                                 target=deploy_options.target,
                                 namespace=deploy_options.namespace,
                                 profile=deploy_options.profile):
        cmd = "{} -n {} create secret generic {} --from-literal=session_secret={}" \
                .format(CMD_BIN, deploy_options.namespace, secret_name, session_secret)
        utils.check_output(cmd)

    ## Annotate Serviceaccount
    json_manifest = '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"prometheus-assisted"}}'
    sa_name = 'prometheus-k8s'
    annotation_name = 'serviceaccounts.openshift.io/oauth-redirectreference.assisted-installer-prometheus'
    cmd = "{} -n {} annotate serviceaccount {} --overwrite {}='{}'"\
            .format(CMD_BIN, deploy_options.namespace, sa_name, annotation_name, json_manifest)
    utils.check_output(cmd)

    # Download OCP Certificate as a secret
    cert_secret_name = 'openshift-custom-ca'
    cmd = "{} -n {} get secret {} --no-headers".format(
        CMD_BIN, deploy_options.namespace, cert_secret_name)
    cert_secret = utils.check_output(cmd)
    if not cert_secret:
        # Get OCP Certificate
        secret_name = 'router-certs-default'
        namespace = 'openshift-ingress'
        template = '{{index .data "tls.crt"}}'
        cmd = "{} get secret {} --namespace={} --template '{}'"\
                .format(CMD_BIN, secret_name, namespace, template)
        ca_cert = utils.check_output(cmd)

        # Renderized secret with CA Certificate of the OCP Cluster
        src_file = os.path.join(os.getcwd(), \
                'deploy/monitoring/prometheus/assisted-installer-ocp-prometheus-custom-ca.yaml')
        dst_file = os.path.join(os.getcwd(), \
                'build', deploy_options.namespace, 'assisted-installer-ocp-prometheus-custom-ca.yaml')
        topic = 'OCP Custom CA'
        with open(src_file, "r") as src:
            with open(dst_file, "w+") as dst:
                data = src.read()
                data = data.replace("BASE64_CERT", ca_cert)
                data = data.replace('REPLACE_NAMESPACE',
                                    f'"{deploy_options.namespace}"')
                print("Deploying {}: {}".format(topic, dst_file))
                dst.write(data)
        utils.apply(target=deploy_options.target,
                    namespace=deploy_options.namespace,
                    profile=deploy_options.profile,
                    file=dst_file)
    else:
        print("Secret {} already exists", cert_secret_name)
Ejemplo n.º 8
0
def deploy_grafana_ds():
    ns = 'assisted-installer'
    secret_name = 'grafana-datasources'
    src_file = os.path.join(os.getcwd(),
                            "deploy/monitoring/grafana/prometheus.json")
    if not utils.check_if_exists('secret', secret_name):
        print("Creating Grafana Datasource")
        cmd = "{} create secret generic {} --namespace={} --from-file=prometheus.yaml={}".format(
            CMD_BIN, secret_name, ns, src_file)
        utils.check_output(cmd)
Ejemplo n.º 9
0
def deploy_grafana_ds():
    '''Deploy grafana daemonSet'''
    secret_name = 'grafana-datasources'
    src_file = os.path.join(os.getcwd(), "deploy/monitoring/grafana/prometheus.json")
    dst_file = os.path.join(os.getcwd(), "build/prometheus.json")
    with open(src_file) as fp:
        data = fp.read()
    data = data.replace('REPLACE_NAMESPACE', deploy_options.namespace)
    with open(dst_file, 'w') as fp:
        fp.write(data)
    if not utils.check_if_exists('secret', secret_name, deploy_options.namespace):
        print("Creating Grafana Datasource")
        cmd = "{} create secret generic {} --namespace={} --from-file=prometheus.yaml={}".format(CMD_BIN, secret_name, deploy_options.namespace, dst_file)
        utils.check_output(cmd)
Ejemplo n.º 10
0
def deploy_grafana_config(conf_file):
    ns = 'assisted-installer'
    secret_name = 'grafana-config'
    src_file = os.path.join(os.getcwd(),
                            "deploy/monitoring/grafana/" + conf_file)
    if not utils.check_if_exists('secret', secret_name):
        print("Creating Grafana Configuration")
        cmd = "{} create secret generic {} --namespace={} --from-file=grafana.ini={}".format(
            CMD_BIN, secret_name, ns, src_file)
        utils.check_output(cmd)
    else:
        print("Updating Grafana Configuration")
        cmd = "{} delete secret {} --namespace={}".format(
            CMD_BIN, secret_name, ns)
        utils.check_output(cmd)
        cmd = "{} create secret generic {} --namespace={} --from-file=grafana.ini={}".format(
            CMD_BIN, secret_name, ns, src_file)
        utils.check_output(cmd)
Ejemplo n.º 11
0
def main():
    deploy_options = deployment_options.load_deployment_options()
    utils.verify_build_directory(deploy_options.namespace)

    # Render a file without values for the operator as we don't want every deployment to have the same values
    if not deploy_options.apply_manifest:
        render_file(deploy_options.namespace, "", "")
        return

    secret_name = 'assisted-installer-local-auth-key'
    exists = utils.check_if_exists("secret",
                                   secret_name,
                                   target=deploy_options.target,
                                   namespace=deploy_options.namespace,
                                   profile=deploy_options.profile)

    if exists:
        print(
            f'Secret {secret_name} already exists in namespace {deploy_options.namespace}'
        )
        return

    output_dir = tempfile.TemporaryDirectory()
    priv_path = os.path.join(output_dir.name, f'ec-private-key.pem')
    pub_path = os.path.join(output_dir.name, f'ec-public-key.pem')

    print(
        utils.check_output(
            f'openssl ecparam -name prime256v1 -genkey -noout -out {priv_path}'
        ))
    print(
        utils.check_output(
            f'openssl ec -in {priv_path} -pubout -out {pub_path}'))

    secret_file = render_file(deploy_options.namespace,
                              encoded_contents(priv_path),
                              encoded_contents(pub_path))

    utils.apply(target=deploy_options.target,
                namespace=deploy_options.namespace,
                profile=deploy_options.profile,
                file=secret_file)
Ejemplo n.º 12
0
def index():
    """ Index page """
    count = Address.query.count()
    form = SpammerForm()
    if request.method == 'POST' and request.is_xhr:
        # put request vars into a form and try to validate
        form.address = request.form.get('address')
        form.csrf_token = request.form.get('csrf_token')
        new_form = SpammerForm()
        if form.validate_on_submit():
            if not utils.check_if_exists(form.address):
                # process the address
                # send back thank you, and a new form
                flash(u"Thanks!", "text-success")
            else:
                # address exists, send back an error and a new form
                flash(u"We already know that spammer!", "text-error")
        else:
            # Validation error
            new_form._errors = form.errors
        return render_template('form.jinja', form=new_form)
    # GET, just render a page with a blank form
    return render_template('index.jinja', form=form, count=count)
Ejemplo n.º 13
0
def index():
    """ Index page """
    count = Address.query.count()
    form = SpammerForm()
    if request.method == "POST" and request.is_xhr:
        # put request vars into a form and try to validate
        form.address = request.form.get("address")
        form.csrf_token = request.form.get("csrf_token")
        new_form = SpammerForm()
        if form.validate_on_submit():
            if not utils.check_if_exists(form.address):
                # process the address
                # send back thank you, and a new form
                flash(u"Thanks!", "text-success")
            else:
                # address exists, send back an error and a new form
                flash(u"We already know that spammer!", "text-error")
        else:
            # Validation error
            new_form._errors = form.errors
        return render_template("form.jinja", form=new_form)
    # GET, just render a page with a blank form
    return render_template("index.jinja", form=form, count=count)
Ejemplo n.º 14
0
def main():
    args = get_args()
    # check inputs
    build_log_folders = []
    for f in args.build_log_folders.split(","):
        abs_f = os.path.abspath(f)
        check_if_exists(abs_f)
        build_log_folders.append(abs_f)
    db_name = args.db_name
    check_if_exists(db_name)

    # prepare logger
    _, script_ts_safe = get_utc_ts()
    logger_name = f'{os.path.basename(__file__)[:-3]}_at_{script_ts_safe}'.replace(
        "-", "_")
    logger = get_logger(logger_name)
    logger.info("Start")
    print(f"Logs are in {logger_name}.log")

    db = Database(db_name)
    # add new columns
    if "buildpack" not in db[execution_table].columns_dict:
        db[execution_table].add_column("buildpack", str)
    if "build_error" not in db[execution_table].columns_dict:
        db[execution_table].add_column("build_error", str)

    # update values of new columns per each row
    for build_log_folder in build_log_folders:
        print(build_log_folder)
        logger.info(build_log_folder)
        folder_name = os.path.basename(build_log_folder)
        script_timestamp = folder_name.split("_")[-1]
        d, t = script_timestamp.split("T")
        script_timestamp = f"{d}T{t.replace('-', ':')}"

        log_files = [
            i for i in os.listdir(build_log_folder) if i.endswith(".log")
        ]
        len_log_files = len(log_files)
        logger.info(f"{len_log_files} log files")
        count = 0
        for log_file in log_files:
            repo_id = int(log_file.split("_")[0])
            log_file = os.path.join(build_log_folder, log_file)
            # print(log_file)
            count += 1
            # provider_r2d = "404"
            buildpack = "404"
            build_error = "None"
            with open(log_file, "r") as f:
                for line in f:
                    line = line.rstrip()
                    # if line.startswith("Picked") and line.endswith("provider."):
                    #     provider_r2d = line.split(" ")[1]
                    if buildpack == "404" and line.startswith(
                            "Using") and line.endswith("builder"):
                        buildpack = line.split(" ")[1]
                    # TODO what are other errors, how to detect them?
                    elif build_error == "None" and "ReadTimeoutError" in line:
                        # NOTE: this doesnt catch only docker timeouts, e.g. also from pip
                        # urllib3.exceptions.ReadTimeoutError: UnixHTTPConnectionPool(host='localhost', port=None) ->
                        # this is the timeout error from docker from repo2docker
                        build_error = "ReadTimeoutError"
                        logger.info(f"{repo_id}: ReadTimeoutError: {line}")
                        break
            # save new data into tables
            new_data = {"buildpack": buildpack, "build_error": build_error}
            # print(new_data)
            if buildpack == "404":
                # TODO check why?
                #  - docker readtimeout
                #  - repo or ref doesnt exist anymore or another cloning error
                logger.warning(f"{repo_id}: {new_data}")
            # update only rows that this log file is related,
            # a repo can have many builds in different times for different r2d versions
            db.conn.execute(f"""UPDATE {execution_table}
                                SET buildpack="{buildpack}", build_error="{build_error}"
                                WHERE script_timestamp="{script_timestamp}" AND repo_id={repo_id};"""
                            )
            db.conn.commit()
            # logger.info(f"{script_timestamp} : {repo_id} : {new_data}")
            print(f'{(count*100)/len_log_files:.3f}%\r', end="")

    logger.info("Done")
Ejemplo n.º 15
0
def main():
    args = get_args()
    script_timestamps = [
        '"' + st.strip() + '"' for st in args.script_timestamps.split(",")
        if st
    ]
    db_name = args.db_name
    check_if_exists(db_name)

    _, script_ts_safe = get_utc_ts()
    logger_name = f'{os.path.basename(__file__)[:-3]}_at_{script_ts_safe}'.replace(
        "-", "_")
    logger = get_logger(logger_name)
    logger.info("Start")
    print(f"Logs are in {logger_name}.log")

    db = Database(db_name)
    # add new columns
    if "kernel_name" not in db[execution_table].columns_dict:
        db[execution_table].add_column("kernel_name", str)
    if "nb_execution_time" not in db[execution_table].columns_dict:
        db[execution_table].add_column("nb_execution_time", int)
    if "nb_error" not in db[execution_table].columns_dict:
        db[execution_table].add_column("nb_error", str)

    execution_count = 0
    if script_timestamps:
        executions = db[execution_table].rows_where(
            f'nb_log_file is not null AND '
            f'script_timestamp IN ({", ".join(script_timestamps)})')
    else:
        executions = db[execution_table].rows_where('nb_log_file is not null')
    # add values of new columns per each row
    for execution in executions:
        execution_count += 1
        nb_log_file = execution["nb_log_file"]
        # TODO parse also logs of notebooks detection: use repo folder, it is in form of "notebooks_<TS>.log"
        # repo_folder = os.path.dirname(nb_log_file)
        # repo_id = int(repo_folder.split("_")[0])
        # assert repo_id == execution["repo_id"]
        # repo_id = execution["repo_id"]

        # print(nb_log_file)
        kernel_name = "404"
        nb_execution_time = 404
        nb_error = "None"
        with open(nb_log_file, "r") as f:
            for line in f:
                line = line.rstrip()
                if kernel_name == "404" and "execute:" in line and "Executing notebook with kernel:" in line:
                    kernel_name = line.split(" ")[-1]
                elif nb_execution_time == 404 and "inrepo:" in line and "Execution time is" in line:
                    nb_execution_time = int(line.split(" ")[-1])
                # TODO what are other errors, how to detect them?
                elif nb_error == "None" and "execute:" in line and "Timeout waiting for execute reply" in line:
                    nb_error = "TimeoutError"
                    break
        # save new data into tables
        # new_data = {"kernel_name": kernel_name, "nb_execution_time": nb_execution_time, "nb_error": nb_error}
        # if kernel_name == "404":
        #     logger.warning(f"{repo_id} : {nb_log_file} : {new_data}")
        # if nb_error == "None" and nb_execution_time == 404:
        #     logger.warning(f"undetected error: {repo_id} : {nb_log_file}")

        # row = list(
        #     db[execution_table].rows_where(f'nb_log_file="{nb_log_file}"')
        # )
        # if not row:
        #     logger.error(f"Error: {repo_id}: nb log file doesnt exist: {nb_log_file} : {new_data}")
        # elif len(row) > 1:
        #     logger.error(f"Error: {repo_id}: nb log file must be unique: {nb_log_file} : {new_data}")
        # else:
        db.conn.execute(f"""UPDATE {execution_table}
                            SET kernel_name="{kernel_name}", nb_execution_time={nb_execution_time}, nb_error="{nb_error}"
                            WHERE nb_log_file="{nb_log_file}";""")
        db.conn.commit()
        # logger.info(f"{nb_log_file} : {new_data}")
        print(f'{execution_count}\r', end="")

    logger.info(f"{execution_count} executions")
    logger.info("Done")
Ejemplo n.º 16
0
 def test_exists(self):
     """ Should pass because we've added the address during setup """
     print Address.query.filter_by(address="@test-address.com").first().address
     assert utils.check_if_exists('test-address.com')
     print Address.query.filter_by(address="@test-address.com").first().address