def revoke_cert(crl_path, cert_to_revoke, ca_cert, ca_key, ssl_conf): # openssl ca -revoke bad_crt_file -keyfile ca_key -cert ca_crt # rhel5 needs -md sha1, it complains about the 'default_md' option in openssl config cmd = "openssl ca -revoke %s -keyfile %s -cert %s -config %s -md sha1" % (cert_to_revoke, ca_key, ca_cert, ssl_conf) if not run_command(cmd): return False # openssl ca -gencrl -config openssl.cnf -keyfile ./Pulp_CA.key -cert Pulp_CA.cert -out my_crl.pem cmd = "openssl ca -gencrl -keyfile %s -cert %s -out %s -config %s -crlexts crl_ext -md sha1" % (ca_key, ca_cert, crl_path, ssl_conf) if not run_command(cmd): return False return True
def update_httpd_config(server_key, server_cert, ca_cert, app_ssl_config_file): server_key = server_key.replace("/", "\/") server_cert = server_cert.replace("/", "\/") ca_cert = ca_cert.replace("/", "\/") cmd = "sed -i 's/^SSLCertificateFile.*/SSLCertificateFile %s/' %s" % (server_cert, app_ssl_config_file) if not run_command(cmd): return False cmd = "sed -i 's/^SSLCertificateKeyFile.*/SSLCertificateKeyFile %s/' %s" % (server_key, app_ssl_config_file) if not run_command(cmd): return False cmd = "sed -i 's/^SSLCACertificateFile.*/SSLCACertificateFile %s/' %s" % (ca_cert, app_ssl_config_file) if not run_command(cmd): return False return True
def comment_out_ssl_settings(httpd_ssl_config_file="/etc/httpd/conf.d/ssl.conf"): cmd = "sed -i 's/^SSLCertificateFile/#SSLCertificateFile/' %s" % (httpd_ssl_config_file) if not run_command(cmd): return False cmd = "sed -i 's/^SSLCertificateKeyFile/#SSLCertificateKeyFile/' %s" % (httpd_ssl_config_file) if not run_command(cmd): return False cmd = "sed -i 's/^SSLCACertificateFile/#SSLCACertificateFile/' %s" % (httpd_ssl_config_file) if not run_command(cmd): return False cmd = "sed -i 's/^SSLCertificateChainFile/#SSLCertificateChainFile/' %s" % (httpd_ssl_config_file) if not run_command(cmd): return False return True
def update_httpd_config(server_key, server_cert, ca_cert, httpd_ssl_confd="/etc/httpd/conf.d/ssl.conf"): server_key = server_key.replace("/", "\/") server_cert = server_cert.replace("/", "\/") ca_cert = ca_cert.replace("/", "\/") cmd = "sed -i 's/^SSLCertificateFile.*/SSLCertificateFile %s/' %s" % (server_cert, httpd_ssl_confd) if not run_command(cmd): return False cmd = "sed -i 's/^SSLCertificateKeyFile.*/SSLCertificateKeyFile %s/' %s" % (server_key, httpd_ssl_confd) if not run_command(cmd): return False #cmd = "sed -i 's/^SSLCACertificateFile.*/SSLCACertificateFile %s/' %s" % (ca_cert, httpd_ssl_confd) #if not run_command(cmd): # return False return True
def create_server_cert(server_cert, server_csr, ca_cert, ca_key, ca_serial): check_dirs(server_cert) cmd = "openssl x509 -req -days 10950 -CA %s -CAkey %s -in %s -out %s -CAserial %s" \ % (ca_cert, ca_key, server_csr, server_cert, ca_serial) if not os.path.exists(ca_serial): cmd = cmd + " -CAcreateserial" return run_command(cmd)
def create_server_csr(server_key, csr, hostname): cmd = "openssl req -new -key %s -out %s -subj '/C=US/ST=NC/L=Raleigh/O=Red Hat/OU=Pulp/CN=%s'" % ( server_key, csr, hostname, ) return run_command(cmd)
def check_modulus(key, cert): cert_modulus = "" cmd = "openssl x509 -noout -modulus -in %s " % (cert) status = run_command(cmd) if status: state, out, err = status cert_modulus = out key_modulus = "" cmd = "openssl rsa -noout -modulus -in %s " % (key) status = run_command(cmd) if status: state, out, err = status key_modulus = out if cert_modulus and key_modulus and cert_modulus == key_modulus: return True return False
#!/usr/bin/env python import sys from base import get_parser, run_command, add_hostname_option if __name__ == "__main__": parser = get_parser(limit_options=["ca_key", "ca_cert", "client_cert", "client_key"]) parser = add_hostname_option(parser) opts, args = parser.parse_args() ca_cert = opts.ca_cert client_cert = opts.client_cert client_key = opts.client_key hostname = opts.hostname url = "https://%s/pulp/repos/repos/pulp/pulp/fedora-15/i386/repodata/repomd.xml" % (hostname) cmd = "curl --cacert %s --cert %s --key %s %s" % (ca_cert, client_cert, client_key, url) result = run_command(cmd) if result: state, out, err = result print "%s" % (out) print "%s" % (err)
def create_server_csr(server_key, csr, hostname): cmd = "openssl req -new -key %s -out %s -subj '/C=US/ST=NC/L=Raleigh/O=Red Hat/OU=Pulp/CN=%s'" % ( server_key, csr, hostname) return run_command(cmd)
def docker_images_all_list(): command = ["sudo", "docker", "images", "-a"] print("Docker images Command: ", command) base.run_command(command)
def helm3_7_registry_login(ip, user, password): command = ["helm3.7", "registry", "login", ip, "-u", user, "-p", password] base.run_command(command)
def helm3_7_push(file_path, ip, project_name): command = ["helm3.7", "push", file_path, "oci://{}/{}".format(ip, project_name)] base.run_command(command)
def oras_login(harbor_server, user, password): ret = base.run_command([oras_cmd, "login", "-u", user, "-p", password, harbor_server])
def copy_file(src, dst): cmd = "cp %s %s" % (src, dst) if not run_command(cmd): return False return True
def set_police_on_project(project_id, file): base.run_command( ['gcloud', 'projects', 'set-iam-policy', project_id, file])
def make(packages): base_dir = base.get_script_dir() + "/../out" git_dir = base.get_script_dir() + "/../.." for package in packages: if -1 != package.find("diskimage"): macos_dir = os.path.abspath(git_dir + "/desktop-apps/macos") update_dir = macos_dir + "/build/update" changes_dir = macos_dir + "/ONLYOFFICE/update/updates/ONLYOFFICE/changes" if (package == "diskimage-x86_64"): lane = "release_x86_64" scheme = "ONLYOFFICE-x86_64" elif (package == "diskimage-v8-x86_64"): lane = "release_v8" scheme = "ONLYOFFICE-v8" elif (package == "diskimage-arm64"): lane = "release_arm" scheme = "ONLYOFFICE-arm" else: exit(1) print("Build package " + scheme) print("$ bundler exec fastlane " + lane + " skip_git_bump:true") base.cmd_in_dir(macos_dir, "bundler", ["exec", "fastlane", lane, "skip_git_bump:true"]) print("Build updates") app_version = base.run_command( "/usr/libexec/PlistBuddy -c 'print :CFBundleShortVersionString' " + macos_dir + "/build/ONLYOFFICE.app/Contents/Info.plist")['stdout'] zip_filename = scheme + "-" + app_version macos_zip = macos_dir + "/build/" + zip_filename + ".zip" update_storage_dir = base.get_env( "ARCHIVES_DIR") + "/" + scheme + "/_updates" base.create_dir(update_dir) base.copy_dir_content(update_storage_dir, update_dir, ".zip") base.copy_dir_content(update_storage_dir, update_dir, ".html") base.copy_file(macos_zip, update_dir) notes_src = changes_dir + "/" + app_version + "/ReleaseNotes.html" notes_dst = update_dir + "/" + zip_filename + ".html" cur_date = base.run_command( "LC_ALL=en_US.UTF-8 date -u \"+%B %e, %Y\"")['stdout'] if base.is_exist(notes_src): base.copy_file(notes_src, notes_dst) base.replaceInFileRE( notes_dst, r"(<span class=\"releasedate\">).+(</span>)", "\\1 - " + cur_date + "\\2") else: base.writeFile(notes_dst, "placeholder\n") notes_src = changes_dir + "/" + app_version + "/ReleaseNotesRU.html" notes_dst = update_dir + "/" + zip_filename + ".ru.html" cur_date = base.run_command( "LC_ALL=ru_RU.UTF-8 date -u \"+%e %B %Y\"")['stdout'] if base.is_exist(notes_src): base.copy_file(notes_src, notes_dst) base.replaceInFileRE( notes_dst, r"(<span class=\"releasedate\">).+(</span>)", "\\1 - " + cur_date + "\\2") else: base.writeFile(notes_dst, "placeholder\n") print("$ ./generate_appcast " + update_dir) base.cmd(macos_dir + "/Vendor/Sparkle/bin/generate_appcast", [update_dir]) print("Edit Sparkle appcast links") sparkle_base_url = "https://download.onlyoffice.com/install/desktop/editors/mac" if (package == "diskimage-x86_64"): sparkle_base_url += "/x86_64" elif (package == "diskimage-v8-x86_64"): sparkle_base_url += "/v8" elif (package == "diskimage-arm64"): sparkle_base_url += "/arm" base.replaceInFileRE( update_dir + "/onlyoffice.xml", r"(<sparkle:releaseNotesLink>)(?:.+ONLYOFFICE-(?:x86|x86_64|v8|arm)-([0-9.]+)\..+)(</sparkle:releaseNotesLink>)", "\\1" + sparkle_base_url + "/updates/changes/\\2/ReleaseNotes.html\\3") base.replaceInFileRE( update_dir + "/onlyoffice.xml", r"(<sparkle:releaseNotesLink xml:lang=\"ru\">)(?:ONLYOFFICE-(?:x86|x86_64|v8|arm)-([0-9.]+)\..+)(</sparkle:releaseNotesLink>)", "\\1" + sparkle_base_url + "/updates/changes/\\2/ReleaseNotesRU.html\\3") base.replaceInFileRE(update_dir + "/onlyoffice.xml", r"(url=\")(?:.+/)(ONLYOFFICE.+\")", "\\1" + sparkle_base_url + "/updates/\\2") print("Delete unnecessary files") for file in os.listdir(update_dir): if (-1 == file.find(app_version)) and (file.endswith(".zip") or file.endswith(".html")): base.delete_file(update_dir + "/" + file) return
def set_police_on_organization(organization_id, file): base.run_command( ['gcloud', 'organizations', 'set-iam-policy', organization_id, file])
def sync_test_repo(repo_id): cmd = "sudo pulp-admin repo sync --id %s -F" % (repo["id"]) return run_command(cmd)
def create_test_repo(repo_id, repo_feed, ca_cert, client_cert, client_key): cmd = "sudo pulp-admin repo create --id %s --feed %s --consumer_ca %s --consumer_cert %s --consumer_key %s" % \ (repo_id, repo_feed, ca_cert, client_cert, client_key) return run_command(cmd)
def create_ca_key(ca_key_name): check_dirs(ca_key_name) cmd = "openssl genrsa -out %s 2048" % (ca_key_name) return run_command(cmd)
def docker_images_all_list(): command = ["docker", "images", "-a"] base.run_command(command)
def install_deps(): if base.is_file("./packages_complete"): return # dependencies packages = [ "apt-transport-https", "autoconf2.13", "build-essential", "ca-certificates", "cmake", "curl", "git", "glib-2.0-dev", "libglu1-mesa-dev", "libgtk-3-dev", "libpulse-dev", "libtool", "p7zip-full", "subversion", "gzip", "libasound2-dev", "libatspi2.0-dev", "libcups2-dev", "libdbus-1-dev", "libicu-dev", "libglu1-mesa-dev", "libgstreamer1.0-dev", "libgstreamer-plugins-base1.0-dev", "libx11-xcb-dev", "libxcb*", "libxi-dev", "libxrender-dev", "libxss1", "libncurses5" ] base.cmd("sudo", ["apt-get", "install", "-y"] + packages) # nodejs base.cmd("sudo", ["apt-get", "install", "-y", "nodejs"]) nodejs_cur = 0 try: nodejs_version = base.run_command('node -v')['stdout'] nodejs_cur_version_major = int(nodejs_version.split('.')[0][1:]) nodejs_cur_version_minor = int(nodejs_version.split('.')[1]) nodejs_cur = nodejs_cur_version_major * 1000 + nodejs_cur_version_minor print("Installed Node.js version: " + str(nodejs_cur_version_major) + "." + str(nodejs_cur_version_minor)) except: nodejs_cur = 1 if (nodejs_cur < 10020): print("Node.js version cannot be less 10.20") print("Reinstall") if (base.is_dir("./node_js_setup_10.x")): base.delete_dir("./node_js_setup_10.x") base.cmd("sudo", ["apt-get", "remove", "--purge", "-y", "nodejs"]) base.download("https://deb.nodesource.com/setup_10.x", "./node_js_setup_10.x") base.cmd( 'curl -fsSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -' ) base.cmd("sudo", ["bash", "./node_js_setup_10.x"]) base.cmd("sudo", ["apt-get", "install", "-y", "nodejs"]) base.cmd("sudo", ["npm", "install", "-g", "npm@6"]) else: print("OK") base.cmd("sudo", ["apt-get", "-y", "install", "npm", "yarn"], True) base.cmd("sudo", ["npm", "install", "-g", "grunt-cli"]) base.cmd("sudo", ["npm", "install", "-g", "pkg"]) # java java_error = base.cmd("sudo", ["apt-get", "-y", "install", "openjdk-11-jdk"], True) if (0 != java_error): java_error = base.cmd("sudo", ["apt-get", "-y", "install", "openjdk-8-jdk"], True) if (0 != java_error): base.cmd("sudo", ["apt-get", "-y", "install", "software-properties-common"]) base.cmd("sudo", ["add-apt-repository", "-y", "ppa:openjdk-r/ppa"]) base.cmd("sudo", ["apt-get", "update"]) base.cmd("sudo", ["apt-get", "-y", "install", "openjdk-8-jdk"]) base.cmd("sudo", ["update-alternatives", "--config", "java"]) base.cmd("sudo", ["update-alternatives", "--config", "javac"]) base.writeFile("./packages_complete", "complete") return
def docker_load_image(image): command = ["docker", "load", "-i", image] base.run_command(command)
def create_client_csr(client_key, csr): cmd = "openssl req -new -key %s -out %s -subj '/C=US/ST=NC/L=Raleigh/O=Red Hat/OU=Pulp/CN=Pulp_Content_Cert'" % (client_key, csr) return run_command(cmd)
def main_connector(args): # validate need to choose translation mapper if not args.quiet: mapper_file = choose_translation_mapper() update_mapper_file_org_id(args.organization_id, mapper_file) connector_project_id = args.connector_project print('connector - partner bucket creation.') partner_bucket_name = args.connector_bucket partner_bucket_status = bucket_status(partner_bucket_name) if "NotFound" == partner_bucket_status: bucket_template = os.path.join(helpers.BASE_DIR, 'connector', 'dm', 'bucket.py') cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', '-'.join([ 'bucket-for-partner', datetime.utcnow().strftime('%Y%m%d%H%M%S') ]), '--template', bucket_template, '--properties', ",".join([ 'region:' + scape_to_os(args.region), 'bucketname:' + partner_bucket_name, ]), '--project', connector_project_id ] run_command(cmd) print('connector - cloud function bucket creation.') cf_bucket_name = args.cf_bucket cf_bucket_status = bucket_status(cf_bucket_name) if "NotFound" == cf_bucket_status: bucket_template = os.path.join(helpers.BASE_DIR, 'connector', 'dm', 'bucket.py') cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', '-'.join( ['bucket-for-cf', datetime.utcnow().strftime('%Y%m%d%H%M%S')]), '--template', bucket_template, '--properties', ",".join([ 'region:' + scape_to_os(args.region), 'bucketname:' + cf_bucket_name, ]), '--project', connector_project_id ] run_command(cmd) print('connector - connector application creation.') infra_dm_name = 'infra-for-partner' if not deployment_exists(connector_project_id, infra_dm_name): zip_and_store_cf('forwardfilelink', 'forwardfilelink.zip', 'gs://' + cf_bucket_name) zip_and_store_cf('flushbuffer', 'flushbuffer.zip', 'gs://' + cf_bucket_name) zip_and_store_cf('configuration', 'configuration.zip', 'gs://' + cf_bucket_name) zip_and_store_cf('translation', 'translation.zip', 'gs://' + cf_bucket_name, translation_sa=args.connector_sa_file) zip_and_store_cf('cleanup', 'cleanup.zip', 'gs://' + cf_bucket_name) infra_template = os.path.join(helpers.BASE_DIR, 'connector', 'dm', 'writeFindingsConnectorInfra.py') cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', infra_dm_name, '--template', infra_template, '--properties', ",".join([ 'region:' + scape_to_os(args.region), 'cfbucket:' + cf_bucket_name, ]), '--project', connector_project_id ] run_command(cmd) if not gae_exists(connector_project_id): print('Enable Google App Engine.') cmd = [ 'gcloud', 'app', 'create', '--region', args.gae_region, '--project', connector_project_id ] run_command(cmd) if not gae_service_exist(connector_project_id, 'default'): print('Deploy blank GAE app to activate Datastore.') cmd = [ 'gcloud', 'app', 'deploy', os.path.join(helpers.BASE_DIR, 'connector', 'gae_app', 'app.yaml'), '--quiet', '--project', connector_project_id ] run_command(cmd) print('connector - connector application turn on bucket notifications.') if not bucket_notification_exists(partner_bucket_name): cmd = [ 'gsutil', 'notification', 'create', '-e', 'OBJECT_FINALIZE', '-t', 'projects/' + connector_project_id + '/topics/forwardfilelink', '-f', 'json', 'gs://' + partner_bucket_name ] run_command(cmd)
def helm3_7_package(file_path): command = ["helm3.7", "package", file_path] base.run_command(command)
def singularity_push_to_harbor(harbor_server, sif_file, project, image, tag): ret = base.run_command([ singularity_cmd, "push", sif_file, "oras://" + harbor_server + "/" + project + "/" + image + ":" + tag ])
def create_client_key(client_key): cmd = "openssl genrsa -out %s 2048" % (client_key) return run_command(cmd)
def singularity_pull(out_file, from_sif_file): ret = base.run_command([singularity_cmd, "pull", out_file, from_sif_file])
def docker_info_display(): command = ["docker", "info", "-f", "'{{.OSType}}/{{.Architecture}}'"] print "Docker Info: ", command ret = base.run_command(command) print "Command return: ", ret
def helm_login(harbor_server, user, password): os.putenv("HELM_EXPERIMENTAL_OCI", "1") command = ["helm3", "registry", "login", harbor_server, "-u", user, "-p", password] ret = base.run_command(command) print("Command return: ", ret)
def create_server_key(server_key): cmd = "openssl genrsa -out %s 2048" % (server_key) return run_command(cmd)
def helm_save(chart_archive, harbor_server, project, repo_name): command = ["helm3", "chart","save", chart_archive, harbor_server+"/"+project+"/"+repo_name] base.run_command(command)
def create_server_cert(server_cert, server_csr, ca_cert, ca_key, ca_serial): cmd = "openssl x509 -req -days 10950 -CA %s -CAkey %s -in %s -out %s -CAserial %s" \ % (ca_cert, ca_key, server_csr, server_cert, ca_serial) if not os.path.exists(ca_serial): cmd = cmd + " -CAcreateserial" return run_command(cmd)
def enable_repo_auth(repo_auth_config="/etc/pulp/repo_auth.conf"): cmd = "sed -i 's/enabled: false/enabled: true/' %s" % (repo_auth_config) return run_command(cmd)
def test_02_SystemlevelRobotAccount(self): """ Test case: Robot Account Test step and expected result: 1. Define a number of access lists; 2. Create the same number of private projects; 3. Create a system robot account has permission for those projects; 4. Verify the system robot account has the corresponding rights; 5. Disable the system robot account; 6. Verify the system robot account has no the corresponding rights; 7. Enable the system robot account; 8. Verify the system robot account has the corresponding rights; 9. Refresh secret for the system robot account; 10. Verify the system robot account has no the corresponding right with the old secret already; 11. Verify the system robot account still has the corresponding right with the new secret; 12. List system robot account, then add a new project to the system robot account project permission list; 13. Delete this project; 14. List system robot account successfully; 15. Delete the system robot account; 16. Verify the system robot account has no the corresponding right; 17. Add a system robot account with all project coverd; 18. Verify the system robot account has no the corresponding right; """ #1. Define a number of access lists; CHART_FILE_LIST = [ dict(name='prometheus', version='7.0.2'), dict(name='harbor', version='0.2.0') ] for i in range(2): base.run_command([ "curl", r"-o", "./tests/apitests/python/{}-{}.tgz".format( CHART_FILE_LIST[i]["name"], CHART_FILE_LIST[i]["version"]), "https://storage.googleapis.com/harbor-builds/helm-chart-test-files/{}-{}.tgz" .format(CHART_FILE_LIST[i]["name"], CHART_FILE_LIST[i]["version"]) ]) #Make sure that whether 'True' or 'False' must be included in each line or row. check_list = [ [True, True, True, True, True, True, False, True, False, True], [False, False, False, False, True, True, False, True, True, False], [True, False, True, False, True, False, True, False, True, True], [False, False, False, True, False, True, False, True, True, False] ] access_list_list = [] for i in range(len(check_list)): access_list_list.append( self.robot.create_access_list(check_list[i])) #2. Create the same number of private projects; robot_account_Permissions_list = [] project_access_list = [] for i in range(len(check_list)): with created_user(TestRobotAccount.user_ra_password, _teardown=False) as (user_id, username): with created_project(metadata={"public": "false"}, user_id=user_id, _teardown=False) as (project_id, project_name): project_access_list.append( dict(project_name=project_name, project_id=project_id, check_list=check_list[i])) robot_account_Permissions = v2_swagger_client.Permission( kind="project", namespace=project_name, access=access_list_list[i]) robot_account_Permissions_list.append( robot_account_Permissions) #3. Create a system robot account has permission for those projects; system_robot_account_id, system_robot_account = self.robot.create_system_robot( robot_account_Permissions_list, 300) print("system_robot_account:", system_robot_account) SYSTEM_RA_CLIENT = dict(endpoint=TestRobotAccount.url, username=system_robot_account.name, password=system_robot_account.secret) SYSTEM_RA_CHART_CLIENT = dict(endpoint=CHART_API_CLIENT["endpoint"], username=SYSTEM_RA_CLIENT["username"], password=SYSTEM_RA_CLIENT["password"]) #4. Verify the system robot account has the corresponding rights; for project_access in project_access_list: print(r"project_access:", project_access) if project_access["check_list"][1]: #---repository:push--- repo = push_self_build_image_to_project( project_access["project_name"], harbor_server, SYSTEM_RA_CLIENT["username"], SYSTEM_RA_CLIENT["password"], "test_pushable", "v6.8.1") else: push_self_build_image_to_project( project_access["project_name"], harbor_server, SYSTEM_RA_CLIENT["username"], SYSTEM_RA_CLIENT["password"], "test_unpushable", "v6.8.1", expected_error_message="unauthorized to access repository") tag_for_del = "v1.0.0" repo_name, tag = push_self_build_image_to_project( project_access["project_name"], harbor_server, ADMIN_CLIENT["username"], ADMIN_CLIENT["password"], "test_del_artifact", tag_for_del) if project_access["check_list"][0]: #---repository:pull--- pull_harbor_image(harbor_server, SYSTEM_RA_CLIENT["username"], SYSTEM_RA_CLIENT["password"], repo_name, tag_for_del) else: pull_harbor_image( harbor_server, SYSTEM_RA_CLIENT["username"], SYSTEM_RA_CLIENT["password"], repo_name, tag_for_del, expected_error_message= "action: pull: unauthorized to access repository") if project_access["check_list"][2]: #---artifact:delete--- self.artifact.delete_artifact(project_access["project_name"], repo_name.split('/')[1], tag_for_del, **SYSTEM_RA_CLIENT) else: self.artifact.delete_artifact(project_access["project_name"], repo_name.split('/')[1], tag_for_del, expect_status_code=403, **SYSTEM_RA_CLIENT) #Prepare for chart read and delete self.chart.upload_chart( project_access["project_name"], r'./tests/apitests/python/{}-{}.tgz'.format( CHART_FILE_LIST[1]["name"], CHART_FILE_LIST[1]["version"]), **CHART_API_CLIENT) if project_access["check_list"][3]: #---helm-chart:read--- library.helm.helm2_fetch_chart_file( "chart_repo_" + base._random_name("repo"), harbor_url, project_access["project_name"], SYSTEM_RA_CLIENT["username"], SYSTEM_RA_CLIENT["password"], CHART_FILE_LIST[1]["name"]) else: library.helm.helm2_fetch_chart_file( "chart_repo_" + base._random_name("repo"), harbor_url, project_access["project_name"], SYSTEM_RA_CLIENT["username"], SYSTEM_RA_CLIENT["password"], CHART_FILE_LIST[1]["name"], expected_add_repo_error_message="403 Forbidden") if project_access["check_list"][ 4]: #---helm-chart-version:create--- self.chart.upload_chart( project_access["project_name"], r'./tests/apitests/python/{}-{}.tgz'.format( CHART_FILE_LIST[0]["name"], CHART_FILE_LIST[0]["version"]), **SYSTEM_RA_CHART_CLIENT) else: self.chart.upload_chart( project_access["project_name"], r'./tests/apitests/python/{}-{}.tgz'.format( CHART_FILE_LIST[0]["name"], CHART_FILE_LIST[0]["version"]), expect_status_code=403, **SYSTEM_RA_CHART_CLIENT) if project_access["check_list"][ 5]: #---helm-chart-version:delete--- self.chart.delete_chart_with_version( project_access["project_name"], CHART_FILE_LIST[1]["name"], CHART_FILE_LIST[1]["version"], **SYSTEM_RA_CHART_CLIENT) else: self.chart.delete_chart_with_version( project_access["project_name"], CHART_FILE_LIST[1]["name"], CHART_FILE_LIST[1]["version"], expect_status_code=403, **SYSTEM_RA_CHART_CLIENT) repo_name, tag = push_self_build_image_to_project( project_access["project_name"], harbor_server, ADMIN_CLIENT["username"], ADMIN_CLIENT["password"], "test_create_tag", "latest_1") self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", **ADMIN_CLIENT) if project_access["check_list"][6]: #---tag:create--- self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "1.0", **SYSTEM_RA_CLIENT) else: self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "1.0", expect_status_code=403, **SYSTEM_RA_CLIENT) if project_access["check_list"][7]: #---tag:delete--- self.artifact.delete_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", **SYSTEM_RA_CLIENT) else: self.artifact.delete_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", expect_status_code=403, **SYSTEM_RA_CLIENT) repo_name, tag = push_self_build_image_to_project( project_access["project_name"], harbor_server, ADMIN_CLIENT["username"], ADMIN_CLIENT["password"], "test_create_artifact_label", "latest_1") #Add project level label to artifact label_id, _ = self.label.create_label( project_id=project_access["project_id"], scope="p", **ADMIN_CLIENT) if project_access["check_list"][8]: #---artifact-label:create--- self.artifact.add_label_to_reference( project_access["project_name"], repo_name.split('/')[1], tag, int(label_id), **SYSTEM_RA_CLIENT) else: self.artifact.add_label_to_reference( project_access["project_name"], repo_name.split('/')[1], tag, int(label_id), expect_status_code=403, **SYSTEM_RA_CLIENT) if project_access["check_list"][9]: #---scan:create--- self.scan.scan_artifact(project_access["project_name"], repo_name.split('/')[1], tag, **SYSTEM_RA_CLIENT) else: self.scan.scan_artifact(project_access["project_name"], repo_name.split('/')[1], tag, expect_status_code=403, **SYSTEM_RA_CLIENT) #5. Disable the system robot account; self.robot.update_system_robot_account(system_robot_account_id, system_robot_account.name, robot_account_Permissions_list, disable=True, **ADMIN_CLIENT) #6. Verify the system robot account has no the corresponding rights; self.verify_repository_unpushable(project_access_list, SYSTEM_RA_CLIENT) #7. Enable the system robot account; self.robot.update_system_robot_account(system_robot_account_id, system_robot_account.name, robot_account_Permissions_list, disable=False, **ADMIN_CLIENT) #8. Verify the system robot account has the corresponding rights; self.verify_repository_pushable(project_access_list, SYSTEM_RA_CLIENT) #9. Refresh secret for the system robot account; new_secret = "new_secret_At_321" self.robot.refresh_robot_account_secret(system_robot_account_id, new_secret, **ADMIN_CLIENT) #10. Verify the system robot account has no the corresponding right with the old secret already; self.verify_repository_unpushable(project_access_list, SYSTEM_RA_CLIENT) #11. Verify the system robot account still has the corresponding right with the new secret; SYSTEM_RA_CLIENT["password"] = new_secret self.verify_repository_pushable(project_access_list, SYSTEM_RA_CLIENT) #12. List system robot account, then add a new project to the system robot account project permission list; self.robot.list_robot(**ADMIN_CLIENT) project_for_del_id, project_for_del_name = self.project.create_project( metadata={"public": "true"}, **ADMIN_CLIENT) robot_account_Permissions = v2_swagger_client.Permission( kind="project", namespace=project_for_del_name, access=access_list_list[0]) robot_account_Permissions_list.append(robot_account_Permissions) self.robot.update_system_robot_account(system_robot_account_id, system_robot_account.name, robot_account_Permissions_list, **ADMIN_CLIENT) self.robot.list_robot(**ADMIN_CLIENT) #13. Delete this project; self.project.delete_project(project_for_del_id, **ADMIN_CLIENT) #14. List system robot account successfully; self.robot.list_robot(**ADMIN_CLIENT) #15. Delete the system robot account; self.robot.delete_robot_account(system_robot_account_id, **ADMIN_CLIENT) #16. Verify the system robot account has no the corresponding right; self.verify_repository_unpushable(project_access_list, SYSTEM_RA_CLIENT) #17. Add a system robot account with all project coverd; all_true_access_list = self.robot.create_access_list([True] * 10) robot_account_Permissions_list = [] robot_account_Permissions = v2_swagger_client.Permission( kind="project", namespace="*", access=all_true_access_list) robot_account_Permissions_list.append(robot_account_Permissions) _, system_robot_account_cover_all = self.robot.create_system_robot( robot_account_Permissions_list, 300) #18. Verify the system robot account has no the corresponding right; print("system_robot_account_cover_all:", system_robot_account_cover_all) SYSTEM_RA_CLIENT_COVER_ALL = dict( endpoint=TestRobotAccount.url, username=system_robot_account_cover_all.name, password=system_robot_account_cover_all.secret) projects = self.project.get_projects(dict(), **ADMIN_CLIENT) print("All projects:", projects) project_access_list = [] for i in range(len(projects)): project_access_list.append( dict(project_name=projects[i].name, project_id=projects[i].project_id, check_list=all_true_access_list)) self.verify_repository_pushable(project_access_list, SYSTEM_RA_CLIENT_COVER_ALL)
def create_server_csr(server_key, csr, hostname): check_dirs(csr) cmd = "openssl req -new -key %s -out %s -subj '/C=US/ST=NC/L=Raleigh/O=Red Hat/OU=Splice/CN=%s'" % (server_key, csr, hostname) return run_command(cmd)
def create_ca_cert(ca_key_name, ca_cert_name): check_dirs(ca_key_name) cmd = ("openssl req -new -x509 -days 10950 -key %s -out %s -subj '/C=US/ST=NC/L=Raleigh/O=Red Hat/OU=Pulp/CN=Pulp-Root-CA'") % (ca_key_name, ca_cert_name) return run_command(cmd)
def helm2_push(helm_repo_name, chart_file, project, username, password): get_chart_file(chart_file) command = ["helm2", "cm-push", "--username="******"--password=" + password, chart_file.split('/')[-1], helm_repo_name] base.run_command(command)
def restart_httpd(): cmd = "/sbin/service httpd restart" return run_command(cmd)
def helm2_repo_update(): command = ["helm2", "repo", "update"] base.run_command(command)
def helm_push(harbor_server, project, repo_name, version): command = ["helm3", "chart", "push", harbor_server+"/"+project+"/"+repo_name+":"+version] ret = base.run_command(command) return ret
def create_client_cert(client_cert, client_csr, ca_cert, ca_key, extensions, ent_name, ca_serial): cmd = "openssl x509 -req -days 10950 -CA %s -CAkey %s -extfile %s -extensions %s -in %s -out %s -CAserial %s" \ % (ca_cert, ca_key, extensions, ent_name, client_csr, client_cert, ca_serial) if not os.path.exists(ca_serial): cmd = cmd + " -CAcreateserial" return run_command(cmd)
def docker_manifest_create(index, manifests): command = ["sudo", "docker", "manifest", "create", index] command.extend(manifests) print "Docker Manifest Command: ", command base.run_command(command)
def helm2_add_repo(helm_repo_name, harbor_url, project, username, password, expected_error_message = None): command = ["helm2", "repo", "add", "--username="******"--password="******"/chartrepo/" + project] ret = base.run_command(command, expected_error_message = expected_error_message)