def setUpClass(cls): cls._logger = logging.getLogger(cls.__class__.__name__) cls._logger.setLevel(logging.INFO) cls._tfc_url = TFC_URL cls._api = TFC(\ TFC_TOKEN, url=TFC_URL, verify=SSL_VERIFY, log_level=API_LOG_LEVEL) cls._test_username = TEST_USERNAME cls._test_email = TEST_EMAIL cls._test_team_name = TEST_TEAM_NAME cls._test_api_token = TFC_TOKEN cls._test_password = TEST_PASSWORD cls._test_state_path = "./test/testdata/terraform/terrasnek_unittest.tfstate" cls._config_version_upload_tarball_path = \ "./test/testdata/terraform/terrasnek_unittest_config_version.tar.gz" cls._policy_set_upload_tarball_path = \ "./test/testdata/sentinel/terrasnek_unittest_sentinel.tar.gz" cls._plan_export_tarball_target_path = \ "/tmp/terrasnek_unittest_plan_export.tar.gz" cls._plan_json_tarball_target_path = \ "/tmp/terrasnek_unittest_plan_json.tar.gz" cls._module_version_source_tarball_target_path = \ "/tmp/terrasnek_unittest_module_version_export.tar.gz" cls._module_latest_source_tarball_target_path = \ "/tmp/terrasnek_unittest_module_latest_export.tar.gz" # If a test org is specified, use the specified org, otherwise create # a new one to run the testing in. if TEST_ORG_NAME: cls._test_org_name = TEST_ORG_NAME else: cls._test_org_name = cls._random_name() org_create_payload = { "data": { "type": "organizations", "attributes": { "name": cls._test_org_name, "email": cls._test_email } } } cls._test_org = cls._api.orgs.create(org_create_payload) cls._api.set_org(cls._test_org_name) # Check to see if this test can be run with the current entitlments missing_entitlements = cls._get_missing_entitlements( cls._endpoint_being_tested) if missing_entitlements: raise unittest.SkipTest(\ "Missing required Terraform Cloud Entitlments for test", \ cls._unittest_name, missing_entitlements) if TFC_SAAS_HOSTNAME in TFC_URL and "admin" in cls._endpoint_being_tested: raise unittest.SkipTest(\ "Skipping Admin Test since we're testing against Terraform Cloud.")
def setUpClass(cls): cls._logger = logging.getLogger(cls.__class__.__name__) cls._logger.setLevel(logging.INFO) cls._api = TFC(TFC_TOKEN) cls._test_username = TEST_USERNAME cls._test_email = TEST_EMAIL cls._test_team_name = TEST_TEAM_NAME cls._test_org_name = TEST_ORG_NAME # TODO: make these env vars? cls._test_state_path = "./test/testdata/terraform/terrasnek_unittest.tfstate" cls._config_version_upload_tarball_path = \ "./test/testdata/terraform/terrasnek_unittest_config_version.tar.gz" cls._plan_export_tarball_target_path = "/tmp/terrasnek_unittest.tar.gz" cls._api.set_organization(cls._test_org_name)
import socket from flask import Flask, send_from_directory, jsonify from flask_cors import CORS from terrasnek.api import TFC from util import api_request_helpers app = Flask(__name__, static_folder='react_app/build') CORS(app) TFC_ORG_NAME = os.getenv("TFC_ORG_NAME", None) TFC_OAUTH_TOKEN_ID = os.getenv("TFC_OAUTH_TOKEN_ID", None) TFC_TOKEN = os.getenv("TFC_TOKEN", None) TFC_SAAS_URL = "https://app.terraform.io" TFC_URL = os.getenv("TFC_URL", TFC_SAAS_URL) api = TFC(TFC_TOKEN, url=TFC_URL) api.set_org(TFC_ORG_NAME) @app.route('/config_bundles/') def config_bundles(): return jsonify(api_request_helpers.get_config_bundles()) @app.route('/workspaces/') def list_workspaces(): all_workspaces = api.workspaces.list()["data"] return jsonify(all_workspaces) @app.route('/workspaces/plan/<workspace_id>')
from terrasnek.api import TFC import os ### # Documentation of the Py client libry # https://terrasnek.readthedocs.io/en/latest/ ### TFC_TOKEN = os.getenv("TFC_TOKEN", None) TFC_URL = os.getenv("TFC_URL", None) # https://app.terraform.io TFC_ORG = os.getenv("TFC_ORG") # org: paulm if __name__ == "__main__": api = TFC(TFC_TOKEN, url=TFC_URL) api.set_org("TFC_ORG") ws = api.workspaces.list ## Get the WS ID / Help here to get the id ws_id = api.workspaces(workspace_name="terraform-paulm-org") #.id ## Get the Run ID run_show_list = api.runs.show(ws_id) ## Filter the last run run_id = api.runs.list_all(ws_id)[0] ## Apply the plan applied_run = api.runs.apply(run_id)
def setUpClass(cls): cls._logger = logging.getLogger(cls.__class__.__name__) cls._logger.setLevel(TERRASNEK_LOG_LEVEL) cls._tfc_url = TFC_URL cls._test_api_token = TFC_TOKEN cls._test_api_org_token = TFC_ORG_TOKEN cls._TERRASNEK_LOG_LEVEL = TERRASNEK_LOG_LEVEL cls._ssl_verify = SSL_VERIFY cls._api = TFC(\ cls._test_api_token, url=cls._tfc_url, \ verify=cls._ssl_verify, log_level=cls._TERRASNEK_LOG_LEVEL) cls._test_username = TEST_USERNAME cls._test_email = TEST_EMAIL cls._test_team_name = TEST_TEAM_NAME cls._test_password = TEST_PASSWORD cls._test_state_path = "./test/testdata/terraform/terrasnek_unittest.tfstate" cls._config_version_upload_tarball_path = \ "./test/testdata/terraform/terrasnek_unittest_config_version.tar.gz" with open(\ './test/testdata/terraform/terrasnek_unittest_config_version_from_string.tf', 'r') \ as tf_file: cls._config_version_upload_string = tf_file.read() cls._module_upload_tarball_path = \ "./test/testdata/terraform/terrasnek_unittest_module.tar.gz" cls._policy_set_upload_tarball_path = \ "./test/testdata/sentinel/terrasnek_unittest_sentinel.tar.gz" cls._plan_export_tarball_target_path = \ "/tmp/terrasnek_unittest_plan_export.tar.gz" cls._plan_json_tarball_target_path = \ "/tmp/terrasnek_unittest_plan_json.tar.gz" cls._module_version_source_tarball_target_path = \ "/tmp/terrasnek_unittest_module_version_export.tar.gz" cls._module_latest_source_tarball_target_path = \ "/tmp/terrasnek_unittest_module_latest_export.tar.gz" # If a test org is specified, use the specified org, otherwise create # a new one to run the testing in. if TEST_ORG_NAME: cls._test_org_name = TEST_ORG_NAME cls._test_org = cls._api.orgs.show(cls._test_org_name) else: cls._test_org_name = cls._random_name() org_create_payload = { "data": { "type": "organizations", "attributes": { "name": cls._test_org_name, "email": cls._test_email } } } cls._test_org = cls._api.orgs.create(org_create_payload) cls._api.set_org(cls._test_org_name) # Check to see if this test can be run with the current entitlments missing_entitlements = cls._get_missing_entitlements( cls._endpoint_being_tested) if missing_entitlements: raise unittest.SkipTest(\ "Missing required Terraform Cloud Entitlments for test", \ cls._unittest_name, missing_entitlements) endpoint_to_test = getattr(cls._api, cls._endpoint_being_tested) if cls._api.is_terraform_cloud( ) and endpoint_to_test.terraform_enterprise_only(): raise unittest.SkipTest(\ (f"Skipping Test (%s), since we're testing against Terraform Cloud." % \ cls._endpoint_being_tested)) if not cls._api.is_terraform_cloud( ) and endpoint_to_test.terraform_cloud_only(): raise unittest.SkipTest(\ (f"Skipping Test (%s), since we're testing against Terraform Enterprise." % \ cls._endpoint_being_tested)) cls._purge_organization()
} run = api.runs.create(payload) if run == None: print( 'Error: Unable to queue destroy plan. The provided token probably does not have "apply" permission.' ) exit(1) run_id = run["data"]["id"] return run_id if __name__ == "__main__": if len(sys.argv) != 2: print('Usage: python3 destroy-plan.py [workspace-name]') print('') print( 'Please also ensure that the following environment variables are set to the appropriate values for your TFE install:' ) print(' * TFE_TOKEN') print(' * TFE_URL') print(' * TFE_ORG') exit(1) TFE_TOKEN = os.getenv("TFE_TOKEN", None) TFE_URL = os.getenv("TFE_URL", None) TFE_ORG = os.getenv("TFE_ORG", None) api = TFC(TFE_TOKEN, url=TFE_URL) api.set_org(TFE_ORG) destroy_run_id = queue_destroy_run(api, sys.argv[1]) print('Successfully queued destroy plan')
help="Migrate all state history workspaces. Default behavior is only current state.") parser.add_argument('--sensitive-data-file-path', dest="sensitive_data_file_path", \ default=DEFAULT_SENSITIVE_DATA_FILE, \ help="Path the the sensitive values file. Defaults to `sensitive_data.txt`.") parser.add_argument('--migrate-sensitive-data', dest="migrate_sensitive_data", action="store_true", \ help="Migrate sensitive data to the target organization.") parser.add_argument('--delete-all', dest="delete_all", action="store_true", \ help="Delete all resources from the target API.") parser.add_argument('--no-confirmation', dest="no_confirmation", action="store_true", \ help="If set, don't ask for confirmation before deleting all target resources.") parser.add_argument('--debug', dest="debug", action="store_true", \ help="If set, run the logger in debug mode.") args = parser.parse_args() api_source = TFC(TFE_TOKEN_SOURCE, url=TFE_URL_SOURCE, verify=TFE_VERIFY_SOURCE) api_source.set_org(TFE_ORG_SOURCE) api_target = TFC(TFE_TOKEN_TARGET, url=TFE_URL_TARGET, verify=TFE_VERIFY_TARGET) api_target.set_org(TFE_ORG_TARGET) if not os.path.exists(args.vcs_file_path): open(DEFAULT_VCS_FILE, "w").close() else: with open(args.vcs_file_path, "r") as f: TFE_VCS_CONNECTION_MAP = json.loads(f.read()) if not os.path.exists(args.sensitive_data_file_path):
"data": { "type": "registry-module-versions", "attributes": { "version": "0.0.1" } } } # TODO: build this into the purge logic. TFC_TOKEN = os.getenv("TFC_TOKEN", None) TFC_URL = os.getenv("TFC_URL", None) TFC_ORG = os.getenv("TFC_ORG", None) SSL_VERIFY = os.getenv("SSL_VERIFY", None) if __name__ == "__main__": api = TFC(TFC_TOKEN, url=TFC_URL, log_level=logging.DEBUG, verify=False) # logging.basicConfig(level=logging.DEBUG) api.set_org(TFC_ORG) listed_modules = api.registry_modules.list()["modules"] print(listed_modules) data = api.registry_modules.list() print(data) for module in listed_modules: module_name = module["name"] module_provider = module["provider"] if module_name in modules_to_purge: create_payload = create_module_payload create_payload["data"]["attributes"]["name"] = module_name
TFC_ORG = os.getenv("TFC_ORG", None) GCS_BUCKET_NAME = os.getenv("GCS_BUCKET_NAME", None) if __name__ == "__main__": migration_targets = [] # Read the Migration Map from the file system into a Python dict with open("migration.json", "r") as f: migration_targets = json.loads(f.read()) # Create the GCS client storage_client = storage.Client() # Create a Terraform Enterprise client with the TFC_TOKEN from the # environment api = TFC(TFC_TOKEN, url=TFC_URL) # Set the orgranization to work in for our client api.set_organization(TFC_ORG) for mt in migration_targets: # Connect to the bucket we want to download blobs from bucket = storage_client.bucket(GCS_BUCKET_NAME) # Create a blob object based on the blob path in the migration targets dict blob = bucket.blob(mt["gcs-blob-path"]) # Extract the statefile name from the blob and use # it to define the path we want to save the statefile # locally statefile_name = blob.name.split("/")[-1]
blob = bucket.blob(blob.name) filename = f"statefiles/{statefile_name}" blob.download_to_filename(filename) # And update migration targets with the information we # parsed above mt["blob-path"] = full_blob_path mt["statefile-name"] = statefile_name mt["statefile-local-path"] = filename # Write the updated version of the migration targets so we can # reference what occurred later with open('migration-enriched.json', 'w') as f: json.dump(migration_targets, f, indent=4) api = TFC(TFC_TOKEN) api.set_org(TFC_ORG) workspaces = api.workspaces.list() oauth_clients = api.oauth_clients.list()["data"] oauth_token_id = None for oac in oauth_clients: org_name = oac["relationships"]["organization"]["data"]["id"] if org_name == TFC_ORG: oauth_token_id = oac["relationships"]["oauth-tokens"]["data"][0][ "id"] for mt in migration_targets: # Configure our create payload with the data # from the migration targets JSON file
create_module_version_payload = { "data": { "type": "registry-module-versions", "attributes": { "version": "0.0.1" } } } # TODO: build this into the purge logic. TFC_TOKEN = os.getenv("TFC_TOKEN", None) TFC_URL = os.getenv("TFC_URL", None) if __name__ == "__main__": api = TFC(TFC_TOKEN, url=TFC_URL) api.set_org("terrasnek-unittest") modules = api.registry_modules.list()["modules"] for module in modules: module_name = module["name"] module_provider = module["provider"] create_payload = create_module_payload create_payload["data"]["attributes"]["name"] = module_name created_module = api.registry_modules.create(create_payload)["data"] created_version = \ api.registry_modules.create_version(\ module_name, module_provider, create_module_version_payload)["data"]
import os from terrasnek.api import TFC from functions import * # SOURCE ORG TFE_TOKEN_ORIGINAL = os.getenv("TFE_TOKEN_ORIGINAL", None) TFE_URL_ORIGINAL = os.getenv("TFE_URL_ORIGINAL", None) TFE_ORG_ORIGINAL = os.getenv("TFE_ORG_ORIGINAL", None) api_original = TFC(TFE_TOKEN_ORIGINAL, url=TFE_URL_ORIGINAL) api_original.set_org(TFE_ORG_ORIGINAL) # NEW ORG TFE_TOKEN_NEW = os.getenv("TFE_TOKEN_NEW", None) TFE_URL_NEW = os.getenv("TFE_URL_NEW", None) TFE_ORG_NEW = os.getenv("TFE_ORG_NEW", None) TFE_OAUTH_NEW = os.getenv("TFE_OAUTH_NEW", None) api_new = TFC(TFE_TOKEN_NEW, url=TFE_URL_NEW) api_new.set_org(TFE_ORG_NEW) if __name__ == "__main__": teams_map = migrate_teams(api_original, api_new) print('teams successfully migrated') # migrate_organization_memberships(api_original, api_new, teams_map) # print('organization memberships successfully migrated') ssh_keys_map, ssh_key_name_map = migrate_ssh_keys(api_original, api_new) print('ssh keys successfully migrated')