def get_trigger( name: str, configuration: Configuration = None, secrets: Secrets = None, ): """ Returns information about a BuildTrigger. See: https://cloud.google.com/cloud-build/docs/api/reference/rest/v1/projects.triggers/get :param name: name of the trigger :param configuration: :param secrets: :return: """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) service = get_service('cloudbuild', version='v1', configuration=configuration, secrets=secrets) request = service.projects().triggers().get(projectId=ctx.project_id, triggerId=name) response = request.execute() return response
def test_context_default_values(): """ alllow for optional keys in the configuration with None as default """ ctx = get_context({}, fixtures.secrets) assert ctx.project_id is None assert ctx.zone is None assert ctx.cluster_name is None assert ctx.region is None
def create_new_nodepool(body: Dict[str, Any], wait_until_complete: bool = True, configuration: Configuration = None, secrets: Secrets = None) -> Dict[str, Any]: """ Create a new node pool in the given cluster/zone of the provided project. The node pool config must be passed a mapping to the `body` parameter and respect the REST API. If `wait_until_complete` is set to `True` (the default), the function will block until the node pool is ready. Otherwise, will return immediatly with the operation information. See: https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.zones.clusters.nodePools/create """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) service = get_service( 'container', configuration=configuration, secrets=secrets) np = service.projects().zones().clusters().nodePools() response = np.create( projectId=ctx.project_id, zone=ctx.zone, clusterId=ctx.cluster_name, body=body ).execute() logger.debug("NodePool creation: {}".format(str(response))) if wait_until_complete: ops = service.projects().zones().operations() response = wait_on_operation( ops, projectId=ctx.project_id, zone=ctx.zone, operationId=response["name"]) return response
def delete_nodepool(node_pool_id: str, wait_until_complete: bool = True, configuration: Configuration = None, secrets: Secrets = None) -> Dict[str, Any]: """ Delete node pool from the given cluster/zone of the provided project. If `wait_until_complete` is set to `True` (the default), the function will block until the node pool is deleted. Otherwise, will return immediatly with the operation information. See: https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.zones.clusters.nodePools/create """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) service = get_service( 'container', configuration=configuration, secrets=secrets) np = service.projects().zones().clusters().nodePools() response = np.delete( projectId=ctx.project_id, zone=ctx.zone, clusterId=ctx.cluster_name, nodePoolId=node_pool_id ).execute() logger.debug("NodePool deletion: {}".format(str(response))) if wait_until_complete: ops = service.projects().zones().operations() response = wait_on_operation( ops, projectId=ctx.project_id, zone=ctx.zone, operationId=response["name"]) return response
def list_instances(configuration: Configuration = None, secrets: Secrets = None) -> Dict[str, Any]: """ Lists Cloud SQL instances in a given project in the alphabetical order of the instance name. See: https://cloud.google.com/sql/docs/postgres/admin-api/v1beta4/instances/list """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) service = get_service('sqladmin', version='v1beta4', configuration=configuration, secrets=secrets) instances = [] request = service.instances().list(project=ctx.project_id) while request is not None: response = request.execute() instances.extend(response["items"]) request = service.instances().list_next(previous_request=request, previous_response=response) return {"instances": instances}
def client(configuration: Configuration = None, secrets: Secrets = None): """ Create a client for Google Cloud Storage """ ctx = get_context(configuration=configuration, secrets=secrets) credentials = load_credentials(secrets=secrets) return gc_storage.Client(project=ctx.project_id, credentials=credentials)
def describe_database(instance_id: str, database_name: str, configuration: Configuration = None, secrets: Secrets = None) -> Dict[str, Any]: """ Displays configuration and metadata about a Cloud SQL database Information such as database name, charset, and collation will be displayed. See: https://cloud.google.com/sql/docs/postgres/admin-api/rest/v1beta4/databases/get :param instance_id: Cloud SQL instance ID. :param database: Cloud SQL database name. """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) service = get_service('sqladmin', version='v1beta4', configuration=configuration, secrets=secrets) request = service.databases().get(project=ctx.project_id, instance=instance_id, database=database_name) response = request.execute() return response
def run_trigger( name: str, source: Dict[Any, Any], configuration: Configuration = None, secrets: Secrets = None, ): """ Runs a BuildTrigger at a particular source revision. NB: The trigger must exist in the targeted project. See: https://cloud.google.com/cloud-build/docs/api/reference/rest/v1/projects.triggers/run :param name: name of the trigger :param source: location of the source in a Google Cloud Source Repository :param configuration: :param secrets: :return: """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) service = get_service('cloudbuild', version='v1', configuration=configuration, secrets=secrets) request = service.projects().triggers().run(projectId=ctx.project_id, triggerId=name, body=source) response = request.execute() return response
def trigger_failover(instance_id: str, wait_until_complete: bool = True, settings_version: int = None, configuration: Configuration = None, secrets: Secrets = None) -> Dict[str, Any]: """ Causes a high-availability Cloud SQL instance to failover. See: https://cloud.google.com/sql/docs/postgres/admin-api/v1beta4/instances/failover :param instance_id: Cloud SQL instance ID. :param wait_until_complete: wait for the operation in progress to complete. :param settings_version: The current settings version of this instance. :return: """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) service = get_service('sqladmin', version='v1beta4', configuration=configuration, secrets=secrets) if not settings_version: # dynamically fetches the value from instance description instance = describe_instance(instance_id, configuration=configuration, secrets=secrets) settings_version = instance["settings"]["settingsVersion"] failover_request_body = { "failoverContext": { "kind": "sql#failoverContext", "settingsVersion": settings_version } } request = service.instances().failover(project=ctx.project_id, instance=instance_id, body=failover_request_body) response = request.execute() logger.debug('Database {db} failover: {resp}'.format(db=instance_id, resp=response)) if wait_until_complete: ops = service.operations() response = wait_on_operation(ops, project=ctx.project_id, operation=response["name"]) return response
def list_databases(instance_id: str, configuration: Configuration = None, secrets: Secrets = None) -> Dict[str, Any]: """ Lists databases in the specified Cloud SQL instance See: https://cloud.google.com/sql/docs/postgres/admin-api/rest/v1beta4/databases/list :param instance_id: Cloud SQL instance ID. """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) service = get_service('sqladmin', version='v1beta4', configuration=configuration, secrets=secrets) request = service.databases().list(project=ctx.project_id, instance=instance_id) response = request.execute() databases = response.get("items", []) return {"databases": databases}
def list_triggers( configuration: Configuration = None, secrets: Secrets = None, ): """ Lists existing BuildTriggers. See: https://cloud.google.com/cloud-build/docs/api/reference/rest/v1/projects.triggers/list :param configuration: :param secrets: :return: """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) service = get_service('cloudbuild', version='v1', configuration=configuration, secrets=secrets) request = service.projects().triggers().list(projectId=ctx.project_id) response = request.execute() return response
def describe_instance(instance_id: str, configuration: Configuration = None, secrets: Secrets = None) -> Dict[str, Any]: """ Displays configuration and metadata about a Cloud SQL instance. Information such as instance name, IP address, region, the CA certificate and configuration settings will be displayed. See: https://cloud.google.com/sql/docs/postgres/admin-api/v1beta4/instances/get :param instance_id: Cloud SQL instance ID. """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) service = get_service('sqladmin', version='v1beta4', configuration=configuration, secrets=secrets) request = service.instances().get(project=ctx.project_id, instance=instance_id) response = request.execute() return response
def test_context_from_config(): ctx = get_context(fixtures.configuration, fixtures.secrets) assert ctx.project_id == fixtures.configuration["gcp_project_id"] assert ctx.zone == fixtures.configuration["gcp_zone"] assert ctx.region == fixtures.configuration["gcp_region"] assert ctx.cluster_name == fixtures.configuration["gcp_gke_cluster_name"]
def export_data(instance_id: str, storage_uri: str, project_id: str = None, file_type: str = 'sql', databases: List[str] = None, tables: List[str] = None, export_schema_only: bool = False, wait_until_complete: bool = True, configuration: Configuration = None, secrets: Secrets = None) -> Dict[str, Any]: """ Exports data from a Cloud SQL instance to a Cloud Storage bucket as a SQL dump or CSV file. See: https://cloud.google.com/sql/docs/postgres/admin-api/v1beta4/instances/export If `project_id` is given, it will take precedence over the global project ID defined at the configuration level. """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) if file_type not in ['sql', 'csv']: raise ActivityFailed( "Cannot export database. " "File type '{ft}' is invalid.".format(ft=file_type)) if not project_id and not ctx.project_id: raise ActivityFailed( "Cannot import data into database. " "The project ID must be defined in configuration or as argument.") if databases is None: databases = [] if tables is None: tables = [] export_request_body = { "exportContext": { "kind": "sql#exportContext", "fileType": file_type, "uri": storage_uri, "databases": databases, } } if file_type == "sql": export_request_body["sqlExportOptions"] = { "tables": tables, "schemaOnly": export_schema_only, "mysqlExportOptions": { "masterData": 0 } } elif file_type == "csv": export_request_body["csvExportOptions"] = {"selectQuery": databases[0]} service = get_service('sqladmin', version='v1beta4', configuration=configuration, secrets=secrets) request = service.instances().export(project=project_id or ctx.project_id, instance=instance_id, body=export_request_body) response = request.execute() logger.debug("Export data from database {db}[{proj}]: {resp}".format( proj=project_id or ctx.project_id, db=instance_id, resp=response)) if wait_until_complete: ops = service.operations() response = wait_on_operation(ops, project=ctx.project_id, operation=response["name"]) return response
def import_data(instance_id: str, storage_uri: str, database: str, project_id: str = None, file_type: str = 'sql', import_user: str = None, table: str = None, columns: List[str] = None, wait_until_complete: bool = True, configuration: Configuration = None, secrets: Secrets = None) -> Dict[str, Any]: """ Imports data into a Cloud SQL instance from a SQL dump or CSV file in Cloud Storage. See: https://cloud.google.com/sql/docs/postgres/admin-api/v1beta4/instances/import If `project_id` is given, it will take precedence over the global project ID defined at the configuration level. """ # noqa: E501 ctx = get_context(configuration=configuration, secrets=secrets) if file_type not in ['sql', 'csv']: raise ActivityFailed( "Cannot import data into database. " "File type '{ft}' is invalid.".format(ft=file_type)) if not database: raise ActivityFailed("Cannot import data into database. " "Database name is required.") if not storage_uri: raise ActivityFailed( "Cannot import data into database. " "Path of the import file in Cloud Storage is required.") if file_type == 'csv' and not table: raise ActivityFailed( "Cannot import data into database. " "The table to which CSV data is imported is required") if not project_id and not ctx.project_id: raise ActivityFailed( "Cannot import data into database. " "The project ID must be defined in configuration or as argument.") if columns is None: columns = [] import_request_body = { "importContext": { "kind": "sql#importContext", "fileType": file_type, "uri": storage_uri, "database": database, "importUser": import_user, } } if file_type == 'csv': import_request_body["csvImportOptions"] = { "table": table, "columns": columns, } service = get_service('sqladmin', version='v1beta4', configuration=configuration, secrets=secrets) request = service.instances().import_(project=project_id or ctx.project_id, instance=instance_id, body=import_request_body) response = request.execute() logger.debug("Import data into database {db}[{proj}]: {resp}".format( proj=project_id or ctx.project_id, db=instance_id, resp=response)) if wait_until_complete: ops = service.operations() response = wait_on_operation(ops, project=ctx.project_id, operation=response["name"]) return response