def _get_default_bucket_name(): default_bucket = None p_id = project_id() if p_id: default_bucket = "{}.appspot.com".format(p_id) return default_bucket
def backup_datastore(bucket=None, kinds=None): """ Using the new scheduled backup service write all required entity kinds to a specific GCS bucket path. """ backup_enabled = get_backup_setting("ENABLED", False) if not backup_enabled: logger.warning("DJANGAE_BACKUP_ENABLED is False or not set." "The datastore backup will not be run.") return # make sure no blacklisted entity kinds are included in our export valid_kinds = _get_valid_export_kinds(kinds) if not valid_kinds: logger.warning("No whitelisted entity kinds to export.") return # build the service object with the necessary credentials and trigger export service = _get_service() body = { 'outputUrlPrefix': get_backup_path(bucket), 'entityFilter': { 'kinds': valid_kinds, } } app_id = project_id() request = service.projects().export(projectId=app_id, body=body) request.execute()
def _get_storage_client(): """Gets an instance of a google CloudStorage Client Note: google storage python library depends on env variables read at module import time, so that should be set before import if overwrite needed """ http = None if not is_production_environment(): http = requests.Session() from google.cloud import storage return storage.Client( project=project_id(), _http=http, )
def cloud_tasks_project(): project_id = getattr(settings, CLOUD_TASKS_PROJECT_SETTING, None) if not project_id: project_id = environment.project_id() return project_id