def create_volume(disk_size_gb, labels, use_existing_disk_name=None, zone=None): disk_id = use_existing_disk_name or 'cc' + _generate_password(12) if use_existing_disk_name: logs.info(f'using existing persistent disk {disk_id}') else: logs.info(f'creating persistent disk {disk_id} with size {disk_size_gb}') _, zone = get_project_zone() labels = ','.join([ '{}={}'.format(k.replace('/', '_'), v.replace('/', '_')) for k, v in labels.items() ]) gcloud_driver.check_call(*get_project_zone(), f'compute disks create {disk_id} --size={disk_size_gb}GB --zone={zone} --labels={labels}') kubectl.apply({ 'apiVersion': 'v1', 'kind': 'PersistentVolume', 'metadata': {'name': disk_id, 'namespace': 'ckan-cloud'}, 'spec': { 'storageClassName': '', 'capacity': {'storage': f'{disk_size_gb}G'}, 'accessModes': ['ReadWriteOnce'], 'gcePersistentDisk': {'pdName': disk_id} } }) kubectl.apply({ 'apiVersion': 'v1', 'kind': 'PersistentVolumeClaim', 'metadata': {'name': disk_id, 'namespace': 'ckan-cloud'}, 'spec': { 'storageClassName': '', 'volumeName': disk_id, 'accessModes': ['ReadWriteOnce'], 'resources': {'requests': {'storage': f'{disk_size_gb}G'}} } }) return {'persistentVolumeClaim': {'claimName': disk_id}}
def create_backup(database, connection_string=None, db_prefix=None): filename = f'{database}_' + datetime.datetime.now().strftime( '%Y%m%d%H%M') + '.gz' gs_url = os.path.join( _credentials_get(db_prefix, key='backups-gs-base-url', required=True), datetime.datetime.now().strftime('%Y/%m/%d/%H'), filename) if not connection_string: from ckan_cloud_operator.providers.db import manager as db_manager connection_string = db_manager.get_external_admin_connection_string( db_name=database) logs.info(f'Dumping DB: {filename}') subprocess.check_call([ "bash", "-o", "pipefail", "-c", f"pg_dump -d {connection_string} --format=plain --no-owner --no-acl --schema=public | " f"sed -E 's/(DROP|CREATE|COMMENT ON) EXTENSION/-- \\1 EXTENSION/g' | " f"gzip -c > {filename}", ]) subprocess.check_call(f'ls -lah {filename}', shell=True) logs.info(f'Copying to: {gs_url}') gcloud_driver.check_call(*_gcloud().get_project_zone(), f'cp -m ./{filename} {gs_url} && rm {filename}', gsutil=True)
def gsutil_publish(filename, gsurl, duration='7d'): if gsurl: if filename: subprocess.check_call(f'ls -lah {filename}', shell=True) gcloud_driver.check_call( *cluster_manager.get_provider().get_project_zone(), f'cp ./{filename} {gsurl}', gsutil=True) with tempfile.NamedTemporaryFile('w') as f: f.write( config_manager.get( key='service-account-json', secret_name='ckan-cloud-provider-cluster-gcloud')) f.flush() output = gcloud_driver.check_output( *cluster_manager.get_provider().get_project_zone(), f'signurl -d {duration} {f.name} {gsurl}', gsutil=True) signed_gsurls = [ line for line in [ line.strip().split('\t')[-1].strip() for line in output.decode().splitlines() ] if len(line) > 20 ] assert len(signed_gsurls) == 1 return signed_gsurls[0] else: return None
def check_call(cmd, gsutil=False): return gcloud_driver.check_call(*get_project_zone(), cmd, gsutil=gsutil)