def gsutil_publish(filename, gsurl, duration='7d'): if gsurl: if filename: subprocess.check_call(f'ls -lah {filename}', shell=True) gcloud_driver.check_call( *cluster_manager.get_provider().get_project_zone(), f'cp ./{filename} {gsurl}', gsutil=True) with tempfile.NamedTemporaryFile('w') as f: f.write( config_manager.get( key='service-account-json', secret_name='ckan-cloud-provider-cluster-gcloud')) f.flush() output = gcloud_driver.check_output( *cluster_manager.get_provider().get_project_zone(), f'signurl -d {duration} {f.name} {gsurl}', gsutil=True) signed_gsurls = [ line for line in [ line.strip().split('\t')[-1].strip() for line in output.decode().splitlines() ] if len(line) > 20 ] assert len(signed_gsurls) == 1 return signed_gsurls[0] else: return None
def get_all_db_names(db_prefix=None): instance_name = _sql_instance_name(db_prefix) return [ db['name'] for db in json.loads( gcloud_driver.check_output( *_gcloud().get_project_zone(), f'sql databases list --instance {instance_name} --format json' ).decode()) ]
def _get_latest_backups(db_name, datastore_name): gs_base_url = config_manager.get( key='backups-gs-base-url', secret_name='ckan-cloud-provider-db-gcloudsql-credentials') output = gcloud_driver.check_output( *cluster_manager.get_provider().get_project_zone(), f"ls {gs_base_url}/`date +%Y/%m/%d`/'*'/ | grep {db_name}", gsutil=True).decode() + '\n' + gcloud_driver.check_output( *cluster_manager.get_provider().get_project_zone(), f"ls {gs_base_url}/`date +%Y/%m/%d`/'*'/ | grep {datastore_name}", gsutil=True).decode() datastore_backup_url, datastore_backup_datetime = None, None db_backup_url, db_backup_datetime = None, None for line in output.splitlines(): line = line.strip() if len(line) < 10: continue backup_name, backup_datetime = line.split('/')[-1].split('.')[0].split( '_') backup_datetime = datetime.datetime.strptime(backup_datetime, '%Y%m%d%H%M') if backup_name == db_name: is_datastore = False elif backup_name == datastore_name: is_datastore = True else: continue logs.info(backup_name=backup_name, backup_datetime=backup_datetime, is_datastore=is_datastore) if is_datastore and (datastore_backup_datetime is None or datastore_backup_datetime < backup_datetime): datastore_backup_datetime, datastore_backup_url = backup_datetime, line if not is_datastore and (db_backup_datetime is None or db_backup_datetime < backup_datetime): db_backup_datetime, db_backup_url = backup_datetime, line logs.info(db_backup_datetime=db_backup_datetime, db_backup_url=db_backup_url) logs.info(datastore_backup_datetime=datastore_backup_datetime, datastore_backup_url=datastore_backup_url) return db_backup_url, datastore_backup_url
def export_db(instance_id): instance_spec = kubectl.get(f'ckancloudckaninstance {instance_id}')['spec'] db_name = instance_spec['db']['name'] db_prefix = instance_spec['db'].get('dbPrefix') datastore_name = instance_spec['datastore']['name'] datastore_prefix = instance_spec['datastore'].get('dbPrefix') gs_base_url = config_manager.get( secret_name='ckan-cloud-provider-db-gcloudsql-credentials', key='backups-gs-base-url') db_prefix_path = f'{db_prefix}/' if db_prefix else '' datastore_prefix_path = f'{datastore_prefix}/' if datastore_prefix else '' latest_gs_urls = {'db': None, 'datastore': None} latest_gs_urls_datetimes = {'db': None, 'datastore': None} for dbtype in ['db', 'datastore']: for minus_days in (0, 1, 2): dt = (datetime.datetime.now() - datetime.timedelta(days=minus_days)) datepath = dt.strftime('%Y/%m/%d') datesuffix = dt.strftime('%Y%m%d') if dbtype == 'datastore': ls_arg = f'{gs_base_url}{datastore_prefix_path}/{datepath}/*/{datastore_name}_{datesuffix}*.gz' else: ls_arg = f'{gs_base_url}{db_prefix_path}/{datepath}/*/{db_name}_{datesuffix}*.gz' output = gcloud_driver.check_output( *cluster_manager.get_provider().get_project_zone(), f'ls -l "{ls_arg}"', gsutil=True) for line in output.decode().splitlines(): gsurl = line.strip().split(' ')[-1].strip() if gsurl.startswith('gs://'): gs_url_datetime = datetime.datetime.strptime( gsurl.split('/')[-1].split('.')[-2].split('_')[-1], '%Y%m%d%H%M') if not latest_gs_urls[dbtype] or latest_gs_urls_datetimes[ dbtype] < gs_url_datetime: latest_gs_urls[dbtype], latest_gs_urls_datetimes[ dbtype] = gsurl, gs_url_datetime if latest_gs_urls[dbtype]: break return latest_gs_urls['db'], latest_gs_urls['datastore']
def check_output(cmd, gsutil=False): return gcloud_driver.check_output(*get_project_zone(), cmd, gsutil=gsutil)
def get_operation_status(operation_id): return yaml.load( gcloud_driver.check_output( *_gcloud().get_project_zone(), f'sql operations describe {operation_id}').decode())