def _sync_datafs(host_type, path): data_host = api.env.data_hosts[0] src_path = os.path.join(path, 'var', 'filestorage', 'Data.fs') src_host_string = ':'.join([api.env.host_string, src_path]) target_path = os.path.join(api.env.base_data_path, api.env.project_name, 'data', 'current_prod') today = datetime.date.today().strftime('%Y-%m-%d') filename_test = 'Data.fs-%s-%s-%s-*.tgz' % (api.env.project_name, host_type, today) data_path = _get_data_path() result = _quiet_remote_ls(data_path, filename_test) existing_files = result.split() filename = 'Data.fs-%s-%s-%s-%02d.tgz' % (api.env.project_name, host_type, today, len(existing_files)+1) # Ensure the `current_prod` dir exists result = _quiet_remote_mkdir(target_path) with api.settings(host_string=data_host): _sshagent_run('rsync -z --inplace %s %s' % (src_host_string, target_path)) with api.cd(target_path): result = api.run('tar czf %s Data.fs' % filename) if result.succeeded: api.run('mv %s ..' % filename) else: api.run('rm -f %s' % filename)
def list_saved_data(fname_filter='*.tgz'): """List saved data files from the data server """ full_path = _get_data_path() path_filter = os.path.join(full_path, fname_filter) for host in api.env.data_hosts: api.puts('%s: "%s"' % (host, path_filter)) with api.settings(host_string=host): current_files = _quiet_remote_ls(full_path, fname_filter) api.puts(current_files) return current_files.split()