def hasSudoCapabilities(): env.setdefault('canRoot', {}) if env.canRoot.get(env.host_string) is None: with quiet(): env.canRoot[env.host_string] = run('/usr/bin/sudo -n ' '/usr/bin/whoami').succeeded return env.canRoot[env.host_string]
def config(): target_build = get_build() if target_build is None or target_build == 'latest': raise Exception('BUILD_NUMBER environment variable needs to be defined') # target build name or number env['target_build'] = target_build # application package env['build_package_name'] = "%(environment)s-%(target_build)s-%(deploy_package)s" % { 'environment' : env['environment'], 'target_build' : target_build, 'deploy_package' : env['deploy_package'] } env['build_package_file'] = env['build_tmp'] + "/" + env['build_package_name'] + ".tgz" env['build_package_path'] = env['build_tmp'] + "/" + env['build_package_name'] + ".tgz" if 'deploy_publish_path' in env and len(env['deploy_publish_path']) > 0: env['build_publish_root'] = env['environment'] + '/' + env['deploy_publish_path'] else: env['build_publish_root'] = env['environment'] # assets root directory on S3 env['assets_publish_root'] = target_build env.setdefault('build_config_format', 'php') if env['build_config_format'] == 'php': env.setdefault('build_config_path', os.path.join(env['build_root'], 'build.php'))
def wrapper(*args, **kwargs): env.setdefault(f.__name__, {}) result = env[f.__name__].get(env.host_string) if result is None: result = f(*args, **kwargs) env[f.__name__][env.host_string] = result return result
def config(): target_build = get_build() if target_build is None or target_build == 'latest': raise Exception( 'BUILD_NUMBER environment variable needs to be defined') # target build name or number env['target_build'] = target_build # application package env['build_package_name'] = "%(environment)s-%(target_build)s-%(deploy_package)s" % { 'environment': env['environment'], 'target_build': target_build, 'deploy_package': env['deploy_package'] } env['build_package_file'] = env['build_tmp'] + "/" + env[ 'build_package_name'] + ".tgz" env['build_package_path'] = env['build_tmp'] + "/" + env[ 'build_package_name'] + ".tgz" if 'deploy_publish_path' in env and len(env['deploy_publish_path']) > 0: env['build_publish_root'] = env['environment'] + '/' + env[ 'deploy_publish_path'] else: env['build_publish_root'] = env['environment'] # assets root directory on S3 env['assets_publish_root'] = target_build env.setdefault('build_config_format', 'php') if env['build_config_format'] == 'php': env.setdefault('build_config_path', os.path.join(env['build_root'], 'build.php'))
def decorated(*args, **kwargs): """ Path to the configuration file containing the AWS IAM user credentials """ env.setdefault('aws_config', 'config/aws.yml') """ Run AWS configuration """ velvet.aws.config.load() return func(*args, **kwargs)
def decorated(*args, **kwargs): config = velvet.config.load() if config.has_option('aws', 'config'): env.setdefault('aws_config', config.get('aws', 'config')) else: env.setdefault('aws_config', 'config/aws.yml') return func(*args, **kwargs)
def decorated(*args, **kwargs): config = load() if config.has_option('velvet', 'config'): env.setdefault('config', config.get('velvet', 'config')) else: env.setdefault('config', find_config_file()) return func(*args, **kwargs)
def newfunc(*args, **kwargs): kwargs['ovirt_engine'] = ( kwargs.get('ovirt_engine') or get_from_env_or_input( key='OVIRT_ENGINE', prompt='oVirt engine URL: ', err_msg='Please provide OVIRT_ENGINE inside the febricrc file.' ) ) kwargs['ovirt_user'] = ( kwargs.get('ovirt_user') or get_from_env_or_input( key='OVIRT_USER', prompt='oVirt username: '******'Please provide OVIRT_USER inside the febricrc file.' ) ) kwargs['ovirt_pass'] = ( kwargs.get('ovirt_pass') or get_from_env_or_input( key='OVIRT_PASS', prompt='oVirt password: '******'Please provide OVIRT_PASS inside the febricrc file.' ) ) kwargs.setdefault( 'ovirt_insecure', env.setdefault('OVIRT_INSECURE', True) ) return func(*args, **kwargs)
def chef_query(query, api=None, hostname_attr=DEFAULT_HOSTNAME_ATTR, environment=_default_environment): """A decorator to use an arbitrary Chef search query to find nodes to execute on. This is used like Fabric's ``roles()`` decorator, but accepts a Chef search query. Example:: from chef.fabric import chef_query @chef_query('roles:web AND tags:active') @task def deploy(): pass .. versionadded:: 0.2.1 """ api = _api(api) if api.version_parsed < Environment.api_version_parsed and environment is not None: raise ChefAPIVersionError( 'Environment support requires Chef API 0.10 or greater') rolename = 'query_' + query env.setdefault('roledefs', {})[rolename] = Roledef(query, api, hostname_attr, environment) return lambda fn: roles(rolename)(fn)
def chef_query(query, api=None, hostname_attr=DEFAULT_HOSTNAME_ATTR, environment=_default_environment): api = _api(api) if api.version_parsed < Environment.api_version_parsed and environment is not None: raise ChefAPIVersionError('Environment support requires Chef API 0.10 or greater') rolename = 'query_'+query env.setdefault('roledefs', {})[rolename] = Roledef(query, api, hostname_attr, environment) return lambda fn: roles(rolename)(fn)
def set_defaults(): if '_fabi_defaults' in env: return domain_path = path.join(env.base_dir, env.app_name) # domain_path must be set separately, since it's referred in other defaults env.setdefault('domain_path', domain_path) defaults = ( ('use_sudo', True), ('git_branch', 'master'), ('svn_revision', 'HEAD'), ('svn_username', ''), ('svn_password', ''), ('python_bin', 'python'), ('remote_owner', 'www-data'), ('remote_group', 'www-data'), ('update_env', False), # Default to False, as we are removing this function. ('deploy_via', 'remote_clone'), ('current_path', path.join(env.domain_path, 'current')), ('releases_path', path.join(env.domain_path, 'releases')), ('shared_path', path.join(env.domain_path, 'shared')), # The following dirs will be created in shared_path when setting up. # During deployment, these dirs will be soft-linked # from shared_path to the current dir. ('shared_dirs', ['log', 'static']), ) for k, v in defaults: env.setdefault(k, v) if dir_exists(env.releases_path): # The current_release and previous_release set here # are for RollBackTask. For deploy, set new value # for current_release in your strategy. env.releases = sorted(run('ls -x %(releases_path)s' % {'releases_path': env.releases_path}).split()) if len(env.releases) >= 1: env.current_revision = env.releases[-1] env.current_release = '%(releases_path)s/%(current_revision)s' % \ {'releases_path': env.releases_path, 'current_revision': env.current_revision} if len(env.releases) > 1: env.previous_revision = env.releases[-2] env.previous_release = '%(releases_path)s/%(previous_revision)s' % \ {'releases_path': env.releases_path, 'previous_revision': env.previous_revision} env._fabi_defaults = True
def decorated(*args, **kwargs): env.setdefault('opsworks', False) env.setdefault('cookbooks_root', 'cookbooks') env.setdefault('cookbooks_package', env.app_name + '-cookbooks') env.setdefault('cookbooks_publish_path', 'cookbooks') return func(*args, **kwargs)
def environment(): if '__envset__' not in env: env.__envset__ = True # standard fabric variables env.connection_attempts = 10 env.timeout = 30 env.use_ssh_config = True # custom variables env.local_cwd = os.path.dirname(os.path.realpath(__file__)) env.setdefault('remote_home', '/mnt/janus') env.setdefault('nfs_home', '/export/janus') env.setdefault('git_repo', '[email protected]:NYU-NEWS/janus.git') env.setdefault('git_revision', 'master') env.setdefault('py_virtual_env', '{home}/py_venv'.format(home=env.nfs_home))
def run(self): # update fabric environment for project settings env.update(self.settings) env.setdefault('python_version', 'python2.7') env.setdefault('compass_version', None) env.setdefault('use_syncdb', False) env.setdefault('project_path_name', env.project_name) # check if all required project settings are present in fabric environment [require(r) for r in self.requirements] vhost_folder = '%s%s' % (env.project_name_prefix, env.project_name) vhost_path = os.path.join(env.vhosts_path, vhost_folder) print( green('\nInitializing fabric environment for %s.' % magenta(self.name))) # project_name: example # project_name_prefix: t- # user: t-example # project_path_name: example # vhosts_path: /var/www/vhosts/ # vhost_path: /var/www/vhosts/t-example/ # current_instance_path: /var/www/vhosts/t-example/current_instance/ # previous_instance_path: /var/www/vhosts/t-example/previous_instance/ # log_path: /var/www/vhosts/t-example/log/ # media_path: /var/www/vhosts/t-example/media/ # supervisor_path: /var/www/vhosts/t-example/supervisor env.update({ 'database_name': env.project_name, 'vhost_path': vhost_path, 'current_instance_path': os.path.join(vhost_path, 'current_instance'), 'log_path': os.path.join(vhost_path, 'log'), 'media_path': os.path.join(vhost_path, 'media'), 'previous_instance_path': os.path.join(vhost_path, 'previous_instance'), 'user': '******' % (env.project_name_prefix, env.project_name), 'supervisor_path': os.path.join(vhost_path, 'supervisor'), })
def get_from_env_or_input(key, prompt, err_msg): """ Get a value from 'env' or prompt for it if its not there and we have a TTY If we got a value from the user, we store it in 'env' for the next time the function is called :param str key: Ket for the value in 'env' :param str prompt: Prompt to show when we have TTY :param str err_msg: Error to show when there is no TTY :returns: The value we got :rtype: str """ return ( env.get(key) or env.setdefault(key, input_if_tty(prompt=prompt, err_msg=err_msg)) )
def decorated(*args, **kwargs): """ By default, include the whole project root in the build """ env.setdefault('build_root', '.') """ Temporary directory for building """ env.setdefault('build_tmp', 'tmp') """ Path to the file containing the list of files to exclude in the build """ env.setdefault('build_exclude_file', 'config/build-exclude.txt') return func(*args, **kwargs)
def chef_query(query, api=None, hostname_attr=DEFAULT_HOSTNAME_ATTR, environment=_default_environment): """A decorator to use an arbitrary Chef search query to find nodes to execute on. This is used like Fabric's ``roles()`` decorator, but accepts a Chef search query. Example:: from chef.fabric import chef_query @chef_query('roles:web AND tags:active') @task def deploy(): pass .. versionadded:: 0.2.1 """ api = _api(api) if api.version_parsed < Environment.api_version_parsed and environment is not None: raise ChefAPIVersionError("Environment support requires Chef API 0.10 or greater") rolename = "query_" + query env.setdefault("roledefs", {})[rolename] = Roledef(query, api, hostname_attr, environment) return lambda fn: roles(rolename)(fn)
def _set_env_defaults(): env.setdefault("farmboy_user", DEFAULT_USER) env.setdefault("farmboy_files", "./files") env.skip_bad_hosts = True env.timeout = 2 env.roledefs = {"apt": [], "ci": [], "proxy": [], "vcs": [], "web": []}
def decorated(*args, **kwargs): env.setdefault('use_sudo', True) env.setdefault('python_bin', 'python') env.setdefault('remote_owner', 'www-data') env.setdefault('remote_group', 'www-data') env.setdefault('pip_install_command', 'pip install -r requirements.txt') env.setdefault('domain_path', "%(base_dir)s/%(app_name)s" % {'base_dir': env.base_dir, 'app_name': env.app_name}) env.setdefault('repo_path', "%(domain_path)s/repo" % {'domain_path': env.domain_path}) env.setdefault('current_path', "%(domain_path)s/current" % {'domain_path': env.domain_path}) env.setdefault('releases_path', "%(domain_path)s/releases" % {'domain_path': env.domain_path}) env.setdefault('shared_path', "%(domain_path)s/shared" % {'domain_path': env.domain_path}) env.setdefault('revisions_log_path', "%(domain_path)s/revisions.log" % {'domain_path': env.domain_path}) # TODO(Guodong Ding) complete this in 'after_deploy()' env.setdefault('current_time', time.strftime('%Y%m%d%H%M%S', time.localtime(time.time()))) if 'releases' not in env.keys(): if dir_exists(env.releases_path): env.releases = sorted(run('ls -x %(releases_path)s' % {'releases_path': env.releases_path}).split()) if len(env.releases) >= 1: env.current_revision = env.releases[-1] env.current_release = "%(releases_path)s/%(current_revision)s" % \ {'releases_path': env.releases_path, 'current_revision': env.current_revision} if len(env.releases) > 1: env.previous_revision = env.releases[-2] env.previous_release = "%(releases_path)s/%(previous_revision)s" % \ {'releases_path': env.releases_path, 'previous_revision': env.previous_revision} else: env.releases = [] return func(*args, **kwargs)
from boto.route53.exception import DNSServerError from fabric.api import env, task from fabric.colors import green, red from fabric.utils import abort from bootstrap_cfn.cloudformation import Cloudformation from bootstrap_cfn.config import ConfigParser, ProjectConfig from bootstrap_cfn.elb import ELB from bootstrap_cfn.iam import IAM from bootstrap_cfn.r53 import R53 from bootstrap_cfn.utils import tail # Default fab config. Set via the tasks below or --set env.setdefault("application") env.setdefault("environment") env.setdefault("aws") env.setdefault("config") env.setdefault("stack_passwords") env.setdefault("blocking", True) env.setdefault("aws_region", "eu-west-1") # GLOBAL VARIABLES TIMEOUT = 3600 RETRY_INTERVAL = 10 # This is needed because pkgutil wont pick up modules # imported in a fabfile. path = env.real_fabfile or os.getcwd() sys.path.append(os.path.dirname(path))
from bootstrap_cfn.cloudformation import Cloudformation from bootstrap_cfn.config import ConfigParser, ProjectConfig from bootstrap_cfn.elb import ELB from bootstrap_cfn.errors import ( ActiveTagExistConflictError, BootstrapCfnError, CfnConfigError, CloudResourceNotFoundError, DNSRecordNotFoundError, PublicELBNotFoundError, StackRecordNotFoundError, TagRecordExistConflictError, TagRecordNotFoundError, UpdateDNSRecordError, UpdateDeployarnRecordError, ZoneIDNotFoundError) from bootstrap_cfn.iam import IAM from bootstrap_cfn.r53 import R53 from bootstrap_cfn.utils import tail from bootstrap_cfn.vpc import VPC # Default fab config. Set via the tasks below or --set env.setdefault('application') env.setdefault('environment') env.setdefault('aws') env.setdefault('config') env.setdefault('stack_passwords') env.setdefault('blocking', True) env.setdefault('aws_region', 'eu-west-1') env.setdefault('keyname', None) # GLOBAL VARIABLES TIMEOUT = 3600 RETRY_INTERVAL = 10 # This is needed because pkgutil wont pick up modules # imported in a fabfile. path = env.real_fabfile or os.getcwd()
def decorated(*args, **kwargs): env.setdefault('use_sudo', True) env.setdefault('python_bin', 'python') env.setdefault('remote_owner', 'www-data') env.setdefault('remote_group', 'www-data') env.setdefault('pip_install_command', 'pip install -r requirements.txt') env.setdefault( 'domain_path', "%(base_dir)s/%(app_name)s" % { 'base_dir': env.base_dir, 'app_name': env.app_name }) env.setdefault( 'current_path', "%(domain_path)s/current" % {'domain_path': env.domain_path}) env.setdefault( 'releases_path', "%(domain_path)s/releases" % {'domain_path': env.domain_path}) env.setdefault( 'shared_path', "%(domain_path)s/shared" % {'domain_path': env.domain_path}) if 'releases' not in env.keys(): if dir_exists(env.releases_path): env.releases = sorted( run('ls -x %(releases_path)s' % { 'releases_path': env.releases_path }).split()) if len(env.releases) >= 1: env.current_revision = env.releases[-1] env.current_release = "%(releases_path)s/%(current_revision)s" % \ {'releases_path': env.releases_path, 'current_revision': env.current_revision} if len(env.releases) > 1: env.previous_revision = env.releases[-2] env.previous_release = "%(releases_path)s/%(previous_revision)s" % \ {'releases_path': env.releases_path, 'previous_revision': env.previous_revision} else: env.releases = [] return func(*args, **kwargs)
def app(): env.setdefault('django_settings_module', '--settings=%s' % SERVER_DJANGO_SETTINGS_MODULE) env.roles = ['app'] env.key_filename = '/Users/jacob/Dropbox/Development/Keys/dmis_key.pem'
def init(fabfile, sentinel=None, min_version=None, systemd=None): if sentinel is not None: abort(red( 'Pass min_version and systemd as keyword arguments to' ' fh_fablib.init() please' )) if min_version is not None: if VERSION < min_version: abort(red( 'fh-fablib update required. Have: %s. Want: %s.' % ( '.'.join(map(str, VERSION)), '.'.join(map(str, min_version)), ), )) if systemd is None: abort(red( 'fh_fablib.init() requires either systemd=True or systemd=False,' ' depending on whether you want to use systemd for process' ' supervision or not.' )) fabfile['__all__'] = ( 'check', 'deploy', 'dev', 'git', 'local', 'server', ) if pwd.getpwuid(getuid())[0] == 'www-data': abort(red('Stop fab-ing on the server.', bold=True)) # Set defaults ----------------------------------------------------------- if systemd: for key, value in DEFAULTS_SYSTEMD.items(): env.setdefault(key, value) for key, value in DEFAULTS.items(): env.setdefault(key, value) # Multi-env support ------------------------------------------------------ def _create_setup_task_for_env(environment): def _setup(): env['box_environment'] = environment for key, value in env.box_environments[environment].items(): env['box_%s' % key] = value env.hosts = env.box_servers _setup.__name__ = str(environment) _setup.__doc__ = 'Set environment to %s' % environment return _setup if env.get('box_hardwired_environment'): _create_setup_task_for_env(env.box_hardwired_environment)() else: # Create a task per environment for environment in env.box_environments: t = _create_setup_task_for_env(environment) shortcut = env.box_environments[environment].get('shortcut') aliases = (shortcut,) if shortcut else () fabfile[environment] = task(aliases=aliases)(t) fabfile['__all__'] += (environment,) # Fabric commands with environment interpolation ------------------------- def interpolate_with_env(fn): """Wrapper which extends a few Fabric API commands to fill in values from Fabric's environment dictionary""" @wraps(fn) def _dec(string, *args, **kwargs): return fn(string % env, *args, **kwargs) return _dec g = globals() g['cd'] = interpolate_with_env(cd) g['run'] = interpolate_with_env(run) g['run_local'] = interpolate_with_env(run_local) g['confirm'] = interpolate_with_env(confirm) # Git pre-commit hook which always runs "fab check" ---------------------- def ensure_pre_commit_hook_installed(): """ Ensures that ``git commit`` fails if ``fab check`` returns any errors. """ p = Popen('git rev-parse --git-dir'.split(), stdout=PIPE) git_dir = p.stdout.read().strip() project_dir = dirname(git_dir) if not any(exists(join(project_dir, name)) for name in ( 'fabfile.py', 'fabfile')): # Does not look like a Django project. # Additionally, "fab check" wouldn't work anyway. return pre_commit_hook_path = join(git_dir, 'hooks', 'pre-commit') if not exists(pre_commit_hook_path): with open(pre_commit_hook_path, 'w') as hook: hook.write('#!/bin/sh\nfab check\n') chmod(pre_commit_hook_path, 0o755) # Run this each time the fabfile is loaded ensure_pre_commit_hook_installed() if not exists('tmp'): mkdir('tmp') from fh_fablib import check, deploy, dev, git, local, server fabfile.update({ 'check': check, 'deploy': deploy, 'dev': dev, 'git': git, 'local': local, 'server': server, })
from commands.python_packaging import package_python_osmocom from commands.python_packaging import package_python_sms_utilities from commands.python_packaging import package_python_freeswitch from commands.external_packaging import package_freeswitch from commands.translating import compile_lang from commands.translating import extract_pot try: from commands.fb_shipping import promote_metapackage from commands.fb_shipping import shipit except ImportError: from commands.shipping import promote_metapackage from commands.shipping import shipit # Global packaging settings # Use 'setdefault' so as not to override prior settings, e.g., --set foo=bar env.setdefault("pkgfmt", "deb") env.setdefault("gsmeng", "osmocom") env.setdefault("depmap", {}) def get_vagrant_conf(vm): vm_conf = local('vagrant ssh-config %s' % (vm, ), capture=True) params = {} for line in vm_conf.split('\n'): vm_param = re.match('^ +(HostName|User|Port|IdentityFile) (.*)', line) if vm_param: # update expects a sequence of pairs, groups() is a pair params.update((vm_param.groups(), )) return ( params["HostName"], params["Port"],
def decorated(*args, **kwargs): """ Python binary name """ env.setdefault('python_bin', 'python') """ Deployment username and group """ env.setdefault('user', 'ubuntu') env.setdefault('group', 'ubuntu') """ Base directory where the applications are deployed to """ env.setdefault('base_dir', '/srv/www') """ Web server username and group """ env.setdefault('www_owner', 'www-data') env.setdefault('www_group', 'www-data') """ S3 deployment bucket and package name """ env.setdefault('deploy_package', env.app_name) env.setdefault('deploy_bucket', env.app_name + '-deploy') env.setdefault('deploy_package_path', '') """ Path to look deployment hook scripts for, relative to the project root """ env.setdefault('deploy_hook_path', 'build/deploy') """ The path where to deploy the application to within the deployment base directory. By default, deploy application to the path after it's name. This can be changed in the config, if there is a need for example to deploy multiple environments on the same server. """ env.setdefault('app_path', "%(app_name)s" % { 'app_name': env.app_name }) """ Full path where to deploy the application to """ env.setdefault('domain_path', "%(base_dir)s/%(app_path)s" % \ { 'base_dir':env.base_dir, 'app_path':env.app_path }) """ Symlink to the current release """ env.setdefault('current_path', "%(domain_path)s/current" % \ { 'domain_path':env.domain_path }) """ Path to the releases directory """ env.setdefault('releases_path', "%(domain_path)s/releases" % \ { 'domain_path':env.domain_path }) """ Path to the shared directory maintained between releases """ env.setdefault('shared_path', "%(domain_path)s/shared" % \ { 'domain_path':env.domain_path }) """ Keep application s3cmd configuration in the application root """ env.setdefault('s3cfg', "%(domain_path)s/.s3cfg" % \ { 'domain_path':env.domain_path }) return func(*args, **kwargs)
def setup_from_project( main_package=None, settings_module_name=None): env.ROOTDIR = Path().absolute() env.project_name = env.ROOTDIR.name env.setdefault('build_dir_name', '.build') # but ablog needs '_build' env.current_project = get_project_info(main_package) env.setdefault('long_date_format', "%Y%m%d (%A, %d %B %Y)") # env.work_root = Path(env.work_root) env.setdefault('sdist_dir', None) env.setdefault('use_dirhtml', False) if env.sdist_dir is not None: env.sdist_dir = Path(env.sdist_dir) env.main_package = main_package env.tolerate_sphinx_warnings = False env.demo_databases = [] env.use_mercurial = True env.apidoc_exclude_pathnames = [] # env.blogger_url = "http://blog.example.com/" env.setdefault('languages', None) env.setdefault('blogger_project', None) env.setdefault('blogger_url', None) # env.setdefault('doc_trees', ['docs']) if isinstance(env.languages, basestring): env.languages = env.languages.split() if env.main_package: env.SETUP_INFO = get_setup_info(Path(env.ROOTDIR)) else: env.SETUP_INFO = None if settings_module_name is not None: os.environ['DJANGO_SETTINGS_MODULE'] = settings_module_name from django.conf import settings # why was this? settings.SITE.startup() env.languages = [lng.name for lng in settings.SITE.languages] env.demo_databases.append(settings_module_name)
def _set_env_defaults(): env.setdefault('farmboy_apt_proxy', 'http://192.168.33.13:3142')
from commands.python_packaging import package_python_osmocom from commands.python_packaging import package_python_sms_utilities from commands.python_packaging import package_python_freeswitch from commands.external_packaging import package_freeswitch from commands.translating import compile_lang from commands.translating import extract_pot try: from commands.fb_shipping import promote_metapackage from commands.fb_shipping import shipit except ImportError: from commands.shipping import promote_metapackage from commands.shipping import shipit # Global packaging settings # Use 'setdefault' so as not to override prior settings, e.g., --set foo=bar env.setdefault("pkgfmt", "deb") env.setdefault("gsmeng", "openbts") env.setdefault("depmap", {}) def get_vagrant_conf(vm): vm_conf = local('vagrant ssh-config %s' % (vm, ), capture=True) params = {} for line in vm_conf.split('\n'): vm_param = re.match('^ +(HostName|User|Port|IdentityFile) (.*)', line) if vm_param: # update expects a sequence of pairs, groups() is a pair params.update((vm_param.groups(), )) return ( params["HostName"], params["Port"],
# -*- coding: utf-8 -*- __author__ = 'Kirill Yakovenko' __email__ = '*****@*****.**' import os from fabric.api import task, sudo, run, cd, settings, env, prefix, put from fabric.contrib.files import upload_template from fabric.contrib.project import rsync_project env.setdefault('destination', '/srv/') @task def install_sqlite(): pass @task def install_mysql(): pass @task def install_packages(): sudo('apt-get update') sudo( 'apt-get install -y git virtualenv build-essential python-dev supervisor' ) @task
import os import sys import warnings import yaml from fabric import network from fabric.api import env, hide, local, quiet, run, warn_only, sudo from fabric.colors import red, green from fabric.state import connections from django.core.exceptions import ImproperlyConfigured env.setdefault("DJANGO_CONFIGURATION", "Dev") from django.conf import settings as django_settings try: from codalab.settings import Dev django_settings.configure(Dev) except ImproperlyConfigured, e: print 'ERROR: Configuration issue:' print '\t', e print '' CURRENT_DIRECTORY = os.path.dirname(os.path.abspath(__file__)) # Ignore annoying internal fabric depreciated stuff warnings.filterwarnings("ignore", category=DeprecationWarning) ############################################################################### # Helpers def _print(str):
In your fabfile, create a `build` task that assembles your web site as it will exist on the server into the `otto.build_dir`. Test it locally to ensure everything works. You can add the build dir to your .gitignore, you won't need to check the completed files into git. When you're ready, execute `fab otto.web.stage` to upload your site to Otto's server-side repository. Otto will create a tag and push your staged changes to the server. Otto will then rsync your built site to the staging area. When you are ready, execute `fab otto.web.deploy`. Otto will make your staged site "live" by manipulating symbolic links and, if necessary, reloading your web server. """ __version__ = '0.4.2' from fabric.api import env DEFAULT_CONFIG = { 'otto.home': '/usr/local/share/otto', 'otto.path.hooks': 'hooks', # relative to otto.home 'otto.path.repos': 'repos', # relative to otto.home 'otto.path.sites': 'sites', # relative to otto.home 'otto.path.virtualenvs': 'virtualenvs', # relative to otto.home 'otto.path.workspace': 'workspace', # relative to otto.home 'otto.requirements_file': 'requirements.txt', 'otto.httpserver': 'apache2', 'otto.git.staging_branch': 'master', } for k, v in DEFAULT_CONFIG.iteritems(): env.setdefault(k, v)
import os import sys import warnings import yaml from fabric import network from fabric.api import env, hide, local, quiet, run, warn_only, sudo from fabric.colors import red, green from fabric.state import connections from django.core.exceptions import ImproperlyConfigured env.setdefault("DJANGO_CONFIGURATION", "Dev") from django.conf import settings as django_settings try: from codalab.settings import Dev django_settings.configure(Dev) except ImproperlyConfigured, e: print 'ERROR: Configuration issue:' print '\t', e print '' CURRENT_DIRECTORY = os.path.dirname(os.path.abspath(__file__)) # Ignore annoying internal fabric depreciated stuff warnings.filterwarnings("ignore", category=DeprecationWarning) ############################################################################### # Helpers
def decorated(*args, **kwargs): env.setdefault('use_sudo', True) env.setdefault('git_branch', 'master') env.setdefault('python_bin', 'python') env.setdefault('remote_owner', 'www-data') env.setdefault('remote_group', 'www-data') env.setdefault('pip_install_command', 'pip install -r requirements.txt') env.setdefault('domain_path', "%(base_dir)s/%(app_name)s" % \ { 'base_dir':env.base_dir, 'app_name':env.app_name }) env.setdefault('current_path', "%(domain_path)s/current" % \ { 'domain_path':env.domain_path }) env.setdefault('releases_path', "%(domain_path)s/releases" % \ { 'domain_path':env.domain_path }) env.setdefault('shared_path', "%(domain_path)s/shared" % \ { 'domain_path':env.domain_path }) if not env.has_key('releases'): if dir_exists(env.releases_path): env.releases = sorted(run('ls -x %(releases_path)s' % { 'releases_path':env.releases_path }).split()) if len(env.releases) >= 1: env.current_revision = env.releases[-1] env.current_release = "%(releases_path)s/%(current_revision)s" % \ { 'releases_path':env.releases_path, 'current_revision':env.current_revision } if len(env.releases) > 1: env.previous_revision = env.releases[-2] env.previous_release = "%(releases_path)s/%(previous_revision)s" % \ { 'releases_path':env.releases_path, 'previous_revision':env.previous_revision } return func(*args, **kwargs)
def decorated(*args, **kwargs): env.setdefault('git_branch', 'master') env.setdefault('python_bin', 'python') env.setdefault('remote_owner', 'www-data') env.setdefault('remote_group', 'www-data') env.setdefault('domain_path', "%(base_dir)s/%(app_name)s" % \ { 'base_dir':env.base_dir, 'app_name':env.app_name }) env.setdefault('current_path', "%(domain_path)s/current" % \ { 'domain_path':env.domain_path }) env.setdefault('releases_path', "%(domain_path)s/releases" % \ { 'domain_path':env.domain_path }) env.setdefault('shared_path', "%(domain_path)s/shared" % \ { 'domain_path':env.domain_path }) if not env.has_key('releases'): if dir_exists(env.releases_path): env.releases = sorted( run('ls -x %(releases_path)s' % { 'releases_path': env.releases_path }).split()) if len(env.releases) >= 1: env.current_revision = env.releases[-1] env.current_release = "%(releases_path)s/%(current_revision)s" % \ { 'releases_path':env.releases_path, 'current_revision':env.current_revision } if len(env.releases) > 1: env.previous_revision = env.releases[-2] env.previous_release = "%(releases_path)s/%(previous_revision)s" % \ { 'releases_path':env.releases_path, 'previous_revision':env.previous_revision } return func(*args, **kwargs)
def set_env_defaults(): env.setdefault('remote_workdir', '~') env.setdefault('gunicorn_pidpath', env.remote_workdir + '/gunicorn.pid') env.setdefault('gunicorn_bind', '127.0.0.1:8000')
def setup_from_fabfile( globals_dict, main_package=None, settings_module_name=None): """To be called from within your project's :xfile:`fabfile.py`. Minimal example:: from atelier.fablib import * setup_from_fabfile(globals()) If this doctree is the main doctree of a Python project, then the minimal example should be:: from atelier.fablib import * setup_from_fabfile(globals(), "foobar") Where "foobar" is the Python name of your project's main package. """ if not '__file__' in globals_dict: raise Exception( "No '__file__' in %r. " "First parameter to must be `globals()`" % globals_dict) fabfile = Path(globals_dict['__file__']) if not fabfile.exists(): raise Exception("No such file: %s" % fabfile) env.root_dir = fabfile.parent.absolute() # print("20141027 %s %s " % (main_package, env.root_dir)) env.project_name = env.root_dir.name env.setdefault('build_dir_name', '.build') # but ablog needs '_build' env.setdefault('long_date_format', "%Y%m%d (%A, %d %B %Y)") # env.work_root = Path(env.work_root) env.setdefault('use_dirhtml', False) env.setdefault('blog_root', env.root_dir.child('docs')) env.setdefault('sdist_dir', None) env.setdefault('editor_command', None) if env.sdist_dir is not None: env.sdist_dir = Path(env.sdist_dir) env.main_package = main_package env.locale_dir = None env.tolerate_sphinx_warnings = False env.demo_projects = [] env.revision_control_system = None env.apidoc_exclude_pathnames = [] # env.blogger_url = "http://blog.example.com/" env.setdefault('languages', None) env.setdefault('blogger_project', None) env.setdefault('blogger_url', None) env.setdefault('cleanable_files', []) if isinstance(env.languages, basestring): env.languages = env.languages.split() # if env.main_package: # env.SETUP_INFO = get_setup_info(Path(env.root_dir)) # else: # env.SETUP_INFO = None if settings_module_name is not None: os.environ['DJANGO_SETTINGS_MODULE'] = settings_module_name from django.conf import settings # why was this? settings.SITE.startup() env.languages = [lng.name for lng in settings.SITE.languages] # env.demo_databases.append(settings_module_name) #~ env.userdocs_base_language = settings.SITE.languages[0].name # The following import will populate the projects from atelier.projects import get_project_info env.current_project = get_project_info(env.root_dir) env.doc_trees = env.current_project.doc_trees
from fabric.colors import green, red from fabric.utils import abort from bootstrap_cfn.autoscale import Autoscale from bootstrap_cfn.cloudformation import Cloudformation from bootstrap_cfn.config import ConfigParser, ProjectConfig from bootstrap_cfn.elb import ELB from bootstrap_cfn.errors import BootstrapCfnError, CfnConfigError, CloudResourceNotFoundError, DNSRecordNotFoundError, ZoneIDNotFoundError from bootstrap_cfn.iam import IAM from bootstrap_cfn.r53 import R53 from bootstrap_cfn.utils import tail from bootstrap_cfn.vpc import VPC # Default fab config. Set via the tasks below or --set env.setdefault('application') env.setdefault('environment') env.setdefault('aws') env.setdefault('config') env.setdefault('stack_passwords') env.setdefault('blocking', True) env.setdefault('aws_region', 'eu-west-1') # GLOBAL VARIABLES TIMEOUT = 3600 RETRY_INTERVAL = 10 # This is needed because pkgutil wont pick up modules # imported in a fabfile. path = env.real_fabfile or os.getcwd() sys.path.append(os.path.dirname(path))