def _setup_env(deployment_name): env.update(DEPLOYMENTS[deployment_name]) if not _check_key_filename(deployment_name): sys.exit(1) env.project_directory = os.path.join(env.home, env.project) env.code_src = os.path.join(env.project_directory, env.repo_name) env.doc_src = os.path.join(env.project_directory, env.repo_name, env.docs) env.pip_requirements_file = os.path.join(env.code_src, 'requirements.pip')
def dev(): env.update({ 'site': 'dev.nextdayvideo.com', 'available': 'veyepar', 'hosts': ['[email protected]'], 'site_environment': 'dev', })
def setenv(name): """ Load environment configuration """ cli = env.copy() env.update(fabcfg.environments[name]) #print cli # IF ... SPECIFIED ON CLI THEN OVERRIDE # gateway if cli.has_key('gateway') and cli['gateway'] != None: env.gateway = cli['gateway'] # hosts if len(cli['hosts']) > 0: env.hosts = cli['hosts'] #TODO, allow glob* pattern on host name else: for l in env.roledefs.values(): env.hosts.extend(l) if env.ssh_config_path and os.path.isfile(os.path.expanduser(env.ssh_config_path)): env.use_ssh_config = True _annotate_hosts_with_ssh_config_info(os.path.expanduser(env.ssh_config_path)) _populateRoledevsFromHosts()
def prod(): env.update({ 'site': 'encoding2.dc15.debconf.org', 'available': 'veyepar', 'hosts': ['*****@*****.**'], 'site_environment': 'prod', })
def prod(): env.update({ 'site': 'veyepar3.nextdayvideo.com', 'available': 'veyepar', 'hosts': ['*****@*****.**'], 'site_environment': 'prod', })
def prod(): env.update({ 'site': 'veyepar.nextdayvideo.com', 'available': 'veyepar', 'hosts': ['162.209.96.130'], 'site_environment': 'prod', })
def setup_env(deployment_name): env.update(DEFAULTS) env.update(DEPLOYMENTS[deployment_name]) env.project_directory = os.path.join(env.home, env.project) env.code_src = os.path.join(env.project_directory, env.repo_name) env.wsgi_config_file = os.path.join(env.project_directory, "apache", "environment.wsgi") env.pip_requirements_file = os.path.join(env.code_src, "requirements.pip")
def setup_env(deployment_name): env.update(DEFAULTS) env.update(DEPLOYMENTS[deployment_name]) if not check_key_filename(deployment_name): sys.exit(1) env.code_src = os.path.join(env.home, env.project) env.pip_requirements_file = os.path.join(env.code_src, 'requirements.pip')
def modify_kernel(region, instance_id): """ Modify old kernel for stopped instance (needed for make pv-grub working) .. note:: install grub-legacy-ec2 and upgrades before run this. region specify instance region; instance_id specify instance id for kernel change Kernels list: ap-southeast-1 x86_64 aki-11d5aa43 ap-southeast-1 i386 aki-13d5aa41 eu-west-1 x86_64 aki-4feec43b eu-west-1 i386 aki-4deec439 us-east-1 x86_64 aki-427d952b us-east-1 i386 aki-407d9529 us-west-1 x86_64 aki-9ba0f1de us-west-1 i386 aki-99a0f1dc""" key_filename = config.get(region, 'KEY_FILENAME') conn = get_region_conn(region) instance = get_inst_by_id(conn.region.name, instance_id) env.update({ 'host_string': instance.public_dns_name, 'key_filename': key_filename, }) sudo('env DEBIAN_FRONTEND=noninteractive apt-get update && ' 'env DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade && ' 'env DEBIAN_FRONTEND=noninteractive apt-get install grub-legacy-ec2') kernel = config.get(conn.region.name, 'KERNEL' + instance.architecture.upper()) instance.stop() wait_for(instance, 'stopped') instance.modify_attribute('kernel', kernel) instance.start()
def project(module, domain_name, deploy_root_dir = 'deployment', setting_module = None, redirects = None, **kwargs): '''Setup django project for deployment using fabric. * *module* is the name of the module containing the site. It must be on the same directory as the fabfile used to upload. * *domain_name* the site domain name (for configuring web servers). * *setting_name* optional settings file name (default is "settings"). * *deploy_root_dir* optional root directory where file will be installed (Default is "deployment").''' dir = os.path.join(os.getcwd(),module) site = MakeSite(dir, setting_module) env.project = module env.domain_name = domain_name if deploy_root_dir: env.path = os.path.join(deploy_root_dir,module) else: env.path = '' if redirects: env.redirects = redirects env.setting_module = '%s.%s' % (module,setting_module or 'settings') os.environ['DJANGO_SETTINGS_MODULE'] = env.setting_module env.update(kwargs) if not env.server_port: if env.secure: env.server_port = 443 else: env.server_port = 80
def adduser(username, region=None, instance_ids=None, passwordless=None, sudo=None): """ creates new <username> with public SSH key on "host1;host2" list in <region>. If you want to create passwordless account - set any value to <passwrdless> variable, if you want sudo rights - set any value to <sudo>. File with public key must be in same directory. If region and instance_ids not set - script takes hosts amd key values from command line (-H and -i). Usage: 1. WIthout aws api keys and config present: :<username>,<passwordless=1>,<sudo=1> - in this case you have to specify hosts list in -H and your own account in -u fabric parameters. 2. With aws api keys and config entries: :<username>,<region>,"instance1;instance2",<passwordless>,<sudo> Extracts IP's from instance description. """ if instance_ids and region: instances_ids = list(unicode(instance_ids).split(';')) for inst in instances_ids: if inst: _instance = get_inst_by_id(region, inst) if not env.key_filename: key_filename = config.get(_instance.region.name, 'KEY_FILENAME') env.update({'key_filename': key_filename}) env.update({'host_string': _instance.public_dns_name}) _create_account(username, region, instance_ids, passwordless, sudo) else: _create_account(username, region, instance_ids, passwordless, sudo)
def dev(): env.update({ 'site': 'dev.nextdayvideo.com', 'available': 'veyepar', 'hosts': ['192.237.240.167'], 'site_environment': 'dev', })
def set_environment(e): # Copy the environment name into each environments, even though only one is being used for name in environments: environments[name]['name'] = name # Apply the environment env.update(e) # Ensure that the sites dict exists if not env.get('sites'): env['sites'] = {} # Apply any default settings if environments.has_key('defaults'): for setting in environments['defaults']: if not env.has_key(setting): env[setting] = environments['defaults'][setting] # Apply default site settings to each site if env.has_key('sites') and env['sites'].has_key('defaults'): for site in env['sites'].values(): for setting in env['sites']['defaults']: if not site.has_key(setting): site[setting] = env['sites']['defaults'][setting] del env['sites']['defaults'] # Copy the site name into each of the sites and set the default type for name in env['sites']: env['sites'][name]['name'] = name if not env['sites'][name].has_key('type'): env['sites'][name]['type'] = SiteType.DJANGO
def setup_config(port=PORT, **kwargs): from labtest.config import get_config filepath = kwargs.pop('filepath', '') cfg = get_config(filepath, host='127.0.0.1', app_name='testapp', code_repo_url='[email protected]:example/example.git', build_provider='default') instance._setup_env_with_config(cfg) if 'branch_name' in kwargs: instance._setup_default_env('testinstance', kwargs['branch_name']) else: if 'branch_name' in env: del env['branch_name'] instance._setup_default_env('testinstance') env.hosts = [ '{}@{}:{}'.format(USER, HOST, port), ] env.quiet = False env.host_string = env.hosts[0] env.key_filename = CLIENT_PRIVKEY env.abort_exception = FabricException env.test_domain = 'test.example.com' env.container_build_command = cfg.container_build_command env.update(kwargs) return cfg
def _update_params(**params): """ Updates the parameters with the passed arguments """ env.update(params) _build_parameters()
def environment(envName): """ Load the passed environment configuration. This task can be invoked before executing the desired Fabric action. """ env.update(ENVIRONMENTS[envName]) env['environment'] = envName
def environment(): """Set the environment where the tasks will be executed""" name = os.environ.setdefault('PROJECT_ENV', 'dev') try: import project_cfg except ImportError: print('The project_cfg file is required but could not be imported.') sys.exit(1) if name not in project_cfg.environments: error(colors.red('Environment `{}` does not exist.'.format(name))) if hasattr(project_cfg, 'defaults'): env.update(project_cfg.defaults) env.update(project_cfg.environments[name]) env.environment = name if env.get('is_vagrant'): env.superuser = '******' env.ssh_config_path = '.ssh_config' env.use_ssh_config = True env.disable_known_hosts = True local('vagrant ssh-config > .ssh_config') else: env.is_vagrant = False
def hackathon(): global remote_dir global permission_user env.update(environments['hackathon']) remote_dir = environments['hackathon']['remote_dir'] permission_user = '******'
def __init__(self, callable, *args, **kwargs): super(CustomTask, self).__init__(callable, *args, **kwargs) if env.ssh_config_path and os.path.isfile(os.path.expanduser(env.ssh_config_path)): env.use_ssh_config = True if env.host_string == 'localhost' or not env.hosts: env.pyexecutable = sys_executable env.cd = partial(custom_cd, lcd, 002) env.run = local conffile = 'devenv.json' else: env.cd = partial(custom_cd, cd, 002) env.run = run if 'production' in env and env.production: error('TBD') else: conffile = 'testenv.json' if 'conffile' in env: conffile = env.conffile with open(conffile) as f: d = json_load(f) env.update(d) env.activate = ''.join(['. ', env.venvpath, '/bin/activate'])
def load_environment(environment_name): # Load up the images definitions images_file = env.get('images_file', './docker/images.yaml') with open(images_file, 'r') as f: env.images = yaml.safe_load(f) env.manager = ImageManager(env.images) # Load up the maps definitions and store the one in `name` map_file = env.get('map_file', './docker/map.yaml') with open(map_file, 'r') as f: data = yaml.safe_load(f) prefix = data['name'] repo = data.get('repository', None) environment = data[environment_name] env.update(environment.pop('settings', {})) if repo is not None: environment['repository'] = repo env.environment = environment_name if 'host' in environment: for volume, path in environment['host'].items(): environment['host'][volume] = os.path.abspath(path) env.container_dict = environment env.container_prefix = prefix env.container_map = ContainerMap(prefix, environment, check_integrity=True) env.container_config = ClientConfiguration(base_url=env.docker.base_url, version=env.docker._version, timeout=env.docker.timeout, client=env.docker)
def load_db_set(name): """ Loads database parameters from a specific named set. """ verbose = common.get_verbose() db_set = env.db_sets.get(name, {}) env.update(db_set)
def _load_config(config_file, inject=True): ''' Loads a `build` from the json config files and optionally injects the data into the env. ''' with open(config_file) as fp: json_data = json.load(fp) if inject: env.update(json_data) return json_data
def vagrant(name=''): config = ssh_config(name) extra_args = _settings_dict(config) env.update(extra_args) env['user'] = '******' env['mysql_user'] = '******' env['mysql_password'] = os.environ.get('MYSQL_PASSWORD', 'password')
def prod(): env.update({ 'carbon': '10.176.162.45', 'site': 'researchcompendia.org', 'available': 'researchcompendia', 'hosts': ['researchcompendia.org:2222'], 'site_environment': 'prod_environment.sh', })
def staging(): env.update({ 'carbon': '10.176.162.45', 'site': 'labs.researchcompendia.org', 'available': 'researchcompendia', 'hosts': ['labs.researchcompendia.org:2222'], 'site_environment': 'staging_environment.sh', })
def dev(): env.update({ 'carbon': '10.176.162.45', 'site': '.codersquid.com', 'available': 'researchcompendia', 'hosts': ['67.207.156.211:2222'], 'site_environment': 'dev_environment.sh', })
def setup_env(deployment): env.update(DEFAULTS) env.update(DEPLOYMENTS[deployment]) env.project_directory = os.path.join(env.home, env.project) env.project_tmp_directory = os.path.join(env.project_directory, 'tmp') env.src_directory = os.path.join(env.project_directory, env.repo) env.wsgi_file = os.path.join(env.src_directory, 'next.wsgi') env.pip_requirements_file = os.path.join(env.src_directory, 'requirements.txt')
def setup_env(deployment_name): env.update(DEPLOYMENTS[deployment_name]) if not check_key_filename(deployment_name): sys.exit(1) env.code_src = os.path.join(env.home, env.project) env.virtualenv = os.path.join(env.home, '.virtualenvs')
def vagrant(name=""): config = ssh_config(name) extra_args = _settings_dict(config) env.update(extra_args) env["user"] = "******" env["mysql_user"] = "******" env["mysql_password"] = os.environ.get("MYSQL_PASSWORD", "password")
def _reboot_server(self): env.update({ 'user': self.env.management_user_name, 'key_filename': get_actual_keypath(self.env, self.env.management_key_path), 'host_string': self.env.management_ip, }) reboot()
def staging(): """Configures settings for deployment to a vagrant box """ env.hosts = ['[email protected]'] env.update({ 'SERVER_NAME': '[email protected]', 'DJANGO_SETTINGS_MODULE': 'bugtracker.settings.production', 'REQUIREMENTS_FILE': 'requirements.txt' })
def e(name='esxi'): print "Setting environment", name env.update(environments[name]) env.environment = name env.shell = '/bin/sh -l -c' env.warn_only = True env.user = '******' env.password = '******'
def setup_env(deployment_name): env.update(DEFAULTS) env.update(DEPLOYMENTS[deployment_name]) if not check_key_filename(deployment_name): sys.exit(1) env.project_directory = os.path.join(env.home, env.project) env.code_src = os.path.join(env.project_directory, env.repo_name) env.wsgi_config_file = os.path.join(env.project_directory, 'formhub', 'wsgi.py') env.pip_requirements_file = os.path.join(env.code_src, 'requirements.pip')
def staging(): """Configures settings for deployment to a vagrant box """ env.hosts = ['[email protected]'] env.update({ 'SERVER_NAME':'[email protected]', 'DJANGO_SETTINGS_MODULE':'bugtracker.settings.production', 'REQUIREMENTS_FILE':'requirements.txt' })
def vagrant(): env.update({ 'user': '******', 'site': '127.0.0.1:2222', 'available': 'veyepar', 'hosts': ['[email protected]:2222'], 'site_environment': 'vagrant', 'key_filename': local('vagrant ssh-config | grep IdentityFile | cut -f4 -d " "', capture=True), })
def _setup_env(deployment_name): env.update(DEPLOYMENTS[deployment_name]) if not _check_key_filename(deployment_name): sys.exit(1) env.project_directory = os.path.join(env.home, env.project) env.code_src = os.path.join(env.project_directory, env.repo_name) env.doc_src = os.path.join(env.project_directory, env.repo_name, env.docs) env.pip_requirements_file = os.path.join( env.code_src, 'deploy/requirements/requirements.pip')
def set_on_env(properties, env): """Updates the Fabric env with new properties.""" if properties: properties = convert_strings_to_symbols(properties) env.update(properties) env.update({'timestamp': datetime.datetime.now()}) return env
def setup_env(deployment_name): env.update(DEFAULTS) env.update(DEPLOYMENTS[deployment_name]) if not check_key_filename(deployment_name): sys.exit(1) env.project_directory = os.path.join(env.home, env.project) env.code_src = os.path.join(env.project_directory, env.repo_name) env.wsgi_config_file = os.path.join(env.project_directory, 'apache', 'environment.wsgi') env.pip_requirements_file = os.path.join(env.code_src, 'requirements.pip')
def stage(name, new_settings={}): """Set working environment: staging, production. Usage: fab env:production deploy fab env:staging deploy """ env.update(get_settings(name, env, new_settings)) env.environment = name
def production(new_settings={}): """Work on the production environment""" try: from deploy.production import fabric except ImportError: abort("Can't load 'production' environment; is PYTHONPATH exported?") env.update(fabric.get_settings(new_settings)) env.environment = 'production'
def staging(new_settings={}): """Work on the staging environment""" try: from deploy.staging import fabric except ImportError: abort("Can't load 'staging' environment; is PYTHONPATH exported?") env.update(fabric.get_settings(new_settings)) env.environment = 'staging'
def setup_env(deployment_name): env.update(DEFAULTS) env.update(DEPLOYMENTS[deployment_name]) env.project_directory = os.path.join(env.home, env.project) # django project is embedded within repo env.code_src = os.path.join(env.project_directory, env.repo_name, env.django_dir) env.wsgi_config_file = os.path.join(env.project_directory, 'apache', 'environment.wsgi') env.pip_requirements_file = os.path.join(env.code_src, 'requirements.pip')
def fedora(): """Work on the fedora (red-hat based) environment""" try: from deploy.fedora import fabric except ImportError: print("Can't load 'fedora' environment; is PYTHONPATH exported?") exit(1) env.update(fabric.SETTINGS) env.environment = 'fedora'
def ubuntu(): """Work on the ubuntu environment""" try: from deploy.ubuntu import fabric except ImportError: print("Can't load 'production' environment; is PYTHONPATH exported?") exit(1) env.update(fabric.SETTINGS) env.environment = 'ubuntu'
def reset_submission_tables(deployment="prod", branch="master"): env.update(DEPLOYMENTS[deployment]) virtual_env_command = get_virtual_env_command(env.virtual_env) with cd(env.project_dir): run('git fetch') run("git checkout {branch}".format(branch=branch)) run("git pull origin {branch}".format(branch=branch)) run('find . -name "*.pyc" -exec rm -rf {} \;') with prefix(virtual_env_command): run("reset_submissions production.ini")