Ejemplo n.º 1
0
def build():
	# we install required npm packages with increased number of retries and if it fails we use backup mirror
	local("npm install --fetch-retries 3 -g gulp || npm install --fetch-retries 3 --registry http://registry.npmjs.eu -g gulp")
	local("npm install --fetch-retries 3 || npm install --fetch-retries 3 --registry http://registry.npmjs.eu")
	if branch == "master" or branch == "staging":
		with shell_env(API_URI=api_uri, TLD=tld):
			local("gulp production")
	elif branch == "develop":
		with shell_env(API_URI=api_uri, TLD=tld):
			local("gulp develop")
	else:
		local("gulp develop")
Ejemplo n.º 2
0
def run(commands, prefix=None, cd=None, shell_env=None):
    """
    This is a shell around the fabric run command that allows for conditional
    use of the prefix, cd & shell_env context managers.

    :param commands: A list of commands to run
    :param prefix: An optional prefix to use
    :param cd:  An optional working directory
    :param shell_env: An optional dict of shell env variables
    :return:
    """
    def _run():
        "closure to make running the commands reusable"
        for command in commands:
            fab.run(command)

    # this is so ugly, but I couldn't come up with a better way to do it
    # without making things terribly complicated
    # TODO make this better somehow
    # XXX maybe this can help: http://stackoverflow.com/a/5359988
    if prefix is not None and cd is not None and shell_env is not None:
        with fab.cd(cd):
            with fab.prefix(prefix):
                with fab.shell_env(**shell_env):
                    _run()
    elif prefix is not None and shell_env is not None:
        with fab.prefix(prefix):
            with fab.shell_env(**shell_env):
                _run()
    elif cd is not None and shell_env is not None:
        with fab.cd(cd):
            with fab.shell_env(**shell_env):
                _run()
    elif cd is not None and prefix is not None:
        with fab.cd(cd):
            with fab.prefix(prefix):
                _run()
    elif cd is not None:
        with fab.cd(cd):
            _run()
    elif prefix is not None:
        with fab.prefix(prefix):
            _run()
    elif shell_env is not None:
        with fab.shell_env(**shell_env):
            _run()
    else:
        _run()
Ejemplo n.º 3
0
def gene_sqlite():
    """生成sqlite文件,并通过邮件发送"""
    env.host_string = config.HOST_STRING
    with cd('/var/www/xichuangzhu'):
        with shell_env(MODE='PRODUCTION'):
            with prefix('source venv/bin/activate'):
                run('python manage.py gene_sqlite')
Ejemplo n.º 4
0
 def do():
     with open(config, 'r') as ip:
         config_data = json.load(ip)
     dag_class = config_data['dag']
     # push the toast config to the remote machine
     toast_config_worker_path = os.path.join(
         eggo_config.get('worker_env', 'work_path'),
         build_dest_filename(config))
     put(local_path=config,
         remote_path=toast_config_worker_path)
     # TODO: run on central scheduler instead
     toast_cmd = ('toaster.py --local-scheduler {clazz} '
                  '--ToastConfig-config {toast_config}'.format(
                     clazz=dag_class,
                     toast_config=toast_config_worker_path))
     
     hadoop_bin = os.path.join(eggo_config.get('worker_env', 'hadoop_home'), 'bin')
     toast_env = {'EGGO_HOME': eggo_config.get('worker_env', 'eggo_home'),  # toaster.py imports eggo_config, which needs EGGO_HOME on worker
                  'EGGO_CONFIG': eggo_config.get('worker_env', 'eggo_config_path'),  # bc toaster.py imports eggo_config which must be init on the worker
                  'LUIGI_CONFIG_PATH': eggo_config.get('worker_env', 'luigi_config_path'),
                  'AWS_ACCESS_KEY_ID': eggo_config.get('aws', 'aws_access_key_id'),  # bc dataset dnload pushes data to S3 TODO: should only be added if the dfs is S3
                  'AWS_SECRET_ACCESS_KEY': eggo_config.get('aws', 'aws_secret_access_key'),  # TODO: should only be added if the dfs is S3
                  'SPARK_HOME': eggo_config.get('worker_env', 'spark_home')}
     if exec_ctx == 'local':
             # this should copy vars that maintain venv info
             env_copy = os.environ.copy()
             env_copy.update(toast_env)
             toast_env = env_copy
     with path(hadoop_bin):
         with shell_env(**toast_env):
             wrun(toast_cmd)
Ejemplo n.º 5
0
def pbuilder_env(os_release, name=None):
    dist = dist_from_release(os_release)
    dist_release = "{0}-{1}".format(dist, os_release)
    if name:
        dist_release = "{0}-{1}".format(dist_release, name)
    output_dir = os.path.join(package_export_dir(), dist_release)
    return shell_env(ARCH=ARCH, DIST=dist_release, GIT_PBUILDER_OUTPUT_DIR=output_dir)
Ejemplo n.º 6
0
def create(os_release=None):
    """Create an environment for building packages."""
    if os_release is None:
        os_release = get_os_release_from_current_branch()
    dist = dist_from_release(os_release)
    path = '/var/cache/pbuilder/base-{dist}-{os_release}-{arch}.cow'.format(
        arch=ARCH, dist=dist, os_release=os_release)

    if os.path.exists(path):
        raise Exception('PBuilder base image already exists at %s' % path)

    build_trusted()
    keyring = expanduser("~/.trusted.gpg")

    mirror = ubuntu_mirrors[dist]
    other_mirrors = mirrors[os_release]
    components = "main universe"

    with shell_env(ARCH=ARCH, DIST=dist):
        local('git-pbuilder create --basepath {basepath}'
              ' --mirror {mirror}'
              ' --components "{components}"'
              ' --othermirror "{mirrors}"'
              ' --keyring {keyring}'
              ' --debootstrapopts'
              ' --keyring={keyring}'.format(
                  mirror=mirror,
                  components=components,
                  mirrors="|".join(other_mirrors),
                  keyring=keyring,
                  basepath=path))
def run_standalone_test():
    run("service couchbase-server stop", warn_only=True)
    with shell_env(LD_LIBRARY_PATH="{}/forestdb/build".format(args.remote_workdir)):
        with cd(args.remote_workdir):
            run("rm -rf data/")
            run("mkdir data")
            run("ldd ./{}".format(prog_name))
            run("./{}".format(prog_name))
            run("cat incrementalsecondary.txt")

            # Now for internal processing and posting to showfast
            output_text = run("cat incrementalsecondary.txt")
            groups = re.search(
                r"initial index build time[^\d]*(\d*).*?seconds",
                output_text)
            initial_time = int(groups.group(1))

            groups = re.search(
                r"incrmental index build time[^\d]*(\d*).*?seconds",
                output_text)
            incremental_time = int(groups.group(1))
            logger.info("Grepped intial build time {}".format(initial_time))
            logger.info("Grepped incremental build time {}".format(
                incremental_time))
            if initial_time:
                post_initial(initial_time)
            if incremental_time:
                post_incremental(incremental_time)
Ejemplo n.º 8
0
def install_adam(work_path, adam_home, maven_version, fork, branch):
    # dnload mvn
    mvn_path = os.path.join(work_path, 'apache-maven')
    wrun('mkdir -p {0}'.format(mvn_path))
    with wcd(mvn_path):
        wrun('wget http://apache.mesi.com.ar/maven/maven-3/{version}/binaries/'
             'apache-maven-{version}-bin.tar.gz'.format(version=maven_version))
        wrun('tar -xzf apache-maven-{0}-bin.tar.gz'.format(maven_version))
    # checkout adam
    if not exists(adam_home):
        adam_parent = os.path.dirname(adam_home)
        wrun('mkdir -p {0}'.format(adam_parent))
        with wcd(adam_parent):
            wrun('git clone https://github.com/{0}/adam.git'.format(fork))
            if branch != 'master':
                with wcd('adam'):
                    wrun('git checkout origin/{branch}'.format(branch=branch))
    # build adam
    shell_vars = {}
    shell_vars['M2_HOME'] = os.path.join(
        mvn_path, 'apache-maven-{0}'.format(maven_version))
    shell_vars['M2'] = os.path.join(shell_vars['M2_HOME'], 'bin')
    shell_vars['MAVEN_OPTS'] = '-Xmx1024m -XX:MaxPermSize=512m'
    if exec_ctx == 'director':
        shell_vars['JAVA_HOME'] = '/usr/java/jdk1.7.0_67-cloudera'
    with wcd(adam_home):
        with shell_env(**shell_vars):
            wrun('$M2/mvn clean package -DskipTests')
Ejemplo n.º 9
0
    def run(self, resource, *args, **kwargs):
        log.debug('SSH: %s', args)

        executor = fabric_api.run
        if kwargs.get('use_sudo', False):
            executor = fabric_api.sudo

        managers = [
            fabric_api.settings(**self._fabric_settings(resource)),
        ]

        cwd = kwargs.get('cwd')
        if cwd:
            managers.append(fabric_api.cd(kwargs['cwd']))

        env = kwargs.get('env')
        if env:
            managers.append(fabric_api.shell_env(**kwargs['env']))

        if kwargs.get('warn_only', False):
            managers.append(fabric_api.warn_only())

        with nested(*managers):
            res = executor(' '.join(args))
            return self.get_result(res)
Ejemplo n.º 10
0
def deploy_web():
    """
    Installs the output of the build on the web instances.
    """
    require("configuration")
    if exists(env.deploy_dir):
        run("rm -rf %s" % env.deploy_dir)
    run("tar -xvzf %s" % env.build_archive)
    run("mv %s deploy" % env.git_tag)
    run("source /usr/local/bin/virtualenvwrapper.sh && mkvirtualenv venv")
    env.SHELL_ENV = dict(
        DJANGO_SETTINGS_MODULE=env.django_settings_module,
        DJANGO_CONFIGURATION=env.django_configuration,
        CONFIG_HTTP_PORT=env.config_http_port,
        CONFIG_SERVER_NAME=env.config_server_name,
    )
    print env.SHELL_ENV
    with cd(env.deploy_dir):
        with prefix("source /usr/local/bin/virtualenvwrapper.sh && workon venv"), shell_env(**env.SHELL_ENV):
            requirements_path = "/".join(["codalab", "requirements", "dev_azure_nix.txt"])
            pip_cmd = "pip install -r {0}".format(requirements_path)
            run(pip_cmd)
            # additional requirements for bundle service
            run("pip install SQLAlchemy simplejson")
            with cd("codalab"):
                run("python manage.py config_gen")
                run("mkdir -p ~/.codalab && cp ./config/generated/bundle_server_config.json ~/.codalab/config.json")
                run("python manage.py syncdb --migrate")
                run("python scripts/initialize.py")
                run("python manage.py collectstatic --noinput")
                sudo("ln -sf `pwd`/config/generated/nginx.conf /etc/nginx/sites-enabled/codalab.conf")
                sudo("ln -sf `pwd`/config/generated/supervisor.conf /etc/supervisor/conf.d/codalab.conf")
Ejemplo n.º 11
0
    def sudorun(self, servername, commands, runas, passwd=""):
        # generic function to run one or more commands
        # as a specific remote user.  returns the results
        # of the last command run.  aborts when any
        # command fails
        env.key_filename = self.servers[servername]["ssh_key"]
        env.user = self.servers[servername]["ssh_user"]
        env.disable_known_hosts = True
        env.host_string = self.servers[servername]["hostname"]
        rundict = return_dict(True, "no commands provided", {"return_code" : None })
        if passwd is None:
            pgpasswd = ""
        else:
            pgpasswd = passwd

        for command in commands:
            try:
                with shell_env(PGPASSWORD=pgpasswd):
                    runit = sudo(command, user=runas, warn_only=True,pty=False)
                rundict.update({ "details" : runit ,
                    "return_code" : runit.return_code })
                if runit.succeeded:
                    rundict.update({"result":"SUCCESS"})
                else:
                    rundict.update({"result":"FAIL"})
                    break
            except Exception as ex:
                rundict = { "result" : "FAIL",
                    "details" : "connection failure: %s" % self.exstr(ex),
                    "return_code" : None }
                break
        
        disconnect_all()
        return rundict
Ejemplo n.º 12
0
def pull():
    """更新代码"""
    env.host_string = config.HOST_STRING
    with cd('/var/www/blogbar'):
        with shell_env(MODE='PRODUCTION'):
            run('git reset --hard HEAD')
            run('git pull')
Ejemplo n.º 13
0
def runvis():
    "Run the development server"
    with lcd(PROJ_ROOT), \
      shell_env(NLTK_DATA=env['nltk_data'],
                PYTHONPATH=env['pythonpath'],
                DDT_HOME=PROJ_ROOT):
        local('{python} vis/server.py'.format(**env))
Ejemplo n.º 14
0
    def build(self):
        # if we're running a virtualenv the we need to reload the defaults
        virtualenv_name = env.get("virtualenv", None)
        if (virtualenv_name is not None):
            # make a place for the virtualenv to exist
            local("{} -p {}".format(env.tools['mkdir'], env.python_virtualenv_root_dir))

            # remember where the default python installation went
            system_python_virtualenv = env.python_virtualenv

            # create the virtualenv
            with lcd(env.python_virtualenv_root_dir):
                local("{} --python={} {}".format(system_python_virtualenv, env.python, virtualenv_name))

            with settings(path("{}/{}/bin".format(env.python_virtualenv_root_dir, virtualenv_name), behavior="prepend"),
                          shell_env(VIRTUAL_ENV="{}/{}".format(env.python_virtualenv_root_dir, virtualenv_name))):
                # re-load the default paths to make it uses the virtualenv python
                load_defaults()

                # load requirements into virtualenv
                if (os.path.isfile("{}/requirements.txt".format(env.build_dir))):
                    local("{} install -r {}/requirements.txt".format(env.python_pip, env.build_dir))

                # really build
                self._build()

            # make it so that we can move the virtualenv
            with lcd(env.python_virtualenv_root_dir):
                local("{} --relocatable {}".format(system_python_virtualenv, virtualenv_name))
        else:
            # really build
            self._build()
Ejemplo n.º 15
0
def run_standalone_test():
    run("service couchbase-server stop", warn_only=True)
    with shell_env(LD_LIBRARY_PATH="{}/forestdb/build".format(args.remote_workdir)):
        with cd(args.remote_workdir):
            run("mkdir data")
            run("ldd ./{}".format(prog_name))
            run("./{}".format(prog_name))
Ejemplo n.º 16
0
def sr(*cmd):
    """
    Sudo Run - Wraps a given command around sudo and runs it as the
    www-data user
    """
    with shell_env(HOME='/srv/ifttt'):
        return sudo(' '.join(cmd), user='******')
Ejemplo n.º 17
0
def locales(names):
    """
    Require the list of locales to be available.
    """

    config_file = '/var/lib/locales/supported.d/local'

    if not is_file(config_file):
        config_file = '/etc/locale.gen'

    # Regenerate locales if config file changes
    with watch(config_file, use_sudo=True) as config:

        # Add valid locale names to the config file
        supported = dict(supported_locales())
        for name in names:
            if name in supported:
                charset = supported[name]
                locale = "%s %s" % (name, charset)
                with shell_env():
                    uncomment(config_file, escape(locale), use_sudo=True)
                    append(config_file, locale, use_sudo=True)
            else:
                warn('Unsupported locale name "%s"' % name)

    if config.changed:
        run_as_root('dpkg-reconfigure --frontend=noninteractive locales')
Ejemplo n.º 18
0
def deploy_web():
    """
    Installs the output of the build on the web instances.
    """
    require('configuration')
    if exists(env.deploy_dir):
        run('rm -rf %s' % env.deploy_dir)
    run('tar -xvzf %s' % env.build_archive)
    run('mv %s deploy' % env.git_tag)
    run('source /usr/local/bin/virtualenvwrapper.sh && mkvirtualenv venv')
    env.SHELL_ENV = dict(
        DJANGO_SETTINGS_MODULE=env.django_settings_module,
        DJANGO_CONFIGURATION=env.django_configuration,
        CONFIG_HTTP_PORT=env.config_http_port,
        CONFIG_SERVER_NAME=env.config_server_name)
    print env.SHELL_ENV
    with cd(env.deploy_dir):
        with prefix('source /usr/local/bin/virtualenvwrapper.sh && workon venv'), shell_env(**env.SHELL_ENV):
            requirements_path = "/".join(['codalab', 'requirements', 'dev_azure_nix.txt'])
            #pip_cmd = 'pip install --use-wheel --no-index --find-links=wheel_packages -r {0}'.format(requirements_path)
            pip_cmd = 'pip install -r {0}'.format(requirements_path)
            run(pip_cmd)
            with cd('codalab'):
                run('python manage.py config_gen')
                run('python manage.py syncdb --migrate')
                run('python scripts/initialize.py')
                run('python manage.py collectstatic --noinput')
                sudo('ln -sf `pwd`/config/generated/nginx.conf /etc/nginx/sites-enabled/codalab.conf')
Ejemplo n.º 19
0
def setup_environment():
    """Setup users, groups, supervisor, etc."""
    # FIXME: When `fabtools v0.21.0` gets released, remove this...
    with shell_env(SYSTEMD_PAGER=''):
        require.users.user(
            name=env.app_user,
            group=env.app_user,
            system=True,
            shell='/bin/bash',
        )

        for path in (env.app_path, env.etc_path):
            require.directory(
                path=path,
                owner=env.app_user,
                group=env.app_user,
                use_sudo=True,
            )

        require.python.virtualenv(
            directory=env.venv_path,
            venv_python='python3',
            user=env.app_user,
            use_sudo=True,
        )

        require.supervisor.process(
            name=env.app_name,
            command='{} stream --verbose'.format(env.hadroid_botctl),
            user=env.app_user,
            directory=env.app_path,
            stdout_logfile='/var/log/hadroid.log',
            stderr_logfile='/var/log/hadroid-err.log',
            environment='HADROID_CONFIG={}'.format(env.hadroid_config),
        )
Ejemplo n.º 20
0
def deploy():
    """Deploy to production."""
    _require_root()

    if not confirm("This will apply any available migrations to the database. Has the database been backed up?"):
        abort("Aborted.")
    if not confirm("Are you sure you want to deploy?"):
        abort("Aborted.")

    with lcd(PRODUCTION_DOCUMENT_ROOT):
        with shell_env(PRODUCTION="TRUE"):
            local("git pull")
            with open("requirements.txt", "r") as req_file:
                requirements = req_file.read().strip().split()
                try:
                    pkg_resources.require(requirements)
                except:
                    local("pip install -U -r requirements.txt")
                else:
                    puts("Python requirements already satisfied.")
            with prefix("source /usr/local/virtualenvs/ion/bin/activate"):
                local("./manage.py collectstatic --noinput")
                local("./manage.py migrate")
            restart_production_gunicorn(True)

    puts("Deploy complete.")
Ejemplo n.º 21
0
def deploy(refspec):
    """ A Git refspec such as a commit code or branch. Branches names need
    to start with `origin/` (e.g. origin/1.x instead of 1.x). """
    p = env.app_path
    if not files.exists(p + '/repo'):
        run('cd %s && git clone -q %s repo' % (p, env.repo))   # clone
    else:
        run('cd %s/repo && git fetch' % p)                  # or fetch
    with(cd(p)):
        refspec = run('cd %s/repo && git rev-parse %s' % (p, refspec))
        run('cd repo && git reset --hard %s && git submodule -q update '
            % refspec + '--init --recursive')
        run('rm -rf current')
        run('cp -r repo/redmine current')
    files.upload_template('private/database.yml', p +
                          '/current/config/database.yml', env)
    files.upload_template('private/configuration.yml', p +
                          '/current/config/configuration.yml', env)

    with shell_env(GEM_HOME=env.gem_home, RAILS_ENV='production'):
        with(cd(p + '/current')):
            execute(install_plugins)
            env.bundle_bin = "%s/bin/bundle" % env.gem_home
            run('%(bundle_bin)s install --path %(gem_home)s '
                '--without="development test"' % env)
            run('%(bundle_bin)s exec rake db:migrate' % env)
            run('%(bundle_bin)s exec rake redmine:plugins:migrate' %
                env)
            run('%(bundle_bin)s exec rake tmp:cache:clear' % env)
            run('%(bundle_bin)s exec rake tmp:sessions:clear' % env)
            run('%(bundle_bin)s exec rake generate_secret_token' % env)

    run('sudo /etc/init.d/apache2 restart')
Ejemplo n.º 22
0
def find_authors_wiki():
    """寻找作者wiki url"""
    env.host_string = config.HOST_STRING
    with cd('/var/www/xichuangzhu'):
        with shell_env(MODE='PRODUCTION'):
            with prefix('source venv/bin/activate'):
                run('python manage.py find_authors_wiki')
Ejemplo n.º 23
0
def restart():
    env.host_string = HOST_STRING
    with cd('/home/flask/source/codingpy'):
        with shell_env(MODE='PRODUCTION'):
            run('git reset --hard HEAD')
            run('git pull')
        run('supervisorctl restart codingpy')
Ejemplo n.º 24
0
def convert_title():
    """转换标题"""
    env.host_string = config.HOST_STRING
    with cd('/var/www/xichuangzhu'):
        with shell_env(MODE='PRODUCTION'):
            with prefix('source venv/bin/activate'):
                run('python manage.py convert_title')
Ejemplo n.º 25
0
    def run(solard_context, cmd, **kwargs):
        # return check_output(shlex.split(cmd))
        executor = fabric_api.local
        # if kwargs.get('use_sudo', False):
        #     cmd = 'sudo ' + cmd

        managers = []

        cwd = kwargs.get('cwd')
        if cwd:
            managers.append(fabric_api.cd(kwargs['cwd']))

        env = kwargs.get('env')
        if env:
            managers.append(fabric_api.shell_env(**kwargs['env']))

        # we just warn, don't exit on solard
        # correct data is returned
        managers.append(fabric_api.warn_only())

        with nested(*managers):
            out = executor(cmd, capture=True)
            result = {}
            for name in ('failed', 'return_code', 'stdout', 'stderr',
                         'succeeded', 'command', 'real_command'):
                result[name] = getattr(out, name)
            return result
Ejemplo n.º 26
0
    def _ssh_command(resource, *args, **kwargs):
        log.debug('SSH: %s', args)

        executor = fabric_api.run
        if kwargs.get('use_sudo', False):
            executor = fabric_api.sudo

        managers = [
            fabric_api.settings(**ResourceSSHMixin._fabric_settings(resource)),
        ]

        if 'cwd' in kwargs:
            managers.append(
                fabric_api.cd(kwargs['cwd'])
            )

        if 'env' in kwargs:
            managers.append(
                fabric_api.shell_env(**kwargs['env'])
            )

        if 'warn_only' in kwargs:
            managers.append(
                fabric_api.warn_only())

        with nested(*managers):
            return executor(' '.join(args))
Ejemplo n.º 27
0
def pbuilder_env(os_release, name=None, ubuntu_release=None):
    dist_release = get_build_env(os_release, ubuntu_release)
    if name:
        dist_release = '{0}-{1}'.format(dist_release, name)
    output_dir = os.path.join(package_export_dir(), dist_release)
    return shell_env(ARCH=ARCH, DIST=dist_release,
                     GIT_PBUILDER_OUTPUT_DIR=output_dir)
Ejemplo n.º 28
0
def deploy():
    """with quiet():
        rv = local('git status', capture=True).stdout.strip()
        if 'nothing to commit' not in rv:
            _abort('Commit all changes before deployment.')
        print blue('* Updating git branch `deploy`...'),
        branch = local('git rev-parse --abbrev-ref HEAD', capture=True).stdout
        local('git checkout deploy')
        local('git rebase %s' % branch)
        local('git push')
        local('git checkout %s' % branch)
        print blue('Done')"""

    run('git config --global credential.helper "cache --timeout=3600"')
    rv = run('[ -d myimagebot ] && (cd myimagebot && git checkout master && '
             'git stash save --keep-index && git pull --ff) || '
             'git clone https://github.com/jaechang/myimagebot.git')
    if rv.failed:
        _abort(rv.stderr)

    # check `venv` directory exists
    run('[ -d venv ] || virtualenv venv')
    run('. venv/bin/activate')

    # check `var` directory exists
    run('[ -d var ]|| mkdir var var/log var/log/nginx var/run var/upload')

    # Path for Booost.
    with shell_env(LD_LIBRARY_PATH='/opt/local/lib'):
        with cd('myimagebot'):
            #run('pip install -r requirements.txt')
            rv = run('fab conf nginx stop start')
            if rv.failed:
                _abort(rv.stderr)
Ejemplo n.º 29
0
def runserver():
    "Run the development server"
    with lcd(PROJ_ROOT), \
      shell_env(NLTK_DATA=env['nltk_data'],
                PYTHONPATH=env['pythonpath'],
                MEMEX_HOME=MEMEX_ROOT):
        local('{python} models/seed_crawler_model.py'.format(**env))
Ejemplo n.º 30
0
def dump(backup_path, database_name, user='******', host=None,
         password=None):
    """
    Backs up the given database to the given file as a PostgreSQL archive. If
    host is set to None, a local connection will be used, so you'll need to be
    able to sudo to the given user. Otherwise, a standard password connection
    will be used and the user will be asked for a password.
    """
    if host is None:
        api.sudo('pg_dump -Fc {database_name} > {backup_path}'
                 .format(
                     database_name=database_name,
                     backup_path=backup_path,
                 ), user=user)
    else:
        if password is None:
            password = getpass('Enter database password for {user}: '
                                  .format(user=user))

        with api.shell_env(PGPASSWORD=password):
            api.run('pg_dump -Fc -U {user} -h {host} {database_name} >'
                    ' {backup_path}'
                    .format(
                        user=user,
                        host=host,
                        database_name=database_name,
                        backup_path=backup_path,
                    ))
Ejemplo n.º 31
0
def runapiworker():
    manage_py = project_relative("manage.py")
    with shell_env(HELTOUR_APP="API_WORKER"):
        local("python %s runserver 0.0.0.0:8880" % manage_py)
Ejemplo n.º 32
0
    def handle(self, *args, **options):  # pylint: disable=too-many-locals,too-many-statements
        # Load server config from project
        config, remote = load_config(env,
                                     options.get('remote', ''),
                                     debug=options.get('debug', False))

        # Set remote server name
        self.remote = config.get('remote_name')

        # Set local project path
        local_project_path = django_settings.SITE_ROOT

        # Get our python version - we'll need this while rebuilding the
        # virtualenv.
        python_version = remote['server'].get('python_version', '3')

        # Change into the local project folder
        with hide('output', 'running', 'warnings'), lcd(local_project_path):
            project_folder = local(
                f"basename $( find {local_project_path} -name 'wsgi.py' -not -path '*/.venv/*' -not -path '*/venv/*' | xargs -0 -n1 dirname )",
                capture=True)

        with settings(
                sudo_user=project_folder), cd(f'/var/www/{project_folder}'):
            initial_git_hash = run('git rev-parse --short HEAD')
            old_venv = f'/var/www/{project_folder}/.venv-{initial_git_hash}'

            settings_module = '{}.settings.{}'.format(
                project_folder,
                remote['server'].get('settings_file', 'production'),
            )

            sudo(
                'git config --global user.email "*****@*****.**"'
            )
            sudo('git config --global user.name "Onespacemedia Developers"')
            sudo('git config --global rebase.autoStash true')

            sudo('git fetch')

            if options.get('commit', False):
                print('Pulling to specific commit.')
                sudo('git reset --hard {}'.format(options.get('commit',
                                                              False), ))
            else:
                print('Pulling to HEAD')
                sudo('git reset --hard HEAD')

            new_git_hash = run('git rev-parse --short HEAD')
            new_venv = f'/var/www/{project_folder}/.venv-{new_git_hash}'

            if initial_git_hash == new_git_hash and not options['force_update']:
                print('Server is already up to date.')
                exit()

            # Does the new venv folder already exist?
            with settings(warn_only=True):
                venv_folder = run(f'test -d {new_venv}')

            # Build the virtualenv.
            if venv_folder.return_code == 0:
                print('Using existing venv for this commit hash')

            if venv_folder.return_code > 0:
                print('Creating venv for this commit hash')

                # Check if we have PyPy
                with settings(warn_only=True):
                    pypy = run('test -x /usr/bin/pypy')

                if pypy.return_code == 0:
                    sudo(f'virtualenv -p /usr/bin/pypy {new_venv}')
                else:
                    sudo(f'virtualenv -p python{python_version} {new_venv}')

                with virtualenv(new_venv), shell_env(
                        DJANGO_SETTINGS_MODULE=settings_module):
                    sudo(
                        '[[ -e requirements.txt ]] && pip install -r requirements.txt'
                    )
                    sudo('pip install gunicorn')

            # Things which need to happen regardless of whether there was a venv already.
            with virtualenv(new_venv), shell_env(
                    DJANGO_SETTINGS_MODULE=settings_module):
                if remote['server'].get('build_system', 'npm') == 'npm':
                    sudo('. ~/.nvm/nvm.sh && yarn', shell='/bin/bash')
                    sudo('. ~/.nvm/nvm.sh && yarn run build',
                         shell='/bin/bash')

                sudo('python manage.py collectstatic --noinput -l')

                sudo('yes yes | python manage.py migrate')

                requirements = sudo('pip freeze')

                for line in requirements.split('\n'):
                    if line.startswith('django-watson'):
                        sudo('python manage.py buildwatson')

        # Point the application to the new venv
        sudo(f'rm -rf /var/www/{project_folder}/.venv')
        sudo(f'ln -sf {new_venv} /var/www/{project_folder}/.venv')
        sudo(f'rm -rf {old_venv}')
        sudo(f'supervisorctl signal HUP {project_folder}')

        # Register the release with Opbeat.
        if 'opbeat' in config and config['opbeat']['app_id'] and config[
                'opbeat']['secret_token']:
            with (lcd(local_project_path)):
                local(
                    'curl https://intake.opbeat.com/api/v1/organizations/{}/apps/{}/releases/'
                    ' -H "Authorization: Bearer {}"'
                    ' -d rev=`git log -n 1 --pretty=format:%H`'
                    ' -d branch=`git rev-parse --abbrev-ref HEAD`'
                    ' -d status=completed'.format(
                        config['opbeat']['organization_id'],
                        config['opbeat']['app_id'],
                        config['opbeat']['secret_token'],
                    ))
Ejemplo n.º 33
0
def buildRpms(env, htmItSha, releaseVersion,
              artifactsDir, logger, config, htmitRemote):
  """
  Builds an rpm for htm-it

  Takes the sha according to htm-it and checks that the sha.json file
  is present (also checks if the rpm is present on rpmbuild and in S3), if
  not it creates the rpm.

  :param env: The environment variables which is set.
  :param htmItSha: The htm-it sha.
  :param releaseVersion: The product version which will be used
                         in the name of RPM
  :param artifactsDir: In this directory the artifacts will be stored.
  :param config: This is a dict of configuration data here we are using
                   AWS secret and access.
  :returns: syncRpmStatus(It is list which will help recongnize if RPM's rpm
            should be synced) and rpmNameDetails(It is a dict which contains the
            RPM name of HTM-IT)
  :raises: infrastructure.utilities.exceptions.MissingRPMError,
           when RPM is not found.
           infrastructure.utilities.exceptions.FailedToMoveRPM,
           if there is some error while moving RPM's to
           rpmbuild.groksolutions.com
  """

  rpmNameDetails = {}
  rpmName = "htm-it"
  try:
    syncRpm = False
    sha = htmItSha
    rpmExists = checkRpmExists(rpmName, sha, rpmNameDetails, config, logger)
    with shell_env(**env):
      if not rpmExists:
        logger.info("Creating %s rpm.", rpmName)

        # Clean stale rpms
        with changeToWorkingDir(OPERATIONS_SCRIPTS):
          try:
            # Delete any previously created rpm
            for name in glob.glob("nta-products-htm-it-*.rpm"):
              os.remove(name)
            log.printEnv(env, logger)
            infrastuctureCommonPath = os.path.join(PRODUCTS_PATH,
                                                   "infrastructure",
                                                   "infrastructure")

            command = ("%s/create-numenta-rpm" % infrastuctureCommonPath +
                       " --rpm-flavor htm-it" +
                       " --debug" +
                       " --cleanup-script htm.it/htm/it/pipeline/scripts/rpm-creator" +
                       "/clean-htm-it-tree-for-packaging" +
                       " --whitelist htm-it" +
                       " --whitelist nta.utils" +
                       " --whitelist htmengine" +
                       " --whitelist infrastructure" +
                       " --whitelist install-htm-it.sh" +
                       " --base-version " + releaseVersion +
                       " --description HTM-IT-installed-from-products-repo" +
                       " --rpm-name nta-products-htm-it" +
                       " --tempdir /tmp/htm-itbuild" +
                       " --setup-py-arguments develop" +
                       " --log-level debug" +
                       " --setup-py-dir nta.utils" +
                       " --setup-py-dir htmengine" +
                       " --setup-py-dir infrastructure" +
                       " --extend-pythonpath htm-it/lib/python2.7/site-packages" +
                       " --sha " + htmItSha +
                       " --artifact opt" +
                       " --git-url " + htmitRemote)
            # Due to some environment issue's I have used local here,
            # we can change this later.
            # fixme https://jira.numenta.com/browse/TAUR-797
            from fabric.api import local
            local(command)
            # getting name of the RPM created
            nameOfRpmCreated = glob.glob("nta-products-htm-it-*.rpm").pop()
            if not nameOfRpmCreated:
              raise exceptions.MissingRPMError("%s rpm name not found exiting"
                                               % rpmName)
            # Creating artifact
            with open("%s.txt" % rpmName, "w") as fp:
              fp.write(nameOfRpmCreated)

            logger.info("\n\n######### %s RPM created #########\n\n"
                        % rpmName)
          except:
            raise exceptions.RPMBuildingError("Failed while creating %s RPM."
                                              % rpmName)
          else:
            syncRpm = True

        filename = os.path.join(OPERATIONS_SCRIPTS, "%s.txt" % rpmName)
        # updating rpm details
        rpmNameDetails.update({rpmName:nameOfRpmCreated})
        # moving the rpms name to artifacts directory
        move(filename, artifactsDir)
        shaFileName = createShaFile(nameOfRpmCreated, sha)
        # move rpmname to rpmbuild
        status = moveRpmsToRpmbuild(nameOfRpmCreated, config, logger)
        if status:
          uploadShaFiletoBucket(rpmName, shaFileName, logger)
          # deleting the rpm after copying to rpmbuild
          os.remove("%s/%s" %  (OPERATIONS_SCRIPTS, nameOfRpmCreated))
        else:
          raise exceptions.FailedToMoveRPM("Failed to move rpms to "
                                           "rpmbuilder machine")
      else:
        logger.info("RPM for %s with %s sha already exists,"
                    "skipping creation of rpm!!", rpmName, sha)
    return syncRpm, rpmNameDetails
  except Exception:
    logger.exception("RPM building failed.")
    raise
Ejemplo n.º 34
0
def _remote_bake(recipe):
    with prefix('source ~/bdg-recipes/bdg-recipes-ec2-variables.sh'), shell_env(ACCESS_KEY=os.environ["AWS_ACCESS_KEY_ID"],
                                                                  SECRET_KEY=os.environ["AWS_SECRET_ACCESS_KEY"]):
        run('. ~/bdg-recipes/%s/run.sh' % recipe)
Ejemplo n.º 35
0
def apt_update():
    with shell_env(DEBIAN_FRONTEND='noninteractive'):
        sudo('apt-get update -y -qq')
        sudo('apt-get upgrade -y -qq')
Ejemplo n.º 36
0
def deploy():
    ''' Zero-Downtime deployment for the web. '''
    stage = shell.get_stage()
    user = get_stage_config(stage)['user']

    # Get the current branch and commit (locally).
    branch = git.current_branch(remote=False)
    commit = git.last_commit(remote=False, short=True)
    info('Deploying <{branch}:{commit}> to the {stage} server'.format(
        branch=branch, commit=commit, stage=stage))

    tmp_path = fs.get_temp_filename()
    build_dir = buildman.resolve_local_build_dir()

    deploy_dir = buildman.get_deploy_dir()
    deployer_user = shell.get_user()

    notif.send(notif.DEPLOYMENT_STARTED, {
        'user': deployer_user,
        'branch': branch,
        'stage': stage
    })

    (release_dir, current_path) = buildman.setup_remote()

    timestamp = datetime.utcnow()
    build_id = timestamp.strftime('%Y%m%d%H%M%S')
    build_name = buildman.get_build_name(build_id)
    build_compressed = build_name + '.tar.gz'
    release_path = release_dir + '/' + build_name

    info('Getting the build ready for deployment')

    # Trigger the install script
    runner.run_script(constants.SCRIPT_INSTALL, remote=False)

    # Trigger the build script.
    #
    # The stage for which the build script is being run is passed
    # via an environment variable STAGE.
    # This could be useful for creating specific builds for
    # different environments.
    with shell_env(STAGE=stage):
        runner.run_script(constants.SCRIPT_BUILD, remote=False)

    info('Compressing the build')
    fs.tar_archive(build_compressed, build_dir, remote=False)

    info('Uploading the build {} to {}'.format(build_compressed, tmp_path))
    fs.upload(build_compressed, tmp_path)

    # Remove the compressed build from the local directory.
    fs.rm(build_compressed, remote=False)

    # Once, the build is uploaded to the remote,
    # set things up in the remote server.
    with cd(release_dir):
        remote_info('Extracting the build {}'.format(build_compressed))
        # Create a new directory for the build in the remote.
        fs.mkdir(build_name)

        # Extract the build.
        fs.tar_extract(tmp_path, build_name)

        # Remove the uploaded archived from the temp path.
        fs.rm_rf(tmp_path)

        remote_info('Changing ownership of {} to user {}'.format(
            deploy_dir, user))
        fs.chown(release_path, user, user)

        remote_info('Pointing the current symlink to the latest build')
        fs.update_symlink(release_path, current_path)

    # Save build history
    buildman.record_history({
        'id': build_id,
        'path': release_path,
        'branch': branch,
        'commit': commit,
        'stage': stage,
        'createdBy': deployer_user,
        'timestamp': timestamp.strftime(buildman.TS_FORMAT)
    })

    # Send deployment finished notification.
    notif.send(notif.DEPLOYMENT_FINISHED, {'branch': branch, 'stage': stage})

    remote_info('Deployment Completed')
Ejemplo n.º 37
0
def start():
    with cd(DIR):
        with shell_env(
                PATH='/home/felipe/.nvm/versions/node/v6.10.3/bin:$PATH'):
            with prefix(VENV):
                run('pm2 start backend.js > start.log')
Ejemplo n.º 38
0
def deploy(app, with_blog=None, with_alembic=False):
    """deploy the app"""
    assert system.distrib_id() == 'Ubuntu'
    lsb_codename = system.distrib_codename()
    if lsb_codename != 'xenial':
        raise ValueError('unsupported platform: %s' % lsb_codename)

    # See whether the local appconfig clone is up-to-date with the remot master:
    remote_repo = local('git ls-remote [email protected]:shh-dlce/appconfig.git HEAD | awk \'{ print $1}\'')
    local_clone = local('git rev-parse HEAD')

    if remote_repo != local_clone:
        if confirm('Local appconfig clone is not up-to-date '
                   'with remote master, continue?', default=False):
            print("Continuing deployment.")
        else:
            print("Deployment aborted.")
            return

    require.deb.packages(getattr(app, 'require_deb_%s' % lsb_codename) + app.require_deb)
    require.users.user(app.name, create_home=True, shell='/bin/bash')
    require.directory(str(app.www_dir), use_sudo=True)
    require.directory(str(app.www_dir / 'files'), use_sudo=True)
    require_logging(app.log_dir,
                    logrotate=app.logrotate,
                    access_log=app.access_log, error_log=app.error_log)

    workers = 3 if app.workers > 3 and env.environment == 'test' else app.workers
    with_blog = with_blog if with_blog is not None else app.with_blog

    if env.environment != 'staging':
        # Test and production instances are publicly accessible over HTTPS.
        letsencrypt.require_certbot()
        letsencrypt.require_cert(env.host)
        if env.environment == 'production':
            letsencrypt.require_cert(app)

    ctx = template_context(app, workers=workers, with_blog=with_blog)

    if app.stack == 'soundcomparisons':  # pragma: no cover
        require.git.working_copy(
            'https://github.com/{0}/{1}.git'.format(app.github_org, app.github_repos),
            path=str(app.home_dir / app.name),
            use_sudo=True,
            user=app.name)
        require_bower(app, d=app.home_dir / app.name / 'site' / 'js')
        require_grunt(app, d=app.home_dir / app.name / 'site' / 'js')
        require_php(app)
        require_mysql(app)

        with shell_env(SYSTEMD_PAGER=''):
            require.nginx.server()

        sudo_upload_template('nginx-php-fpm-app.conf', str(app.nginx_site), app=app, env=env)
        nginx.enable(app.name)
        if env.environment == 'production':
            # We only enable systemd services when deploying to production, because we don't want
            # to start and run things like backup to CDSTAR from non-production systems.
            systemd.enable(app, pathlib.Path(os.getcwd()) / 'systemd')
        service.reload('nginx')
        return

    #
    # Create a virtualenv for the app and install the app package in development mode, i.e. with
    # repository working copy in /usr/venvs/<APP>/src
    #
    require_venv(
        app.venv_dir,
        require_packages=[app.app_pkg] + app.require_pip,
        assets_name=app.name if app.stack == 'clld' else None)

    #
    # If some of the static assets are managed via bower, update them.
    #
    require_bower(app)
    require_grunt(app)

    require_nginx(ctx)

    if app.stack == 'clld':
        require_bibutils()

    require_postgres(app)

    require_config(app.config, app, ctx)

    # if gunicorn runs, make it gracefully reload the app by sending HUP
    # TODO: consider 'supervisorctl signal HUP $name' instead (xenial+)
    sudo('( [ -f {0} ] && kill -0 $(cat {0}) 2> /dev/null '
         '&& kill -HUP $(cat {0}) ) || echo no reload '.format(app.gunicorn_pid))

    if not with_alembic and confirm('Recreate database?', default=False):
        stop.execute_inner(app)
        upload_sqldump(app)
    elif exists(str(app.src_dir / 'alembic.ini')) and confirm('Upgrade database?', default=False):
        # Note: stopping the app is not strictly necessary, because
        #       the alembic revisions run in separate transactions!
        stop.execute_inner(app, maintenance_hours=app.deploy_duration)
        alembic_upgrade_head(app, ctx)

    pip_freeze(app)

    start.execute_inner(app)
    check(app)
    if env.environment == 'production':
        systemd.enable(app, pathlib.Path(os.getcwd()) / 'systemd')
Ejemplo n.º 39
0
def start():
    with cd(DIR):
        with shell_env(
                PATH='/home/tanweer/.nvm/versions/node/v6.10.3/bin:$PATH'):
            with prefix(VENV):
                run('pm2 start todo.js > start.log')
Ejemplo n.º 40
0
    def update_conf_file(self):
        self.environment_id.server_id.get_env()
        self.stop_service()
        # TODO: chequear si el servicio esta levantado y bajarlo,
        # si estaba levantado volver a iniciarlo
        # self.stop_service()
        if not exists(self.environment_id.path, use_sudo=True):
            raise except_orm(
                _('No Environment Path!'),
                _("Environment path '%s' does not exists. \
                                Please create it first!") %
                (self.environment_id.path))

        command = self.environment_id.path + '/bin/' + self.run_server_command
        command += ' --stop-after-init -s -c ' + self.conf_file_path

        # Remove file if it already exists, we do it so we can put back some
        # booelan values as unaccent
        if exists(self.conf_file_path, use_sudo=True):
            sudo('rm ' + self.conf_file_path)

        addons_path = False
        for addon_path in literal_eval(self.addons_path):
            if not exists(addon_path, use_sudo=True):
                raise except_orm(
                    _('Addons path does not exist!'),
                    _("Addons path '%s' does not exists. \
                                    Please create it first!") % (addon_path))
            if not addons_path:
                addons_path = addon_path
            addons_path += ',' + addon_path

        if addons_path:
            command += ' --addons-path=' + addons_path
        command += ' --db-filter=' + self.db_filter.rule
        command += ' --xmlrpc-port=' + str(self.xml_rpc_port)
        command += ' --logfile=' + self.logfile
        command += ' --limit-time-real=' + str(self.limit_time_real)
        command += ' --db_maxconn=' + str(self.db_maxconn)

        if self.environment_id.environment_version_id.name in ('8.0',
                                                               'master'):
            if self.data_dir:
                command += ' --data-dir=' + self.data_dir
            if self.longpolling_port:
                command += ' --longpolling-port=' + str(self.longpolling_port)

        if self.module_load:
            command += ' --load=' + self.module_load

        if self.unaccent:
            command += ' --unaccent'

        if self.proxy_mode:
            command += ' --proxy-mode'

        if self.workers:
            command += ' --workers=' + str(self.workers)

        if self.type == 'secure':
            command += ' --xmlrpcs-port=' + str(self.xml_rpcs_port)
        else:
            command += ' --no-xmlrpcs'

        # TODO --cert-file and --pkey-file
        # TODO ver de agregar --log-db=LOG_DB
        # TODO check that user exists
        # TODO tal vez -r -w para database data
        try:
            sudo('chown ' + self.user + ':odoo -R ' + self.environment_id.path)
            # TODO cambiar estos print por cosas en el log
            print
            print command
            print
            eggs_dir = '/home/%s/.python-eggs' % self.user
            if not exists(eggs_dir, use_sudo=True):
                sudo('mkdir %s' % eggs_dir, user=self.user)
            with shell_env(PYTHON_EGG_CACHE=eggs_dir):
                sudo('chmod g+rw -R ' + self.environment_id.path)
                sudo(command, user=self.user)
        except Exception, e:
            raise Warning(
                _("Can not create/update configuration file, this is what we get: \n %s"
                  ) % (e))
Ejemplo n.º 41
0
def build_interface():
    """ have grunt perform a deployment build for us """
    with cd(CONFIG['interface_dir']), \
            shell_env(PHANTOMJS_BIN='/usr/local/bin/phantomjs'), \
            prefix('source {0}/bin/activate'.format(CONFIG['virt_env'])):
        run('grunt --force')
Ejemplo n.º 42
0
def start_celery_worker(queue: str):
    with shell_env(PYTHONOPTIMIZE='1', PYTHONWARNINGS='ignore', C_FORCE_ROOT='1'):
        local('nohup env/bin/celery worker '
              '-A perfrunner.helpers.worker -Q {} > worker.log &'.format(queue))
Ejemplo n.º 43
0
def deploy():
    require('environment')

    yell(
        magenta(
            "Create a directory on a remote server, if it doesn't already exists"
        ))
    if not exists(env.code_root):
        sudo('mkdir -p %(code_root)s' % env)

    if not exists(env.logs_root):
        sudo('mkdir -p %(logs_root)s' % env)

    if not exists(env.run_root):
        sudo('mkdir -p %(run_root)s' % env)

    yell(magenta("Create a virtualenv, if it doesn't already exists..."))
    if not exists(env.virtualenv_root):
        with cd(env.root):
            sudo('mkdir env')
            sudo('virtualenv -p python3 env')

    local('git archive --format=tar %(branch)s | gzip > release.tar.gz' % env)
    put('release.tar.gz', env.code_root, use_sudo=True)

    with cd(env.code_root):
        sudo('tar zxf release.tar.gz', pty=True)
        local('rm release.tar.gz')

        yell(magenta("Activate the environment and install requirements..."))
        # run('source %(remote_env_path)s/bin/activate' % env)
        sudo(
            'source %(virtualenv_root)s/bin/activate && pip install --upgrade -r requirements.txt'
            % env)

        with shell_env(
                DJANGO_SETTINGS_MODULE='config.settings.production',
                DATABASE_URL=
                'postgres://%(db_user)s:%(db_pass)s@localhost:5432/%(db_name)s'
                % env,
                DJANGO_SECRET_KEY=env.django_secret_key,
                DJANGO_ADMIN_URL='admin',
                PYTHONPATH='.',
                DJANGO_DEBUG=str(env.django_debug)):
            yell(magenta("Collect all the static files..."))
            sudo(
                '%(virtualenv_root)s/bin/python manage.py collectstatic --noinput'
                % env)

            yell(magenta("Compiling translations..."))
            sudo(
                '%(virtualenv_root)s/bin/python manage.py compilemessages --use-fuzzy'
                % env)

            yell(magenta("Give deploy access to logs and run directories..."))
            sudo('chown -R deploy:deploy %(logs_root)s' % env)
            sudo('chown -R deploy:deploy %(run_root)s' % env)

            yell(magenta("Migrate and Update the database..."))
            run('%(virtualenv_root)s/bin/python manage.py migrate --noinput' %
                env)

        yell(magenta("bootstrap environment..."))
        put(get_and_render_template('template.env', env),
            os.path.join(env.run_root, '.env'),
            use_sudo=True)

        yell(magenta("gunicorn entry script..."))
        put(get_and_render_template('gunicorn_run.sh', env),
            os.path.join(env.run_root, 'gunicorn_run.sh'),
            use_sudo=True)
        sudo('chmod u+x %(run_root)s/gunicorn_run.sh' % env)

        yell(magenta("put supervisor conf..."))
        put(get_and_render_template('pycon2017.conf', env),
            '/etc/supervisor/conf.d/pycon2017_%(environment)s.conf' % env,
            use_sudo=True)

        yell(magenta("restart supervisor..."))
        sudo('supervisorctl reread && supervisorctl update')
        sudo('supervisorctl restart pycon2017_%(environment)s' % env)

    yell(magenta("Draw a ship..."))
    yell(white("               |    |    |               "))  # NOQA
    yell(white("              )_)  )_)  )_)              "))  # NOQA
    yell(white("             )___))___))___)\            "))  # NOQA
    yell(white("            )____)____)_____)\\          "))  # NOQA
    yell(magenta("          _____|____|____|____\\\__      "))
    yell(magenta(" ---------\                   /--------- "))
    yell(blue("   ^^^^^ ^^^^^^^^^^^^^^^^^^^^^           "))  # NOQA
    yell(blue("     ^^^^      ^^^^     ^^^    ^^        "))  # NOQA
    yell(blue("          ^^^^      ^^^                  "))  # NOQA
Ejemplo n.º 44
0
 def docker_compose(command):
     with cd(PATH):
         with shell_env(CI_BUILD_REF_NAME=os.getenv(
                 'CI_BUILD_REF_NAME', 'master')):
             run('set -o pipefail; docker-compose %s | tee' % command)
Ejemplo n.º 45
0
def django(command):
    with shell_env(DJANGO_SETTINGS_MODULE=env.django_settings_module):
        with cd(os.path.join(env.project_dir, 'django')):
            run(os.path.join(env.env_path, 'bin/python') + ' manage.py ' +
                command,
                pty=False)
Ejemplo n.º 46
0
def checkout_branch(branch_name, cwd):
    with (shell_env(GIT_COMMITTER_EMAIL='travis@travis',
                    GIT_COMMITTER_NAME='Travis CI')):
        print('checkout branch: {}'.format(branch_name))
        run_command('git checkout {}'.format(branch_name), cwd)