Example #1
0
def install_homebrew(remote_configuration=None):
    """ Install Homebrew on OSX from http://mxcl.github.com/homebrew/ """

    if not remote_configuration.is_osx:
        return

    if exists("/usr/local/bin/brew"):

        if confirm("Update Brew Formulæ?", default=False):
            pkg.brew_update()

        if confirm("Upgrade outdated Brew packages?", default=False):
            pkg.brew_upgrade()

    else:
        sudo('ruby -e "$(curl -fsSL https://raw.github.com/mxcl/homebrew/go)"')

        sudo("brew doctor")
        pkg.brew_add(("git",))

        # TODO: implement this.
        info("Please install OSX CLI tools for Xcode manually.")

        if confirm("Is the installation OK?"):
            pkg.brew_update()

    LOGGER.warning("You still have to install Xcode and its CLI tools.")
def build(treeish='head'):
    """Build a release."""
    version = local("git describe {}".format(treeish), capture=True)

    with settings(hide('warnings'), warn_only=True):
        cmd = "git diff-index --quiet {} --".format(treeish)
        is_committed = local(cmd).succeeded
        cmd = "git branch -r --contains {}".format(version)
        is_pushed = local(cmd, capture=True)

    if not is_committed:
        prompt = "Uncommitted changes. Continue?"
        if not confirm(prompt, default=False):
            abort("Canceled.")

    if not is_pushed:
        prompt = "Commit not pushed. Continue?"
        if not confirm(question=prompt, default=False):
            abort("Canceled.")

    output = "/tmp/{}.tar.gz".format(version)
    prefix = "{}/".format(version)
    cmd = "git archive --prefix={prefix} --format=tar.gz --output={output} {version}:src"
    local(cmd.format(prefix=prefix, output=output, version=version))
    puts("\nBuilt: {} at: {}".format(version, output))
    return output
Example #3
0
def dump_db(dumpfile="pootle_DB_backup.sql"):
    """Dumps the DB as a SQL script and downloads it"""
    require('environment', provided_by=[production, staging])

    if ((isfile(dumpfile) and confirm('\n%s already exists locally. Do you '
        'want to overwrite it?' % dumpfile, default=False))
        or not isfile(dumpfile)):

        remote_filename = '%s/%s' % (env['project_path'], dumpfile)

        if ((exists(remote_filename) and confirm('\n%s already exists. Do you '
            'want to overwrite it?' % remote_filename, default=False))
            or not exists(remote_filename)):

            print('\nDumping DB...')

            with settings(hide('stderr')):
                sudo('mysqldump -u %s -p %s > %s' % (env['db_user'],
                                                     env['db_name'],
                                                     remote_filename))
                get(remote_filename, '.')
        else:
            print('\nAborting.')
    else:
        print('\nAborting.')
def release():
    """Create a tag release for an revision"""
    print yellow(">>> Creating a tag release")
    local("git tag")
    tagname = prompt("Enter a new tagname as according as above: ")

    print red('.... updating tag release at setup.py')
    _replace_in_file("version='.*'", "version='%s'" % tagname, 'setup.py')

    print red('.... versioning tag release')
    diff_ = local('git diff', capture=True)
    comment = prompt('Enter a bit comment for this release:')
    if diff_:
        print diff_
        if confirm("It's ok?", default=False):
            local('git add setup.py')
            local("git ci -m 'version %s - %s'" % (tagname, comment))
    local("git lg1 -n5")
    rev = prompt("Which revision you want release?")

    cmd_tag = "git tag -f %s %s -m '%s'" % (tagname, rev, comment)

    if confirm('%s # Create tag?' % cmd_tag, default=False):
        local(cmd_tag)

    if confirm('push to github?', default=False):
        local('git push origin master --tags -f')
Example #5
0
def deploy(project = "", restart=False):
    ## if no project is specified deploy all of them
    if project == "":
        for p in PROJECTS:
            print(p)
            prepare_deploy(p)
            if confirm(red("Do you wish to proceed?")):
                upload(p)
         
        deploy_shared_static()
        # Restart jetty
        if confirm(red("Do you wish to restart jetty?")):
            sudo('service jetty restart')

    else:
        ## deploy only the specified project
        print(project)
        prepare_deploy(project)
        if confirm(("Do you wish to proceed?")):
            upload(project)
            deploy_shared_static()


            # Restart jetty
            if confirm(red("Do you wish to restart jetty?")):
                sudo('service jetty restart')
Example #6
0
def hotfix_deploy():
    """
    deploy ONLY the code with no extra cleanup or syncing

    for small python-only hotfixes

    """
    if not console.confirm('Are you sure you want to deploy {env.environment}?'.format(env=env), default=False) or \
       not console.confirm('Did you run "fab {env.environment} preindex_views"? '.format(env=env), default=False) or \
       not console.confirm('HEY!!!! YOU ARE ONLY DEPLOYING CODE. THIS IS NOT A NORMAL DEPLOY. COOL???', default=False):
        utils.abort('Deployment aborted.')

    _require_target()
    run('echo ping!')  # workaround for delayed console response

    try:
        execute(update_code)
    except Exception:
        execute(mail_admins, "Deploy failed", "You had better check the logs.")
        # hopefully bring the server back to life
        execute(services_restart)
        raise
    else:
        execute(services_restart)
        execute(record_successful_deploy)
Example #7
0
def deploy():
    """deploy code to remote host by checking out the latest via git"""
    if not console.confirm('Are you sure you want to deploy {env.environment}?'.format(env=env), default=False) or \
       not console.confirm('Did you run "fab {env.environment} preindex_views"? '.format(env=env), default=False):
        utils.abort('Deployment aborted.')

    _require_target()
    run('echo ping!')  # workaround for delayed console response

    try:
        execute(update_code)
        execute(update_virtualenv)
        execute(clear_services_dir)
        set_supervisor_config()
        if env.should_migrate:
            execute(stop_pillows)
            execute(stop_celery_tasks)
            execute(migrate)
        execute(_do_collectstatic)
        execute(do_update_django_locales)
        execute(version_static)
        if env.should_migrate:
            execute(flip_es_aliases)
    except Exception:
        execute(mail_admins, "Deploy failed", "You had better check the logs.")
        # hopefully bring the server back to life
        execute(services_restart)
        raise
    else:
        execute(services_restart)
        execute(record_successful_deploy)
Example #8
0
def setup():
    #  test configuration start
    if not test_configuration():
        if not console.confirm("Configuration test %s! Do you want to continue?" % red_bg('failed'), default=False):
            abort("Aborting at user request.")
    #  test configuration end
    if env.ask_confirmation:
        if not console.confirm("Are you sure you want to setup %s?" % red_bg(env.project.upper()), default=False):
            abort("Aborting at user request.")
    puts(green_bg('Start setup...'))
    start_time = datetime.now()

    _verify_sudo
    _install_dependencies()
    _create_django_user()
    _setup_directories()
    _git_clone()
    _install_virtualenv()
    _create_virtualenv()
    _install_gunicorn()
    _install_requirements()
    _upload_nginx_conf()
    _upload_rungunicorn_script()
    _upload_supervisord_conf()

    end_time = datetime.now()
    finish_message = '[%s] Correctly finished in %i seconds' % \
    (green_bg(end_time.strftime('%H:%M:%S')), (end_time - start_time).seconds)
    puts(finish_message)
Example #9
0
def update():
    #  test configuration start
    if not test_configuration():
        if not console.confirm("Configuration test %s! Do you want to continue?" % red_bg('failed'), default=False):
            abort("Aborting at user request.")
    #  test configuration end
    _verify_sudo()
    if env.ask_confirmation:
        if not console.confirm("Are you sure you want to deploy in %s?" % red_bg(env.project.upper()), default=False):
            abort("Aborting at user request.")
    puts(green_bg('Start deploy...'))
    start_time = datetime.now()

    if not 'synced_projectdir' in env or not env.synced_projectdir:
        git_pull()
    # _install_requirements()
    # _upload_nginx_conf()
    # _upload_rungunicorn_script()
    # _upload_supervisord_conf()
    _prepare_django_project()
    _prepare_media_path()
    _supervisor_restart()

    end_time = datetime.now()
    finish_message = '[%s] Correctly deployed in %i seconds' % \
    (green_bg(end_time.strftime('%H:%M:%S')), (end_time - start_time).seconds)
    puts(finish_message)
Example #10
0
def dump_db(dumpfile="pathagarh_DB_backup.sql"):
    """Dumps the DB as a SQL script and downloads it"""
    require('environment', provided_by=[production, staging])

    if isdir(dumpfile):
        print("dumpfile '%s' is a directory! Aborting." % dumpfile)

    elif (not isfile(dumpfile) or
          confirm('\n%s already exists locally. Do you want to overwrite it?'
                  % dumpfile, default=False)):

              remote_filename = '%s/%s' % (env['project_path'], dumpfile)

              if (not exists(remote_filename) or
                  confirm('\n%s already exists. Do you want to overwrite it?'
                          % remote_filename, default=False)):

                      print('\nDumping DB...')

                      with settings(hide('stderr')):
                          run('mysqldump -u %s %s %s > %s' %
                              (env['db_user'], env['db_password_opt'],
                               env['db_name'], remote_filename))
                          get(remote_filename, '.')
                          run('rm %s' % (remote_filename))
              else:
                  print('\nAborting.')
    else:
        print('\nAborting.')
Example #11
0
def initial_setup():

    sudo("mkdir -p /code/%s" % env.projectname)

    with cd("/code/" + env.projectname):
        with settings(warn_only=True):
            sudo("git clone %s ." % env.git_url)
        transfer_secrets()
        sudo("docker-compose build")

    # installing supervisor
    sudo("apt-get install -y supervisor")

    # add this projects supervisord.conf
    sudo("ln -sfn /code/{0}/supervisord.conf /etc/supervisor/conf.d/{0}.conf".format(env.projectname))

    # restart supervisord gracefully
    sudo("supervisorctl reread")
    sudo("supervisorctl update")
    sudo("supervisorctl start " + env.projectname)

    if confirm("Run database migration?", default=True):
        sudo("docker-compose run django python manage.py migrate")

    if confirm("Create super user?", default=False):
        sudo("docker-compose run django python manage.py createsuperuser")
Example #12
0
def repo_setup(repo, ref):
    """ Clone repository """
    puts(cyan(">>> Setting up repository %s with ref %s..." % (repo, ref)))

    topsrcdir = repo_check(repo, check_path=False, workdir=False)
    workdir = repo_check(repo, check_path=False, workdir=True)
    gitdir = os.path.join(topsrcdir, '.git')

    if not os.path.exists(env.CFG_SRCDIR):
        res = confirm("Create repository root %s?" % env.CFG_SRCDIR)
        if not res:
            abort(red("Cannot continue") % env)
        else:
            local("mkdir -p %s" % env.CFG_SRCDIR)

    if not os.path.exists(gitdir) and os.path.exists(topsrcdir):
        res = confirm("Remove %s (it does not seem to be a git repository)?" % topsrcdir)
        if not res:
            abort(red("Cannot continue") % env)
        else:
            local("rm -Rf %s" % topsrcdir)

    if not os.path.exists(gitdir):
        git_clone(repo)
    if not os.path.exists(workdir):
        git_newworkdir(repo)
    git_checkout(repo, ref)
    repo_prepare(repo)
Example #13
0
def dump_db(dumpfile="pootle_DB_backup.sql"):
    """Dumps the DB as a SQL script and downloads it"""
    require("environment", provided_by=[production, staging])

    if isdir(dumpfile):
        abort("dumpfile '%s' is a directory! Aborting." % dumpfile)

    elif not isfile(dumpfile) or confirm(
        "\n%s already exists locally. Do you want to overwrite it?" % dumpfile, default=False
    ):

        remote_filename = "%s/%s" % (env["project_path"], dumpfile)

        if not exists(remote_filename) or confirm(
            "\n%s already exists. Do you want to overwrite it?" % remote_filename, default=False
        ):

            print("\nDumping DB...")

            with settings(hide("stderr")):
                run(
                    "mysqldump -u %s %s %s > %s"
                    % (env["db_user"], env["db_password_opt"], env["db_name"], remote_filename)
                )
                get(remote_filename, ".")
                run("rm %s" % (remote_filename))
        else:
            abort("\nAborting.")
    else:
        abort("\nAborting.")
Example #14
0
def wait_for_health(status):
    """
    Waits for the cluster's health to match what we want

    Continually poll the elasticsearch cluster health API for health
    to match what we want
    """

    # wait (limit * sleep) seconds
    tries = DEFAULT_TRIES
    while tries > 0:
        st = get_cluster_health()
        utils.puts(
            'Waiting for cluster health to be {}, currently {}'.format(
                getattr(colors, status)(status),
                getattr(colors, st)(st),
            )
        )
        if st == status:
            return
        else:
            tries -= 1
            time.sleep(DEFAULT_INTERVAL)
    console.confirm(
        'Cluster status never got {}! Press Enter to continue, '
        'CTRL+C to abort (check output of {}/_cluster/health?pretty)'.
        format(status, env.apis[0])
    )
Example #15
0
def deploy():
    """ deploy code to some remote environment """
    require('config', provided_by=('test', 'staging', 'production', 'malawi'))
    if env.stop_start:
        sudo("supervisorctl stop all")
    if env.config == 'production':
        if not console.confirm('Are you sure you want to deploy production?',
                               default=False):
            utils.abort('Production deployment aborted.')
    if env.code_cleanup:
        if not console.confirm('Are you sure you want to wipe out the "logistics" folder?',
                               default=False):
            utils.abort('Deployment aborted.')
        run('rm -rf logistics')
        run('git clone git://github.com/dimagi/logistics.git')
    else:
        with cd(env.code_dir):
            run('git fetch')
            run('git checkout %(branch)s' % {"branch": env.branch})
            run('git pull %(repo)s %(branch)s' % {"repo": env.remote, "branch": env.branch})
    if env.db_cleanup:
        if not console.confirm('Are you sure you want to wipe out the database?',
                               default=False):
            utils.abort('Deployment aborted.')
        sudo('dropdb %(dbname)s' % {"dbname": env.db_name}, user="******")
        sudo('createdb %(dbname)s' % {"dbname": env.db_name}, user="******")
        
    bootstrap(subdir='logistics_project')
    if env.stop_start:
        sudo("/etc/init.d/apache2 reload")
        sudo("supervisorctl start all")
Example #16
0
    def run(self, host, server_name=None, postgres_uri=None,
            solr_url=None, solr_shards=None, create_schema=None,
            solr_import=None):

        if server_name is None:
            server_name = host
        if postgres_uri is None:
            postgres_uri = 'postgres://*****:*****@localhost/fluidinfo'
        if solr_url is None:
            solr_url = 'http://localhost:8080/solr'
        if solr_shards is None:
            solr_shards = solr_url.replace('https://', '')
            solr_shards = solr_shards.replace('http://', '')
        if create_schema is None:
            if not confirm("You didn't set the --create-schema option. "
                           "The database won't be bootstrapped.\n"
                           'Continue anyway?'.format(postgres_uri)):
                return

        if solr_import is None:
            if not confirm("You didn't set the --solr-import option. "
                           "The solr index won't be imported for the first"
                           'time.\nContinue anyway?'):
                return

        with settings(host_string=host):
            bootstrapFluidDB(server_name, postgres_uri, solr_url, solr_shards,
                             createSchema=create_schema,
                             solrImport=solr_import)
Example #17
0
def wait_for_node(node, leave=False):
    """
    Waits for a node to leave or join the cluster

    Continually poll the elasticsearch cluster status API for the node
    """

    tries = DEFAULT_TRIES
    while tries > 0:
        utils.puts(
            'Waiting for node {} to {}'.format(
                node, 'leave' if leave else 'come back',
            )
        )
        data = curl_and_json(env.apis, '/_nodes/_all/info')
        for node_id, node_conf in data['nodes'].items():
            if 'host' in node_conf:
                if not leave and node_conf['host'] == node:
                    return
        else:
            if leave:
                return
            else:
                tries -= 1
                time.sleep(DEFAULT_INTERVAL)
    console.confirm(
        'Node {} never {}! Press Enter to continue, '
        'CTRL+C to abort (check output of {}/_nodes/_all/info?pretty)'.
        format(
            node,
            'left' if leave else 'came back',
            env.apis[0],
        )
    )
def bootstrap():
    with cd(env.path):
        print("\nStep 1: Install required PHP extensions/apps")

        if confirm('Continue installing requirements? Can skip if already installed.'):
            env.run('sudo setup_env.sh')

        print("\nStep 2: Database and basic Wordpress setup")

        with settings(warn_only=True):
            env.run('rm wp-config.php');
        env.run(env.prefix + './manage.sh setup_wp-config')

        create_db()
        env.run(env.prefix + './manage.sh install')
        env.run(env.prefix + './manage.sh install_network')

        with settings(warn_only=True):
            env.run('rm wp-config.php');
        env.run(env.prefix + './manage.sh setup_wp-config --finish')

        print("\nStep 3: Setup plugins")

        env.run(env.prefix + './manage.sh setup_plugins')

        print("\nStep 4: Cleanup, create blogs")

        env.run(env.prefix + './manage.sh set_root_blog_defaults')

    if confirm("Create child blogs?"): create_blogs()

    with cd(env.path):
        env.run(env.prefix + './manage.sh setup_upload_dirs')
Example #19
0
def init_belmiro_local_customer( server_name,
                                 cluster_name,
                                 customer_name,
                                 yes_to_all='f',
                                 init_db='t',
                                 test_docs='f',
                                 prompt='t' ):
    '''
    init_belmiro_local_customer( server_name, cluster_name, customer_name )
    '''
    master_app = _get_current_app( server_name, cluster_name, 'belmiro' )
    term.printLog( repr( 'clear_local_instance' ) )
    belmiro_app.clear_local_instance( master_app, customer_name )
    term.printLog( repr( 'create_local_instance' ) )
    belmiro_app.create_local_instance( master_app, customer_name )
    curr_app = _get_current_app( server_name, cluster_name, customer_name )
    if prompt[0].lower() == 't' \
    or yes_to_all[0].lower() == 't':
        if yes_to_all[0].lower() == 't' \
        or console.confirm( 'Initialize DB?', default=True ):
            belmiro_app.drop_local_db( curr_app )
            belmiro_app.create_local_db( curr_app, customer_name )
            term.printLog( repr( 'initialize_local_app' ) )
            belmiro_app.initialize_local_app( curr_app )
        if yes_to_all[0].lower() == 't' \
        or console.confirm( 'Test docs?', default=True ):
            belmiro_app.local_test_docs( curr_app )
Example #20
0
def upload_blobs(prod_user=None, path=None):
    """Upload BLOB part of Zope's data to the server."""
    opts = dict(
        prod_user=prod_user or env.get('prod_user'),
        path=path or env.get('path') or os.getcwd()
    )

    if not env.get('confirm'):
        confirm("This will destroy all current BLOB files on the server. " \
        "Are you sure you want to continue?")

    with cd('/home/%(prod_user)s/niteoweb.%(shortname)s/var' % opts):

        # backup current BLOBs
        if exists('blobstorage'):
            sudo('mv blobstorage blobstorage.bak')

        # remove temporary BLOBs from previous uploads
        if exists('/tmp/blobstorage'):
            sudo('rm -rf /tmp/blobstorage')

        # upload BLOBs to the server and move them to their place
        rsync_project('/tmp', local_dir='%(path)s/var/blobstorage' % opts)
        sudo('mv /tmp/blobstorage ./')
        sudo('chown -R %(prod_user)s:%(prod_user)s blobstorage' % opts)
        sudo('chmod -R 700 blobstorage')
Example #21
0
def download_data():
    """Download Zope's Data.fs from the server."""

    if not env.get('confirm'):
        confirm("This will destroy all current Zope data on your local machine. " \
                "Are you sure you want to continue?")

    with cd('/home/%(prod_user)s/niteoweb.%(shortname)s/var' % env):

        ### Downlaod Data.fs ###
        # backup current Data.fs
        if os.path.exists('filestorage/Data.fs'):
            local('mv %(path)s/var/filestorage/Data.fs %(path)s/var/filestorage/Data.fs.bak' % env)

        # remove temporary Data.fs file from previous downloads
        if exists('/tmp/Data.fs', use_sudo=True):
            sudo('rm -rf /tmp/Data.fs')

        # downlaod Data.fs from server
        sudo('rsync -a filestorage/Data.fs /tmp/Data.fs')
        get('/tmp/Data.fs', '%(path)s/var/filestorage/Data.fs' % env)

        ### Download Blobs ###
        # backup current Blobs
        if os.path.exists('%(path)s/var/blobstorage' % env):
            local('mv %(path)s/var/blobstorage %(path)s/var/blobstorage_bak' % env)

        # remove temporary Blobs from previous downloads
        if exists('/tmp/blobstorage', use_sudo=True):
            sudo('rm -rf /tmp/blobstorage')

        # download Blobs from server -> use maintenance user for transfer
        sudo('rsync -a blobstorage /tmp/')
        sudo('chown -R %(user)s /tmp/blobstorage' % env)
        local('rsync -az %(user)s@%(server)s:/tmp/blobstorage %(path)s/var/' % env)
Example #22
0
def install_mysql(default_password=None):
    """Install MySQL database server."""
    opts = dict(
        default_password=default_password or env.get('default_password') or 'secret'
    )

    # first set root password in advance so we don't get the package
    # configuration dialog
    sudo('echo "mysql-server-5.0 mysql-server/root_password password %(default_password)s" | debconf-set-selections' % opts)
    sudo('echo "mysql-server-5.0 mysql-server/root_password_again password %(default_password)s" | debconf-set-selections' % opts)

    # install MySQL along with php drivers for it
    sudo('sudo apt-get -yq install mysql-server mysql-client')

    if not env.get('confirm'):
        confirm("You will now start with interactive MySQL secure installation."
                " Current root password is '%(default_password)s'. Change it "
                "and save the new one to your password managere. Then answer "
                "with default answers to all other questions. Ready?" % opts)
    sudo('/usr/bin/mysql_secure_installation')

    # restart mysql and php-fastcgi
    sudo('service mysql restart')
    sudo('/etc/init.d/php-fastcgi restart')

    # configure daily dumps of all databases
    sudo('mkdir /var/backups/mysql')
    password = prompt('Please enter your mysql root password so I can configure daily backups:')
    sudo("echo '0 7 * * * mysqldump -u root -p%s --all-databases | gzip > /var/backups/mysql/mysqldump_$(date +%%Y-%%m-%%d).sql.gz' > /etc/cron.d/mysqldump" % password)
Example #23
0
def deploy_dev(branch='dev', username=None, skip_pip=False):
    #local('open /Applications/Safari.app http://media.tumblr.com/tumblr_maljispYL31ruql5i.gif')
    env.hosts = ['timmy.area59.se']
    env.roledefs = {
        'www': ['timmy.area59.se'],
        'cdn': ['timmy.area59.se'],
        'db': ['timmy.area59.se']
    }

    if branch is None:
        branch = "dev"

        if not confirm("Deploy the %s branch on server?" % branch):
            abort("Aborting.")

    if username is None:
        if not confirm("Are you %s?" % env.user):
            env.user = prompt("Username:"******"I dont know who you are.")

    execute(_git_pull, branch=branch)
    if not skip_pip:
        execute(_ensure_pip_packages)
    else:
        print "skipping pip"
    execute(_ensure_deb_packages)
    execute(_install_crontab)
    execute(_migrate_db)
    execute(_compress_static)
    execute(_collect_static)
    execute(_restart_uwsgi)
Example #24
0
def deploy(new_apps=True, new_app_conf=True):
    #  test configuration start
    puts_green('Iniciando DEPLOY...', bg=107)
    if not test_configuration():
        if not console.confirm("Configuration test %s! Do you want to continue?" % red('failed'), default=False):
            abort("Aborting at user request.")
            #  test configuration end
    _verify_sudo()
    if env.ask_confirmation:
        if not console.confirm("Are you sure you want to deploy in %s?" % red(env.project.upper()), default=False):
            abort("Aborting at user request.")
    puts_green('Start deploy...')
    start_time = datetime.now()

    if env.repository_type == 'hg':
        hg_pull()
    else:
        git_pull()
    if new_apps:
        _install_requirements()
    if new_app_conf:
        _upload_nginx_conf()
        _upload_rungunicorn_script()
        _upload_supervisord_conf()

    _deploy_django_project()
    _collect_static()

    _prepare_media_path()  # fica porque pode ser alterado em uma review de código
    _supervisor_restart()

    end_time = datetime.now()
    finish_message = '[%s] Correctly deployed in %i seconds' % \
                     (green(end_time.strftime('%H:%M:%S')), (end_time - start_time).seconds)
    puts(finish_message)
Example #25
0
def configure_virtualenvwrapper_for_user(username, python_ver='2'):
    """
    Configure virtualenvwrapper for user
    """
    assert python_ver in ('2', '3')
    python_ver = int(python_ver)
    check_sudo()
    check_os()
    if not confirm('Do you want to configure python {} virtualenvwrapper to user "{}"?'.format(python_ver, username)):
        return
    if not exists('/usr/local/bin/virtualenvwrapper.sh', use_sudo=True):
        abort('virtualenvwrapper is not installed.')
    print_green('INFO: Configure virtualenvwrapper to user {}...'.format(username))
    user_dir = '/home/{}'.format(username)
    if not exists(user_dir, use_sudo=True):
        print_red("Directory {} doesn't exists :(".format(user_dir))
        if not confirm('Do you want to ignore this error and continue? (else will be abort)', default=False):
            abort("Directory {} doesn't exists :(".format(user_dir))
    else:
        bashrc = '{}/.bashrc'.format(user_dir)
        python_path = '/usr/bin/python2.7'
        if python_ver == 3:
            python_path = '/usr/bin/python{}'.format(get_python3_version())
        append(bashrc, 'export WORKON_HOME=$HOME/.virtualenvs', use_sudo=True)
        append(bashrc, 'export VIRTUALENVWRAPPER_HOOK_DIR=$WORKON_HOME', use_sudo=True)
        append(bashrc, 'export VIRTUALENVWRAPPER_PYTHON={}'.format(python_path), use_sudo=True)
        append(bashrc, 'source /usr/local/bin/virtualenvwrapper.sh', use_sudo=True)
    print_green('INFO: Configure python {} virtualenvwrapper to user {}... OK'.format(python_ver, username))
Example #26
0
def deploy():
    """ deploy code to remote host by checking out the latest via git """
    if not console.confirm('Are you sure you want to deploy {env.environment}?'.format(env=env), default=False) or \
       not console.confirm('Did you run "fab {env.environment} preindex_views"? '.format(env=env), default=False):
        utils.abort('Deployment aborted.')

    require('root', provided_by=('staging', 'preview', 'production', 'india'))
    run('echo ping!') #hack/workaround for delayed console response

    try:
        execute(update_code)
        execute(update_virtualenv)
        execute(clear_services_dir)
        set_supervisor_config()
        if env.should_migrate:
            execute(migrate)
        execute(_do_collectstatic)
        execute(version_static)
        if env.should_migrate:
            execute(flip_es_aliases)
    except Exception:
        execute(mail_admins, "Deploy failed", "You had better check the logs.")
        raise
    else:
        execute(record_successful_deploy)
    finally:
        # hopefully bring the server back to life if anything goes wrong
        execute(services_restart)
Example #27
0
def invenio_conf():
    """ Upload and update Invenio configuration """
    puts(cyan(">>> Configuring Invenio..." % env))

    invenio_local = env.get('CFG_INVENIO_CONF', None)
    invenio_local_remote = os.path.join(env.CFG_INVENIO_PREFIX, 'etc/invenio-local.conf')

    if not invenio_local:
        puts(red(">>> CFG_INVENIO_CONF not specified, using built-in template for invenio-local.conf..."))

    puts(">>> Writing invenio-local.conf to %s ..." % invenio_local_remote)
    if not invenio_local:
        write_template(invenio_local_remote, env, tpl_str=INVENIO_LOCAL_TPL, use_sudo=True)
    else:
        try:
            write_template(invenio_local_remote, env, tpl_file=invenio_local, use_sudo=True)
        except TemplateNotFound:
            puts(red("Could not find template %s" % invenio_local))
            if not confirm("Use built-in template for invenio-local.conf?"):
                abort("User aborted")
            else:
                write_template(invenio_local_remote, env, tpl_str=INVENIO_LOCAL_TPL, use_sudo=True)

    if confirm(cyan("Run config update")):
        inveniomanage("config update")
        inveniomanage("bibfield config load")
Example #28
0
def create_individual_database(dbname):
    """Create a single database. Used by initial_create_databases
    and copy_databases."""

    dbinfo = config('databases')[dbname]

    if 'postgis' in dbinfo['ENGINE']:
        if confirm("Create database %s on %s with template postgis?" % (
                dbinfo['NAME'], dbinfo['HOST']), default=False):
            print(green("The password required is that of user 'postgres'." +
                        " Often equal to 'postgres'."))
            cmd = ('createdb -h {host} -U postgres ' +
                   '--template=template_postgis --owner={user} {database}')
            # Use warn-only so that the script doesn't halt if the db
            # exists already
            with settings(warn_only=True):
                local(cmd.format(host=dbinfo['HOST'], user=dbinfo['USER'],
                                 database=dbinfo['NAME']))
    else:
        if confirm("Create database %s on %s?" % (
                dbinfo['NAME'], dbinfo['HOST']), default=False):
            print(green("The password required is that of user 'postgres'." +
                        " Often equal to 'postgres'."))
            cmd = ('createdb -h {host} -U postgres ' +
                   '--owner={user} {database}')
            # Use warn-only so that the script doesn't halt if the db
            # exists already
            with settings(warn_only=True):
                local(cmd.format(host=dbinfo['HOST'], user=dbinfo['USER'],
                                 database=dbinfo['NAME']))
def set_bootmachine_servers(roles=None, ip_type="public", append_port=True):
    """
    Internal bootmachine method to set the Fabric env.bootmachine_servers variable.
    """
    env.bootmachine_servers = list_servers(as_list=True)
    ips = []

    for server in env.bootmachine_servers:
        if server.status != "ACTIVE":
            console.confirm("The server `{0}` is not `ACTIVE` and is in the `{1}` phase. \
Continue?".format(server.name, server.status))
    for server in env.bootmachine_servers:
        # Verify (by name) that the live server was defined in the settings.
        try:
            [n for n in settings.SERVERS if n["servername"] == server.name][0]
        except IndexError:
            continue
        # If a ``roles`` list was passed in, verify it identically matches the server's roles.
        if roles and sorted(roles) != sorted(server["roles"]):
            continue
        if append_port:
            try:
                Telnet(server.public_ip, 22)
                server.port = 22
                ips.append(server.public_ip + ":" + "22")
            except IOError:
                Telnet(server.public_ip, int(settings.SSH_PORT))
                server.port = int(settings.SSH_PORT)
                ips.append(server.public_ip + ":" + str(settings.SSH_PORT))
        else:
            ips.append(server.addresses[ip_type][0])
        server.distro_module = [n["distro_module"] for n in settings.SERVERS
                                if n["servername"] == server.name][0]
    return ips
Example #30
0
def upload_zodb(prod_user=None, path=None):
    """Upload ZODB part of Zope's data to the server."""
    opts = dict(
        prod_user=prod_user or env.get('prod_user'),
        path=path or env.get('path') or os.getcwd()
    )

    # _verify_env(['prod_user', 'path', ])

    if not env.get('confirm'):
        confirm("This will destroy the current Data.fs file on the server. " \
        "Are you sure you want to continue?")

    with cd('/home/%(prod_user)s/niteoweb.%(shortname)s/var/filestorage' % opts):

        # remove temporary BLOBs from previous uploads
        if exists('/tmp/Data.fs'):
            sudo('rm -rf /tmp/Data.fs')

        # upload Data.fs to server and set production user as it's owner
        upload_template(
            filename='%(path)s/var/filestorage/Data.fs' % opts,
            destination='Data.fs',
            use_sudo=True
        )
        sudo('chown -R %(prod_user)s:%(prod_user)s Data.fs' % opts)
def deploy():
    """
    Deploy latest version of the project.
    Backup current version of the project, push latest version of the project
    via version control or rsync, install new requirements, sync and migrate
    the database, collect any new static assets, and restart gunicorn's worker
    processes for the project.
    """
    if not exists(env.proj_path):
        if confirm("Project does not exist in host server: %s"
                   "\nWould you like to create it?" % env.proj_name):
            create()
        else:
            abort("Aborted at user request")

    # Backup current version of the project
    _print(blue("Backing up static files and database...", bold=True))
    with cd(env.proj_path):
        backup("last.db")
    if env.deploy_tool in env.vcs_tools:
        with cd(env.repo_path):
            if env.deploy_tool == "git":
                run("git rev-parse HEAD > %s/last.commit" % env.proj_path)
            elif env.deploy_tool == "hg":
                run("hg id -i > last.commit")
        with project():
            static_dir = static()
            if exists(static_dir):
                run("tar -cf static.tar --exclude='*.thumbnails' %s" %
                    static_dir)
    else:
        with cd(join(env.proj_path, "..")):
            excludes = ["*.pyc", "*.pio", "*.thumbnails"]
            exclude_arg = " ".join("--exclude='%s'" % e for e in excludes)
            run("tar -cf {0}.tar {1} {0}".format(env.proj_name, exclude_arg))

    # Update requirements and migrate the DB
    _print(blue("Deploying the latest version of the project...", bold=True))
    with update_changed_requirements():
        if env.deploy_tool in env.vcs_tools:
            vcs_upload()
        else:
            rsync_upload()
    manage("migrate --noinput")

    # Upload and collect compiled static resources
    run("mkdir -p %s" % static())  # Create the STATIC_ROOT
    remote_path = static() + "/.htaccess"
    upload_template("deploy/htaccess", remote_path, backup=False)
    excludes = ["*~", "*.old", "*.map"]
    local_dir = os.path.join(os.getcwd(), "theme", "static", "build")
    remote_dir = os.path.join(env.proj_path, "theme", "static")
    rsync_project(remote_dir=remote_dir, local_dir=local_dir, exclude=excludes)
    manage("collectstatic -v 0 --noinput")

    # Upload templated config files
    _print(blue("Uploading configuration files...", bold=True))
    # Get the application port we saved on create() into the context
    with tempfile.TemporaryFile() as temp:
        get("%s/app.port" % env.proj_path, temp)
        temp.seek(0)
        port = temp.read()
        env.gunicorn_port = port.strip()
    for name in get_templates():
        upload_template_and_reload(name)
    restart()
    return True
Example #32
0
    def handle(self, *args, **options):
        # Load server config from project
        _, remote = load_config(env,
                                options.get('remote', ''),
                                config_user='******',
                                debug=options.get('debug', False))

        if django_settings.DEBUG:
            abort(
                "You're currently using your local settings file, you need use production instead.\n"
                "To use production settings pass `--settings={}` to the deploy command."
                .format(
                    os.getenv('DJANGO_SETTINGS_MODULE').replace(
                        '.local', '.production')))

        # Compress the domain names for nginx
        domain_names = " ".join(django_settings.ALLOWED_HOSTS)

        # Use the site domain as a fallback domain
        fallback_domain_name = django_settings.SITE_DOMAIN

        if not options['noinput']:
            fallback_domain_name = prompt('What should the default domain be?',
                                          default=fallback_domain_name)
            domain_names = prompt(
                'Which domains would you like to enable in nginx?',
                default=domain_names)
        else:
            print(f'Default domain: {fallback_domain_name}')
            print(f'Domains to be enabled in nginx: {domain_names}')

        # If the domain is pointing to the droplet already, we can setup SSL.
        setup_ssl_for = [
            domain_name for domain_name in domain_names.split(' ')
            if local(f'dig +short {domain_name}', capture=True) ==
            remote['server']['ip']
        ]

        if not setup_ssl_for:
            abort(
                "Sorry, it's $CURRENT_YEAR, you need to use SSL. Please update the domain DNS to point to {}."
                .format(remote['server']['ip']))

        for domain_name in domain_names.split(' '):
            if domain_name not in setup_ssl_for:
                print(f'SSL will not be configured for {domain_name}')

        if not options['noinput']:
            if not confirm('Do you want to continue?'):
                exit()

        # Define nginx tasks
        nginx_tasks = [
            {
                'title':
                'Ensure Nginx service is stopped',  # This allows Certbot to run.
                'command': 'service nginx stop',
            },
            {
                'title':
                'Run certbot',
                'command':
                'certbot certonly --standalone -n --agree-tos --email [email protected] --cert-name {} --domains {}'
                .format(fallback_domain_name, ','.join(setup_ssl_for)),
            },
            {
                'title': 'Ensure Nginx service is started',
                'command': 'service nginx start',
            },
        ]
        run_tasks(env, nginx_tasks)
Example #33
0
def request_confirm(action):
    require('confirm_required')

    if env.confirm_required:
        if not confirm("Are you sure you want to run task: %s on servers %s?" % (action, env.hosts)):
            abort('Deployment aborted.')
Example #34
0
def letsencrypt_configure(reconfigure_nginx=True):
    require('code_dir')

    domains = set()

    # Collect all the domains that need a certificate
    with cd(env.code_dir):
        # construct a configparser object
        config = ConfigParser.ConfigParser()

        for filename in get_config_repo_paths('letsencrypt'):
            buf = StringIO()

            # Add the actual config file data to the buffer
            get(filename, buf)

            # Here we prepend a section header to the in-memory buffer. This
            #  allows us to easily read the letsencrypt config file using stdlib configparser
            #
            # see: http://stackoverflow.com/questions/2819696/parsing-properties-file-in-python/25493615#25493615
            buf = StringIO('[DEFAULT]\n' + buf.getvalue())

            # read config from buf
            config.readfp(buf)

            # get domains from the config file
            for domain in config.get('DEFAULT', 'domains').split(','):
                domains.add(domain.strip())

    # Create a temporary nginx config file
    temporary_nginx_conf = """
        server {
            listen 80;
            server_name %(domains)s;
            location /.well-known/acme-challenge/ {
                root /etc/letsencrypt/www;
                break;
            }
        }
    """ % {
        "domains": " ".join(domains),
    }

    # Notify the user that the dns MUST be configured for all the domains as of this point
    print(" ")
    print(colors.blue('Preparing to request certificate using letsencrypt. The DNS for '
                      'following domains MUST be configured to point to the remote host: %s' % " ".join(domains)))

    if not confirm(colors.yellow("Is the dns configured? (see above)")):
        abort('Deployment aborted.')

    # Upload it to the app nginx config path
    put(local_path=StringIO(temporary_nginx_conf), remote_path=get_nginx_app_target_path(), use_sudo=True)

    # Reload nginx
    sudo('docker exec nginx nginx -s reload')

    # use letsencrypt_update to obtain the certificate
    letsencrypt_update(dry_run=True)

    # restore nginx config if requested
    if reconfigure_nginx:
        nginx_update()
Example #35
0
def setup_server(id=None):
    """ Perform initial deploy on the target """
    require('hosts')
    require('code_dir')

    # Clone code repository
    vcs.clone(id or None)

    # Create password for DB, secret key and the local settings
    db_password = generate_password()
    secret_key = generate_password()

    # Create site settings for this env
    allowed_hosts = [env.node_site]

    if env.django_site not in allowed_hosts:
        allowed_hosts.append(env.django_site)

    allowed_hosts = ','.join(allowed_hosts)

    site_settings = string.Template(BASE_LOCAL_SETTINGS).substitute(
        node_site=env.node_site, django_site=env.django_site, allowed_hosts=allowed_hosts,
    )
    local_settings = string.Template(DJANGO_LOCAL_SETTINGS).substitute(
        db_password=db_password, secret_key=secret_key, site_settings=site_settings,
    )

    # Create database
    sudo('echo "CREATE DATABASE {{cookiecutter.repo_name}}; '
         '      CREATE USER {{cookiecutter.repo_name}} WITH password \'{db_password}\'; '
         '      GRANT ALL PRIVILEGES ON DATABASE {{cookiecutter.repo_name}} to {{cookiecutter.repo_name}};" '
         '| docker exec -i postgres-10 psql -U postgres'.format(db_password=db_password))

    # Upload local settings / env files
    node_settings_file = env.code_dir + '/app/env/node.env'
    django_settings_file = env.code_dir + '/the_platform/django.env'

    put(local_path=StringIO(site_settings), remote_path=node_settings_file, use_sudo=True)
    put(local_path=StringIO(local_settings), remote_path=django_settings_file, use_sudo=True)

    if confirm(colors.yellow("Separate Node Sentry keys?"), default=False):
        print('Enter Django RAVEN_PUBLIC_DSN:')
        add_secret_key('RAVEN_PUBLIC_DSN', [django_settings_file])

        print('Enter Django RAVEN_BACKEND_DSN:')
        add_secret_key('RAVEN_BACKEND_DSN', [django_settings_file])

        print('Enter Node RAVEN_PUBLIC_DSN:')
        add_secret_key('RAVEN_PUBLIC_DSN', [node_settings_file])

        print('Enter Node RAVEN_BACKEND_DSN:')
        add_secret_key('RAVEN_BACKEND_DSN', [node_settings_file])
    else:
        print('Enter RAVEN_PUBLIC_DSN:')
        add_secret_key('RAVEN_PUBLIC_DSN', [node_settings_file, django_settings_file])

        print('Enter RAVEN_BACKEND_DSN:')
        add_secret_key('RAVEN_BACKEND_DSN', [node_settings_file, django_settings_file])

    # Create log dir
    sudo('mkdir -p /var/log/{{cookiecutter.repo_name}}/')

    ensure_docker_networks()

    docker_compose('build')

    # migrations, collectstatic (both django & node)
    migrate(silent=True)
    collectstatic()

    # Copy logrotate conf
    with cd(env.code_dir):
        sudo('cp deploy/logrotate.conf /etc/logrotate.d/{{cookiecutter.repo_name}}')

    # (Re)start services
    docker_up(silent=True)

    # Run deploy systemchecks
    check()

    # Configure letsencrypt
    letsencrypt_configure(reconfigure_nginx=False)

    # Install nginx config
    nginx_update()
Example #36
0
def create():
    """
    Creates the environment needed to host the project.
    The environment consists of: system locales, virtualenv, database, project
    files, SSL certificate, and project-specific Python requirements.
    """
    # Generate project locale
    locale = env.locale.replace("UTF-8", "utf8")
    with hide("stdout"):
        if locale not in run("locale -a"):
            sudo("locale-gen %s" % env.locale)
            sudo("update-locale %s" % env.locale)
            sudo("service postgresql restart")
            run("exit")

    # Create project path
    run("mkdir -p %s" % env.proj_path)

    # Set up virtual env
    run("mkdir -p %s" % env.venv_home)
    with cd(env.venv_home):
        if exists(env.proj_name):
            if confirm("Virtualenv already exists in host server: %s"
                       "\nWould you like to replace it?" % env.proj_name):
                run("rm -rf %s" % env.proj_name)
            else:
                abort()
        run("virtualenv %s" % env.proj_name)

    # Upload project files
    if env.deploy_tool in env.vcs_tools:
        vcs_upload()
    else:
        rsync_upload()

    # Create DB and DB user
    pw = db_pass()
    user_sql_args = (env.proj_name, pw.replace("'", "\'"))
    user_sql = "CREATE USER %s WITH ENCRYPTED PASSWORD '%s';" % user_sql_args
    psql(user_sql, show=False)
    shadowed = "*" * len(pw)
    print_command(user_sql.replace("'%s'" % pw, "'%s'" % shadowed))
    psql("CREATE DATABASE %s WITH OWNER %s ENCODING = 'UTF8' "
         "LC_CTYPE = '%s' LC_COLLATE = '%s' TEMPLATE template0;" %
         (env.proj_name, env.proj_name, env.locale, env.locale))

    # Set up SSL certificate
    if not env.ssl_disabled:
        conf_path = "/etc/nginx/conf"
        if not exists(conf_path):
            sudo("mkdir %s" % conf_path)
        with cd(conf_path):
            crt_file = env.proj_name + ".crt"
            key_file = env.proj_name + ".key"
            if not exists(crt_file) and not exists(key_file):
                try:
                    crt_local, = glob(join("deploy", "*.crt"))
                    key_local, = glob(join("deploy", "*.key"))
                except ValueError:
                    parts = (crt_file, key_file, env.domains[0])
                    sudo("openssl req -new -x509 -nodes -out %s -keyout %s "
                         "-subj '/CN=%s' -days 3650" % parts)
                else:
                    upload_template(crt_local, crt_file, use_sudo=True)
                    upload_template(key_local, key_file, use_sudo=True)

    # Install project-specific requirements
    upload_template_and_reload("settings")
    with project():
        if env.reqs_path:
            pip("-r %s/%s" % (env.proj_path, env.reqs_path))
        pip("gunicorn setproctitle psycopg2 "
            "django-compressor python-memcached")
        # Bootstrap the DB
        manage("createdb --noinput --nodata")
        python(
            "from django.conf import settings;"
            "from django.contrib.sites.models import Site;"
            "Site.objects.filter(id=settings.SITE_ID).update(domain='%s');" %
            env.domains[0])
        for domain in env.domains:
            python("from django.contrib.sites.models import Site;"
                   "Site.objects.get_or_create(domain='%s');" % domain)
        if env.admin_pass:
            pw = env.admin_pass
            user_py = ("from django.contrib.auth import get_user_model;"
                       "User = get_user_model();"
                       "u, _ = User.objects.get_or_create(username='******');"
                       "u.is_staff = u.is_superuser = True;"
                       "u.set_password('%s');"
                       "u.save();" % pw)
            python(user_py, show=False)
            shadowed = "*" * len(pw)
            print_command(user_py.replace("'%s'" % pw, "'%s'" % shadowed))

    return True
Example #37
0
def production():
    """www.commcarehq.org"""
    env.sudo_user = '******'
    env.environment = 'production'
    env.django_bind = '0.0.0.0'
    env.django_port = '9010'
    env.should_migrate = True
    env.sms_queue_enabled = True
    env.pillow_retry_queue_enabled = True

    if env.code_branch != 'master':
        branch_message = (
            "Woah there bud! You're using branch {env.code_branch}. "
            "ARE YOU DOING SOMETHING EXCEPTIONAL THAT WARRANTS THIS?").format(
                env=env)
        if not console.confirm(branch_message, default=False):
            utils.abort('Action aborted.')

    class Servers(object):
        db = ['hqdb0.internal.commcarehq.org']
        celery = ['hqcelery0.internal.commcarehq.org']
        touch = ['hqtouch0.internal.commcarehq.org']
        django = [
            'hqdjango3.internal.commcarehq.org',
            'hqdjango4.internal.commcarehq.org',
            'hqdjango5.internal.commcarehq.org'
        ]

    env.roledefs = {
        'couch': Servers.db,
        'pg': Servers.db,
        'rabbitmq': Servers.db,
        'django_celery': Servers.celery,
        'sms_queue': Servers.celery,
        'pillow_retry_queue': Servers.celery,
        'django_app': Servers.django,
        'django_pillowtop': Servers.db,

        # for now, we'll have touchforms run on both hqdb0 and hqdjango0
        # will remove hqdjango0 once we verify it works well on hqdb0
        'formsplayer': Servers.touch,
        'lb': [],
        'staticfiles': PROD_PROXIES,
        # having deploy here makes it so that
        # we don't get prompted for a host or run deploy too many times
        'deploy': Servers.db,
        # fab complains if this doesn't exist
        'django_monolith': []
    }

    env.server_name = 'commcare-hq-production'
    env.settings = '%(project)s.localsettings' % env
    # e.g. 'ubuntu' or 'redhat'.
    # Gets auto-populated by what_os()
    # if you don't know what it is or don't want to specify.
    env.host_os_map = None
    env.roles = [
        'deploy'
    ]  # this line should be commented out when running bootstrap on a new machine
    env.es_endpoint = 'hqes0.internal.commcarehq.org'
    env.flower_port = 5555

    _setup_path()
Example #38
0
def letsencrypt(real_cert=False):
    domain = "lichess4545.com"
    domain2 = "lichess4545.tv"
    domain3 = "staging.lichess4545.com"
    domains = [
        domain,
        "www.{0}".format(domain),
        domain2,
        "www.{0}".format(domain2),
        domain3,
    ]
    country = "CA"
    state = "Alberta"
    town = "Calgary"
    email = "*****@*****.**"

    now = datetime.datetime.now()
    outdir = project_relative(now.strftime("certs/%Y-%m"))
    if os.path.exists(outdir):
        print colors.red(
            "{0} exists, bailing to avoid overwriting files".format(outdir))
        return
    key = "{0}/privkey1.pem".format(outdir)
    csr = "{0}/signreq.der".format(outdir)
    tmpdir = "{0}/tmp".format(outdir)
    ssl_conf = "{0}/openssl.cnf".format(tmpdir)
    local("mkdir -p {0}".format(tmpdir))
    with lcd(outdir):
        # Create an openssl.cnf that we can use.
        sans = ",".join(["DNS:{0}".format(d) for d in domains])
        local('cat /etc/ssl/openssl.cnf > "{0}"'.format(ssl_conf))
        local('echo "[SAN]" >> "{0}"'.format(ssl_conf))
        local('echo "subjectAltName={1}" >> "{0}"'.format(ssl_conf, sans))
        # Create the signing request.
        local(
            'openssl req -new -newkey rsa:2048 -sha256 -nodes -keyout "{key}" -out "{csr}" -outform der -subj "/C={country}/ST={state}/L={town}/O={domain}/emailAddress={email}/CN={domain}" -reqexts SAN -config "{ssl_conf}"'
            .format(
                key=key,
                csr=csr,
                country=country,
                state=state,
                town=town,
                domain=domain,
                email=email,
                ssl_conf=ssl_conf,
            ))

        domain_args = " ".join(["-d {0}".format(d) for d in domains])
        log_dir = "{0}/log".format(outdir)
        lib_dir = "{0}/lib".format(outdir)
        etc_dir = "{0}/etc".format(outdir)
        test_cert = "--test-cert"
        if real_cert:
            test_cert = ""
        local(
            'letsencrypt certonly --text {test_cert} --manual {domain_args} --config-dir {etc_dir} --logs-dir {log_dir} --work-dir {lib_dir} --email "{email}" --csr "{csr}"'
            .format(domain_args=domain_args,
                    log_dir=log_dir,
                    lib_dir=lib_dir,
                    etc_dir=etc_dir,
                    email=email,
                    csr=csr,
                    test_cert=test_cert))
    if real_cert and confirm("Install cert?"):
        privkey = os.path.join(outdir, "privkey1.pem")
        chain = os.path.join(outdir, "0001_chain.pem")
        privkey_target = "/var/ssl/lichess4545.com.key"
        chain_target = "/var/ssl/lichess4545.com.pem"
        put(privkey, privkey_target)
        put(chain, chain_target)
Example #39
0
def disable_root():
    """
    Disables root and creates a new sudo user as specified by HOST_USER in your
    settings or your host_string
    
    The normal pattern for hosting is to get a root account which is then disabled.
    
    returns True on success
    """
    
    def enter_password():
        password1 = getpass.getpass(prompt='Enter the password for %s:'% sudo_user)
        password2 = getpass.getpass(prompt='Re-enter the password:'******'The password was not the same'
            enter_password()
        return password1

    (olduser,host,port) = normalize(env.host_string)
 
    if env.verbosity and not (env.HOST_USER or env.ROLEDEFS):
    
        print "\nWOVEN will now walk through setting up your node (host).\n"

        if env.INTERACTIVE:
            root_user = prompt("\nWhat is the default administrator account for your node?", default=env.ROOT_USER)
        else: root_user = env.ROOT_USER
        if env.user <> 'root': sudo_user = env.user
        else: sudo_user = ''
        if env.INTERACTIVE:
            sudo_user = prompt("What is the new or existing account you wish to use to setup and deploy to your node?", default=sudo_user)
           
    else:
        root_user = env.ROOT_USER
        sudo_user = env.user
        

    original_password = env.get('HOST_PASSWORD','')
    
    host_string=join_host_strings(root_user,host,str(env.DEFAULT_SSH_PORT))
    with settings(host_string=host_string, key_filename=env.key_filename, password=env.ROOT_PASSWORD):
        if not contains('/etc/group','sudo',use_sudo=True):
            sudo('groupadd sudo')

        home_path = '/home/%s'% sudo_user
        if not exists(home_path):
            if env.verbosity:
                print env.host, 'CREATING A NEW ACCOUNT WITH SUDO PRIVILEGE: %s'% sudo_user
            if not original_password:

                original_password = enter_password()
            
            add_user(username=sudo_user, password=original_password,group='sudo')

        #Add existing user to sudo group
        else:
            sudo('adduser %s sudo'% sudo_user)
        #adm group used by Ubuntu logs
        sudo('usermod -a -G adm %s'% sudo_user)
        #add user to /etc/sudoers
        if not exists('/etc/sudoers.wovenbak',use_sudo=True):
            sudo('cp -f /etc/sudoers /etc/sudoers.wovenbak')
        sudo('cp -f /etc/sudoers /tmp/sudoers.tmp')
        append('/tmp/sudoers.tmp', "# Members of the sudo group may gain root privileges", use_sudo=True)
        append('/tmp/sudoers.tmp', "%sudo ALL=(ALL) NOPASSWD:ALL",  use_sudo=True)
        sudo('visudo -c -f /tmp/sudoers.tmp')
        sudo('cp -f /tmp/sudoers.tmp /etc/sudoers')
        sudo('rm -rf /tmp/sudoers.tmp')
        if env.key_filename:
            sudo('mkdir -p /home/%s/.ssh'% sudo_user)
            sudo('cp -f ~/.ssh/authorized_keys /home/%s/.ssh/authorized_keys'% sudo_user)
            sudo('chown -R %s:sudo /home/%s/.ssh'% (sudo_user,sudo_user))
            
    env.password = original_password

    #finally disable root
    host_string=join_host_strings(sudo_user,host,str(env.DEFAULT_SSH_PORT))
    with settings(host_string=host_string):
        if sudo_user <> root_user and root_user == 'root':
            if env.INTERACTIVE:
                d_root = confirm("Disable the root account", default=True)
            else: d_root = env.DISABLE_ROOT
            if d_root:
                if env.verbosity:
                    print env.host, 'DISABLING ROOT'
                sudo("usermod -L %s"% 'root')

    return True
def create():
    """
    Creates the environment needed to host the project.
    The environment consists of: virtualenv, database, project
    files, project-specific Python requirements, and Webfaction API objects.
    """
    # Set up virtualenv
    run("mkdir -p %s" % env.venv_home)
    with cd(env.venv_home):
        if exists(env.proj_name):
            if confirm("Virtualenv already exists in host server: %s"
                       "\nWould you like to replace it?" % env.proj_name):
                run("rm -rf %s" % env.proj_name)
            else:
                abort("Aborted at user request")
        run("virtualenv %s" % env.proj_name)
        # Make sure we don't inherit anything from the system's Python
        run("touch %s/lib/python2.7/sitecustomize.py" % env.proj_name)

    # Create elements with the Webfaction API
    _print(
        blue(
            "Creating database and website records in the Webfaction "
            "control panel...",
            bold=True))
    srv, ssn, acn = get_webf_session()

    # Database
    db_user = get_webf_obj(srv, ssn, "db_user", env.proj_name)
    if db_user:
        abort("Database user %s already exists." % db_user["username"])
    db = get_webf_obj(srv, ssn, "db", env.proj_name)
    if db:
        abort("Databse %s already exists." % db["name"])
    if env.db_pass is None:
        env.db_pass = db_pass()
    srv.create_db(ssn, env.proj_name, "postgresql", env.db_pass)

    # Custom app
    app = get_webf_obj(srv, ssn, "app", env.proj_name)
    if app:
        abort("App %s already exists." % app["name"])
    app = srv.create_app(ssn, env.proj_name, "custom_app_with_port", True, "")
    # Save the application port to a file for later deployments
    run("echo '%s' > %s/app.port" % (app["port"], env.proj_path))

    # Static app
    static_app = get_webf_obj(srv, ssn, "app", "%s_static" % env.proj_name)
    if static_app:
        abort("Static app %s already exists." % static_app["name"])
    static_app_name = "%s_static" % env.proj_name
    static_dir = "%s/static" % env.proj_path
    srv.create_app(ssn, static_app_name, "symlink54", False, static_dir)

    # Domain and subdomain
    dom = get_webf_obj(srv, ssn, "domain", env.live_domain, env.live_subdomain)
    if dom:
        abort("Domain %s already exists." % env.live_host)
    srv.create_domain(ssn, env.live_domain, env.live_subdomain)

    # Site record
    site = get_webf_obj(srv, ssn, "website", env.proj_name)
    if site:
        abort("Website: %s already exists." % site["name"])
    main_app, static_app = [env.proj_name, "/"], [static_app_name, "/static"]
    site = srv.create_website(ssn, env.proj_name, env.host_string, False,
                              [env.live_host], main_app, static_app)

    # Upload project files
    _print(blue("Uploading project files...", bold=True))
    if env.deploy_tool in env.vcs_tools:
        vcs_upload()
    else:
        rsync_upload()

    # Install project-specific requirements
    _print(blue("Installing project requirements...", bold=True))
    upload_template_and_reload("settings")
    with project():
        if env.reqs_path:
            pip("-r %s/%s" % (env.proj_path, env.reqs_path), show=False)
        pip(
            "gunicorn setproctitle psycopg2 "
            "django-compressor python-memcached",
            show=False)
        # Bootstrap the DB
        _print(blue("Initializing the database...", bold=True))
        manage("createdb --noinput --nodata")
        python("from django.conf import settings;"
               "from django.contrib.sites.models import Site;"
               "site, _ = Site.objects.get_or_create(id=settings.SITE_ID);"
               "site.domain = '" + env.live_host + "';"
               "site.save();")
        if env.admin_pass:
            pw = env.admin_pass
            user_py = ("from django.contrib.auth import get_user_model;"
                       "User = get_user_model();"
                       "u, _ = User.objects.get_or_create(username='******');"
                       "u.is_staff = u.is_superuser = True;"
                       "u.set_password('%s');"
                       "u.save();" % (env.admin_user, pw))
            python(user_py, show=False)
            shadowed = "*" * len(pw)
            print_command(user_py.replace("'%s'" % pw, "'%s'" % shadowed))

    return True
Example #41
0
def stop():
    "Stop neo4j-service."
    # NB: This doesn't use sudo() directly because it insists on asking
    # for a password, even though we should have NOPASSWD in visudo.
    if confirm("Stop Neo4j server?"):
        sudo('sudo service %(service_name)s stop' % env)
Example #42
0
def test():
    with settings(warn_only=True):
        result = local("nosetests -v", capture=True)
    if result.failed and not confirm("Tests failed. Continue?"):
        abort("Aborted at user request.")
Example #43
0
def install_addon(name,
                  settings=PROD_SETTINGS,
                  install_dir=INSTALL_DIR,
                  verbose=False,
                  interactive=True):
    """Install a user add-on.

    The name is supplied on the command line. The version, manufacturer,
    url_root, notes, and celery tasks are supplied from the add-on.

    :param name: The add-on's installation name
    :type name: str
    :keyword settings: The path of the Django settings file to use.
    :type settings: str
    :keyword install_dir: The path to the Goldstone installation directory.
    :type install_dir: str
    :keyword verbose: Display more informational messages?
    :type verbose: bool or str, depending on whether we are called directly or
                   from manage_addon.sh

    """

    # Normalize verbose and interactive to booleans.
    verbose = verbose in ["True", "true", True]
    interactive = interactive in ["True", "true", True]

    # Switch to the right environment, because we'll access the database.
    with _django_env(settings, install_dir):
        from goldstone.addons.models import Addon
        from rest_framework.authtoken.models import Token

        # Gather the package installation information from the package or user.
        # (The package has already been installed into Python's execution
        # environment.)
        addon_db, addon_install = \
            _install_addon_info(name, install_dir, verbose)

        # Get permission to proceed.
        if not interactive or \
                confirm(cyan('Proceed?'), default=False):
            if addon_install["replacement"]:
                # Replacing an existing add-on.
                row = Addon.objects.get(name=name)
                row.version = addon_db["version"]
                row.manufacturer = addon_db["manufacturer"]
                row.url_root = addon_db["url_root"]
                row.notes = addon_db["notes"]
                row.save()
            else:
                # Installing a new add-on. We'll track where we are, in case an
                # exception occurs.
                try:
                    # Add an Addon table row for this add-on.
                    error = "updating the Addon table. It's probably OK, " \
                            "but check it."

                    row = Addon.objects.create(**addon_db)

                    # Now add the add-on to INSTALLED_APPS and
                    # CELERYBEAT_SCHEDULE. SED is scary, so we'll use Python
                    # instead.
                    error = "reading base.py. The Addon table was " \
                            "modified. You must edit settings/base.py and " \
                            "urls.py, and copy the static files."

                    filepath = os.path.join(install_dir,
                                            "goldstone/settings/base.py")

                    with open(filepath) as f:
                        filedata = f.read()

                    # Find the end of the INSTALLED_APPS tuple and insert the
                    # line there.
                    insert = filedata.index(INSTALLED_APPS_START)
                    insert = filedata.index(')', insert)

                    filedata = \
                        filedata[:insert] + \
                        addon_install["installedapp"] + \
                        filedata[insert:]

                    # Now find CELERYBEAT_SCHEDULE, and the start of the
                    # user-installed apps section. We do both to maximize the
                    # probability of doing this correctly.
                    insert = filedata.index(CELERYBEAT_SCHEDULE)
                    insert = filedata.index(CELERYBEAT_APPS, insert)

                    # Insert at the start of the next line.
                    insert = filedata.index('\n', insert) + 1
                    filedata = \
                        filedata[:insert] + \
                        CELERYBEAT_APP_INCLUDE.format(name) + \
                        filedata[insert:]

                    # Update the file.
                    error = "writing base.py. The Addon table was " \
                            "modified. You must edit settings/base.py and " \
                            "urls.py, and copy the static files."

                    with open(filepath, 'w') as f:
                        f.write(filedata)

                    # Do a syncdb, to add the add-on's models. (This can't be
                    # done before INSTALLED_APPS is updated.)
                    error = "doing a syncdb."

                    _django_manage("syncdb --noinput --migrate",
                                   proj_settings=settings,
                                   install_dir=install_dir)

                    # Add this add-on's root node to the persistent resource
                    # graph. (Can't be done before the syncdb.)
                    error = "updating the persistent resource graph."

                    _add_root_node(row.name)

                    # Now add the add-on to the end of the URLconf.
                    error = "writing urls.py. The Addon table and " \
                            "settings/base.py were updated. You must edit " \
                            "urls.py, and copy the static files."

                    filepath = os.path.join(install_dir, "goldstone/urls.py")

                    with open(filepath, 'a') as f:
                        f.write(addon_install["urlpatterns"])

                    # Now move the add-on's JavaScript and CSS files, and
                    # insert the script and link tags.
                    error = "copying the static files. You best check them, " \
                            "and base.html's script tag."

                    _install_addon_static(name, addon_install, install_dir)

                    # Finally, expire all user tokens to force users to
                    # re-login, which will reset their client-side localStorage
                    # 'addons' object.
                    error = "trying to invalidate user tokens. You " \
                            "must clear the Token table."

                    Token.objects.all().delete()

                except Exception as exc:  # pylint: disable=W0703
                    # Oops!  Tell the user what happened, because they'll have
                    # to unwind things manually.
                    error = "%s while " + error
                    abort(red(error % exc))
Example #44
0
def restart():
    "Restart neo4j-service."
    # NB: This doesn't use sudo() directly because it insists on asking
    # for a password, even though we should have NOPASSWD in visudo.
    if confirm("Restart Neo4j server?"):
        sudo('service %(service_name)s restart' % env)
Example #45
0
def test():
    with settings(warn_only=True):
        ## result = local('./manage.py test {0}'.format(' '.join(env.django_apps)), capture=True)
        result = local('pipenv run python -Wall ./manage.py test')
    if result.failed and not confirm("Tests failed. Continue anyway?"):
        abort("Aborting at user request.")
Example #46
0
def remove_addon(
        name,  # pylint: disable=R0914,R0915
        settings=PROD_SETTINGS,
        install_dir=INSTALL_DIR,
        interactive=True):
    """Remove a user add-on.

    :param name: The add-on's installation name
    :type name: str
    :keyword settings: The path of the Django settings file to use.
    :type settings: str
    :keyword install_dir: The path to the Goldstone installation directory.
    :type install_dir: str

    """
    from django.core.exceptions import ObjectDoesNotExist

    # Switch to the right environment because we're going to access the
    # database.
    with _django_env(settings, install_dir):
        from goldstone.addons.models import Addon
        from rest_framework.authtoken.models import Token

        # Get the add-on's row.
        try:
            row = Addon.objects.get(name=name)
        except ObjectDoesNotExist:
            fastprint(red("The add-on \"%s\" isn't in the table.\n" % name))
            sys.exit()

        if not interactive or \
                confirm(cyan('We will remove the %s add-on. Proceed?' % name),
                        default=False):
            try:
                # First, delete the row.
                error = "updating the Addon table. Check it."
                row.delete()

                # Now remove its root node, and any inferior nodes, from the
                # resource graph.
                error = "importing %s" % name

                the_app = import_module("%s.models" % name)
                remove_nodes = next((x[1]
                                     for x in getmembers(the_app, isfunction)
                                     if x[0] == "remove_nodes"), None)
                if not remove_nodes:
                    error = "looking for remove_nodes()"
                    raise Exception

                error = "calling remove_nodes()"
                remove_nodes()

                # Now remove the add-on from INSTALLED_APPS. SED is scary, so
                # we'll use Python instead.
                error = "reading base.py. The Addon table was " \
                        "modified. You must manually edit settings/base.py " \
                        "and urls.py, and remove the base.html script tag, " \
                        "and delete the add-on's static directory."

                filepath = os.path.join(install_dir,
                                        "goldstone/settings/base.py")

                with open(filepath) as f:
                    filedata = f.read()

                # Find the INSTALLED_APPS tuple. Then find the start of the
                # line for this add-on, and the line after it.
                insert = filedata.index(INSTALLED_APPS_START)
                insert = filedata.index(INSTALLED_APP % name, insert)
                end = filedata.index('\n', insert) + 1

                # Delete the line.
                filedata = filedata[:insert] + filedata[end:]

                # Now find CELERYBEAT_SCHEDULE, and the start of the
                # user-installed apps section. We do both to maximize the
                # probability of doing this correctly. Then, find the beginning
                # of the line that starts this add-on's task entries, and the
                # beginning of the line after the end of the task entries.
                insert = filedata.index(CELERYBEAT_SCHEDULE)
                insert = filedata.index(CELERYBEAT_APPS, insert)

                insert = filedata.index(CELERYBEAT_APP_INCLUDE.format(name),
                                        insert)
                end = insert
                for _ in range(CELERYBEAT_APP_INCLUDE.count('\n')):
                    end = filedata.index('\n', end) + 1

                filedata = filedata[:insert] + filedata[end:]

                # Update the file.
                error = "writing base.py. The Addon table was " \
                        "modified. You must manually edit settings/base.py " \
                        "and urls.py, and remove the base.html script tag, " \
                        "and delete the add-on's static directory."

                with open(filepath, 'w') as f:
                    f.write(filedata)

                # Now delete the add-on from the URLconf.
                error = "writing urls.py. The Addon table and " \
                        "settings/base.py were updated. You must edit " \
                        "urls.py, and remove the base.html script tag, and " \
                        "delete the add-on's static directory."

                filepath = os.path.join(install_dir, "goldstone/urls.py")

                with open(filepath, 'r') as f:
                    filedata = f.read()

                insert = filedata.index(URLS_PY.format(name, row.url_root))

                end = insert
                for _ in range(URLS_PY.count('\n')):
                    end = filedata.index('\n', end) + 1

                filedata = filedata[:insert] + filedata[end:]

                with open(filepath, 'w') as f:
                    f.write(filedata)

                # Now remove the client's static files, and its base.html
                # script and link tags.
                error = "removing static files. You must delete the " \
                        "add-on's static directory."

                _remove_addon_static(name, install_dir)

                # Finally, expire all user tokens to force users to re-login,
                # which will reset their client-side localStorage 'addons'
                # object.
                error = "trying to invalidate user tokens. You " \
                        "must clear the Token table."

                Token.objects.all().delete()

            except Exception as exc:  # pylint: disable=W0703
                # Oops! Tell the user what happened, because they'll have to
                # unwind things manually.
                error = "%s while " + error
                abort(red(error % exc))
Example #47
0
def build_war():
    with lcd('%s/threadfix' % local_working_folder_loc):
        res = local('mvn package')
    if res.failed and confirm('Maven failed to build the WAR file. Abort recommended. Abort?'):
        abort('Aborting because Maven failed.')
Example #48
0
def restartchesster():
    if confirm(colors.red("Would you like to restart chesster?")):
        sudo("/usr/sbin/service chesster restart", shell=False)
def deploy(apps, tag='HEAD', version=None):
    """
    Deploy to one or more applications
    (semicolon-separated values or 'all').
    """
    # get app definitions
    app_defns = get_app_definitions(apps.split(';'))

    # before doing anything, check if *all* apps are ok to deploy to
    for app_defn in app_defns:
        ok_to_deploy(app_defn, tag)

    # check/rewrite tag
    available_tags = local('git tag -l', capture=True).splitlines()
    if tag == 'HEAD':
        # rewrite HEAD to a tag if available
        head_tags = local('git tag -l --contains=HEAD', capture=True)
        if head_tags:
            tag = head_tags.splitlines()[0]
        else:
            warn("HEAD is not tagged with a version.")
    elif tag not in available_tags:
        abort("Unknown tag '%s'" % tag)

    # get commit hash
    commit = local('git rev-parse %s' % tag, capture=True)

    # decide GAE app version to use
    if version:
        gae_app_version = version
    else:
        current_branch = get_current_branch()
        if current_branch == env.master_branch:
            gae_app_version = env.default_gae_app_version
        else:
            gae_app_version = current_branch

    # state options
    print "\nSelected deployment options:\n"
    print "Tag:             %s" % tag
    print "GAE app version: %s" % gae_app_version
    print "Apps:            %s" % ', '.join(
        app_defn['application'] for app_defn in app_defns
    )
    print

    # if deploying to all, check
    if apps == 'all':
        if not confirm("Deploy to ALL apps, excluding sandbox? Are you sure?", default=False):
            abort("Deploy to all except sandbox unconfirmed")

    # clear old build dirs
    clear_build_dirs()

    # build to deployment, using git archive
    build_dir = mkdtemp(prefix=BUILD_DIR_PREFIX)
    print "Building to %s ..." % build_dir
    local("git archive %s | tar -x -C %s" % (tag, build_dir))
    print "Changing pwd to %s ..." % build_dir
    os.chdir(build_dir)

    # deploy to all specified apps
    for app_defn in app_defns:
        print "\nDeploying to %s...\n" % app_defn['application']
        print "Writing app.yaml..."
        write_app_yaml(app_defn, gae_app_version=gae_app_version)
        perform_overwrites(app_defn)
        write_deployment_version(app_defn, tag, commit)
        print "Starting GAE update..."
        update()  # call GAE appcfg
        delete_app_yaml()

    # output success message
    print "\nSuccessfully deployed to %s." % ', '.join(
        app_defn['application'] for app_defn in app_defns
    )
Example #50
0
def make_sphinx_branch():
    """
    Create a new branch with Sphinx documentation ready to be published
    using GitHub's Pages system.
    
    Example usage:
    
        $ fab make_sphinx_branch
    
    Before you can publish your docs, you need to commit them to the repo.
    
        $ git add .
        $ git commit -am "First commit"
    
    Then publish the files by pushing them up to GitHub.
    
        $ git push origin gh-pages
    
    Then the docs will appear on GitHub at:
    
        http://<your_account_name>.github.com/<your_repo_name>/
    
    """
    # Create the new branch
    local("git branch gh-pages")
    # Move into it
    local("git checkout gh-pages")
    # Install sphinx
    local("pip install sphinx")
    # Save the dependencies to the requirements file
    local("pip freeze > requirements.txt")
    # Warn the user of a quirk before configuring with Sphinx
    confirm(""".    ___ ___ _     ___ ___  _       
  /\  |   | |_ |\ | |   |  / \ |\ | 
 /--\ |   | |_ | \| |  _|_ \_/ | \| 
                                    
Sphinx is about to start configuring your project.

You can accept the default settings it offers, EXCEPT ONE.

The second question it will ask is:

'Separate source and build directories (y/N) [n]:'

YOU MUST ANSWER YES. THAT MEANS YOU TYPE 'Y' AND PRESS ENTER.

DO YOU UNDERSTAND?""")
    # Start up a Sphinx project
    local("sphinx-quickstart")
    # Create the .nojekyll file GitHub requires
    local("touch .nojekyll")
    # Make the patches to Sphinx's Makefile we need
    local("echo '' >> Makefile")
    local("echo 'BUILDDIR      = ./' >> Makefile")
    local("echo '' >> Makefile")
    local("echo 'html:' >> Makefile")
    local(
        "echo '	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)' >> Makefile"
    )
    local("echo '	@echo' >> Makefile")
    local(
        "echo '	@echo \"Build finished. The HTML pages are in $(BUILDDIR)\"' >> Makefile"
    )
    # Make the branch for the first time
    local("make html")
Example #51
0
 def install_virtualenv(self):
     result = run(self.env['workon_remote'] + 'pip install -r %s/d/hot/requirements.txt' % self.env['rpath'])
     if result.failed and not confirm('Virtualenv sync failure! Continue?'):
         abort('Aborting')
Example #52
0
def clone_code():
    with settings(warn_only=True):
        result = local('git clone %s' % source_code_loc)
    if result.failed and confirm('Source code could not be found. Abort recommended. Abort?'):
        abort('Aborting because source code not found.')
Example #53
0
def get_file():
    print(yellow('get file ...'))
    with with_settings(warn_only=True):
        result = get('/home/jinrong', './')
    if result.failed and not confirm('get file filed,Continue[Y/N]?'):
        abort('Aborting file get task!')
Example #54
0
 def prepare_local(self):
     with settings(warn_only=True):
         result = local('git status', capture=False)
         if result.failed and not confirm("Uncommitted changes. Deploy anyway?"):
             abort("Aborting at user request.")
Example #55
0
 def _confirm_step(func, *args, **kwargs):
     if quite or confirm(cyan("Run step %s?" % func.__name__)):
         func(*args, **kwargs)
Example #56
0
 def update_virtualenv(self):
     result = run(self.env['workon_remote'] + 'pip install -r %s/d/current/requirements.txt -U' % self.env['rpath'])
     if result.failed and not confirm('Virtualenv sync failure! Continue?'):
         abort('Aborting')
     self.reload_remote()
def backup_db(stage, database, name):
    """
    Backups and pg db and restores it locally (fab settings:stage=staging,database=db_1 backup_db)
    """

    # Generate Filename
    timestamp = current_milli_time()
    backup_file = "{databaseName}-{stage}-snapshot-{timestamp}".format(
        databaseName=name, stage=stage, timestamp=timestamp)

    # Generate local Backup Folder
    local(
        'mkdir -p {backupFolder}'.format(backupFolder=database['backup_dir']))

    # Remote Backup Folder
    _run('mkdir -p /tmp/backups/database')

    # Backup Command
    backup_command = " ".join(
        map(str, [
            "PGPASSWORD={remotePassword}".format(
                remotePassword=database['remote_password']), "pg_dump",
            "-p {port}".format(port=database['remote_port']),
            "-h {host}".format(host=database['remote_host']),
            "-U {user}".format(user=database['remote_user']), "-F c -b -v",
            "-f /backups/{backup_file}".format(backup_file=backup_file),
            "{databaseName}".format(databaseName=database['remote_database'])
        ]))

    # Docker Backup Command
    command = " ".join(
        map(str, [
            "docker", "run", "-v /tmp/backups/database:/backups", "-it",
            database['image'], "sh", "-c",
            "\"{backup_command}\"".format(backup_command=backup_command)
        ]))

    # Run Command
    _run(command)

    # Get the Backup
    if stage is not 'local':
        get(
            '/tmp/backups/database/{backup_file}'.format(
                backup_file=backup_file), database['backup_dir'])

    # Restore the local database
    if console.confirm(
            "Do you want to replace your local '{databaseName}' databases".
            format(databaseName=database['local_database'])):
        local("dropdb -U {user} {databaseName}".format(
            user=database['local_user'],
            databaseName=database['local_database']))
        local("createdb -U {user} {databaseName}".format(
            user=database['local_user'],
            databaseName=database['local_database']))
        restore_command = " ".join(
            map(str, [
                "PGPASSWORD={remotePassword}".format(
                    remotePassword=database['local_password']), "pg_restore",
                "-p {port}".format(port=database['local_port']),
                "-U {user}".format(user=database['local_user']),
                "-d {databaseName}".format(
                    databaseName=database['local_database']),
                "-v {backupFolder}/{backup_file}".format(
                    backupFolder=database['backup_dir'],
                    backup_file=backup_file)
            ]))

        local(restore_command)
Example #58
0
def putfile():
    print(blue('put fle...'))
    with settings(warn_only=True):
        result = put('jinrong.py', '/home')
    if result.failed and not confirm('put file filed,Continue[Y/N]?'):
        abort('Aborting file put task!')
Example #59
0
def put_task():
    run("mkdir -p /tmp/install")
    with settings(warn_only=True):
        result = put(lpackpath, rpackpath)  #上传安装包
    if result.failed and not confirm("put file failed, Continue[Y/N]?"):
        abort("Aborting file put task!")
Example #60
0
def test():
    with settings(warn_only=True):
        result = local('reboot', capture=True)
        if result.failed and not confirm("Tests failed. Continue anyway?"):
            abort("Aborting at user request.")