Example #1
0
def status():
    """Check the status of the mongo cluster"""
    with hide('output'), settings(host_string=_find_primary()):
        print(colors.blue(
            "Primary replication info - db.printReplicationInfo()",
            bold=True))
        print(run(mongo_command("db.printReplicationInfo()")))

        print(colors.blue(
            "Slave replication info - db.printSlaveReplicationInfo()",
            bold=True))
        print(run(mongo_command("db.printSlaveReplicationInfo()")))

        print(colors.blue(
            "Replication status - rs.status()",
            bold=True))
        for status in get_cluster_status():
            print(colors.cyan(
                "{} - {}".format(status['name'], status['state'])))

            keys = ['health', 'uptime', 'optime', 'optimeDate',
                    'lastHeartbeat', 'lastHeartbeatRecv',
                    'lastHeartbeatMessage']
            for key in keys:
                if status[key]:
                    print("{0:<22} {1}".format(key, status[key]))
Example #2
0
File: tools.py Project: gipi/parcel
def rsync(sources,dest,rsync_ignore=None,color_files=True):
    if type(sources)==str:
        sources = [sources]
    run('mkdir -p "%s"'%dest)

    command = []
    command.append('rsync')
    command.append('-av')
    command.extend("'%s'"%s for s in sources)
    command.append("'%s@%s:%s'"%(env.user,env.host,dest))
                
    if rsync_ignore:
        if os.path.isfile(rsync_ignore):
            command.append('--exclude-from=%s'%rsync_ignore)
    
    
    if not color_files:   
        return local(" ".join(command))
        
    data = local(" ".join(command),capture=True)
    lines = data.splitlines()
    lines = lines[1:]
    i=0
    while lines[i]:
        print blue(lines[i])
        i+=1
    for line in lines[i:]:
        print line     
Example #3
0
def deploy():
    print green("Begining update...")
    print ""

    print blue('Checking pre-requisites...')

    print cyan("Checking for local changes...")
    has_changes = local("git status --porcelain", capture=True)
    if REQUIRE_CLEAN and has_changes:
        abort(red("Your working directory is not clean."))

    print cyan("Ensuring remote working area is clean...")
    GIT_CMD = "git --work-tree={0} --git-dir={0}/.git".format(DEPLOY_PATH)
    has_changes = run(GIT_CMD + " status --porcelain")
    if has_changes:
        abort(red("Remote working directory is not clean."))

    print blue("Finished checking pre-requisites.")
    print ""

    print green("Starting deployment...")
    print ""

    print green("Updating environment...")
    with cd(DEPLOY_PATH):
        print cyan("Pulling from master")
        run('git pull')

        sudo('reboot')
Example #4
0
def list_disk_usage( server_name ):
    '''
    list_disk_usage( server_name )
    '''
    srv_ctx = _get_server( server_name )
    curr_srv = CurrentServer( srv_ctx )
    usage = curr_srv.get_disk_usage()
#     term.printDebug( 'usage.keys: %s' % ( usage.keys() ) )
    print( blue( 'free space:' ) )
#     ret = cli_run( template % 'df -h' )
    term.printDebug( 'usage.total: %s' % ( usage.total ) )
    print( blue( usage.total ) )
    print( '' )
    print( magenta( 'used in ~/tmp:' ) )
#     ret = cli_run( template % 'du -h /home/carlos/tmp' )
    print( magenta( get_top_du( usage.user_tmp ) ) )

    print( magenta( 'used in /tmp:' ) )
#     ret = cli_run( template % 'du -h /tmp' )
    print( magenta( get_top_du( usage.root_tmp ) ) )

    print()
    print( 'web:' )
    print( magenta( 'used in ~web/tmp:' ) )
#     ret = cli_run( template % 'du -h tmp' )
    print( magenta( get_top_du( usage.web_tmp ) ) )
Example #5
0
def get_previous_version_tag():
    """
    Get the version of the previous release
    """
    # We try, probably too hard, to portably get the number of the previous
    # release of SymPy. Our strategy is to look at the git tags.  The
    # following assumptions are made about the git tags:

    # - The only tags are for releases
    # - The tags are given the consistent naming:
    #    sympy-major.minor.micro[.rcnumber]
    #    (e.g., sympy-0.7.2 or sympy-0.7.2.rc1)
    # In particular, it goes back in the tag history and finds the most recent
    # tag that doesn't contain the current short version number as a substring.
    shortversion = get_sympy_short_version()
    curcommit = "HEAD"
    with cd("/home/vagrant/repos/sympy"):
        while True:
            curtag = run("git describe --abbrev=0 --tags " + curcommit).strip()
            if shortversion in curtag:
                # If the tagged commit is a merge commit, we cannot be sure
                # that it will go back in the right direction. This almost
                # never happens, so just error
                parents = local("git rev-list --parents -n 1 " + curtag,
                    capture=True).strip().split()
                # rev-list prints the current commit and then all its parents
                assert len(parents) == 2, curtag
                curcommit = curtag + "^" # The parent of the tagged commit
            else:
                print blue("Using {tag} as the tag for the previous "
                    "release.".format(tag=curtag), bold=True)
                return curtag
        error("Could not find the tag for the previous release.")
Example #6
0
def build():
    """
    Compile JS sources.
    """
    from os import stat
    from json import load
    with open('plovr-config.js') as fp:
        plovr_config = load(fp)

    input_file = plovr_config['inputs']
    output_file = plovr_config['output-file']

    stat_in = stat(input_file)

    if exists(output_file):
        stat_out = stat(output_file)
        needs_build = stat_in.st_mtime > stat_out.st_mtime
    else:
        needs_build = True

    if not needs_build:
        print(clr.blue(input_file),
              clr.green('is older than'),
              clr.blue(output_file),
              clr.green('Assuming it has not changed and skipping '
                        'closure-build!'))
        return
    else:
        print(clr.blue(input_file),
              clr.green('has changed'),
              clr.green('recompiling to'),
              clr.blue(output_file))

    fab.local('java -jar __libs__/{} build plovr-config.js'.format(PLOVR))
Example #7
0
def rsync(sources,dest,rsync_ignore=None,color_files=True):
    if type(sources)==str:
        sources = [sources]
    run('mkdir -p "%s"'%dest)

    command = []
    command.append('rsync')
    command.append('-av')
    # Support ports other than the default. Useful for when
    # working with Vagrant.
    if env.port:
        command.append('-e "ssh -p {}"'.format(env.port))
    command.extend("'%s'" % s for s in sources)
    command.append("'%s@%s:%s'" % (env.user, env.host, dest))

    if rsync_ignore:
        if os.path.isfile(rsync_ignore):
            command.append('--exclude-from=%s'%rsync_ignore)
    
    
    if not color_files:   
        return local(" ".join(command))
        
    data = local(" ".join(command),capture=True)
    lines = data.splitlines()
    lines = lines[1:]
    i=0
    while lines[i]:
        print blue(lines[i])
        i+=1
    for line in lines[i:]:
        print line     
Example #8
0
def env_init(site_name=SITE_NAME):
    '''Initialize with this site hostname.'''
    print(green("Initializing new site configuration..."))

    #
    # Generate secret key and update config file
    #
    import random
    import string

    CHARS = string.ascii_letters + string.digits
    SECRET_KEY = "".join([random.choice(CHARS) for i in range(50)])

    print(blue("Configuring the secret key..."))
    os.chdir(PROJ_DIR)
    try:
        sh.sed("-i ",
               "s/SECRET_KEY *=.*/SECRET_KEY = '{0}'/g".format(SECRET_KEY),
               "{0}/config.py".format(APP_NAME))
    except sh.ErrorReturnCode:
        print(red("Could not configure SECRET_KEY for config.py"))
        exit(1)

    #
    # Set the site name, the user defined site hostname
    #
    print(blue("Configuring the SITE_NAME '{0}'.".format(site_name)))
    try:
        sh.sed("-i ",
               "s/SITE_NAME *=.*/SITE_NAME = '{0}'/g".format(site_name),
               "{0}/config.py".format(APP_NAME))
    except sh.ErrorReturnCode:
        print(red("Could not configure SITE_NAME for config.py"))
        exit(1)
def sync():
    print blue('* Sync')
    with cd(env.project_dir):
        with hide('running', 'stdout', 'stderr'):
            print green("  Sync code from remote repository into branch `{}`...".format(env.branch)),
            run('git pull --ff-only -u origin {}'.format(env.branch))
            print green("DONE")
def setup():
    """
        Installs flask, mako and sqlalchemy using pip
    """

    ## installing flask
    print yellow("Installing latest Flask using pip... ")
    local("pip install flask")
    print blue("Finished installing flask")

    ## download Flask-Mako zip file from github first
    print yellow("Downloading Mako templates for Flask from github... ")
    local("git clone https://github.com/tzellman/flask-mako.git")
    print blue("Finished downloading Mako templates for Flask")

    ## installing Mako templates for flask
    print yellow("Installing Mako templates for Flask using tzellman's project on github... ")
    with lcd("flask-mako"):
        local("python setup.py install")

    print yellow("Clearing setup files for Mako-Flask... ")
    local("rm -rf flask-mako")
    print blue("Finished installing Mako templates for Flask")

    ##installing SQLAlchemy for Flask
    print yellow("Installing Flask-SQLAlchemy... ")
    local("pip install flask-sqlalchemy")
    print blue("Finished installing Flask-SQLAlchemy")

    print blue("Finished setup")
Example #11
0
def voidtx(onetx,status):
    #with settings(hide('warnings','running','stderr','status'),warn_only=True):
        #because GI and GY difference
    if status == 'progress':
        '''
        with shell_env(ORACLE_BASE='/usr/pkg/oracle',
                       ORACLE_DB='rgsgib',
                       ORACLE_HOME='/usr/pkg/oracle/product/10.2.0/db',
                       ORACLE_SID='rgsgib2',
                       ORA_CRS_HOME='/usr/pkg/oracle/product/10.2.0/crs',
                       ORA_NLS10='/usr/pkg/oracle/product/10.2.0/db/nls/data',
                       PATH='/usr/pkg/oracle/product/10.2.0/db/bin:/usr/kerberos/bin:/usr/local/bin:/bin:/usr/bin:/oracle/scripts:/usr/pkg/oracle/product/10.2.0/db/OPatch'):
        '''
        with cd('/oracle/scripts'):
            try:
                #sudo('env',user=db_user)
                #sudo('env %s' % onetx,user=db_user)
                sudo('/usr/pkg/oracle/product/10.2.0/db/bin/sqlplus rgsapp/rgsapp @/oracle/scripts/void_txn.sql %s' % onetx,user=db_user)
                print('\t Voiding IN PROGRESS Transaction ID %s [%s]' % (onetx,blue("PASS")))
            except:
                print('\t Voiding IN PROGRESS Transaction ID %s [%s]' % (onetx,red("FAIL")))
    else:
        with cd('/oracle/scripts'):
            try:
                sudo('./void_xt_pending.sh %s' % onetx,user=db_user)
                print('\t Voiding IN PROGRESS Transaction ID %s [%s]' % (onetx,blue("PASS")))
            except:
                print('\t Voiding IN PROGRESS Transaction ID %s [%s]' % (onetx,red("FAIL")))
def backup(label=None):
    print blue('* Backup')
    now = datetime.datetime.now()
    backup_dir_exact = os.path.join(env.backup_dir, '{}'.format(now.strftime('%Y/%m/%d/%H-%M')))

    def backup_path(filename):
        return os.path.join(backup_dir_exact, filename)

    with hide('stderr'):
        with cd(env.backup_dir):
            run('mkdir -p {}'.format(backup_dir_exact))
            if label:
                run('mkdir -p labels')
                run('ln -s {} labels/{}-{}'.format(backup_dir_exact, label.replace(' ', '-'), now.strftime('%Y-%m-%d-%H%M')))

        with cd(env.project_dir):
            # IMPORTANT: If new files will be added to backup, it is necessary to increase
            # `BACKUP_FILES` constant in `backup_maintenance` function.
            print green("  Git revision... "),
            run('git rev-parse HEAD > {}'.format(backup_path('git.id')))
            print green("DONE")

            print green("  Database... "),
            if env.name == 'production':
                run('pg_dump -O -c {} | gzip -4 > {}'.format(env.db_name, backup_path('database.dump.gz')))
            # elif env.name == 'development':
            #     pass
            else:
                print red("Unknown environment")
            print green("DONE")

    run('ls -lah {}'.format(backup_dir_exact))
Example #13
0
def deploy():
    """
    Push to Heroku to deploy.
    If migrations need to be run, put the app in maintenance mode
    and run migrations.

    Running collectstatic during the build is disabled, since we need to use
    different settings (staging with S3PipelineStorage) to deploy static files
    to S3. We manually do this on a one-off dyno instead.
    """

    puts(blue("Deploying to Heroku."))
    local('git push heroku HEAD:master')

    # Run collectstatic on Heroku with the staging environment
    set_staging = 'DJANGO_SETTINGS_MODULE=coursereviews.settings.staging'
    collectstatic_cmd = 'python manage.py collectstatic --noinput'
    local('heroku run {0} {1}'.format(set_staging, collectstatic_cmd))

    puts(blue('Checking for migrations.'))
    migrations = local('heroku run python manage.py showmigrations --plan', capture=True)
    if any(filter(lambda m: '[X]' not in m, migrations.split('\n'))):
        local('heroku maintenance:on')
        local('heroku run python manage.py migrate')
        local('heroku maintenance:off')
    else:
        puts(blue('Nothing to migrate.'))

    local('heroku ps')
Example #14
0
def capture_db():
    """
    Create a backup of the production database using Heroku PG Backups.
    Download the backup to a PostgreSQL dump file.
    Restore the backup to the locally running PostgreSQL instance.
    Optionally remove the dump file.
    """

    # We follow the flow outlined here:
    # https://devcenter.heroku.com/articles/heroku-postgres-import-export#export
    now = datetime.now().replace(microsecond=0)
    db_name = 'middcourses-{0}'.format(now.isoformat())

    puts(blue('Creating a backup.'))
    local('heroku pg:backups capture')

    puts(blue('Downloading the backup.'))
    local('curl -o {0}.dump `heroku pg:backups public-url`'.format(db_name))

    puts(blue('Restoring the database to PostgreSQL.'))
    local('createdb {0}'.format(db_name))
    local('pg_restore --no-acl --no-owner -h localhost -U {0} -d {1} "{1}.dump"'.format(env.user, db_name))  # noqa

    puts(green('PostgreSQL database {0} created.'.format(db_name)))

    remove = confirm('Remove "{0}.dump"?'.format(db_name))

    if remove:
        local('rm {0}.dump'.format(db_name))
Example #15
0
def install(target='all'):
    def rbenv():
        local('rbenv install 2.0.0-p247')
        local('rbenv rehash')
        local('rbenv local 2.0.0-p247')
        local('sudo gem install bundler')

    def nodebrew():
        local('curl -L git.io/nodebrew | perl - setup')
        local('source ~/.bash_profile')
        local('nodebrew install v0.10.x')

    def python():
        if not is_command_exist('pip'):
            local('sudo easy_install pip')
            local('sudo pip install virtualenv virtualenvwrapper')
            local('sudo pip install sphinx sphinxcontrib-blockdiag')

    def my_setting():
        local('git clone [email protected]:satoshun/utility.git {0}/utility'.format(git_dir))

    if target == 'all':
        for name, func in locals().items():
            if callable(func):
                print(blue('call: ' + name))
                func()
    else:
        print(blue('call: ' + target))
        locals()[target]()
Example #16
0
def deploy():
    """ updates the chosen environment """

    print(red(" * checking remote status..."))
    run("cd %s " "&& git status" % env.path)
    if "y" == prompt(blue("Commit and push changes (y/n)?"), default="y"):
        print(red(" * commiting and pushing provise code..."))
        run("cd %s " '&& git commit -am "translations" ' "&& git pull " "&& git push" % env.path)

    print(red(" * pushing commits..."))
    local("git pull " "&& git push")

    print(red(" * updating code..."))
    run("cd %s && git pull" % env.path)

    if "y" == prompt(blue("Update packages (y/n)?"), default="y"):
        print(red(" * updating packages..."))
        run(
            "cd %s "
            "&& source venv/bin/activate "
            "&& pip install -r requirements.txt --upgrade "
            "&& pip install -r server_requirements.txt --upgrade" % env.path
        )

    if "y" == prompt(blue("Migrate database schema (y/n)?"), default="y"):
        print(red(" * migrating database schema..."))
        run("cd %s " "&& source venv/bin/activate " "&& python manage.py migrate" % env.path)

    if not env.debug:
        print(red(" * compiling less files..."))
        run("cd %s " "&& lessc --clean-css website/static/less/style.less website/static/css/style.min.css" % env.path)

        print(red(" * collecting static files..."))
        run("cd %s " "&& source venv/bin/activate " "&& python manage.py collectstatic --noinput" % env.path)
Example #17
0
def fix_solr():
    """ Fix solr
        work in progress
    """
    with cd('/var/tmp'):
        print blue('pulling new code...')
        sudo('/etc/init.d/jetty stop')
        sleep(5)
        # run('rm -rf /opt/deploy/solr/collection1')

        print blue('copying new code...')
        # run('mkdir -p /opt/deploy/solr/collection1')
        # run("cp -r oclapi/solr/collection1/conf /opt/deploy/solr/collection1")

    with cd("/opt/deploy/ocl_api/ocl"):
        # there is no need for this, settings.py.eploy is actually wrong?
        # run("cp settings.py.deploy settings.py")
        with prefix('source /opt/virtualenvs/ocl_api/bin/activate'):
            with prefix('export DJANGO_CONFIGURATION="Production"'):
                with prefix('export DJANGO_SECRET_KEY="blah"'):
                    # this is really slow because it pull down django-norel
                    run('./manage.py build_solr_schema > ' +
                        '/opt/deploy/solr/collection1/conf/schema.xml')
    sleep(5)
    sudo('/etc/init.d/jetty start')
Example #18
0
def muni_setup():
    """ Installs utilities on the target VPS with a muni flavor"""

    print(blue('Add your own public key to authorized hosts'))
    ssh.add_authorized_key()

    print(blue('Generate the ssh config file to connect to all magnet hosts'))
    ssh.generate_config_file()

    print(blue('upgrade installed packages'))
    admin.full_upgrade(ask_confirmation=False)

    print(blue('NTP installation and configuration'))
    admin.install_ntp()

    install_utils(run('whoami'))

    print(blue('install vim-gtk'))
    utils.deb.install('vim-gtk')

    print(blue('install zsh theme: powerline'))
    zsh.install_theme('powerline')

    print(blue('install ruby'))
    ruby.install()

    print(blue('install rails'))
    ruby.install_rails()

    print(blue('install git-smart'))
    run('gem install git-smart')

    print(blue('install zsh muni flavor'))
    zsh.install_flavor('muni')
Example #19
0
def start_clientx(x = 5):
    if not exists("/usr/bin/xargs"):
        sudo("apt-get install xargs")
    print blue("Client %s with  Master %s" % (env.host, maddr))
    with cd(work_dir):
        cmd = "echo {1..%s} | xargs -P %s -n 1 -d ' ' -I* " + cmd_client
        run(cmd  % (x, x, maddr))
Example #20
0
def list_clusters( server_name ):
    '''
    list_clusters( server_name )
    '''
    from bismarck_cli.utils.misc import get_padded_str

    srv_def = _get_server( server_name )
    len_cluster_name = 12
    len_installer_name = 10
    len_vcs_name = 6
    l = get_padded_str( 'Cluster', len_cluster_name )
    l += ' ' + get_padded_str( 'Installer', len_installer_name )
    l += ' ' + get_padded_str( 'VCS', len_vcs_name )
    l += ' App. list'
    print( blue( l ) )
    for cluster_name in srv_def.clusters.clusters:
        cluster = srv_def.clusters.clusters[ cluster_name ]
        installer_name = cluster.installer.installer_name
        vcs_name = cluster.vcs_server.vcs_app_name
        app_list = ', '.join( cluster.apps )
        l = get_padded_str( cluster_name, len_cluster_name )
        l += ' ' + get_padded_str( installer_name, len_installer_name )
        l += ' ' + get_padded_str( vcs_name, len_vcs_name )
        l += ' ' + app_list
        print( blue( l ) )
Example #21
0
def php_version():
    select = True
    while select:
        print blue("Select version:")
        option  = raw_input( blue("0) PHP 5.4\n1) PHP 5.5\n2) PHP 5.6\n>>") )

        if option == "0":
            select = False
            print "Installing php 5.4..."
            state.output['stdout'] = True
            run('sudo apt-get remove -y libapache2-mod-php5')
            run('sudo add-apt-repository ppa:ondrej/php5-oldstable')
        if option == "1":
            select = False
            print "Installing php 5.5..."
            state.output['stdout'] = True
            run('sudo apt-get remove -y libapache2-mod-php5')
            run('sudo add-apt-repository ppa:ondrej/php5')
        if option == "2":
            select = False
            print "Installing php 5.6..."
            state.output['stdout'] = True
            run('sudo apt-get remove -y libapache2-mod-php5')
            run('sudo add-apt-repository ppa:ondrej/php5-5.6')

    run('sudo apt-get update')
    run('sudo apt-get install php5')
    run('sudo apt-get install libapache2-mod-php5')
Example #22
0
def delete(name):
    """Delete supervisor config file"""
    supervisor_config = '/etc/supervisor/conf.d/%s.conf' % name
    if files.exists(supervisor_config):
        print blue("Delete supervisor config file ... \n")
        sudo('rm %s' % supervisor_config)
        sudo('supervisorctl update')
Example #23
0
def rsync(sources,dest,rsync_ignore=None,color_files=True):
    if type(sources)==str:
        sources = [sources]

    dest = os.path.join(os.path.expanduser('~/'), dest)
    local('mkdir -p "%s"'%dest)
    data = ''

    for s in sources:
        data += local('cp -R {0} {1}/'.format(s, dest))

    command = []
    command.append('rsync')
    command.append('-av')
    command.extend("'%s'"%s for s in sources)
    command.append("'%s'"%(dest))
                
    if rsync_ignore:
        if os.path.isfile(rsync_ignore):
            command.append('--exclude-from=%s'%rsync_ignore)
    
    if not color_files:   
        return local(" ".join(command))

    print " ".join(command)
        
    data = local(" ".join(command),capture=True)
    lines = data.splitlines()
    lines = lines[1:]
    i=0
    while lines[i]:
        print blue(lines[i])
        i+=1
    for line in lines[i:]:
        print line     
Example #24
0
def RollOverControlAppServers(service="stop"):
    ''' Stop/Start/Restart/State weblogic admin servers '''
    print service
    weblogic_bin_path = os.path.join(g_weblogic_dir, 'bin')
    with cd(weblogic_bin_path):
        if service.lower()=="stop":
            sudo('./weblogic.admin.managed stop',user=wworks_User)
            sudo('./weblogic.platform.managed stop',user=wworks_User)
            print('[%s]' % (blue("PASS")))
        elif service.lower()=="start":
            sudo('nohup ./weblogic.admin.managed start | tee ../logs/nohup.out',user=wworks_User)
            sudo('nohup ./weblogic.platform.managed start | tee ../logs/nohup.out',user=wworks_User)
            print('[%s]' % (blue("PASS")))
        elif service.lower()=="restart":
            sudo('./weblogic.admin.managed stop',user=wworks_User)
            sudo('./weblogic.platform.managed stop',user=wworks_User)
            sudo('nohup ./weblogic.admin.managed start | tee ../logs/nohup.out',user=wworks_User)
            sudo('nohup ./weblogic.platform.managed start | tee ../logs/nohup.out',user=wworks_User)
            print('[%s]' % (blue("PASS")))
        elif service.lower()=="state":
            sudo('./weblogic.admin.managed state',user=wworks_User)
            sudo('./weblogic.platform.managed state',user=wworks_User)
            print('[%s]' % (blue("PASS")))
        else:
            error('Invalid service type')
Example #25
0
def deploy_on_shared_storage( source_path=env.source_path, shared_path=env.shared_path ):
    """
    Install full distribution on a shared storage (i.e. dispatcher, worker, API and tools)
    """
    print ""
    print(green("Deploy sources, API and tools on network path", bold=True))
    print(green(" - source path = %s" % source_path, bold=True))
    print(green(" - shared path = %s" % shared_path, bold=True))
    print(green(" - steps:", bold=True))
    print(green("   1. install core apps", bold=True))
    print(green("   2. install API files", bold=True))
    print(green("   3. install scripts", bold=True))

    result = prompt(green("\nContinue ?", bold=True), default='y')
    if result != 'y':
        abort("Interrupted by user.") 

    local("mkdir -p %s" % shared_path)
    print(blue("Install core apps", bold=True))
    local("rsync -r %s/src/octopus %s" % (source_path, shared_path))
    local("rsync -r %s/src/pulitools %s" % (source_path, shared_path))

    print(blue("Install API", bold=True))
    local("mkdir -p %s/puliclient" % shared_path)
    local("rsync -r %s/src/puliclient/__init__.py %s/puliclient" % (source_path, shared_path))
    local("rsync -r %s/src/puliclient/jobs.py %s/puliclient" % (source_path, shared_path))

    print(blue("Install scripts", bold=True))
    local("mkdir -p %s/scripts" % shared_path)
    local("rsync -r %s/scripts/dispatcherd.py %s/scripts" % (source_path, shared_path))
    local("rsync -r %s/scripts/workerd.py %s/scripts" % (source_path, shared_path))
    local("rsync -r %s/scripts/util/jobcleaner.py %s/scripts" % (source_path, shared_path))
def reset():
    print blue('* Reset')
    with cd(env.project_dir):
        with hide('running', 'stdout', 'stderr'):
            print green("  Reset uncommited changes made to source code..."),
            run('git reset --hard')
            print green("DONE")
Example #27
0
def reload():
    running = _running()
    pid = _pid()

    if running and pid:
        cmd = 'uwsgi --reload %(path_var)s/run/%(project_name)s.pid' % config
        try:
            if config['debug']:
                cmd += ' --catch-exceptions'
                print green("*** DEBUG MODE ENABLED ***")
        except:
            pass
        print blue("* Reloading..."),
        local(cmd)
        print blue('Done')

    elif running and not pid:
        _abort("%(project_name)s running, but has no PID file. "
               "Use 'start' instead." % config)

    elif not running and pid:
        _abort("%(project_name)s is not running, but has PID file. "
               "Use 'start' instead." % config)

    elif not running and not pid:
        _abort("%(project_name)s is not running." % config)
Example #28
0
def createRing(typeRing,port,IP,deviceName,deviceWeight):
    # ASSUMES A SINGLE DEVICE ON STORAGE NODE

    port = str(port)

    with cd('/etc/swift/'):
        # verify if ring is already there
        out = run("swift-ring-builder %s.builder" % (typeRing),quiet=True)
        if 'does not exist' in out:
            # ring is not created yet

            # Create the base *.builder file
            run("swift-ring-builder %s.builder create 10 3 1" % (typeRing))

            # Add node to the ring
            run("swift-ring-builder %s.builder add r1z1-%s:%s/%s %s" % 
                    (typeRing,IP,port,deviceName,deviceWeight))

            # rebalance ring
            run("swift-ring-builder %s.builder rebalance" % (typeRing))
        else:
            print blue("Ring {} already exists. Nothing done".format(typeRing))

        run("ls")

    msg = 'Restart proxy server service'
    runCheck(msg, 'systemctl restart openstack-swift-proxy.service')
Example #29
0
def skeletonize():
    '''Update Skeleton HTML5-Boilerplate.'''
    print(green("Skeletonizing the project directory..."))

    # Skeleton
    print(blue("Installing skeleton HTML5 Boilerplate."))
    os.chdir(PROJ_DIR)
    sh.git.submodule.update(init=True)

    os.chdir(PROJ_DIR + "/Skeleton")
    sh.git.pull("origin", "master")
    sh.rsync("-av", "images", "{0}/{1}/static/".format(PROJ_DIR, APP_NAME))
    sh.rsync("-av", "css", "{0}/{1}/static/".format(PROJ_DIR, APP_NAME))
    sh.rsync("-av", "index.html",
             "{0}/{1}/templates/base_t.html".format(PROJ_DIR, APP_NAME))
    os.chdir(PROJ_DIR)

    # Patch the base template with templating tags
    print(blue("Patching the base template."))
    os.chdir(PROJ_DIR + "/{0}/templates/".format(APP_NAME))
    template_patch = open("base_t.patch".format(APP_NAME))
    sh.patch(strip=0, _in=template_patch)
    template_patch.close()
    os.chdir(PROJ_DIR)

    # jQuery
    print(blue("Installing jquery 1.9.0."))
    os.chdir(PROJ_DIR + "/" + APP_NAME + "/static/js")
    sh.curl("http://code.jquery.com/jquery-1.9.0.min.js", O=True)
    os.chdir(PROJ_DIR)
Example #30
0
def color_test():
    for x in range(0, 2):
        print colors.blue('Blue text', bold=False) + '\n'
        time.sleep(0.2)
        print colors.cyan('cyan text', bold=False)
        time.sleep(0.2)
        print colors.green('green text', bold=False)
        time.sleep(0.2)
        print colors.magenta('magenta text', bold=False)
        time.sleep(0.2)
        print colors.red('red text', bold=False)
        time.sleep(0.2)
        print colors.white('white text', bold=False)
        time.sleep(0.2)
        print colors.yellow('yellow text', bold=False)
        time.sleep(0.2)
        print colors.blue('Blue text bold', bold=True)
        time.sleep(0.2)
        print colors.cyan('cyan text bold', bold=True)
        time.sleep(0.2)
        print colors.green('green text bold', bold=True)
        time.sleep(0.2)
        print colors.magenta('magenta text bold', bold=True)
        time.sleep(0.2)
        print colors.red('red text bold', bold=True)
        time.sleep(0.2)
        print colors.white('white text bold', bold=True)
        time.sleep(0.2)
        print colors.yellow('yellow text bold', bold=True)
        time.sleep(0.2)
Example #31
0
def remove():
    """
    Blow away the current project.
    """
    # Delete Webfaction API objects
    _print(blue("Removing database and website records from the Webfaction "
                "control panel...", bold=True))
    srv, ssn, acn = get_webf_session()
    website = get_webf_obj(srv, ssn, "website", env.proj_name)
    if website:
        del_webf_obj(srv, ssn, "website", env.proj_name, env.host_string)
    domain = get_webf_obj(srv, ssn, "domain", env.live_domain, env.live_subdomain)
    if domain:
        del_webf_obj(srv, ssn, "domain", env.live_domain, env.live_subdomain)
    main_app = get_webf_obj(srv, ssn, "app", env.proj_name)
    if main_app:
        del_webf_obj(srv, ssn, "app", main_app["name"])
    static_app = get_webf_obj(srv, ssn, "app", "%s_static" % env.proj_name)
    if static_app:
        del_webf_obj(srv, ssn, "app", "%s_static" % env.proj_name)
    db = get_webf_obj(srv, ssn, "db", env.proj_name)
    if db:
        del_webf_obj(srv, ssn, "db", env.proj_name, "postgresql")
    db_user = get_webf_obj(srv, ssn, "db_user", env.proj_name)
    if db_user:
        del_webf_obj(srv, ssn, "db_user", env.proj_name, "postgresql")
    if isinstance(env.twitter_period, int):
        srv.delete_cronjob(ssn, "*/%s * * * * %s poll_twitter" % (
            env.twitter_period, env.manage))

    # Delete files/folders
    if exists(env.venv_path):
        run("rm -rf %s" % env.venv_path)
    if exists(env.repo_path):
        run("rm -rf %s" % env.repo_path)
    for template in get_templates().values():
        remote_path = template["remote_path"]
        if exists(remote_path):
            run("rm %s" % remote_path)

    # Update supervisor
    run("supervisorctl update")
Example #32
0
def copy_secrets():
    """
    Copies secrets from local to remote.
    :return:
    """
    secrets = [
        ".env",
    ]

    for secret in secrets:
        remote_path = "/".join([env.project_dir, secret])
        print(
            blue("Copying {secret} to {remote_path} on {host}".format(
                secret=secret, remote_path=remote_path, host=env.host)))
        put(secret, remote_path)

        DEPLOYMENT_DATETIME = datetime.datetime.utcnow().isoformat()
        with cd(env.project_dir):
            run("echo 'DEPLOYMENT_DATETIME=%s' >> %s" %
                (DEPLOYMENT_DATETIME, secret))
Example #33
0
def checkdns():
    """
    Checks if DNS lookup matches hosts IP.
    """
    puts(blue('Checking DNS records for all demo servers.'))
    for role_name, role in env.roledefs.items():
        assert len(role['hosts'])==1, 'Multiple hosts found for role'
        host_ip = role['hosts'][0]
        hostname = role['hostname']
        results = []
        try:
            for rdata in dns.resolver.query(hostname, 'A'):
                results.append(rdata)
            results_text = [r.to_text().rstrip('.') for r in results]
            if host_ip in results_text:
                print('DNS for', role_name, 'OK')
            else:
                print('WRONG DNS for', role_name, 'Hostname:', hostname, 'Expected:', host_ip, 'Got:', results_text)
        except dns.resolver.NoAnswer:
            print('MISSING DNS for', role_name, 'Hostname:', hostname, 'Expected:', host_ip)
Example #34
0
def change_domain():
    """
    Changes the project's domain according to the url configuration from
    environment.json
    """
    require('url')

    print("Making actions to change project's url to: " +
          blue(env.url, bold=True) + "...")

    with cd(env.public_dir):
        print "Reloading vagrant virtual machine..."
        local("vagrant halt")
        local("vagrant up")

        print "Changing project url configuration..."
        run("""
            wp option update home http://{url} &&\
            wp option update siteurl http://{url}
            """.format(**env))
Example #35
0
def seek_apt_pip(default="apt", grep=None):
    with settings(warn_only=True):
        apt = False
        pip = False

        if default == "apt":
            apt = True
        if default == "pip":
            pip = True
        if default == "all":
            apt = True
            pip = True
        if apt:
            if not grep:
                sudo("cat /etc/apt/sources.list ")
            else:
                sudo(
                    "grep '{}' /etc/apt/sources.list 2>/dev/null".format(grep))
        if pip:
            sudo(blue("cat ~/.pip/pip.conf 2>/dev/null"))
Example #36
0
def GitHub_release_text():
    """
    Generate text to put in the GitHub release Markdown box
    """
    shortversion = get_sympy_short_version()
    htmltable = table()
    out = """\
See https://github.com/sympy/sympy/wiki/release-notes-for-{shortversion} for the release notes.

{htmltable}

**Note**: Do not download the **Source code (zip)** or the **Source code (tar.gz)**
files below.
"""
    out = out.format(shortversion=shortversion, htmltable=htmltable)
    print(blue("Here are the release notes to copy into the GitHub release "
        "Markdown form:", bold=True))
    print()
    print(out)
    return out
Example #37
0
def sync_docs_to_server(remote_dir='/home/web/fabgis'):
    """Synchronize docs with webserver.

    :param remote_dir: The directory to sync the docs to. After syncing it will
        match the content of your local docs/build/html dir.
    :type remote_dir: str
    """
    base_path = os.path.dirname(__file__)
    docs_path = os.path.join(base_path, 'docs')
    html_path = os.path.join(docs_path, 'build', 'html')
    with lcd(docs_path):
        local('make clean')
        # Make sure to set pythonpath in case there is already a system
        # installed copy of fabgis overriding our local copy.
        local('PYTHONPATH=%s make html' % base_path)

    rsync_project(remote_dir=remote_dir, local_dir=html_path)
    run('chmod o+rX -R %s' % remote_dir)
    fastprint(
        blue('Your server docs are now synchronised to your local project\n'))
    def list_exports(self):
        """
        List exports.
        """
        def get_exported_stack_name(export):
            exportingStackId = export['ExportingStackId']
            for stack_def in self.stack_defs.values():
                stack_name = stack_def.actual_stack_name()
                if stack_name in exportingStackId:  # Contains stack name in exporting stack name.
                    return stack_name
            return None

        def recursive_list_exports():
            def _recursive(a, res = []):
                res = res + a.get("Exports", [])
                nt = a.get("NextToken", None)
                exists_next = nt is not None and len(nt) > 0
                if exists_next:
                    b = self.cfn_client().list_exports(NextToken = nt)
                    return _recursive(b, res)
                else:
                    return res
            return _recursive(self.cfn_client().list_exports())

        print('Fetching exports...')
        exports = recursive_list_exports()

        table = PrettyTable(['ExportedStackName', 'ExportName', 'ExportValue'])
        table.align['ExportedStackName'] = 'l'
        table.align['ExportName'] = 'l'
        table.align['ExportValue'] = 'l'
        for export in exports:
            exported_stack_name = get_exported_stack_name(export)
            if exported_stack_name is not None:
                table.add_row([
                    exported_stack_name,
                    export['Name'],
                    export['Value']
                ])
        print(blue('Exports:', bold = True))
        print(table)
Example #39
0
def setup_vundle(homeDir=None):
  """Clones the Vundle vim plugin (https://github.com/gmarik/Vundle.vim) to the
  server (if it hasn't been cloned), pulls updates, checkout v0.10.2, and
  installs vim plugins managed by Vundle.

  Args:
    homeDir(str, optional): home directory for the server. If not supplied or if
      `None` is supplied, the return value of the `get_home_dir` function is
      used

  >>> setup-vundle()
  """
  if homeDir is None:
    homeDir = get_home_dir()
  vundleGitRepoPath = os.path.join(homeDir, ".vim", "bundle", "Vundle.vim")
  if exists(vundleGitRepoPath):
    if not is_dir(vundleGitRepoPath):
      abort(red(
        ("Error: `{}` is not a directory. Please remove it manually (it is used"
         " for storing Vundle)."
        ).format(vundleGitRepoPath)
      ))
    # updates the repository
    with cd(vundleGitRepoPath):
      print(blue('Vundle git repo exists. Updating it...'))
      run('git remote update')
  else:
    print(yellow(
      'Vundle.vim not found. Cloning it to `{}`'.format(vundleGitRepoPath)
    ))
    gitCloneCmd = \
      'git clone https://github.com/gmarik/Vundle.vim.git {}'.format(
        vundleGitRepoPath
      )
    run(gitCloneCmd)

  with cd(vundleGitRepoPath):
    run('git checkout v0.10.2')
  print(yellow('Installing vim plugins managed by Vundle...'))
  with settings(hide('stdout')):
    run('vim +PluginInstall +qall')
Example #40
0
def launch_rebinarization(instance, use_temp=False):
    """ Re-launch binarization of previously processed input data
        During upgrade, we need to regenerate data.nav.lz4 file because of
        serialization objects changes; we have to find the last input file
        processed
        "cd" command is executed manually (not in a context manager)
        because it is not good to use global variable with parallel
    """
    with shell_env(TYR_CONFIG_FILE=env.tyr_settings_file), settings(
            user=env.KRAKEN_USER):
        print(
            blue("NOTICE: launching binarization on {} @{}".format(
                instance, time.strftime('%H:%M:%S'))))
        try:
            run("cd " + env.tyr_basedir +
                " && python manage.py import_last_dataset --background {}{}".
                format(instance,
                       ' --custom_output_dir temp' if use_temp else ''))
            return True
        except:
            print(red("ERROR: failed binarization on {}".format(instance)))
Example #41
0
def print_instance(inst):
    inst_id    = inst.id
    inst_state = inst.state
    inst_type  = inst.instance_type
    pub_name   = inst.public_dns_name
    tagdict    = inst.tags
    l_time     = inst.launch_time
    key_name   = inst.key_name
    nuser = None
    puts('Instance {0} ({1}) is {2}'.format(inst_id, inst_type, color_ec2state(inst_state)))
    for k, val in tagdict.items():
        if k == 'Name':
            val = blue(val)
        elif k == 'NGAS User':
            nuser = val
        puts('{0}: {1}'.format(k,val))
    if inst_state == 'running':
        ssh_user = '******' % (nuser) if nuser else ''
        puts("Connect:   ssh -i ~/.ssh/{0}.pem {1}{2}".format(key_name, pub_name, ssh_user))
        puts("Terminate: fab aws.terminate:instance_id={0}".format(inst_id))
    puts('Launch time: {0}'.format(l_time))
Example #42
0
def compare_tar_against_git(release):
    """
    Compare the contents of the tarball against git ls-files

    release should be one of '2' or '3'.
    """
    with hide("commands"):
        with cd("/home/vagrant/repos/sympy"):
            git_lsfiles = set(
                [i.strip() for i in run("git ls-files").split("\n")])
        tar_output_orig = set(show_files(release, print_=False).split("\n"))
        tar_output = set()
    for file in tar_output_orig:
        # The tar files are like sympy-0.7.3/sympy/__init__.py, and the git
        # files are like sympy/__init__.py.
        split_path = _full_path_split(file)
        if split_path[-1]:
            # Exclude directories, as git ls-files does not include them
            tar_output.add(os.path.join(*split_path[1:]))
    # print tar_output
    # print git_lsfiles
    fail = False
    print
    print blue("Files in the tarball from git that should not be there:",
               bold=True)
    print
    for line in sorted(tar_output.intersection(git_whitelist)):
        # Just special case this for now, since this file will be removed. It
        # is only in the Python 2 source, not Python 3.
        if line == 'sympy/parsing/ast_parser_python25.py':
            continue
        fail = True
        print line
    print
    print blue("Files in git but not in the tarball:", bold=True)
    print
    for line in sorted(git_lsfiles - tar_output - git_whitelist):
        fail = True
        print line
    print
    print blue("Files in the tarball but not in git:", bold=True)
    print
    for line in sorted(tar_output - git_lsfiles - tarball_whitelist):
        fail = True
        print line

    if fail:
        error("Non-whitelisted files found or not found in the tarball")
Example #43
0
def mongo_create_db(dbname, dbuser=None, **kwargs):

    print(blue('Create MongoDB DB : %s' % dbname))

    # env.args
    if not exists('/root/.starbase/mongo'):
        mongo_setup_admin_user()
    else:
        mongousr = dotenv_get('/root/.starbase/mongo/env', "MONGO_ADMIN_USER")
        mongopwd = dotenv_get('/root/.starbase/mongo/env', "MONGO_ADMIN_PWD")

    env.dbname = dbname

    if dbuser: env.dbuser = dbuser
    else: env.dbuser = dbname

    script = "%(dbname)s.%(dbuser)s.js" % env

    if exists('/root/.starbase/mongo/%s' % script):
        abort(red('DB %(dbname)s with user %(dbuser)s already exists' % env))

    env.dbpassword = generate_password(16)

    template('mongodbcreateuser.js', "/root/.starbase/mongo/" + script)

    sudo('mongo admin -u %s -p %s /root/.starbase/mongo/%s' %
         (mongousr, mongopwd, script),
         quiet=True)

    print(green('Create MongoDB DB %(dbname)s with associated user :'******'   =>  MONGO_URL="mongodb://%(dbuser)s:%(dbpassword)s@localhost/%(dbname)s"'
            % env))
    print(
        green(
            '   => (remote) MONGO_URL="mongodb://%(dbuser)s:%(dbpassword)s@%(host_string)s/%(dbname)s"'
            % env))

    return (env.dbuser, env.dbpassword)
Example #44
0
def handle_build_docs(args):
    """
    `build-docs` command handler.

    Returns
    -------
    None

    """
    app_list = []
    user = str()

    if not args.user:
        user = os.getlogin()
        print_info(
            'option --user ommited, so using system user ({})'.format(
                green(user)))
    else:
        user = args.user

    app_list = args.apps.split(',')

    for app in app_list:
        print_info('Starting to build docs for {}'.format(green(app)))

        pkg_abs = os.path.abspath(app)
        print_info('Searching in {}'.format(blue(pkg_abs)))
        if not is_package(pkg_abs):
            return print_err('{} is not a package'.format(green(app)))

        if not os.path.exists('{}/docs'.format(app)):
            return print_err(
                'Application {} does not exist'.format(green(app)))

        else:
            # in docs subdir
            with lcd('./{}/docs'.format(app)):
                local('make gettext; make html;', capture=False)
            # back to pwd of command was run
            sscp(app, user, args)
Example #45
0
def pull_docker_image_from_registry(dockerImageName,
    dockerImageTag="latest"):
  """Pulls a tagged Docker image from the Docker registry.

  Rationale: While a `docker run` command for a missing image will pull the
  image from the Docker registry, it requires any running Docker container with
  the same name to be stopped before the newly pulled Docker container
  eventually runs. This usually means stopping any running Docker container
  with the same name before a time consuming `docker pull`. Pulling the desired
  Docker image before a `docker stop` `docker run` will minimize the downtime
  of the Docker container.

  Args:
    dockerImageName(str): Name of the Docker image in `namespace/image` format

    dockerImageTag(str, optional): Tag of the Docker image to pull, defaults to
      the string `latest`
  """
  dockerTaggedImageName = construct_tagged_docker_image_name(dockerImageName,
    dockerImageTag
  )
  # try running `docker pull` without logging in and see if it succeeds so we
  # can avoid running a `docker login`, because at minimum, `docker login`
  # requires the user to press the Enter key if he/she is already logged to the
  # Docker registry.
  dockerPullSucceeded = False
  dockerPullCmd = "docker pull {}".format(dockerTaggedImageName)
  print(blue(
    ("Pulling `{}` from the Docker registry"
     " (http://index.docker.io) ...").format(dockerTaggedImageName)
  ))
  with settings(warn_only=True):
    dockerPullSucceeded = run(dockerPullCmd).succeeded
  if not dockerPullSucceeded:
    print(yellow(
      "The previous `docker pull` failed most probably due to a lack of"
      " credentials. Running `docker login` followed by `docker pull`..."
    ))
    run("docker login")
    run(dockerPullCmd)
Example #46
0
def setup_glance_keystone():
    """
    Set up Keystone credentials for Glance

    Create (a) a user and a service called 'glance', and 
    (b) an endpoint for the 'glance' service
    """

    GLANCE_PASS = passwd['GLANCE_PASS']
    # get admin credentials to run the CLI commands
    credentials = env_config.admin_openrc

    with prefix(credentials):
        # before each creation, we check a list to avoid duplicates
        if 'glance' not in run("keystone user-list"):
            msg = "Create user glance"
            runCheck(msg, "keystone user-create --name glance --pass {}"\
                    .format(GLANCE_PASS))

            msg = "Give the user glance the role of admin"
            runCheck(
                msg, "keystone user-role-add --user glance "
                "--tenant service --role admin")
        else:
            print blue("User glance already created. Do nothing")

        if 'glance' not in run("keystone service-list"):
            msg = "Create service glance"
            runCheck(
                msg, "keystone service-create --name glance --type image "
                "--description 'OpenStack Image Service'")
        else:
            print blue("Service glance already created. Do nothing")

        if 'http://controller:9292' not in run("keystone endpoint-list"):
            msg = "Create endpoint for service glance"
            runCheck(
                msg, "keystone endpoint-create "
                "--service-id $(keystone service-list "
                "| awk '/ image / {print $2}') "
                "--publicurl http://controller:9292 "
                "--internalurl http://controller:9292 "
                "--adminurl http://controller:9292 "
                "--region regionOne")
        else:
            print blue("Enpoint for service glance already created. "
                       "Nothing done")
        def wait_for_drift_to_detected(_stack_name, drift_id):
            def get_drift_status():
                return self.cfn_client().describe_stack_drift_detection_status(
                    StackDriftDetectionId = drift_id
                )

            result = get_drift_status()

            table = PrettyTable()
            table.add_column('StackName', [_stack_name])
            table.align['StackName'] = 'l'
            table.add_column('DriftDetectionId', [result['StackDriftDetectionId']])
            table.align['DriftDetectionId'] = 'l'
            table.add_column('DetectionStatedTime', [result['Timestamp']])
            print(blue('DriftDetection:', bold = True))
            print(table)

            print('')
            print('Waiting for detection to complete...')
            while result['DetectionStatus'] == 'DETECTION_IN_PROGRESS':
                time.sleep(10)
                result = get_drift_status()
Example #48
0
def setup_inasafe():
    """Setup requirements for InaSAFE."""
    fastprint(blue('Setting up InaSAFE dependencies\n'))
    setup_qt4_developer_tools()
    setup_ccache()
    install_qgis2()
    fabtools.require.deb.packages([
        'pep8',
        'pylint',
        'python-nose',
        'python-nosexcover',
        'python-pip',
        'python-numpy',
        'python-qt4',
        'python-nose',
        'gdal-bin',
        'rsync',
        'python-coverage',
        'python-gdal',
        'pyqt4-dev-tools',
        'pyflakes',
    ])
    code_path = os.path.join('/home', env.user, 'dev', 'python')

    update_git_checkout(code_path=code_path,
                        url='git://github.com/AIFDR/inasafe.git',
                        repo_alias='inasafe-dev',
                        branch='master')
    update_git_checkout(code_path=code_path,
                        url='git://github.com/AIFDR/inasafe_data.git',
                        repo_alias='inasafe_data',
                        branch='master')
    update_git_checkout(code_path=code_path,
                        url='git://github.com/AIFDR/inasafe-doc.git',
                        repo_alias='inasafe-doc',
                        branch='develop')
    fastprint(green('Setting up InaSAFE dependencies completed.\n'))
    fastprint(green('You should now have checkouts of inasafe-dev, \n'))
    fastprint(green('inasafe_data and insafe-doc in your dev/python dir.\n'))
Example #49
0
def color_rotate(s):
    """return the ANSI color-rotated version of the given string.
    """

    n = len(s)
    m = 0
    while (m < n):
        if (m % 7) == 0:
            sys.stdout.write(blue(s[m:m + 1]))
        elif (m % 7) == 1:
            sys.stdout.write(cyan(s[m:m + 1]))
        elif (m % 7) == 2:
            sys.stdout.write(green(s[m:m + 1]))
        elif (m % 7) == 3:
            sys.stdout.write(magenta(s[m:m + 1]))
        elif (m % 7) == 4:
            sys.stdout.write(red(s[m:m + 1]))
        elif (m % 7) == 5:
            sys.stdout.write(white(s[m:m + 1]))
        elif (m % 7) == 6:
            sys.stdout.write(yellow(s[m:m + 1]))
        m += 1
Example #50
0
def set_webserver(webserver="nginx"):
    """
    Changes project's web server, nginx or apache2 available, nginx by default.
    """
    require('public_dir')

    if webserver == "apache2":
        sudo("service nginx stop")
        sudo("a2enmod rewrite")
        with open('wordpress-workflow/defaults/htaccess') as htaccess:
            urun(" echo '{0}' > {1}.htaccess".format(htaccess.read(),
                                                     env.public_dir))

        sudo("service apache2 start", pty=False)

    else:
        sudo("service apache2 stop")
        if exists("{0}.htaccess".format(env.public_dir)):
            urun("rm {0}.htaccess".format(env.public_dir))
        sudo("service nginx start")

    print "Web server switched to " + blue(webserver, bold=True) + "."
Example #51
0
def get_previous_version_tag():
    """
    Get the version of the previous release
    """
    # We try, probably too hard, to portably get the number of the previous
    # release of SymPy. Our strategy is to look at the git tags.  The
    # following assumptions are made about the git tags:

    # - The only tags are for releases
    # - The tags are given the consistent naming:
    #    sympy-major.minor.micro[.rcnumber]
    #    (e.g., sympy-0.7.2 or sympy-0.7.2.rc1)
    # In particular, it goes back in the tag history and finds the most recent
    # tag that doesn't contain the current short version number as a substring.
    shortversion = get_sympy_short_version()
    curcommit = "HEAD"
    with cd("/home/vagrant/repos/sympy"):
        while True:
            curtag = run("git describe --abbrev=0 --tags " + curcommit).strip()
            if shortversion in curtag:
                # If the tagged commit is a merge commit, we cannot be sure
                # that it will go back in the right direction. This almost
                # never happens, so just error
                parents = (local("git rev-list --parents -n 1 " + curtag,
                                 capture=True).strip().split())
                # rev-list prints the current commit and then all its parents
                # If the tagged commit *is* a merge commit, just comment this
                # out, and make sure `fab vagrant get_previous_version_tag` is correct
                assert len(parents) == 2, curtag
                curcommit = curtag + "^"  # The parent of the tagged commit
            else:
                print(
                    blue(
                        "Using {tag} as the tag for the previous "
                        "release.".format(tag=curtag),
                        bold=True,
                    ))
                return curtag
        error("Could not find the tag for the previous release.")
Example #52
0
def get_no_data_instances():
    """ Get instances that have no data loaded ("status": null)"""
    for instance in env.instances.values():
        for host in instance.kraken_engines:
            instance_has_data = test_kraken(instance,
                                            fail_if_error=False,
                                            hosts=[host])
            if not instance_has_data:
                target_file = instance.kraken_database
                if not exists(target_file):
                    print(
                        blue(
                            "NOTICE: no data for {}, append it to exclude list"
                            .format(instance.name)))
                    #we need to add a property to instances
                    env.excluded_instances.append(instance.name)
                else:
                    print(
                        red("CRITICAL: instance {} is not available but *has* a "
                            "{}, please inspect manually".format(
                                instance.name, target_file)))
                break
Example #53
0
def quickstart(project_name):
    # TODO: alternate way maybe to do all of this is do something like
    # glob.glob('project/**/*') and then we copy everything that's doesn't have
    # jinja2 in filename, generate the jinja2 stuff
    if os.path.exists(project_name):
        print '{}: folder "{}" already exists'.format(red('error'),
                                                      project_name)
        sys.exit(1)

    print '\nCreating your {} streamparse project...'\
          .format(blue(project_name))
    _env.globals['project_name'] = project_name

    _mkdir(project_name)
    with _cd(project_name):
        _cp(_here('project', 'gitignore'), '.gitignore')
        _generate('config.jinja2.json', 'config.json')
        _cp(_here('project', 'fabfile.py'), 'fabfile.py')
        _generate('project.jinja2.clj', 'project.clj')
        _touch('README.md')
        _mkdir('src')
        with _cd('src'):
            _cp(_here('project', 'src', 'wordcount.py'), 'wordcount.py')
            _cp(_here('project', 'src', 'words.py'), 'words.py')
        _cp(_here('project', 'tasks.py'), 'tasks.py')
        _mkdir('topologies')
        with _cd('topologies'):
            _cp(_here('project', 'topologies', 'wordcount.clj'),
                'wordcount.clj')
        _mkdir('virtualenvs')
        with _cd('virtualenvs'):
            _cp(_here('project', 'virtualenvs', 'wordcount.txt'),
                'wordcount.txt')

    print 'Done.\n'
    print(
        'Try running your topology locally with:\n\n'
        '\tcd {}\n'
        '\tsparse run'.format(project_name))
Example #54
0
    def warn_of_migrations(self):
        if not (_github_auth_provided() and self.last_commit
                and self.deploy_commit):
            return

        pr_numbers = self._get_pr_numbers()
        if len(pr_numbers) > 500:
            print(red("There are too many PRs to display"))
            return

        pool = Pool(5)
        pr_infos = [_f for _f in pool.map(self._get_pr_info, pr_numbers) if _f]

        print(blue("\nList of PRs since last deploy:"))
        self._print_prs_formatted(pr_infos)

        prs_by_label = self._get_prs_by_label(pr_infos)
        if prs_by_label:
            print(
                red('You are about to deploy the following PR(s), which will trigger a reindex or migration. Click the URL for additional context.'
                    ))
            self._print_prs_formatted(prs_by_label['reindex/migration'])
Example #55
0
def config(name, command, virtualenv, **kwargs):
    """
    add supervisor configuration
    """
    print blue("Configuring supervisor ... \n")

    temp_path = tempfile.mktemp('.conf')
    parser = ConfigParser()
    conf_path = '/etc/supervisor/conf.d/%s.conf' % name

    if files.exists(conf_path):
        get(conf_path, temp_path)
        parser.read(temp_path)

    section = 'program:%s' % name
    if not parser.has_section(section):
        parser.add_section(section)

    defaults = {
        'user': '******',
        'autostart': 'true',
        'autorestart': 'true',
        'redirect_stderr': 'true',
    }

    defaults.update(kwargs)

    if virtualenv:
        if not command.startswith('/'):
            if files.exists(os.path.join(virtualenv, "bin",
                                         command.split()[0])):
                command = os.path.join(virtualenv, "bin", command)

        # defaults['environment'] = 'PATH="%s/bin"' % virtualenv

    parser.set(section, 'command', command)
    for attribute, value in defaults.iteritems():
        parser.set(section, attribute, value)

    parser.write(open(temp_path, 'w+'))

    print blue("Write supervisor config ... \n"),
    put(temp_path, conf_path, use_sudo=True)

    print blue("Reloading supervisor ... \n")
    sudo('supervisorctl update')
Example #56
0
def sg_sync(name=None, description=None, rules=[], vpc=None, conn=None):
    """ http://boto.readthedocs.org/en/latest/security_groups.html """
    logger.debug("Synchronizing security group: {0} -- {1}".format(
        name, description))
    assert rules and name
    conn = conn or get_conn()
    description = description or name
    csg_kargs = {}
    if vpc is not None:
        conn = boto.vpc.connect_to_region('us-east-1')
        csg_kargs['vpc_id'] = vpc
    sg = get_or_create_security_group(conn, name, description)
    current_rules = sg_rules(sg)
    rules = set([tuple(map(str, r)) for r in rules])

    new_rules = rules - current_rules
    stale_rules = current_rules - rules

    if not stale_rules and not new_rules:
        print colors.blue("rules already synchronized:") + \
            " nothing to do."
    if stale_rules:
        print colors.blue("stale rules: ") + \
            "{0} total".format(len(stale_rules))
    if new_rules:
        print colors.blue("new rules: ") + \
            "{0} total".format(len(new_rules))

    for rule in new_rules:
        print colors.blue('authorizing') + ' new rule: {0}'.format(rule),
        catch_ec2_error(lambda rule=rule: sg.authorize(*rule))

    for rule in stale_rules:
        print colors.red('revoking:') + \
            ' old rule: {0}'.format(rule)
        catch_ec2_error(lambda rule=rule: sg.revoke(*rule))
Example #57
0
def create(instance_name,
           region=GCP_REGION,
           zone=GCP_ZONE,
           disk_size=GCP_BOOT_DISK_SIZE,
           address_name=None):
    """
    Create a GCP instance `instance_name` and associate a new static IP with it.
    If `address_name` is given (an existing static IP) it will be used.
    """
    # puts(green('You may need to run `gcloud init` before running this command.'))
    # STEP 1: reserve a static IP address
    if address_name is None:
        address_name = instance_name
        reserve_ip_cmd = 'gcloud compute addresses create ' + address_name
        reserve_ip_cmd += ' --project ' + GCP_PROJECT
        reserve_ip_cmd += ' --region ' + region
        local(reserve_ip_cmd)
    # STEP 2: provision instance
    create_cmd = 'gcloud compute instances create ' + instance_name
    create_cmd += ' --project ' + GCP_PROJECT
    create_cmd += ' --zone ' + zone
    create_cmd += ' --machine-type f1-micro'
    create_cmd += ' --boot-disk-size ' + disk_size
    create_cmd += ' --image-project ' + GCP_IMAGE_PROJECT
    create_cmd += ' --image ' + GCP_IMAGE_NAME
    create_cmd += ' --address ' + address_name
    create_cmd += ' --tags http-server,https-server'
    create_cmd += ' --format json'
    cmd_out = local(create_cmd, capture=True)
    cmd_result = json.loads(cmd_out)
    new_ip = cmd_result[0]['networkInterfaces'][0]['accessConfigs'][0]['natIP']
    puts(green('Created demo instance ' + instance_name + ' with IP ' +
               new_ip))
    puts(
        green(
            'Add this paragraph to the dict `inventory` in `fabfiles/gcp.py`:')
    )
    puts(blue("    '%s': {" % instance_name))
    puts(blue("        'hosts':['%s']," % new_ip))
    puts(blue("        'channels_to_import': [],"))
    puts(
        blue("        'facility_name': '" + instance_name.replace('-', ' ') +
             "',"))
    puts(blue("        'hostname': '%s.learningequality.org'," %
              instance_name))
    puts(blue("    },"))
Example #58
0
def setup_keystone_controller():
    """
    Set up Keystone credentials for Neutron

    Create (a) a user and a service called 'neutron', and
    (b) an endpoint for the 'neutron' service
    """

    # get credentials
    with prefix(env_config.admin_openrc):

        # check if user neutron has been created and if not, create it
        if 'neutron' not in run("keystone user-list"):
            # create the neutron user in keystone
            msg = "Create neutron user"
            runCheck(
                msg, 'keystone user-create --name neutron --pass {}'.format(
                    passwd['NEUTRON_PASS']))
            msg = "Add the admin role to the neutron user"
            runCheck(
                msg,
                'keystone user-role-add --user neutron --tenant service --role admin'
            )
        else:
            print blue('\t\tneutron is already a user. Do nothing')

        # check if service neutron has been created and if not, create it
        if 'neutron' not in run("keystone service-list"):
            msg = "Create the neutron service entity"
            runCheck(
                msg, 'keystone service-create --name neutron --type network '
                '--description "OpenStack Networking"')
        else:
            print blue('\t\tneutron is already a service. Do nothing')

        # check if a 9696 endpoint already exists and if not, create one
        if 'http://controller:9696' not in run("keystone endpoint-list"):
            msg = "Create the networking service API endpoints"
            runCheck(msg, 'keystone endpoint-create ' + \
                    "--service-id $(keystone service-list | awk '/ network / {print $2}') " + \
                    "--publicurl http://controller:9696 " + \
                    "--adminurl http://controller:9696 " + \
                    "--internalurl http://controller:9696 " + \
                    "--region regionOne")
        else:
            print blue('\t\t9696 is already an endpoint. Do nothing')
Example #59
0
def validate(release=None, pull=None):
    """
        fab -H mmathethe utils.validate:release=0.1.24 --user=django
    """
    result = run('workon ambition', warn_only=True)
    if result:
        warn(yellow(f'{env.host}: {result}'))
    else:
        result = run('workon ambition && python --version', warn_only=True)

        if result != 'Python 3.6.2':
            warn(yellow(f'{env.host}: {result}'))
        else:
            with cd('~/source/ambition'):
                result = run('git tag')
                if release not in result:
                    result = run('git describe --abbrev=0 --tags')
                    warn(
                        yellow(
                            f'{env.host}: ambition tag not found. Got {result}'
                        ))
                    if pull:
                        run('git pull')
            result = run('curl http://localhost')
            if 'Bad Gateway' in result:
                warn(yellow(f'{env.host}: bad gateway'))
            else:
                result = run(
                    'curl http://localhost/static/ambition/label_templates/aliquot.lbl'
                )
                if '404 Not Found' in result:
                    warn(yellow(f'{env.host}: 404 Not Found'))
                else:
                    if not exists('~/media/edc_map') or not exists(
                            '~/media/transactions'):
                        warn(yellow(f'{env.host}: Media folder not ready'))
                    else:
                        warn(blue(f'{env.host}: OK'))
Example #60
0
def _deploy_without_asking():
    try:
        execute(update_code)
        execute(update_virtualenv)

        # handle static files
        execute(version_static)
        execute(_do_collectstatic)
        execute(_do_compress)
        # initial update of manifest to make sure we have no
        # Offline Compression Issues as services restart
        execute(update_manifest, soft=True)

        execute(clear_services_dir)
        set_supervisor_config()

        do_migrate = env.should_migrate and needs_to_migrate()
        if do_migrate:
            execute(stop_pillows)
            execute(stop_celery_tasks)
            execute(_migrate)
        else:
            print(blue("No migration required, skipping."))
        execute(do_update_django_locales)
        if do_migrate:
            execute(flip_es_aliases)

        # hard update of manifest.json since we're about to force restart
        # all services
        execute(update_manifest)
    except Exception:
        execute(mail_admins, "Deploy failed", "You had better check the logs.")
        # hopefully bring the server back to life
        execute(services_restart)
        raise
    else:
        execute(services_restart)
        execute(record_successful_deploy)