Example #1
0
def run_test(print_output, my_configs):
    lcd(base_path)
    with lcd('./strategy'):
        java_cmd = 'java -cp "../backtest/TradingFramework4j/lib/*:../backtest/TradingFramework4j/bin/" backtester.Backtester '
        cmd_args = "../quotes/%s %s \"%s\"" % (quote_file, strategy_file, my_configs)
        output = local(java_cmd + cmd_args, capture=True)

        if print_output:
            print output

        match = re.search(r'.*Closed PnL \(no fees\):\s*(.*)$', output, re.MULTILINE)
        if match:
            pnl = float(convert_dollars(match.group(1)))
        else:
            print "Error getting PnL."
            sys.exit(1)

        match = re.search(r'.*Total Cost:\s*(.*)$', output, re.MULTILINE)
        if match:
            cost = float(convert_dollars(match.group(1)))
        else:
            print "Error getting cost."
            sys.exit(1)

        score = pnl - cost
        return score
def import_media(filename=None):
    """
    Extracts media dump into your local media root.

    Please note that this might overwrite existing local files.

    Usage::

        fab import_media
        fab import_media:filename=foobar.tar.gz

    """
    if not filename:
        filename = settings.MEDIA_DUMP_FILENAME

    project_root = os.getcwd()

    with fab_settings(hide('everything'), warn_only=True):
        is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
            project_root, filename)).failed
    if is_backup_missing:
        abort(red('ERROR: There is no media backup that could be imported in'
                  ' {0}. We need a file called {1} in that folder.'.format(
                      project_root, filename)))

    # copy the dump into the media root folder
    with lcd(project_root):
        local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))

    # extract and remove media dump
    with lcd(settings.MEDIA_ROOT):
        local('tar -xvf {0}'.format(filename))
        local('rm -rf {0}'.format(filename))
Example #3
0
def make_docs(src_dir=None, build_dir=None):
    """
    Generate Indico docs
    """
    _check_present('pdflatex')

    src_dir = src_dir or env.src_dir

    if build_dir is None:
        target_dir = os.path.join(src_dir, 'indico', 'htdocs', 'ihelp')
    else:
        target_dir = os.path.join(build_dir or env.build_dir, 'indico', 'htdocs', 'ihelp')

    print green('Generating documentation')
    with lcd(os.path.join(src_dir, 'doc')):
        for d in DOC_DIRS:
            with lcd(d):
                local('make html')
                local('make latex')
                local('rm -rf {0}/*'.format(os.path.join(target_dir, 'html')))
                local('mv build/html/* {0}'.format(os.path.join(target_dir, 'html')))

        with lcd(os.path.join('guides', 'build', 'latex')):
            local('make all-pdf')
            local('mv *.pdf {0}'.format(os.path.join(target_dir, 'pdf')))

        print green('Cleaning up')
        for d in DOC_DIRS:
            with lcd(d):
                local('make clean')
Example #4
0
def deploy_project(local_dir, remote_dir, exclude=[]):
    """Deploy the entire project at local_dir to remote_dir, excluding the given paths."""
    export()

    if USE_RSYNC_PROJECT:
        sync = partial(rsync_project, remote_dir=remote_dir, exclude=exclude, delete=True,
                       extra_opts="-e 'ssh -l {}'".format(conf['user']))
    else:
        exclude = ['.git', 'fabfile.py', 'cache', 'config', '*.log', 'js', 'image']
        cmd = "rsync -pthrvz --delete"
        cmd = cmd + " {exclude} --rsh='ssh  -p 22 ' -e 'ssh -l {user}' {local_dir} {host}:{remote_dir}"
        cmd_params = {'user': conf['user'],
                      'host': conf['host'],
                      'remote_dir': remote_dir,
                      'exclude': ' '.join("--exclude '{}'".format(x) for x in exclude)}

        def sync(local_dir):
            cmd_params['local_dir'] = local_dir
            local(cmd.format(**cmd_params))

    try:
        with lcd(workspace):
            with lcd(local_dir):
                sync(local_dir='.')
            sync(local_dir='resources')
    except:
        log.exception('deployment error:')
        raise
    finally:
        rmtree(workspace)
def setup_venv(build_top = "../../../../build"):
    venv_base = "%s/debug/cfgm/schema-transformer" %(build_top)
    with lcd(venv_base):
        venv_base = local("pwd", capture = True)
        local("virtualenv ut-venv")

    venv_dir = "%s/ut-venv" %(venv_base)
    with lcd(venv_dir):
        with prefix("source %s/bin/activate" %(venv_dir)):
            local("pip install --upgrade ../../common/dist/cfgm_common-0.1dev.tar.gz")
            local("pip install --upgrade ../dist/schema_transformer-0.1dev.tar.gz")
            local("pip install --upgrade ../../svc-monitor/dist/svc_monitor-0.1dev.tar.gz")
            local("pip install --upgrade ../../api-server/dist/vnc_cfg_api_server-0.1dev.tar.gz")
            local("pip install --upgrade ../../../api-lib/dist/vnc_api-0.1dev.tar.gz")
            local("pip install --upgrade ../../../sandesh/library/python/dist/sandesh-0.1dev.tar.gz")
            local("pip install paramiko==1.9.0")
            with lcd("../../../../../third_party/kazoo"):
                local("python setup.py install")
            with lcd("../../../../../third_party/ncclient"):
                local("python setup.py install")

            local("pip install fixtures==0.3.12")
            local("pip install testtools==0.9.32")
            local("pip install flexmock==0.9.7")
            local("pip install python-novaclient==2.13.0")
            # 2.6 requirements
            local("pip install ordereddict")
            local("pip install importlib")
Example #6
0
def nonbower():
    if not os.path.exists(components_dir):
        components()
    
    with open("nonbower.json") as f:
        cfg = json.load(f)
    for name, repo in cfg.get('dependencies', {}).items():
        
        clone = "git clone"
        if '#' in repo:
            repo, tag = repo.split('#')
        else:
            tag = None
            clone += " --depth 1"
        
        with lcd(components_dir):
            
            local("{clone} {repo} {name}".format(**locals()))
            
            if tag:
                with lcd(pjoin(components_dir, name)):
                    local("git checkout -b {0} tags/{0}".format(tag))
        
        # remove the git tree, so we don't get submodules
        shutil.rmtree(pjoin(components_dir, name, '.git'))
Example #7
0
    def build(self):
        # if we're running a virtualenv the we need to reload the defaults
        virtualenv_name = env.get("virtualenv", None)
        if (virtualenv_name is not None):
            # make a place for the virtualenv to exist
            local("{} -p {}".format(env.tools['mkdir'], env.python_virtualenv_root_dir))

            # remember where the default python installation went
            system_python_virtualenv = env.python_virtualenv

            # create the virtualenv
            with lcd(env.python_virtualenv_root_dir):
                local("{} --python={} {}".format(system_python_virtualenv, env.python, virtualenv_name))

            with settings(path("{}/{}/bin".format(env.python_virtualenv_root_dir, virtualenv_name), behavior="prepend"),
                          shell_env(VIRTUAL_ENV="{}/{}".format(env.python_virtualenv_root_dir, virtualenv_name))):
                # re-load the default paths to make it uses the virtualenv python
                load_defaults()

                # load requirements into virtualenv
                if (os.path.isfile("{}/requirements.txt".format(env.build_dir))):
                    local("{} install -r {}/requirements.txt".format(env.python_pip, env.build_dir))

                # really build
                self._build()

            # make it so that we can move the virtualenv
            with lcd(env.python_virtualenv_root_dir):
                local("{} --relocatable {}".format(system_python_virtualenv, virtualenv_name))
        else:
            # really build
            self._build()
Example #8
0
def build(config=_DEF_CFG):
    "Build all of winsparkle."
    strs = {"devenv": _DEVENV, "cfg": config}
    with lcd(_DEPS_PATH):
        local("%(devenv)s WinSparkleDeps.sln /build %(cfg)s" % strs)
    with lcd(_TOP_DIR):
        local("%(devenv)s WinSparkle.sln /build %(cfg)s /project WinSparkle.vcproj" % strs)
Example #9
0
def package(skin=None, user=None, version=None,
            app_name="git-snapshot", app_ver="git-snapshot", app_id="com.company.otm"):
    """
    Package a skinned up implementation for deployment based on
    the current commit in this directory.

    This method creates a fresh clone, installs the skin, and
    generates a tarball. In particular, it *ignores* any extra
    files in the directory (such as private keys or config files)
    """
    temp_dir = tempfile.mkdtemp(prefix='fios_tmp')
    clone_dir = os.path.join(temp_dir, 'otmios')
    skin_clone_dir = os.path.join(temp_dir, 'skins')

    git_path = os.path.dirname(__file__)

    git_rev = local('git rev-parse HEAD', capture=True)

    with lcd(temp_dir):
        local('git clone "%s" "%s"' % (git_path, clone_dir))

    with lcd(clone_dir):
        local('git reset --hard %s' % git_rev)
        install_skin(skin, user, version, skin_clone_dir,
                     otm_dir=os.path.join(clone_dir,"OpenTreeMap"),
                     copy_instead_of_link=True)

    create_info_plist(app_name, app_id, app_ver,path=clone_dir)

    with lcd(temp_dir):
        local('tar -czf otmios.tar.gz otmios')

    local('mv "%s" "%s"' % (os.path.join(temp_dir,'otmios.tar.gz'), git_path))
    local('rm -rf "%s"' % temp_dir)
Example #10
0
def local_update():
    '''Update the local Django project (running required management commands'''
    require('project_root')
    require('virtualenv_dir')
    with lcd(env.project_root), lcd('caire_web'), \
            prefix('. {}bin/activate'.format(env.virtualenv_dir)):
        project_manage_upgrade(exec_cmd=local)
Example #11
0
    def fab_patch(self):
        '''
        Walk over 'diffs' directory and patch every application
        '''
        vcs_commands = {
            'git': 'git checkout .',
            'svn': 'svn revert -R .',
            'hg': 'hg revert .',
        }
        for patch in listdir(self.project_path_join(self.diff_dir)):
            app_name, vcs = '.'.join(patch.split('.')[:-2]), patch.split('.')[-2]
            if vcs == 'lib':
                app_dir = self._site_packages_path()
                if isdir(self._site_package_path(app_name)) and isdir(self._site_package_path('%s.original' % app_name)):
                    # delete patched app and move original app to the normal place
                    with lcd(app_dir):
                        local('rm -R %(app)s && mv %(app)s.original %(app)s' % {'app': app_name}, capture=False)
                # copy app from normal to reserve place for keeping
                with lcd(app_dir):
                    local('cp -R %(app)s %(app)s.original' % {'app': app_name}, capture=False)
            else:
                app_dir = self._source_package_path(app_name)
                # revert files from repo
                with lcd(app_dir):
                    local(vcs_commands[vcs], capture=False)

            with lcd(app_dir):
                patch_command = 'patch -p1' if vcs == 'hg' else 'patch -p0'
                local('%s < %s' % (patch_command, self.project_path_join(self.diff_dir, patch)), capture=False)
Example #12
0
def git(force=False):
    _apt_get_install('git')
    if force or _can_overwrite('~/.gitconfig'):
        with lcd('git'):
            local('ln -sf $PWD/gitconfig ~/.gitconfig')
    if force or _can_overwrite('~/.gitignore'):
        local('rm -f ~/.gitignore')
        git_repo = 'https://github.com/github/gitignore'
        git_repo_dir = 'git/gitignore'
        _git_pull_or_clone(git_repo, git_repo_dir)
        with lcd(git_repo_dir):
            templates = [
                'Global/Linux',
                'Global/Vim',
                'Global/Emacs',
                'Global/JetBrains',
                'Python',
                'R',
                'C',
                'C++',
                'Java',
                'Scala',
                'TeX',
            ]
            for template in templates:
                local('cat {}.gitignore >> ~/.gitignore'.format(template))
Example #13
0
def create_project():
    """
    Создает новый проект
    """
    # спрашиваем у пользователя название папки с проектом
    prompt('project root name: ', 'project_root',
           validate='^([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$')

    # спрашиваем у пользователя название проекта
    prompt('project name: ', 'project',
           validate='^([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$')
    puts('create project: {0}'.format(env.project))

    with lcd(PROJECTS_ROOT):
        # Создаем директорию с root проектом
        local('mkdir %s' % env.project_root)

        with lcd(env.project_root):
            # Создаем директорию с django проектом и переходим в нее
            local('mkdir {0}-django'.format(env.project))

            with lcd('{0}-django'.format(env.project)):
                # Создаем виртуальное окружение
                local('virtualenv -p {0} .venv'.format(PYTHON))

                with prefix('.venv/bin/activate'):
                    # Устанавливаем django
                    if DJANGO_VERSION:
                        local('pip install django=={0}'.format(DJANGO_VERSION))
                    else:
                        local('pip install django')

                # Создаем проект из шаблона
                local('django-admin --template={0} {1}'.format(PROJECT_TEMPLATE_PATH,
                                                               env.project))
Example #14
0
def runfcgi_local():
    with prefix('source /home/adw0rd/work/django-lictor/venv/bin/activate'):
        lcd('/home/adw0rd/work/django-lictor/test_project')
        local('./manage.py runfcgi host=localhost port=8005 method=prefork '
              'pidfile=/tmp/test_project.pid minspare=2 maxspare=10 maxchildren=3 maxrequests=100 '
              'outlog=/tmp/test_project.out errlog=/tmp/test_project.err umask=000 '
              'workdir=/home/adw0rd/work/django-lictor/test_project')
Example #15
0
def terminal(force=False):
    _apt_get_install('zsh')
    local('chsh -s /usr/bin/zsh')
    git_repo = 'https://github.com/sorin-ionescu/prezto'
    git_repo_dir = 'zsh/prezto'
    _git_pull_or_clone(git_repo, git_repo_dir)
    with lcd('zsh'):
        if force or _can_overwrite('~/.zprezto'):
            local('ln -sf $PWD/prezto ~/.zprezto')
            local('ln -sf $PWD/prompt_yasserglez_setup ~/.zprezto/modules/prompt/functions/')
        for dotfile in ('zlogin', 'zlogout', 'zpreztorc', 'zprofile', 'profile', 'zshenv', 'zshrc'):
            if force or _can_overwrite('~/.{}'.format(dotfile)):
                local('rm -f ~/.{}'.format(dotfile))
                local('ln -sf $PWD/{0} ~/.{0}'.format(dotfile))

    # gnome-terminal:
    _apt_get_install('gnome-terminal', 'dconf-cli')
    git_repo = 'https://github.com/anthony25/gnome-terminal-colors-solarized'
    git_repo_dir = 'solarized/gnome-terminal-colors-solarized'
    _git_pull_or_clone(git_repo, git_repo_dir)
    with lcd(git_repo_dir):
        local('./install.sh -s dark -p Default')

    # dircolors:
    _apt_get_install('coreutils')
    git_repo = 'https://github.com/seebi/dircolors-solarized'
    git_repo_dir = 'solarized/dircolors-solarized'
    _git_pull_or_clone(git_repo, git_repo_dir)
    if force or _can_overwrite('~/.dir_colors'):
        with lcd(git_repo_dir):
            local('ln -sf $PWD/dircolors.ansi-dark ~/.dir_colors')
def setup_venv(build_top = "../../../../build"):
    venv_base = "%s/debug/cfgm/api-server" %(build_top)
    with lcd(venv_base):
        local("virtualenv ut-venv")

    venv_dir = "%s/ut-venv" %(venv_base)
    with lcd(venv_dir):
        with prefix("source bin/activate"):
            local("pip install --upgrade ../../common/dist/cfgm_common-0.1dev.tar.gz")
            local("pip install --upgrade ../dist/vnc_cfg_api_server-0.1dev.tar.gz")
            local("pip install --upgrade ../../../api-lib/dist/vnc_api-0.1dev.tar.gz")
            local("pip install --upgrade ../../../sandesh/library/python/dist/sandesh-0.1dev.tar.gz")
            local("pip install --upgrade ../../../discovery/dist/discovery-0.1dev.tar.gz")
            local("pip install xmltodict")
            local("pip install fixtures==0.3.12")
            local("pip install testtools==0.9.32")
            local("pip install flexmock==0.9.7")
            local("pip install python-novaclient==2.13.0")
            pyver = "%s.%s" %(sys.version_info[0], sys.version_info[1])
            # 2.6 requirements
            local("pip install ordereddict")
            if pyver == '2.6':
                local("pip install importlib")

            local("cp ../../../../../src/cfgm/api-server/tests/test_common.py lib/python%s/site-packages/vnc_cfg_api_server/" %(pyver))
Example #17
0
def deploy(product,product_root,repo,tag,user):
    deploy_path = os.path.join(product_root, VERSION_DIR)
    sudo('mkdir -pv %s' % deploy_path, user=user)
    if not is_valid_path(deploy_path):
        print "[ERROR] could not create %s." % deploy_path
        raise

    # git url
    url = GIT_URL + ':' + repo + '/' + product.lower() + '.git'
    clone_path = os.path.join(TMP_PATH, product)
    local('mkdir -pv %s' % TMP_PATH)
    try:
        local('git clone %s %s' % (url, clone_path))
        with lcd(clone_path):
            if tag == '':
                tag = local('git for-each-ref --sort=taggerdate | tail -1 \
                             | sed -e "s/.*refs\/tags\///g"', capture=True)
            local('git checkout -b %s' % tag)
        with lcd(TMP_PATH):
            local('mv -v %s %s' % (product, tag))
            local('tar czvf %s.tar.gz %s --exclude-vcs' % (tag, tag))
        local_path = TMP_PATH + '/' + tag + '.tar.gz'
        put(local_path, deploy_path, use_sudo=True)
        with cd(deploy_path):
            run('pwd')
            tar_file = tag + '.tar.gz'
            sudo('tar zxvf %s' % tar_file)
            sudo('chown %s %s' % (user, tag))
            sudo('rm %s' % tar_file)

    except:
        print "[ERROR] %s could not be deployed." % product
        exit(1)
    finally:
        local('rm -rf %s' % TMP_PATH)
Example #18
0
def parse_projects():
    repos = json.loads(open('config.json').read())
    repos = repos['repos']

    if not os.path.exists(WORKSPACE_DIR):
        local('mkdir %s' % WORKSPACE_DIR)

    if not os.path.exists(os.path.join(WORKSPACE_DIR, DUMPS_DIR)):
        local('mkdir %s' % os.path.join(WORKSPACE_DIR, DUMPS_DIR))

    for repo in repos:
        name = repo.split('/')[-1].split('.')[0]
        path = os.path.join(WORKSPACE_DIR, name)
        with lcd(WORKSPACE_DIR):
            if os.path.exists(path):
                with lcd(name):
                    local('git pull')
            else:
                local('git clone %s' % repo)
        local('python testsparser.py %s > %s.json' % (path,
                                                      os.path.join(WORKSPACE_DIR, DUMPS_DIR, name)))

    local("git checkout gh-pages")
    local("git rebase master")
    with settings(warn_only=True):
        local("cp -rvf %s/* %s" % (os.path.join(WORKSPACE_DIR, DUMPS_DIR), DUMPS_DIR))
        local("git add dumps/* && git commit -m \'dump on %s\'" % datetime.now().strftime("%B %d, %Y at %H:%M:%S"))
        local("git checkout master")
    local("git push -f origin gh-pages")
Example #19
0
def sphinx_build(builder, docs_dir,
                 cmdline_args=[], language=None, build_dir_cmd=None):
    args = ['sphinx-build', '-b', builder]
    args += cmdline_args
    # ~ args += ['-a'] # all files, not only outdated
    # ~ args += ['-P'] # no postmortem
    # ~ args += ['-Q'] # no output
    # build_dir = docs_dir.child(env.build_dir_name)
    build_dir = Path(env.build_dir_name)
    if language is not None:
        args += ['-D', 'language=' + language]
        # needed in select_lang.html template
        args += ['-A', 'language=' + language]
        if language != env.languages[0]:
            build_dir = build_dir.child(language)
            #~ print 20130726, build_dir
    if env.tolerate_sphinx_warnings:
        args += ['-w', 'warnings_%s.txt' % builder]
    else:
        args += ['-W']  # consider warnings as errors
        # args += ['-vvv']  # increase verbosity
    #~ args += ['-w'+Path(env.root_dir,'sphinx_doctest_warnings.txt')]
    args += ['.', build_dir]
    cmd = ' '.join(args)
    with lcd(docs_dir):
        local(cmd)
    if build_dir_cmd is not None:
        with lcd(build_dir):
            local(build_dir_cmd)
def dist_package():
  execute(default_path)
  with lcd('%(deploy_dir)s/' % { 'deploy_dir':env.deploy_dir }):
    local('if [ -d %(app)s ]; then rm -rf %(app)s ; fi' % { 'app':env.app })
    local('git clone %(app_repo)s' % { 'app_repo': env.app_repo })
  with lcd('%(deploy_dir)s/%(app)s' % { 'deploy_dir':env.deploy_dir, 'app':env.app }):
    local('play dist')
Example #21
0
def setup_web2py():
    with lcd("/opt"):
        if os.path.isdir('/opt/web2py'):
            with lcd("/opt/web2py"):
                local("git pull")
        else:
            local("git clone https://github.com/web2py/web2py.git web2py")
Example #22
0
def recreate():
    """Recreate new migrations from start and remove database"""

    project_prefixed_apps = []
    for app in PROJECT_APPS:
        project_prefixed_apps.append('{}.{}'.format(PROJECT_DIR, app))

    apps = APPS + project_prefixed_apps

    for app in apps:
        with lcd(os.path.join(*app.split('.'))):
            if django.get_version() >= '1.7':
                local('rm -f ./migrations/*.py')
                local('touch ./migrations/__init__.py')
            else:
                local('rm -f ./south_migrations/*.py')
                local('touch ./south_migrations/__init__.py')
    with lcd(PROJECT_DIR):
        if django.get_version() >= '1.7':
            local('rm -f db.sqlite3')
        else:
            local('rm -rf olddb.sqlite3')

    migrate()

    manage('createsuperuser --username app --email [email protected] --noinput')
    manage('changepassword app')
Example #23
0
def __run(name, **kwargs):
    """
    Run an experiment.

    :param name: experiment name
    :param path: expanded path of the experiment (dynamically filled in through 'command' decorator with 'expand'
    """
    path = kwargs['path']
    check_structure(path, remove=True)
    with hide(*HIDDEN_ALL):
        for sim in ["without", "with"]:
            sim_path = join(path, "{}-malicious".format(sim))
            data, results = join(sim_path, 'data'), join(sim_path, 'results')
            # the Makefile is at experiment's root ('path')
            with lcd(sim_path):
                logger.debug(" > Running simulation {} the malicious mote...".format(sim))
                local("make run", capture=True)
            # simulations are in their respective folders ('sim_path')
            remove_files(sim_path, 'COOJA.log', 'COOJA.testlog')
            # once the execution is over, gather the screenshots into a single GIF and keep the first and
            #  the last screenshots ; move these to the results folder
            with lcd(data):
                local('convert -delay 10 -loop 0 network*.png wsn-{}-malicious.gif'.format(sim))
            network_images = {int(fn.split('.')[0].split('_')[-1]): fn for fn in listdir(data)
                              if fn.startswith('network_')}
            move_files(data, results, 'wsn-{}-malicious.gif'.format(sim))
            net_start_old = network_images[min(network_images.keys())]
            net_start, ext = splitext(net_start_old)
            net_start_new = 'wsn-{}-malicious_start{}'.format(sim, ext)
            net_end_old = network_images[max(network_images.keys())]
            net_end, ext = splitext(net_end_old)
            net_end_new = 'wsn-{}-malicious_end{}'.format(sim, ext)
            move_files(data, results, (net_start_old, net_start_new), (net_end_old, net_end_new))
            remove_files(data, *network_images.values())
            parsing_chain(sim_path)
Example #24
0
def build_tar(project):
    """Build the tar for the project."""

    project_dir = os.path.join('/tmp', project)
    with lcd(project_dir):
        version = local('python setup.py --version', capture=True)
        version = version.replace(' ', '')
    project_version = '{project}-{version}'.format(project=project,
                                                       version=version)
    new_project_dir = os.path.join('/tmp', project_version)
    local('rm -rf "{new_project_dir}"'.format(new_project_dir=new_project_dir))
    local('svn export "{project_dir}" "{new_project_dir}"'.format(
        project_dir=project_dir, new_project_dir=new_project_dir))
    with lcd(new_project_dir.replace('(','\(').replace(')','\)')):
        # Clean up development left overs
        local('rm -f .project')
        local('rm -f .pydevproject')

    #Tar
    project_tar = project_version + '.tar.gz'
    local('tar -C /tmp -czvf "{project_tar}" '
          '"{project_version}"'.format(project_version=project_version,
                                       project_tar=project_tar))
    local('sha256sum "{project_tar}" |awk \'{{printf "SHA256 (%s) = %s\\n", $2, $1 }}\' > distinfo'.format(project_tar=project_tar))
    local('ls -l "{project_tar}" |awk \'{{printf "SIZE (%s) = %s\\n", $8, $5 }}\' >> distinfo'.format(project_tar=project_tar))
Example #25
0
def checkout(package_name_version):
    with lcd(fabfile_dir):
        with lcd(".."):
            existing_packages = local("ls", capture=True).split("\n")
            uncommitted = []

            for package_name, version in package_name_version.iteritems():
                if package_name in existing_packages:
                    with lcd(package_name):
                        if check_for_uncommitted():
                            uncommitted.append(package_name)

            if len(uncommitted):
                print "we have uncommited changes in {} quitting".format(
                    ", ".join(uncommitted)
                )
                return

            for package_name, version in package_name_version.iteritems():
                if package_name not in existing_packages:
                    print "cloning {}".format(package_name)
                    local("git clone {}".format(package_name))
                    with lcd(package_name):
                        print "checking out {0} to {1}".format(
                            package_name, version
                        )
                else:
                    local("git fetch")

                with lcd(package_name):
                    local("git checkout {}".format(version))
                    local("git pull origin {}".format(version))
                    local("python setup.py develop")
Example #26
0
def clean():
    """
    Tidy up app. This should be run before switching projects.
    """
    root, project, src = _get_source()

    def delete_repo(repo):
        if os.path.exists(repo):
            with lcd(repo):
                out = local('git status', capture=True)
                if len(out.splitlines()) > 2:
                    print "\nWon't delete {0} until there a no uncommitted changes".format(repo)
                    exit(-1)
                else:
                    local('rm -rf {0}'.format(repo))

    with settings(warn_only=True):
        www = os.sep.join((src, 'www'))
        local('rm {0}*.html'.format(os.sep.join((www, ''))))
        local('rm {0}'.format(os.sep.join((www, 'theme'))))
        local('rm {0}'.format(os.sep.join((root, 'etc', 'config.ini'))))

    with lcd(root):
        if os.path.exists('project'):
            proj_repo = local('readlink project', capture=True)
            print proj_repo
            delete_repo(os.sep.join((root, proj_repo)))
            local('rm project')

    plugins = os.sep.join((root, 'plugins'))
    if os.path.exists(plugins):
        with lcd(plugins):
            for plugin in os.listdir(plugins):
                delete_repo(os.sep.join((plugins, plugin)))
        local('rmdir plugins')
Example #27
0
def run_tests(branch, project, repo_path):
    with lcd(repo_path):
        if local('git symbolic-ref HEAD', capture=True).rsplit('/', 1)[1] != branch:
            local('git branch -f {0} origin/{0}'.format(branch))
        local('git reset --hard HEAD~2')
        local('git checkout {0}'.format(branch))
        local('git pull')

    mflags = 'MAKEFLAGS=-r --no-print-directory'
    if sys.platform.startswith('linux'):
        mflags += ' -j'
    elif sys.platform.startswith('darwin'):
        mflags += ' -j16'

    with lcd(repo_path):
        local('python bootstrap.py')
        puts('[test]: repository bootstrapped in branch: {0}'.format(branch))
        puts('------------------------------------------------------------------------')

        if project == 'manual':
            pre_builders = 'json dirhtml texinfo'
            local('make {0} {1}'.format(mflags, pre_builders))
            puts('[test]: targets rebuilt: {0}.'.format(pre_builders))
            puts('------------------------------------------------------------------------')

        local('make {0} publish'.format(mflags))
        puts('[test]: repository build publish target in branch: {0}'.format(branch))
        puts('------------------------------------------------------------------------')

        local('fab stage.package'.format(mflags))
        puts('[test]: building package for: {0}'.format(branch))
        puts('------------------------------------------------------------------------')
def test():
    with lcd('puppet/modules'):
        with lcd('nginx'):
            local('rspec')

        with lcd('apache2'):
            local('rspec')
Example #29
0
def scm_get_info(scm_type, scm_ref=None, directory=False):

    scm_info = None

    if not scm_ref:
        scm_ref = scm_get_ref(scm_type, True)

    if not directory:
        directory = "."

    if scm_type.lower() == "svn":
        with lcd(directory):
            with hide("running"):
                xml = local("svn info --xml", capture=True)
                dom = minidom.parseString(xml)
                scm_info = {
                    "type": scm_type,
                    "rev": dom.getElementsByTagName("entry")[0].getAttribute("revision"),
                    "url": dom.getElementsByTagName("url")[0].firstChild.wholeText,
                    "branch": scm_ref,
                }

    elif scm_type.lower() == "git":
        with lcd(directory):
            with hide("running"):
                revision = local("git describe --always", capture=True)
                repo = local("git remote -v | grep fetch", capture=True)
                scm_info = {"type": scm_type, "rev": revision, "url": repo, "branch": scm_ref}

    return scm_info
Example #30
0
def install_django():
    with lcd(env.virtualenv_path):
        local("%s/bin/pip install django" % env.virtualenv_path)
        #if env.db_type!='sqlite3':
        local("%s/bin/pip install south" % env.virtualenv_path)
        if env.db_type=="mysql":
            local("%s/bin/pip install mysql-python" % env.virtualenv_path)
        if env.db_type=="postgresql_psycopg2":
            local("%s/bin/pip install psycopg2" % env.virtualenv_path)
        if env.db_type=="oracle":
            local("%s/bin/pip install cx_oracle" % env.virtualenv_path)
        local("%s/bin/pip install fabric" % env.virtualenv_path)

        prompt("Project Template?: \n    1)1.6 Base\n    2)BadA$$\n    3)TwoScoops\n", key='proj_template', default="0")
        if env.proj_template == '1':
            # django 1.6 Base-Template
            env.proj_template="--template https://github.com/xenith/django-base-template/zipball/master --extension py,md,rst "
        elif env.proj_template == '2':
            # BadA$$-Template
            env.proj_template="--template https://github.com/bruth/badass-django-template/zipball/master -e py,ini,gitignore,in,conf,md,sample,json -n Gruntfile.coffee "
        elif env.proj_template == '3':
            # TwoScoops-Template
            env.proj_template="--template=https://github.com/twoscoops/django-twoscoops-project/archive/master.zip --extension=py,rst,html "
        else:
            env.proj_template=""
        local("%s/bin/python %s/bin/django-admin.py startproject %s %s" % (env.virtualenv_path, env.virtualenv_path, env.proj_template, env.project_name))

    # create the projects app
    with lcd(env.django_path):
        local("%s/bin/python ../manage.py startapp %s" % (env.virtualenv_path, env.app_name))
Example #31
0
File: api.py Project: ddnn55/riker
def deploy_static(app_name, env_name, domain, force):
    app = App(env_name, app_name)
    bucket_name = domain or '{}-{}'.format(
        config.get('system_name',
                   uuid.uuid1().hex), app.repo.name)

    app.repo.fetch()

    version = app.repo.head_commit_id()

    s3 = boto.connect_s3()
    b = s3.lookup(bucket_name)

    if b is not None:
        version_key = b.get_key('__VERSION__')
        if version_key is not None:
            current_version = version_key.get_metadata('git-version')
            if version == current_version:
                if force:
                    print '-----> Version {} already deployed, but re-deploying anyway'.format(
                        version)
                else:
                    print '-----> Version {} already deployed!'.format(version)
                    return

    with lcd(app.repo.path):
        build_cmd = app.config.get('build_script')
        if build_cmd:
            print '-----> Building'
            local(build_cmd)

    if b is None:
        print '-----> Creating bucket {}'.format(bucket_name)
        b = s3.create_bucket(bucket_name)

    # TODO: this policy allows all users read access to all objects.
    # Need to find a way to limit access to __VERSION__ to only authenticated
    # users.
    public_access_policy = json.dumps({
        "Version":
        "2012-10-17",
        "Statement": [{
            "Sid": "PublicReadForGetBucketObjects",
            "Effect": "Allow",
            "Principal": "*",
            "Action": ["s3:GetObject"],
            "Resource": ["arn:aws:s3:::{}/*".format(bucket_name)]
        }]
    })
    b.set_policy(public_access_policy)
    #b.configure_versioning(versioning=False)
    b.configure_website(suffix="index.html", error_key="error.html")

    def map_key_to_obj(m, obj):
        if obj.key != '__VERSION__':
            m[obj.key] = obj
        return m

    existing_keys = reduce(map_key_to_obj, b.get_all_keys(), {})

    root = normpath(join(app.repo.path, app.config.get('root_dir', '')))

    app_redirects = app.config.get('redirects', {})
    for key_name in app_redirects.keys():
        existing_keys.pop(key_name, None)

    print '-----> Uploading {} to {} bucket'.format(root, bucket_name)
    new_keys = []
    updated_keys = []
    for dirname, dirnames, filenames in walk(root):
        reldirname = relpath(dirname, root)
        reldirname = '' if reldirname == '.' else reldirname
        if os.path.commonprefix(['.git', reldirname]) == '.git':
            continue
        for filename in filenames:
            full_filename = join(reldirname, filename)
            if full_filename == '.s3':
                continue
            new_or_update = '        '
            if existing_keys.has_key(full_filename):
                new_or_update = '[UPDATE]'
                updated_keys.append(full_filename)
                key = existing_keys.pop(full_filename)
            else:
                new_or_update = '[NEW]   '
                new_keys.append(full_filename)
                key = b.new_key(full_filename)
            print '       {} Uploading {}'.format(new_or_update, full_filename)
            key.set_contents_from_filename(join(dirname, filename))
    if len(existing_keys) > 0:
        print '-----> WARNING: the following files are still present but no'
        print '       longer part of the website:'
        for k, v in existing_keys.iteritems():
            print '       {}'.format(k)

    print '-----> Tagging bucket with git version {}'.format(version)
    version_key = b.get_key('__VERSION__')
    if version_key:
        version_key.delete()
    version_key = b.new_key('__VERSION__')
    version_key.set_metadata('git-version', version)
    version_key.set_contents_from_string('')

    print '-----> Setting up redirects'
    app_redirects = app.config.get('redirects', {})
    if len(app_redirects) == 0:
        print '       No redirects.'
    else:

        def get_or_new_key(bucket, name):
            key = bucket.get_key(name)
            if key is not None:
                key.delete()
            return bucket.new_key(name)

        elb = boto.connect_elb()
        pybars_compiler = pybars.Compiler()
        for key_name, redirect_source in app_redirects.iteritems():
            redirect_template = pybars_compiler.compile(redirect_source)
            app_redirects[key_name] = redirect_template
        data = {
            'webui_dns':
            elb.get_all_load_balancers(
                load_balancer_names=['{}-web-ui'.format(env_name)])[0].dns_name
        }
        for key_name, redirect_template in app_redirects.iteritems():
            k = get_or_new_key(b, key_name)
            redirect = unicode(redirect_template(data))
            print '       Redirect {} to {}'.format(key_name, redirect)
            k.set_redirect(redirect)

    print '=====> Deployed to {}!'.format(b.get_website_endpoint())

    if domain is not None:

        # TODO: support redirection from www.<domain>
        # b_www = 'www.{}'.format(bucket_name)

        ec2 = boto.connect_ec2()
        region_name = first([
            z.region.name for z in ec2.get_all_zones()
            if z.name == config['availability_zone']
        ])
        s3_website_region = s3_website_regions[region_name]

        route53 = boto.connect_route53()
        zone_name = "{}.".format(get_tld("http://{}".format(domain)))
        zone = route53.get_zone(zone_name)
        if zone is None:
            raise Exception("Cannot find zone {}".format(zone_name))
        full_domain = "{}.".format(domain)
        a_record = zone.get_a(full_domain)
        if not a_record:
            print '-----> Creating ALIAS for {} to S3'.format(full_domain)
            changes = ResourceRecordSets(route53, zone.id)
            change_a = changes.add_change('CREATE', full_domain, 'A')
            change_a.set_alias(alias_hosted_zone_id=s3_website_region[1],
                               alias_dns_name=s3_website_region[0])
            #change_cname = records.add_change('CREATE', 'www.' + full_domain, 'CNAME')
            #change_cname.add_value(b_www.get_website_endpoint())
            changes.commit()
        else:
            print '-----> ALIAS for {} to S3 already exists'.format(
                full_domain)
            print '       {}'.format(a_record)
            if a_record.alias_dns_name != s3_website_region[0]:
                print '       WARNING: Alias DNS name is {}, but should be {}'.format(
                    a_record.alias_dns_name, s3_website_region[0])
            if a_record.alias_hosted_zone_id != s3_website_region[1]:
                print '       WARNING: Alias hosted zone ID is {}, but should be {}'.format(
                    a_record.alias_hosted_zone_id, s3_website_region[1])
            if a_record.name != full_domain:
                print '       WARNING: Domain is {}, but should be {}'.format(
                    a_record.name, full_domain)
            if a_record.type != 'A':
                print '       WARNING: Record type is {}, but should be {}'.format(
                    a_record.type, 'A')

    print '=====> DONE!'
Example #32
0
def make():
    with lcd(path.dirname(__file__)):
        local(
            'export PYTHONPATH=`pwd` && source .env/bin/activate && python setup.py build_ext --inplace',
            shell='/bin/bash')
Example #33
0
def test():
    with virtualenv(VENV_DIR) as venv_local:
        with lcd(path.dirname(__file__)):
            venv_local('pytest -x spacy/tests')
def serve_docs(options=''):
    """Start a local server to view documentation changes."""
    with fab.lcd(HERE) and virtualenv():
        local('mkdocs serve {}'.format(options))
Example #35
0
    abort('need version')

if 'dockerfile_extensions' not in yamlconfig:
    abort('need dockerfile_extensions')

if 'network_bridge' not in yamlconfig:
    abort('need network_bridge')

if 'dev' not in yamlconfig:
    abort('need dev')
else:
    if 'build_path' not in yamlconfig['dev']:
        abort('prod need build_path')
    else:
        with hide('running'):
            with lcd(yamlconfig['project_path']):
                path = join(yamlconfig['project_path'],
                            yamlconfig['dev']['build_path'])
                local('if [[ ! -d %s ]]; then mkdir ./%s; fi' %
                      (path, yamlconfig['dev']['build_path']))

if 'prod' not in yamlconfig:
    abort('need prod')
else:
    if 'build_path' not in yamlconfig['prod']:
        abort('prod need build_path')
    else:
        with hide('running'):
            with lcd(yamlconfig['project_path']):
                path = join(yamlconfig['project_path'],
                            yamlconfig['prod']['build_path'])
Example #36
0
def lvirtualenv(name):
    INPUT_PATH = os.path.join(cwd, name)
    with lcd(INPUT_PATH):
        with prefix('source venv/bin/activate'):
            yield
Example #37
0
File: api.py Project: ddnn55/riker
 def head_commit_id(self):
     with lcd(self.path):
         return git.get_head_commit_sha1()
Example #38
0
def test():
    "Run the test suite locally."
    with lcd(PROJ_ROOT):
        local('{python} manage.py test'.format(**env))
Example #39
0
def make_virtual_env():
    "Make a virtual environment for local dev use"
    with lcd(PROJ_ROOT):
        local('virtualenv .')
        local('./bin/pip install -r requirements.txt')
Example #40
0
def runserver():
    "Run the development server"
    with lcd(PROJ_ROOT):
        local('{python} manage.py runserver --traceback'.format(**env))
Example #41
0
def deploy():
    local("curl http://zc.hehanlin.cn:6800/delproject.json -d project=spider")
    with lcd('..'):
        local("scrapyd-deploy")
        local("rm -rf build project.egg-info setup.py")
Example #42
0
def collect_static():
    with lcd(PROJ_ROOT):
        local('{python} manage.py collectstatic --noinput -v0 --traceback'.
              format(**env))
Example #43
0
def dev():
    with api.lcd('../client'):
        client_fabfile.dev()
Example #44
0
def build_api_docs():
    """Build the HTML API docs."""
    puts(c.magenta("Building HTML API docs..."))
    with settings(hide('running', 'stdout', 'stderr')):
        with lcd('docs'):
            local('make html')
Example #45
0
def doc():
    from os.path import abspath
    opts = {'builddir': '_build', 'sphinx': abspath('env/bin/sphinx-build')}
    cmd = ('{sphinx} -b html ' '-d {builddir}/doctrees . {builddir}/html')
    with fab.lcd('doc'):
        fab.local(cmd.format(**opts))
Example #46
0
def integ_test(gateway_host=None,
               test_host=None,
               trf_host=None,
               destroy_vm="False"):
    """
    Run the integration tests. This defaults to running on local vagrant
    machines, but can also be pointed to an arbitrary host (e.g. amazon) by
    passing "address:port" as arguments

    gateway_host: The ssh address string of the machine to run the gateway
        services on. Formatted as "host:port". If not specified, defaults to
        the `cwag` vagrant box.

    test_host: The ssh address string of the machine to run the tests on
        on. Formatted as "host:port". If not specified, defaults to the
        `cwag_test` vagrant box.

    trf_host: The ssh address string of the machine to run the tests on
        on. Formatted as "host:port". If not specified, defaults to the
        `magma_trfserver` vagrant box.
    """

    destroy_vm = bool(strtobool(destroy_vm))

    # Setup the gateway: use the provided gateway if given, else default to the
    # vagrant machine
    if not gateway_host:
        vagrant_setup("cwag", destroy_vm)
    else:
        ansible_setup(gateway_host, "cwag", "cwag_dev.yml")

    execute(_run_unit_tests)
    execute(_set_cwag_configs)
    cwag_host_to_mac = execute(_get_cwag_br_mac)
    host = env.hosts[0]
    cwag_br_mac = cwag_host_to_mac[host]

    # Transfer built images from local machine to CWAG host
    if gateway_host:
        execute(_transfer_docker_images)
    else:
        execute(_build_gateway)
    execute(_run_gateway)

    # Setup the trfserver: use the provided trfserver if given, else default to the
    # vagrant machine
    with lcd(LTE_AGW_ROOT):
        if not trf_host:
            vagrant_setup("magma_trfserver", destroy_vm)
        else:
            ansible_setup(trf_host, "trfserver", "magma_trfserver.yml")

    execute(_start_trfserver)

    # Run the tests: use the provided test machine if given, else default to
    # the vagrant machine
    if not test_host:
        vagrant_setup("cwag_test", destroy_vm)
    else:
        ansible_setup(test_host, "cwag_test", "cwag_test.yml")

    execute(_set_cwag_test_configs)
    execute(_set_cwag_test_networking, cwag_br_mac)
    execute(_start_ue_simulator)
    execute(_run_integ_tests)
Example #47
0
def pex():
    with virtualenv(VENV_DIR) as venv_local:
        with lcd(path.dirname(__file__)):
            venv_local('pex . -e spacy -o dist/spacy', direct=True)
Example #48
0
def generate_static_files():
    with lcd(env.web_dir):
        local("rm -rf public/static/*")
        local("grunt --env=dist")
        local("./manage.py collectstatic --noinput")
Example #49
0
def sdist():
    with virtualenv(VENV_DIR) as venv_local:
        with lcd(path.dirname(__file__)):
            local('python setup.py sdist')
Example #50
0
def clean():
    with lcd(path.dirname(__file__)):
        with virtualenv(VENV_DIR) as venv_local:
            venv_local('python setup.py clean --all')
Example #51
0
def schematic(run_dir=OLYMPIA):
    with lcd(run_dir):
        local('../venv/bin/python ../venv/bin/schematic migrations')
Example #52
0
def wheel():
    with virtualenv(VENV_DIR) as venv_local:
        with lcd(path.dirname(__file__)):
            venv_local('python setup.py bdist_wheel')
Example #53
0
def managecmd(cmd, run_dir=OLYMPIA):
    with lcd(run_dir):
        local('../venv/bin/python manage.py %s' % cmd)
Example #54
0
def make():
    with virtualenv(VENV_DIR) as venv_local:
        with lcd(path.dirname(__file__)):
            venv_local('pip install -r requirements.txt')
            venv_local('pip install pex')
            venv_local('python setup.py build_ext --inplace', env_vars=['PYTHONPATH=`pwd`'])
def main(configPath):
    errors = {}
    with open(configPath) as envFile:
        envData = json.load(envFile)
        for key, val in envData.items():
            env[key] = val

    local('mkdir -p ' + env.tmpFolder)
    with lcd(env.tmpFolder):
        recursiveClone(env["source-repository"], env["source-commit"])
        print("Checking canRun functions...")
        for repoTuple in sorted(clonedRepositories.items(),
                                key=lambda x: x[1].id,
                                reverse=True):
            oldEnv = deepcopy(env)
            repo = repoTuple[1]
            for key, value in repo.dependencies.items():
                env[key] = value
            env['source-repository'] = repo.repository
            env['source-commit'] = repo.commit
            try:
                with lcd(path.join(repo.cloneFolder, 'deploy')):
                    sys.path.append(local('pwd', capture=True))
                    import deploy
                    if 'canRun' not in dir(deploy):
                        print("No function canRun for deploy script in {}!".
                              format(repo.repository))
                    else:
                        print("Function canRun exist for deploy script in {}!".
                              format(repo.repository))
                        from deploy import canRun
                        try:
                            ret_value = execute(canRun)
                        except Exception as e:
                            print(e)
                            ret_value = {'all': False}

                        for host, value in ret_value.items():
                            if value:
                                print("Deploy can run!")
                            else:
                                raise EnvironmentError(
                                    "Can not continue, missing requirements for deploy script in {}! Aborting..."
                                    .format(repo.repository))
                    sys.path.remove(local('pwd', capture=True))
            except ImportError:
                print("No module deploy for {}!".format(repo.repository))
                pass
            except EnvironmentError:
                local('rm -rf ../{}'.format(env.tmpFolder))
                raise
            finally:
                del sys.modules['deploy']
                env.clear()
                for key, value in oldEnv.items():
                    env[key] = value
        print("Check done!")
        print("Running deploy functions...")
        for repoTuple in sorted(clonedRepositories.items(),
                                key=lambda x: x[1].id,
                                reverse=True):
            oldEnv = deepcopy(env)
            repo = repoTuple[1]
            for key, value in repo.dependencies.items():
                env[key] = value
            env['source-repository'] = repo.repository
            env['source-commit'] = repo.commit
            try:
                with lcd(repo.cloneFolder):
                    with lcd('deploy'):
                        sys.path.append(local('pwd', capture=True))
                        from deploy import runDeploy
                        sys.path.remove(local('pwd', capture=True))
                        print("Running Deploy for {}".format(repo.repository))
                        error = execute(runDeploy)
                        for host, returnValue in error.items():
                            if returnValue is None:
                                continue
                            if host not in errors:
                                errors[host] = {}
                            errors[host][repo.repository] = returnValue

                del sys.modules['deploy']
                env.clear()
                for key, value in oldEnv.items():
                    env[key] = value
            except Exception:
                raise
            finally:
                local('rm -rf ' + repo.cloneFolder)
        print("Run done!")
        if len(errors) is not 0:
            print("Got following errors:")
            for host, errorItem in errors.items():
                print(host)
                for repo, errorArray in errorItem.items():
                    print(f"\t{repo}")
                    for error in errorArray:
                        print(f"\t\t- {error}")
Example #56
0
def update_locales():
    with lcd(pjoin(OLYMPIA, 'locale')):
        local("VENV=%s ./compile-mo.sh ." % VIRTUALENV)
Example #57
0
    def handle(self, *args, **options):
        # Stealth option to disable stdout
        if options.get('silent', False):
            self.stdout = open(os.devnull, 'w')

        # Stealth option that assigns where to pipe initial output
        pipe = options.get('pipe', None)
        if pipe is None:
            pipe = '> /dev/null 2>&1'
            try:
                if 'celery' in settings.LOGGING['loggers']['celery'][
                        'handlers']:
                    logfile = settings.LOGGING['handlers']['celery'][
                        'filename']
                    pipe = '>> {} 2>&1'.format(logfile)
            except KeyError:
                pass

        loglevel = options['loglevel'] or ('DEBUG'
                                           if settings.DEBUG else 'INFO')
        celery_options = [
            '--loglevel={}'.format(loglevel),
            '--pool={}'.format(options['pool']),
            '--pidfile={}'.format(options['celery_pidfile']),
        ] + self._parse_suboptions(options['celery_options'])
        celerybeat_options = [
            '--loglevel={}'.format(loglevel),
            '--pidfile={}'.format(options['celerybeat_pidfile']),
        ] + self._parse_suboptions(options['celerybeat_options'])

        with lcd(settings.PROJECT_ROOT), hide('commands'):  # pylint: disable=not-context-manager
            if not options['celerybeat_only']:
                local(
                    self.celery_cmd(cmd='celery worker',
                                    opts=' '.join(celery_options),
                                    pipe=pipe))

            if not options['celery_only']:
                local(
                    self.celery_cmd(cmd='celerybeat',
                                    opts=' '.join(celerybeat_options),
                                    pipe=pipe))

        pidfiles = [options['celery_pidfile'], options['celerybeat_pidfile']]
        wait_sec = 0

        while wait_sec < self.max_wait_sec and len(pidfiles) > 0:
            time.sleep(1)
            wait_sec += 1

            for i in range(len(pidfiles))[::-1]:
                if os.path.exists(pidfiles[i]):
                    pidfiles.pop(i)

        for name in ('celery', 'celerybeat'):
            if os.path.exists(options[name + '_pidfile']):
                self.stdout.write(
                    self.style.SUCCESS(
                        "Successfully started '{}' in {} seconds.".format(
                            name, wait_sec)))
            else:
                self.stdout.write(
                    self.style.NOTICE("Failed to start '{}'.".format(name)))
Example #58
0
def tojson():
    with lcd('scripts'):
        local('./continents.py')
        local('./countries.py')
        local('./cities.py')
Example #59
0
def setup(silent=False, **kwargs):
    """
    Setup the framework.
    """
    recompile = False
    # install Cooja modifications
    if not check_cooja(COOJA_FOLDER):
        logger.debug(" > Installing Cooja add-ons...")
        # modify Cooja.java and adapt build.xml and ~/.cooja.user.properties
        modify_cooja(COOJA_FOLDER)
        update_cooja_build(COOJA_FOLDER)
        update_cooja_user_properties()
        recompile = True
    # install VisualizerScreenshot plugin in Cooja
    visualizer = join(COOJA_FOLDER, 'apps', 'visualizer_screenshot')
    if not exists(visualizer):
        logger.debug(" > Installing VisualizerScreenshot Cooja plugin...")
        copy_folder('src/visualizer_screenshot', visualizer)
        recompile = True
    # recompile Cooja for making the changes take effect
    if recompile:
        with lcd(COOJA_FOLDER):
            logger.debug(" > Recompiling Cooja...")
            with settings(warn_only=True):
                local("ant clean")
                local("ant jar")
    else:
        logger.debug(" > Cooja is up-to-date")
    # install imagemagick
    with hide(*HIDDEN_ALL):
        imagemagick_apt_output = local('apt-cache policy imagemagick',
                                       capture=True)
        if 'Unable to locate package' in imagemagick_apt_output:
            logger.debug(" > Installing imagemagick package...")
            sudo("apt-get install imagemagick -y &")
        else:
            logger.debug(" > Imagemagick is installed")
    # install msp430 (GCC) upgrade
    with hide(*HIDDEN_ALL):
        msp430_version_output = local('msp430-gcc --version', capture=True)
    if 'msp430-gcc (GCC) 4.7.0 20120322' not in msp430_version_output:
        txt = "In order to extend msp430x memory support, it is necessary to upgrade msp430-gcc.\n" \
              "Would you like to upgrade it now ? (yes|no) [default: no] "
        answer = std_input(txt, 'yellow')
        if answer == "yes":
            logger.debug(
                " > Upgrading msp430-gcc from version 4.6.3 to 4.7.0...")
            logger.warning(
                "If you encounter problems with this upgrade, please refer to:\n"
                "https://github.com/contiki-os/contiki/wiki/MSP430X")
            with lcd('src/'):
                logger.warning(
                    " > Upgrade now starts, this may take up to 30 minutes...")
                sudo('./upgrade-msp430.sh')
                sudo('rm -r tmp/')
                local('export PATH=/usr/local/msp430/bin:$PATH')
                register_new_path_in_profile()
        else:
            logger.warning("Upgrade of library msp430-gcc aborted")
            logger.warning(
                "You may experience problems of mote memory size at compilation"
            )
    else:
        logger.debug(" > Library msp430-gcc is up-to-date (version 4.7.0)")
Example #60
0
def bootstrap(**kwargs):
    """call mr.awsome.ezjail's bootstrap """
    with fab.lcd(_local_path('setup/vm-master')):
        _bootstrap(**kwargs)