def pack(app): if app == 'clientroot' or app == 'clientzhangnu': with lcd('../client'): local('rm -f monclient.tar.gz') local('mkdir bin') local('cp ../deploy/run_monclient.sh bin') local('cp mon_client.py util.py bin') local('tar -zcf monclient.tar.gz bin/') local('rm -rf bin') elif app == 'supervisor': with lcd('../supervisor'): local('rm -f monsupervisor.tar.gz') local('mkdir monsupervisor') local('cp ../deploy/run_monsupervisor.sh monsupervisor') local('cp mon_supervisor.py config.py util.py monsupervisor') local('tar -zcf monsupervisor.tar.gz monsupervisor') local('rm -rf monsupervisor') elif app == 'webserver': with lcd('../webserver'): local('rm -f monwebserver.tar.gz') local('mkdir monwebserver') local('cp ../deploy/run_webserver.sh monwebserver') local('cp mon_webserver.py config.py util.py monwebserver') local('cp -r static/ monwebserver') local('cp -r templates/ monwebserver') local('tar -zcf monwebserver.tar.gz monwebserver') local('rm -rf monwebserver')
def setupGoDeps(comp=None, gitProto='http'): print 'Fetching external Golang repos ....' info = setupHandler().getGoDeps(comp) extSrcDir = setupHandler().getExtSrcDir() org = setupHandler().getOrg() for rp in info: with lcd(extSrcDir): if gitProto == "ssh": repoUrl = '[email protected]:%s/%s' %(org , rp['repo']) else: repoUrl = 'https://github.com/%s/%s' %(org , rp['repo']) dstDir = rp['renamedst'] if rp.has_key('renamedst') else '' dirToMake = dstDir cloned = False if not (os.path.exists(extSrcDir+ dstDir + '/' + rp['repo'])): cmd = 'git clone '+ repoUrl local(cmd) cloned = True if rp.has_key('reltag'): cmd = 'git checkout tags/'+ rp['reltag'] with lcd(extSrcDir+rp['repo']): local(cmd) if not dstDir.endswith('/'): dirToMake = dstDir[0:dstDir.rfind('/')] if dirToMake: cmd = 'mkdir -p ' + dirToMake local(cmd) if rp.has_key('renamesrc') and cloned: cmd = 'mv ' + extSrcDir+ rp['renamesrc']+ ' ' + extSrcDir+ rp['renamedst'] local(cmd)
def install(keystone_path=settings.KEYSTONE_ROOT): """Download and install the Back-end and its dependencies.""" if os.path.isdir(keystone_path[:-1]): print 'Already downloaded.' else: lrun(('git clone https://github.com/ging/keystone.git ' '{0}').format(keystone_path)) with lcd(keystone_path): dependencies = ' '.join(settings.UBUNTU_DEPENDENCIES['keystone']) lrun('sudo apt-get install -y {0}'.format(dependencies)) lrun('sudo cp etc/keystone.conf.sample etc/keystone.conf') lrun('sudo python tools/install_venv.py') # Uncomment config file with lcd('etc/'): lrun(("sudo sed -i " "'s/#admin_token=ADMIN/admin_token={0}/g' " "keystone.conf").format(settings.KEYSTONE_ADMIN_TOKEN)) lrun(("sudo sed -i " "'s/#admin_port=35357/admin_port={0}/g' " "keystone.conf").format(settings.KEYSTONE_ADMIN_PORT)) lrun(("sudo sed -i " "'s/#public_port=5000/public_port={0}/g' " "keystone.conf").format(settings.KEYSTONE_PUBLIC_PORT)) print 'Done!' check(keystone_path) # run check
def create_project(harvest_version, project_name): package_name = project_dir = project_name if os.path.exists(project_dir): print(red('Error: project directory already exists')) sys.exit(1) if template: archive_url = '{0}/archive/HEAD.zip'.format(template) archive = 'custom-template.zip' else: archive_url = config.TEMPLATE_ARCHIVE_URL.format(harvest_version) archive = config.TEMPLATE_ARCHIVE.format(harvest_version) download = True if os.path.exists(archive): download = prompt('{0} archive already exists. Redownload? '.format(archive), default='n', validate=r'^[YyNn]$').lower() if download == 'n': download = False else: os.remove(archive) if download: print(green('- Downloading Harvest @ {0}'.format(harvest_version))) local('wget -O "{0}" "{1}"'.format(archive, archive_url), shell='/bin/bash') # Expected directory name of template template_dir = zipfile.ZipFile(archive).namelist()[0].rstrip('/') # Remove existing unarchived directory if os.path.exists(template_dir): local('rm -rf {0}'.format(template_dir), shell='/bin/bash') with hide(*hidden_output): local('unzip {0}'.format(archive), shell='/bin/bash') local('rm -rf {0}'.format(archive), shell='/bin/bash') # Rename template to project name local('mv {0} {1}'.format(template_dir, project_dir), shell='/bin/bash') # Get the template's main package name cparser = ConfigParser() cparser.read(os.path.join(project_dir, config.HARVESTRC_PATH)) old_package_name = cparser.get('harvest', 'package') # Replace old package name with new one find_replace(project_dir, old_package_name, package_name) # Rename package to new name with lcd(project_dir): local('mv {0} {1}'.format(old_package_name, package_name), shell='/bin/bash') # Set the new package name and version cparser.set('harvest', 'package', package_name) cparser.set('harvest', 'version', template_dir.split('-')[-1]) with lcd(project_dir): with open(config.HARVESTRC_PATH, 'w') as rc: cparser.write(rc)
def setUp(self): super(LocalDirectoryAlreadyExistsWithGitRepoTest, self).setUp() with settings(hide('warnings', 'running', 'stdout', 'stderr')): with lcd( '/vagrant' ): local('mkdir scratch') with lcd( '/vagrant/scratch' ): local( 'git init' )
def pull(): """Pull changes from both repositories""" with lcd(MAIN_DIR): local("git pull") with lcd(STATIC_DIR): local("git pull")
def install_development_deps(): """ Installs all development dependencies """ # deps via apt-get deps = ' '.join([ 'build-essentials', 'bzr', 'libc++1', 'git', 'python-dev', 'python-pip', 'vim' ]) local('sudo apt-get -y install %s' % deps) # install heroku-tookbelt local('wget -qO- https://toolbelt.heroku.com/install-ubuntu.sh | sh') # install/setup vim stuff local('mkdirk ~/.vim') with lcd('~/.vim'): local('mkdir autoload bundle colors') with lcd('./autoload'): local('wget https://tpo.pe/pathogen.vim') with lcd('./bundle'): local('git clone [email protected]:kien/ctrlp.vim.git') local('git clone [email protected]:digitaltoad/vim-jade.git') local('git clone [email protected]:plasticboy/vim-markdown.git') with lcd('./colors'): local('wget https://raw.githubusercontent.com/nanotech/jellybeans.vim/master/colors/jellybeans.vim')
def install_node_stuff(): """ install a more current version of node than what ubuntu ppa has. I ran into an issue where I needed some sync functions in the fs package that only the newer versions of node had. """ # TODO possibly merge some functionality with below node_stuff task into a # general node_stuff function/task with lcd('/tmp'): # download and untar node local('wget -O node-v0.12.2.tar.gz http://nodejs.org/dist/v0.12.2/node-v0.12.2-linux-x64.tar.gz') local('tar xzvf node-v0.12.2.tar.gz') # copy bin to /usr/bin with lcd('./node-v0.12.2/bin'): local('sudo cp ./node /usr/bin/node-v0.12.2') # add node alias to .bashrc local('echo "# add node alias" >> .bashrc') local('echo "alias node="/usr/bin/node-v0.12.2"" >> .bashrc') # globally install npm packages packages = ' '.join([ 'bower', 'express-generator' ]) local('npm install -g %s' % packages)
def update_virtualenv(): virtualenv_dir = os.path.join(env.root_dir, 'environments') create_directory(virtualenv_dir) if not os.path.exists(os.path.join(virtualenv_dir, env.build)): with lcd(virtualenv_dir): local('sudo virtualenv %s' % env.build) with lcd(env.code_dir): source_cmd('pip install -r deploy/requirements.txt')
def checkout_branch(cls): with lcd(env.home_dir): if not lexists(env.release_name): local("git clone {0} {1}".format(env.github_url, env.release_name)) with lcd(env.release_name): local("git fetch") local("git checkout {0}".format(env.branch_name)) local("git pull origin {}".format(env.branch_name))
def zip_packages(): with lcd(PKG_DIR): for f in listdir(PKG_DIR): f = join(PKG_DIR, f) if isdir(f): with lcd(f): local("zip -r -0 %s.zip . -i \*" % f) rmtree(f)
def compile_opencv(): with lcd(SRC_DIR): local("rm -rf release") local(" mkdir release") with lcd(os.path.join(SRC_DIR, "release")): local("cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D BUILD_PYTHON_SUPPORT=ON ..") local("make") local("sudo make install")
def sdk_release(is_upload_archives=True): """ Build library into public/card.io-Android-SDK. """ execute(sdk_setup) version_str = _get_release_version() _confirm_tag_overwrite(env.top_root, version_str) local("git tag -f {0}".format(version_str)) with settings(hide(*env.to_hide)): print(colors.blue("building sdk {version_str} ".format(**locals()))) build(is_upload_archives) print(colors.blue("extracting sdk {version_str} to public repo".format(**locals()))) release_path = os.path.join(env.top_root, "card.io", "build", "outputs", "aar", "card.io-release.aar") dest_file_name = "card.io-{version_str}.aar".format(**locals()) with lcd(env.public_repo_path): # remove old everything local("rm -rf *") local("mkdir aars") local("cp {release_path} aars/{dest_file_name}".format(**locals())) # update all sdk files local("cp -r " + os.path.join(env.top_root, "sdk") + "/* .") local("cp -r " + os.path.join(env.top_root, "sdk") + "/.[!.]* .") # update sample app local("cp -R " + os.path.join(env.top_root, "SampleApp") + " .") local("sed -i '' 's/io.card:android-sdk:REPLACE_VERSION/io.card:android-sdk:{version_str}/g' ./SampleApp/build.gradle".format(**locals())) local("sed -i '' 's/io.card:android-sdk:REPLACE_VERSION/io.card:android-sdk:{version_str}/g' ./README.md".format(**locals())) # add everything to git and commit local("git add .") local("git add -u .") local("git commit -am \"Update library to {version_str}\"".format(**locals())) _confirm_tag_overwrite(env.public_repo_path, version_str) with lcd(env.public_repo_path): local("git tag -f {0}".format(version_str)) print print(colors.white("Success!")) print "The distribution files are now available in {public_repo_path}".format(**env) print if is_upload_archives == True: print "The aar file has been published to sonatype's mavenCentral staging repo. Promote it!" print print "Commit proguard-data" print "Verify and merge back to master" print
def tarball(): """ Generates tarball """ local('rm -f build.tgz nodejs.tgz') with lcd('build'): local('tar czf ../build.tgz .', capture=False) with lcd('nodejs'): local('tar czf ../nodejs.tgz $(git ls-files)', capture=False)
def runserver(): with lcd("backendfail"): local(r'python manage.py migrate') local(r'python manage.py runserver 0.0.0.0:9000 &') time.sleep(3) yield with lcd("backendfail"), suppress(SystemError): local(r'killall python') local(r'rm -f db.sqlite3')
def create_virtualenv(): """ Create empty virtualenv in var/virtualenv folder """ with lcd(PROJECT_DIR): if not p.isdir(VIRTUALENV_DIR): os.mkdir(VIRTUALENV_DIR) with lcd(VIRTUALENV_DIR): local('virtualenv --no-site-packages {}'.format(PROJECT_NAME))
def setUp(self): super(TestProjectBuilder, self).setUp() self.working_dir = 'a directory' lcd_mock = self.mox.CreateMockAnything() lcd_mock.__enter__() lcd_mock.__exit__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()) self.mox.StubOutWithMock(context_managers, 'lcd') context_managers.lcd(self.working_dir).AndReturn(lcd_mock)
def _create_directory_layout(developers): for developer_name in developers: with lcd('/home/{0}'.format(developer_name)): local('sudo mkdir -p projects') with lcd('projects'): for project in PROJECTS: _create_project(project) _create_uwsgi_config(project, developer_name) _create_nginx_config(project, developer_name) local('sudo chown -R {user}:{group} /home/{user}/projects/{project}/'.format(user=developer_name, group=DEVELOPERS_USERGROUP, project=project)) local('sudo chmod -R 755 /home/{user}/projects/{project}/'.format(user=developer_name, project=project))
def build_ui(): local('rm -rf splice/static/build') with lcd('ui'): local('npm install') local('npm run bundle') with lcd('front_end'): local('npm install') local('npm run build:dist') local('mv dist ../splice/static/build/campaign-manager')
def build(): with lcd(env.local_dir+'frontend/'): local('npm install') local('bower install --config.interactive=false --allow-root') local('brunch build --production') with lcd(env.local_dir+'gameserver/'): local('npm install') with lcd(env.local_dir+'api/'): local('composer install --prefer-source --no-interaction --optimize-autoloader --no-scripts')
def setup(): clean() target_dir = os.path.join(BUILD_DIR, MAIN_TARGET) with lcd(BASE_DIR): local('mkdir -p %s' % target_dir) local('git clone %s %s' % (REPOSITORY, target_dir)) with lcd(target_dir): local('git symbolic-ref HEAD refs/heads/gh-pages') local('rm -rf .git/index') local('git clean -fdx') local('git pull origin gh-pages') local('touch .nojekyll')
def setup(): clean() target_dir = os.path.join(BUILD_DIR, MAIN_TARGET) with lcd(BASE_DIR): local('mkdir -p %s' % target_dir) local('git clone [email protected]:OpenTechSchool/django-101.git %s' % target_dir) with lcd(target_dir): local('git symbolic-ref HEAD refs/heads/gh-pages') local('rm .git/index') local('git clean -fdx') local('git pull origin gh-pages') local('touch .nojekyll')
def create_local_db( curr_app, customer_name ): abs_repo_folder = curr_app.get_abs_local_folder() term.printLog( repr( abs_repo_folder ) ) path = abs_repo_folder.split( '/' ) base_folder = '/'.join( path[:-1] ) term.printLog( 'base_folder: %s' % repr( base_folder ) ) with lcd( '%s/%s' % (base_folder, customer_name) ): with lcd( 'private/resources/sql/dbs' ): local( 'cp belmiro3-template.sql %s.sql' % customer_name ) local( "sed -i 's/belmiro3_template/%(c)s/g' %(c)s.sql" % { 'c': customer_name } ) local( 'psql -f %(c)s.sql -v ON_ERROR_STOP=1' % { 'c': customer_name } )
def _setupGitRepo (repo, srcDir, userRepoPrefix, remoteRepoPrefix): with lcd(srcDir): if not (os.path.exists(srcDir + repo) and os.path.isdir(srcDir+ repo)): cmd = 'git clone '+ userRepoPrefix + repo local(cmd) if remoteRepoPrefix: with lcd(srcDir +repo): cmd = 'git remote add upstream ' + remoteRepoPrefix + repo + '.git' local(cmd) commandsToSync = ['git fetch upstream', 'git checkout master', 'git merge upstream/master'] for cmd in commandsToSync: local(cmd)
def backup(): """Backup DB and Media files to Dropbox (require dropbox-uploader installed)""" backup_folder = os.path.join(BASE_DIR,"backup") backup_name = time.strftime("%Y%m%d") + ".zip" local("rm -r {}".format(backup_folder)) if not os.path.exists(backup_folder): os.mkdir(backup_folder) with lcd("/var/www"): local("zip -r {} {}".format(os.path.join(backup_folder,"media.zip"),"wox_media")) with lcd(backup_folder): local("PGPASSWORD=scott pg_dump -U wox wox > {}".format("woxdb.dump")) local("zip {} woxdb.dump media.zip".format(backup_name)) local("/root/github/Dropbox-Uploader/dropbox_uploader.sh upload {} {}".format(backup_name,backup_name))
def git_prepare(): # Создаём пустой репозиторий для нового проекта with lcd('/var/git/'): local('mkdir {0}.git'.format(env.project_name)) with lcd('{0}.git/'.format(env.project_name)): local('git init --bare') # Чмоды для группы и разрешение на пуш для группы local('sudo chmod -R g+ws *') local('git repo-config core.sharedRepository true') # Создаём локальный репозиторий, скопировав файлы из репозитория шаблона with lcd('/tmp/'): local('git clone {repo} --branch={branch} {dir}'.format(repo=BLANK_PROJECT_REPO, branch=BLANK_PROJECT_BRANCH_NAME, dir=env.project_name)) env.working_dir = '/tmp/{0}/{1}/'.format(env.project_name, BLANK_PROJECT_NAME) local('rm -rf {0}/.git'.format(env.project_name)) config_writer.write_db_settings('production') config_writer.write_db_settings('staging') config_writer.write_db_settings('remote_testing') config_writer.write_fabfile() config_writer.write_project_name_in_css() config_writer.write_secret_key() config_writer.write_managepy() config_writer.create_settings_per_developer(DEVELOPERS, DEV_DB_PASSWORD) with lcd('/tmp/{0}/{1}'.format(env.project_name, BLANK_PROJECT_NAME)): # Конфигурируем локальные имена для коммита local('git config --global user.email "{0}@fc"'.format(env.user)) local('git config --global user.name "{0}"'.format(env.user)) local('git init') # Коммитим изменения в локальный репозиторий local('git add .') local('git commit -m "INITIAL"') # Связываем локальный и публичный репозитории local('git remote add origin /var/git/{0}.git/'.format(env.project_name)) # Создаём ветку dev local('git checkout -b dev') # Пушим ветки из локального репозитория в общий local('git push --all') # Удаляем временный репозиторий local('rm -rf /tmp/{0}/'.format(env.project_name))
def fetchRepos (comp=None): global gSrRepos global gBranches print 'Fetching Snaproute repositories dependencies....' srRepos = gSrRepos if comp != None : srRepos = [comp] local('mkdir -p tmp') with lcd('tmp'): for repo in srRepos: local('git clone '+ 'https://github.com/OpenSnaproute/' + repo + '.git') with lcd(repo): local('git remote add upstream https://github.com/SnapRoute/' + repo + '.git') local('git fetch upstream')
def deploy(commit_msg=None): localpath = os.path.dirname(os.path.realpath(__file__)) if commit_msg: with lcd(localpath): with settings(warn_only=True): local('git commit -am "{commit_msg}"'.format(commit_msg=commit_msg)) with lcd(localpath): with settings(warn_only=True): local('git push') with cd('~'): if not dir_exists('blogging'): run('mkdir blogging') with cd('blogging'): run('git clone git://github.com/imathis/octopress.git') run('git clone git://github.com/tly1980/my_blog.git') with cd('~/blogging/octopress'): with prefix('source ~/.bash_profile'): # install the desire ruby version run('bundle install') with cd('~/blogging/my_blog'): run('git pull') with cd('~/blogging/octopress'): with settings(warn_only=True): run('rm Rakefile _config.yml config.rb source') run('ln -s ../my_blog/Rakefile .') run('ln -s ../my_blog/_config.yml .') run('ln -s ../my_blog/config.rb .') run('ln -s ../my_blog/source .') run('rake generate') with cd('~'): with settings(warn_only=True): sudo('rm -rvf /srv/keyonly.com') sudo('cp -r blogging/octopress/public /srv/keyonly.com') sudo('chmod -R 0755 /srv/keyonly.com') file_write('/etc/nginx/sites-available/keyonly.com', site_cfg, sudo=True) if not file_exists('/etc/nginx/sites-enabled/keyonly.com'): sudo('ln -s /etc/nginx/sites-available/keyonly.com /etc/nginx/sites-enabled/keyonly.com') upstart_ensure('nginx')
def install_dependencies_pip(): """ Install all dependencies available from pip """ require('environment', provided_by=[dev, prod]) create_virtualenv() # if this is a development install then filter out anything we have a # git repo for. pips_ = PIP_INSTALL.copy() if env.environment == 'development': map(pips_.pop, [k for k in GIT_INSTALL if k in PIP_INSTALL]) if not pips_: print 'No git repos to install' return with lcd(env.doc_root): #XXX create temp requirements file text from list of requirements # it will be destroyed after install is complete requirements = '\n'.join([''.join(p) for p in pips_.items()]) with settings(hide('running')): local("echo '%s' > requirements.txt" % requirements) local('pip install -E %(virtualenv)s -r requirements.txt' % env) local('rm requirements.txt')
def undeploy(env_type): """Delete website from S3 bucket. Specify stg|prd as argument.""" _setup_env() # Activate local virtual environment (for render_templates+flask?) local('. %s' % env.activate_path) if not os.path.exists(env.s3cmd_cfg): abort("Could not find 's3cmd.cfg' repository at '%(s3cmd_cfg)s'.") if not env_type in _config['deploy']: abort('Could not find "%s" in "deploy" in config file' % env_type) if not "bucket" in _config['deploy'][env_type]: abort('Could not find "bucket" in deploy.%s" in config file' % env_type) bucket = _config['deploy'][env_type]['bucket'] warn('YOU ARE ABOUT TO DELETE EVERYTHING IN %s' % bucket) if not do(prompt("Are you ABSOLUTELY sure you want to do this? (y/n): ").strip()): abort('Aborting.') with lcd(env.sites_path): local('fablib/bin/s3cmd --config=%s del -r --force s3://%s/' \ % (env.s3cmd_cfg, bucket))
def deploy(version): clean() with lcd('..'): local('git tag -a {0} -m \"version {0}\"'.format(version)) local('git push --tags') local('python setup.py sdist') #local('python setup.py bdist_wheel') put('./dist/pyscenic*', '~/data/downloads') with cd('~/data/downloads/'): run('. activate pyscenic') run('pip install --upgrade pyscenic-*.tar.gz')
def tags(): """Get list of current tags""" with lcd(env.project_path): tags = local('cd %(project_path)s;git tag' % env, capture=True) if tags: stripped = [x.strip() for x in tags.strip().split('\n')] re_num = re.compile('[^0-9.]') sorted_tags = reversed( sorted([map(int, re_num.sub('', t).split('.')) for t in stripped])) rebuilt = ['.'.join(map(str, t)) for t in sorted_tags] return rebuilt return []
def execute_ansible_playbook(playbook, playbook_timeout=None, **kwargs): ev = '' for key, value in kwargs.iteritems(): ev = ev + ' -e "%s=%s"' % (key, value) cmd = 'ansible-playbook %s %s' % (ev, playbook) if playbook_timeout: cmd = 'timeout %s %s' % (playbook_timeout, cmd) cwd = os.path.dirname(os.path.realpath(__file__)) with lcd(cwd + '/ansible'): with settings(warn_only=True): output = local(cmd, capture=True) if output and output.succeeded: return True return False
def test_settings_with_other_context_managers(): """ settings() should take other context managers, and use them with other overrided key/value pairs. """ env.testval1 = "outer 1" prev_lcwd = env.lcwd with settings(lcd("here"), testval1="inner 1"): eq_(env.testval1, "inner 1") ok_(env.lcwd.endswith("here")) # Should be the side-effect of adding cd to settings ok_(env.testval1, "outer 1") eq_(env.lcwd, prev_lcwd)
def git_push(copyFiles=False): #Corre local django server automatd tests #local("../web/restapi/manage.py test restapi") print ("commit message:") commit_msg = raw_input(prompt) if copyFiles==False: print ("buildings...just prod NO IMAGES !") with lcd(WORKING_DIR + "/web/frontend/"), settings(warn_only=True): local("npm run build-prod-nofiles") #local("npm run build-dev") else: print ("buildings...just prod") with lcd(WORKING_DIR + "/web/frontend/"), settings(warn_only=True): local("npm run build-prod") #local("npm run build-dev") with lcd(WORKING_DIR), settings(warn_only=True): local("git add .") local("git commit -m '" + str(commit_msg) + "'") local("git push")
def create_env(): """ so we assume the branch name is something like v0.6.0 we strip off the first letter and the .s so we expect the virtualenv name to be 060 """ Pip.create_virtual_env() Git.checkout_branch() Pip.set_project_directory() with lcd(env.project_path): Pip.install_requirements() symlink_nginx() Postgres.create_user_and_database() symlink_upstart()
def mergeIntoMaster(branch=None, comp=None): global gSrRepos print 'Fetching Snaproute repositories dependencies....' srRepos = gSrRepos if comp != None: srRepos = [comp] for repo in srRepos: cmds = [ 'git checkout %s' % ('master'), 'git remote add upstream https://github.com/snaproute/%s.git' % (repo), 'git pull', 'git fetch upstream', 'git merge upstream/%s' % (branch), 'git push origin' ] local('mkdir -p tmp') with lcd('tmp'): local('git clone ' + 'https://github.com/snaproute/' + repo + '.git') with lcd(repo): print '## Merging repo %s' % (repo) for cmd in cmds: print 'Executing Command %s' % (cmd) local(cmd)
def test_docker_e2e(): local('mkdir -p e2e_test') local('mkdir -p ./e2e_test/git_example/') local('mkdir -p ./e2e_test/svn_example/') local('mkdir -p ./e2e_test/file_example/') with lcd('./e2e_test/git_example/'): local('echo "indextest git_example" > "git_example.py"') with lcd('./e2e_test/svn_example/'): local('echo "indextest svn_example" > "svn_example.py"') with lcd('./e2e_test/file_example/'): local('echo "indextest file_example" > "file_example.py"') local( '''docker build -t searchcode-server-test -f ./assets/docker/Dockerfile.test.e2e .''' ) local('''docker run --rm \ -v ~/.m2/:/root/.m2/ \ -v $(PWD):/opt/app/ \ -w /opt/app/ \ searchcode-server-test mvn test''')
def server_deploy(): """Production - run the deployment Ansible playbook""" with lcd(os.path.join(BASE_DIR, 'playbooks')): local("ansible-playbook site.yml -v --extra-vars '{}'".format( json.dumps({ "base_dir": BASE_DIR, "database_name": base.DATABASES.get('default').get('NAME'), "database_user": base.DATABASES.get('default').get('USER'), "database_password": base.DATABASES.get('default').get('PASSWORD') })))
def stage(): """Build/commit/tag/push lib version, copy to local cdn repo""" _setup_env() if not 'stage' in _config: abort('Could not find "stage" in config file') # Make sure cdn exists exists(dirname(env.cdn_path), required=True) # Ask user for a new version _config['version'] = git.prompt_tag('Enter a new version number', unique=True) # Build version # use execute to allow for other implementations of 'build' execute('build') # Commit/push/tag with lcd(env.project_path): with settings(warn_only=True): local('git add build') # support builds where there's no change; sometimes comes up when # reusing a tag because of an unexpected problem with settings(warn_only=True): msg = local('git commit -m "Release %(version)s"' % _config, capture=True) if 'nothing to commit' in msg: warn(msg) warn('continuing anyway') elif not msg.startswith('[master'): abort("Unexpected result: %s" % msg) local('git push') git.push_tag(_config['version']) # Copy to local CDN repository cdn_path = join(env.cdn_path, _config['version']) clean(cdn_path) for r in _config['stage']: static.copy(_config, [{ "src": r['src'], "dst": cdn_path, "regex": r['regex'] }]) # Create zip file in local CDN repository _make_zip(join(cdn_path, '%(name)s.zip' % _config))
def get_revision_by_tag(self, app_name, tag_name): with lcd(os.path.join(os.path.abspath('.'), app_name)): tags = local(self.cmd_tags, capture=True) if not tags: print_err( 'Can not find revision data for application [{0}].\n' 'Try to run deployer from dirrectory above current. $cd ' '.. '.format(app_name)) raise VersionError for tagline in tags.splitlines(): if tagline.startswith(tag_name): return tagline.strip(tag_name).strip().split(':')[1] raise VersionError( 'Can not find defined tag label: {}'.format(tag_name))
def sdk_reset(warn_opt='warn'): if warn_opt != 'nowarn': print(colors.yellow("This step will fetch and reset the public repo to the latest version.")) if not confirm("Proceed?"): abort("OK, fine. I understand. :(") execute(sdk_setup) with lcd(env.public_repo_path): local("git checkout master") # Merge in the internal repo's develop branch. local("git fetch public --prune") local("git reset --hard public/master") local("git clean -x -d -f")
def run_chef(name): """ Read configuration from the appropriate node file and bootstrap the node :param name: :return: """ print(_yellow("--RUNNING CHEF--")) node = "./nodes/{name}_node.json".format(name=name) with lcd('chef_files'): local("knife solo cook -i {key_file} {host} {node}".format( key_file=env.aws_ssh_key_path, host=env.host_string, node=node))
def tag(self): if settings.tag: return settings.tag with ctx.lcd(settings.root_path): with ctx.settings(ctx.hide('running')): if self.is_hg: t = ops.local('hg describe', capture=True) elif self.is_git: t = ops.local('git describe --tags || git rev-parse HEAD', capture=True) else: raise RuntimeError("Unknown revision control system. " "Cannot extract a tag.") return str(t.replace(' ', '-').replace('(', '') \ .replace(')', '').replace('+', '-dev').strip())
def _s3cmd_sync(src_path, bucket): """Sync local directory with S3 bucket""" if not os.path.exists(env.s3cmd_cfg): abort("Could not find 's3cmd.cfg' repository at '%(s3cmd_cfg)s'." % env) with lcd(env.sites_path): local('fablib/bin/s3cmd --config=%s sync' \ ' --rexclude ".*/\.[^/]*$"' \ ' --delete-removed --acl-public' \ ' --add-header="Cache-Control:max-age=300"' \ ' --no-preserve' \ ' %s/ s3://%s/' \ % (env.s3cmd_cfg, src_path, bucket))
def _execute_script_internal(self, working_dir, script): succeeded = True result = None logging.info('Executing a script in %s:', working_dir) with context_managers.lcd(working_dir): for command in script: logging.info(' %s', command) result = operations.local(command, capture=True) succeeded = succeeded and result.succeeded if not succeeded: logging.info('Build failed') break return (succeeded, result)
def styleguide(): """ Install styleguide requirements """ if settings.environment == 'ci': local('virtualenv env') local('{0} --version'.format(settings.bin['python'])) local('{0} setup.py install'.format(settings.bin['python'])) local('{0} install -r requirements.txt'.format(settings.bin['pip'])) with lcd('styleguide'): if settings.environment == 'ci': local('git config --global url."https://".insteadOf git://') local('{0} install'.format(settings.bin['npm'])) local('{0} install'.format(settings.bin['bower']))
def command(cmd, in_local=False, git_path=None): print(cmd, '###') if in_local: if git_path: with lcd(git_path): return local(cmd) else: return local(cmd) else: if git_path: with cd(git_path): return run(cmd) else: return run(cmd)
def download(self, area_interesse): comando = self.COMANDO_DOWNLOAD_PADRAO.format(area_interesse.pk, area_interesse.min_lon, area_interesse.min_lat, area_interesse.max_lon, area_interesse.max_lat) with lcd(OSM_DOWNLOAD_PATH): try: local(comando) return os.path.join(OSM_DOWNLOAD_PATH, "{0}.osm".format(area_interesse.pk)) except Exception as e: print e return None
def update(keystone_path=settings.KEYSTONE_ROOT): """Update the Back-end and its dependencies.""" # returns 1 if everything went OK, 0 otherwise print 'Updating Keystone...' with lcd(keystone_path): lrun('git pull origin') lrun('sudo python tools/install_venv.py') print 'Syncing database...' database_create(keystone_path, True) print green('Keystone updated.') if not check(keystone_path): return 0 # flag for the main task else: return 1 # flag for the main task
def novnc(): """ Grab noVNC. """ if _exists("%(doc_root)s/noVNC" % env): return # Grab the tarball, pass it through filters. Heavy abuse of the fact that # shell=True in local(). with lcd(env.doc_root): # -L follows redirects. local("curl https://github.com/kanaka/noVNC/tarball/v0.3 -L | tar xz") # The glob replaces a git revision. local("mv kanaka-noVNC-*/ noVNC")
def run_cloc_in_repo(repo_name): try: with hide('running', 'stdout', 'stderr'): local('which cloc') except FabricException: puts(red('command line tool cloc not found. Please install cloc.')) return chef_repo_dir = os.path.join(CHEF_REPOS_DIR, repo_name) # json tempfile file to store cloc output with tempfile.NamedTemporaryFile(suffix='.json') as tmpf: with lcd(chef_repo_dir), hide('running', 'stdout', 'stderr'): local('cloc --exclude-dir=venv . --json > ' + tmpf.name) with open(tmpf.name) as jsonf: cloc_data = json.load(jsonf) return cloc_data
def deploy(vals_file: str, kubeconfig: str, secrets_s3_bucket: str, install: bool = False): """ Upgrade or initiate a Helm release for Orchestrator. Args: vals_file: Full path to the vals.yml file for the Helm deployment kubeconfig: Full path to the kubeconfig file for the target k8s cluster secrets_s3_bucket: Name of the s3 bucket where secrets are stored install: Set to True to do a fresh helm install. False by default. """ print("You're initiating a Helm release upgrade. Have you updated the " f'image tags in {vals_file}?') print('Think about it for 3 seconds...') time.sleep(3) sure = prompt('Are you ready to continue?', default='no', validate='^(yes|no)$') if sure != 'yes': exit() _copy_secrets(secrets_s3_bucket) os.environ['KUBECONFIG'] = kubeconfig # template secrets and kubectl apply env.release_success = False try: with lcd(HELM_ROOT): if not install: local(f'helm upgrade orc8r . --values={vals_file}') else: local(f'helm install --name orc8r --namespace magma . ' f'--values={vals_file}') env.release_success = True except Exception as e: print(e) finally: local(f"rm -rf {os.path.join(SECRETS_CHART_ROOT, '.secrets')}") if env.release_success: text = f'Upgrade Successful!' \ 'Use `kubectl -n magma get pods -w` ' \ 'to monitor the health of the release.' print(text) else: print('Failed to upgrade release') exit(1)
def publish(type='patch'): local('bumpversion %s' % type) local('python setup.py sdist register upload') for plugin_name in [ f for f in os.listdir('plugins') if os.path.isdir('plugins/%s' % f) and f != 'tpl' ]: print 'Publishing %s' % plugin_name with lcd('plugins/%s' % plugin_name): local('python setup.py sdist register upload') local('pip install -e .') local('git push') local('git push --tags') local('pip install -e .')
def release(): """Commits and pushes static files, if needed""" # Check for local changes that need to be pushed if re.search("ahead of .* by \d+ commit", local('git status', capture=True)): local("git push") with lcd(STATIC_DIR): if local('git status --porcelain', capture=True): local("git add --all .") local("git clean -df") local('git commit -am"new content"') local('git push') else: puts(red("No changes found in static files"))
def deploy_app(name): print(_yellow("--RUNNING CHEF--")) node = "./nodes/deploy_node.json".format(name=name) with lcd('chef_files'): try: # skip updating the Berkshelf cookbooks to save time os.rename("chef_files/Berksfile", "chef_files/hold_Berksfile") local("knife solo cook -i {key_file} {host} {node}".format( key_file=env.key_filename, host=env.host_string, node=node)) restart() except Exception as e: print e finally: os.rename("chef_files/hold_Berksfile", "chef_files/Berksfile")
def command(cmd, in_local=False, git_path=None): cmd = cmd.encode('utf-8') print cmd, '###' if in_local: if git_path: with lcd(git_path): return local(cmd) else: return local(cmd) else: if git_path: with cd(git_path): return run(cmd) else: return run(cmd)
def _run_task(self, task_command): with fabric_settings(warn_only=True): with lcd(self.working_directory): run_result = local(task_command, capture=True) self.result.succeeded = run_result.succeeded self.result.return_code += "%s," % run_result.return_code log = 'Task: {0}\n'.format(task_command) log += '------------------------------------\n' log += run_result log += '------------------------------------\n' log += 'Exited with exit code: %s\n\n' % run_result.return_code self.result.result_log += log self.result.save()
def _find_matching_opensource_commit( magma_root: str, oss_repo: str = 'https://github.com/facebookincubator/magma.git ', ) -> str: # Find corresponding hash in opensource repo by grabbing the message of the # latest commit to the magma root directory of the current repository then # searching for it in the open source repo commit_subj = local(f'git --no-pager log --oneline --pretty=format:"%s" ' f'-- {magma_root} | head -n 1', capture=True) local('rm -rf /tmp/ossmagma') local('mkdir -p /tmp/ossmagma') local(f'git clone {oss_repo} /tmp/ossmagma/magma') with lcd('/tmp/ossmagma/magma'): oss_hash = local(f'git --no-pager log --oneline --pretty=format:"%h" ' f'--grep=\'{commit_subj}\' | head -n 1', capture=True) return oss_hash
def run_initial_sync_test(self): with settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True): git_sync = self.get_git_sync() git_sync.run_initial_sync() with lcd('/vagrant/scratch'): self.assertTrue(local("ls one.txt").succeeded) self.assertTrue(local("ls not_real.txt").failed) execute( self.simple_change, self, hosts=[self.host], )