def upload_project_sudo(local_dir=None, remote_dir=""): """ Copied from Fabric and updated to use sudo. """ local_dir = local_dir or os.getcwd() # Remove final '/' in local_dir so that basename() works local_dir = local_dir.rstrip(os.sep) local_path, local_name = os.path.split(local_dir) tar_file = "%s.tar.gz" % local_name target_tar = os.path.join(remote_dir, tar_file) tmp_folder = mkdtemp() try: tar_path = os.path.join(tmp_folder, tar_file) local("tar -czf %s -C %s %s" % (tar_path, local_path, local_name)) put(tar_path, target_tar, use_sudo=True) with cd(remote_dir): try: sudo("tar -xzf %s" % tar_file) finally: sudo("rm -f %s" % tar_file) finally: local("rm -rf %s" % tmp_folder)
def add_coverage(_dir=GEN_PATH): report("adding coverage data") cdir = os.path.join(SRC_ROOT, 'htmlcov') if os.path.exists(cdir): api.local("cp -rfv {0} {1}".format(cdir, _dir)) else: report("coverage directory does not exist: {0}".format(cdir))
def cpp_build(build_type='Release'): """ Build """ if not os.path.isdir(env.build_path): os.makedirs(env.build_path) with lcd(env.build_path): local('cmake -DCMAKE_BUILD_TYPE=%s %s' % (build_type, env.src_path)) local('make VERBOSE=1')
def run(self): with cd("/tmp"): local('echo "This is done locally."') run('echo "This is done remotely."') run("ls -lht | head && sleep 10") run('echo "Slept for 10 seconds... was Tornado responsive?"') run("touch tornado.txt")
def tag_commit(commit_id, tag): """ Tag the specified commit and push it to the server. Overwriting old tags with the same name. """ local('git tag %s %s' % (tag, commit_id)) local('git push --tags origin %s' % tag)
def deploy(): require('hosts') require('path') print(green("\n#####Beginning deployment to %s & %s on %s\n" % (env.mob_domain, env.stubs_domain, env.hosts[0]))) env.mob_release_label = None env.stubs_release_label = None if env.is_test: create_tag = prompt('Tag this release? [y/N] ') if create_tag.lower() == 'y': print("\nShowing latest tags for reference\n") local('git tag | tail -5') env.refspec = prompt('Enter tag name [in format VX_X_X]? ') #Can't use .'s as seperaters as they cause import problems on the server local('git tag %s -am "Tagging version %s in fabfile"' % (env.refspec, env.refspec)) local('git push --tags') env.mob_release_label = mob_package_name + '-' + env.refspec env.stubs_release_label = stubs_package_name + '-' + env.refspec if not env.mob_release_label: # An existing tag must be specified local('git tag | tail -5') env.refspec = prompt('Choose tag to build from: ') local('git tag | grep "%s"' % env.refspec) env.mob_release_label = mob_package_name + '-' + env.refspec env.stubs_release_label = stubs_package_name + '-' + env.refspec # import time # env.release_label = package_name + '-' + time.strftime('%Y%m%d%H%M%S') _upload_tar_from_git() _install_site() _symlink_current_release() restart_webserver() print(green("\n#####Deployment successful for %s & %s\n" % (env.mob_domain, env.stubs_domain)))
def encrypt(*args, **kwargs): """Encrypt a secret value for a given environment.""" require('environment') # Convert ASCII key to binary temp_key = '/tmp/tmp.key' with hide('running', 'stdout', 'stderr'): local('gpg --dearmor < {} > {}'.format(env.gpg_key, temp_key)) # Encrypt each file for name in args: local( 'gpg --no-default-keyring --keyring {} ' '--trust-model always -aer {}_salt_key {}'.format( temp_key, env.environment, name)) # Encrypt each value updates = {} for name, value in kwargs.items(): updates[name] = '{}'.format( local( 'echo -n "{}" | ' 'gpg --no-default-keyring --keyring {} ' '--trust-model always -aer {}_salt_key'.format( value, temp_key, env.environment), capture=True)) os.remove(temp_key) if updates: print(yaml.dump(updates, default_flow_style=False, default_style='|', indent=2))
def create(os_release=None): """Create an environment for building packages.""" if os_release is None: os_release = get_os_release_from_current_branch() dist = dist_from_release(os_release) path = '/var/cache/pbuilder/base-{dist}-{os_release}-{arch}.cow'.format( arch=ARCH, dist=dist, os_release=os_release) if os.path.exists(path): raise Exception('PBuilder base image already exists at %s' % path) build_trusted() keyring = expanduser("~/.trusted.gpg") mirror = ubuntu_mirrors[dist] other_mirrors = mirrors[os_release] components = "main universe" with shell_env(ARCH=ARCH, DIST=dist): local('git-pbuilder create --basepath {basepath}' ' --mirror {mirror}' ' --components "{components}"' ' --othermirror "{mirrors}"' ' --keyring {keyring}' ' --debootstrapopts' ' --keyring={keyring}'.format( mirror=mirror, components=components, mirrors="|".join(other_mirrors), keyring=keyring, basepath=path))
def bdeploy(): require('hosts') require('path') print(green("\n#####Beginning %s build\n" % ('testing' if env.is_test else 'production'))) local('sencha app build %s' % ('testing' if env.is_test else 'production')) qdeploy()
def run_background_tasks(): # Runs the background tasks just once. cmd = ("from website.tasks import run_background_tasks; " "run_background_tasks()") local(('export PYTHONPATH={}\:$PYTHONPATH; ' # pylint: disable=anomalous-backslash-in-string 'django-admin shell --settings=website.settings ' '-c\"{}\"').format(PROJECT_ROOT, cmd))
def initialize_project(self): logger.info('Intializing local worker environment') with quiet(): local('virtualenv -p python2.7 env') local('PATH=/usr/lib/ccache:/usr/lib64/ccache/bin:$PATH ' 'env/bin/pip install ' '--download-cache /tmp/pip -r requirements.txt')
def deploy(): env.release = datetime.datetime.now().strftime('%Y%m%d%H%M%S') run('mkdir -p {path}/releases {path}/packages'.format(**env)) local('git archive --format=tar master | gzip > {release}.tar.gz'.format(**env)) put('{release}.tar.gz'.format(**env), '{path}/packages/'.format(**env)) local('rm -vf {release}.tar.gz'.format(**env)) with cd(env.path): run('mkdir -p releases/{release}'.format(**env)) with cd('releases/{release}'.format(**env)): run('tar xvf ../../packages/{release}.tar.gz'.format(**env)) run('ln -sf {dbpath} grouphugs.db'.format(**env)) with cd('{path}/releases'.format(**env)): with settings(warn_only=True): run('rm previous') run('mv current previous') run('ln -sf {release} current'.format(**env)) put('settings.py', '{path}/releases/{release}/settings.py'.format(**env)) restart()
def create_test_website(): # WARNING: destroys the existing website and creates a new one. Creates # a test user and two test sessions: a basic session and a tuning session. # The tuning session has knob/metric data preloaded (5 workloads, 20 # samples each). reset_website() local("python manage.py loaddata test_website.json")
def install_webui_packages(testbed): webui = getattr(testbed, 'ui_browser', False) cmds = '' if detect_ostype() in ['ubuntu']: cmds = "export DEBIAN_FRONTEND=noninteractive; " if webui == 'firefox': cmds += ( "apt-get install -qy firefox xvfb; " "apt-get remove -y firefox; " "wget https://ftp.mozilla.org/pub/mozilla.org/firefox/releases/31.0/linux-x86_64/en-US/firefox-31.0.tar.bz2 -O /tmp/firefox.tar.bz2; " "cd /opt; tar xjf /tmp/firefox.tar.bz2; ln -sf /opt/firefox/firefox /usr/bin/firefox; " ) elif webui == 'chrome': cmds += ( "echo 'deb http://dl.google.com/linux/chrome/deb/ stable main' > /etc/apt/sources.list.d/chrome; " "wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add -; " "apt-get -q -y update; apt-get -qy install unzip; " "wget -c http://chromedriver.storage.googleapis.com/2.10/chromedriver_linux64.zip; " "unzip chromedriver_linux64.zip; cp ./chromedriver /usr/bin/; chmod ugo+rx /usr/bin/chromedriver; " "apt-get -qy install libxpm4 libxrender1 libgtk2.0-0 libnss3 libgconf-2-4 google-chrome-stable; " ) elif detect_ostype() in ['centos', 'fedora', 'redhat', 'centoslinux']: cmds = ( "yum install -y xorg-x11-server-Xvfb; " "wget http://ftp.mozilla.org/pub/mozilla.org/firefox/releases/33.0/linux-x86_64/en-US/firefox-33.0.tar.bz2 -O /tmp/firefox.tar.bz2; " "cd /opt/firefox; tar xjf /tmp/firefox.tar.bz2; " "ln -sf /opt/firefox/firefox /usr/bin/firefox;" ) local(cmds, shell='/bin/bash')
def backupReleaseDir(releaseDir,archiveDir,version): if not os.path.exists(archiveDir): os.makedirs(archiveDir) # make a backup with the timstamp of the build timestamp = datetime.datetime.now().strftime("%y%m%d-%H%M%S") local("tar -czf %s/%s-%s.tgz %s" \ % (archiveDir, version, timestamp, releaseDir))
def update(): with lcd(TRANSONIC): local('npm install') local('make install') local('cp src/media/js/settings_local_hosted.js src/media/js/settings_local.js') local('make build') local('node_modules/.bin/commonplace langpacks')
def all(): """clean the dis and uninstall""" dir() r = int(local("pip freeze |fgrep cloud-metrics | wc -l", capture=True)) while r > 0: local('echo "y\n" | pip uninstall cloud-metrics') r = int(local("pip freeze |fgrep cloud-metrics| wc -l", capture=True))
def delete_package(name): banner("CLEAN PREVIOUS {0} INSTALLS".format(name)) r = int(local("pip freeze |fgrep {0} | wc -l".format(name), capture=True)) while r > 0: local('echo "y" | pip uninstall {0}'.format(name)) r = int( local("pip freeze |fgrep {0} | wc -l".format(name), capture=True))
def deploy_live_server(): """Deploy server""" # app is everything under ./src app = '%s/src/' % local('pwd', capture=True).strip() local('rsync --exclude "*.kate-swp" --exclude "*.pyc" --exclude logs --exclude external --exclude data --exclude resources -C -av %s/* %s@%s:~/dist/pcapi/' % (app, env.user, env.host))
def deploy_staging(): """ Deploys the current local master branch to staging by calling `git push staging master`, then syncs, migrates, and collects static. """ local('git push staging master') after_deploy('staging')
def deploy_production(): """ Deploys the current local master branch to production by calling `git push production master`, then syncs, migrates, and collects static. """ local('git push production master') after_deploy('production')
def style_check(): """Runs Python static code checkers against the code. Although more for style reasons, these are quite helpful in identifying problems with the code. A file will be generated at ./.logs/style.log for perusal. Due to how pylint works it must be invoked manually. """ utils.fastprint("Checking Python code style ... ") with api.settings(api.hide('warnings'), warn_only=True): pep8 = api.local('pep8 .', True) pyflakes = api.local('pyflakes .', True) # Print them out to a file so we can peruse them later. log = open('./.log/style.log', 'w') log.write("pep8:\n%s\n\npyflakes:\n%s" % (pep8, pyflakes)) if pep8: print(colors.magenta("fail", True)) elif pyflakes: print(colors.magenta("fail", True)) else: print(colors.green(" ok ", True)) if pep8 or pyflakes: print(colors.magenta("Please check ./.log/style.log.", True)) print(colors.yellow("Please be sure to run pylint manually.", True)) return (pep8 and pyflakes)
def api(): """Generate the API Documentation""" if PACKAGE is not None: pip(requirements="docs/requirements.txt") local( "sphinx-apidoc -f -e -T -o docs/source/api {0:s}".format(PACKAGE))
def serve(): """ Serves the website locally using a cherrypy server provided by hyde. """ local("hyde -w -s {here}".format( here=DEVELOPMENT_DIR, ))
def compile(): """ Compiles the website using hyde into static files. """ local("hyde -g -s {here}".format( here=DEVELOPMENT_DIR, ))
def test(unit=1, integration=1, functional=1, selenium=0, all=0): """ Central command for running tests. NOTE: integration and functional tests are included by default. Selenium tests are not. Call it like so: >> fab test This will run unit, integration (views tests), and functional (webtest) tests. To run selenium tests selenium tests only, >> fab test:selenium=1 To run all tests, >> fab test:all=1 """ command = './manage.py test -v 2 --settings={{project_name}}.settings.test_settings' if all == 0: if int(unit) == 0: command += " --exclude='unit_tests' " if int(integration) == 0: command += " --exclude='integration_tests' " if int(functional) == 0: command += " --exclude='functional_tests' " if int(selenium) == 1: command = './manage.py test -v 2 --settings={{project_name}}.settings.test_settings {{project_name}}/functional_tests/selenium_tests.py' else: command += " --exclude='selenium_tests' " local(command)
def vcs_upload(): """ Uploads the project with the selected VCS tool. """ if env.deploy_tool == "git": remote_path = "ssh://%s@%s%s" % (env.user, env.host_string, env.repo_path) if not exists(env.repo_path): run("mkdir -p %s" % env.repo_path) with cd(env.repo_path): run("git init --bare") local("git push -f %s master" % remote_path) with cd(env.repo_path): run("GIT_WORK_TREE=%s git checkout -f master" % env.proj_path) run("GIT_WORK_TREE=%s git reset --hard" % env.proj_path) elif env.deploy_tool == "hg": remote_path = "ssh://%s@%s/%s" % (env.user, env.host_string, env.repo_path) with cd(env.repo_path): if not exists("%s/.hg" % env.repo_path): run("hg init") print(env.repo_path) with fab_settings(warn_only=True): push = local("hg push -f %s" % remote_path) if push.return_code == 255: abort() run("hg update")
def build(**options): """Build the Documentation""" pip(requirements="docs/requirements.txt") with lcd("docs"): local("make html")
def watchmedo(): """ Watches the file system for changes of ``*.py`` files and executes the tests whenever you save a file. """ cmd = "watchmedo shell-command --recursive --ignore-directories --patterns='*.py' --wait --command='fab test:integration=1,selenium=0' ." local(cmd)
def build(treeish='head'): """Build a release.""" version = local("git describe {}".format(treeish), capture=True) with settings(hide('warnings'), warn_only=True): cmd = "git diff-index --quiet {} --".format(treeish) is_committed = local(cmd).succeeded cmd = "git branch -r --contains {}".format(version) is_pushed = local(cmd, capture=True) if not is_committed: prompt = "Uncommitted changes. Continue?" if not confirm(prompt, default=False): abort("Canceled.") if not is_pushed: prompt = "Commit not pushed. Continue?" if not confirm(question=prompt, default=False): abort("Canceled.") output = "/tmp/{}.tar.gz".format(version) prefix = "{}/".format(version) cmd = "git archive --prefix={prefix} --format=tar.gz --output={output} {version}:src" local(cmd.format(prefix=prefix, output=output, version=version)) puts("\nBuilt: {} at: {}".format(version, output)) return output
def pull(): """Update local po files with Transifex. """ local('tx pull')
def run(): """Run an already built instance of UpShot.""" if not os.path.exists(RUN_PATH): _err('Run `fab build` before you can run UpShot.') sys.exit(1) local(RUN_PATH)
def git(): """Setup Git""" local("git remote rm origin") local("git remote add origin https://[email protected]/korniichuk/cash.git")
def test(): """Upload package to PyPI Test""" local("python setup.py register -r pypitest") local("python setup.py sdist --format=zip,gztar upload -r pypitest")
def live(): """Upload package to PyPI Live""" local("python setup.py register -r pypi") local("python setup.py sdist --format=zip,gztar upload -r pypi")
from utils import CONSTANTS from fabric.api import local import time for n in xrange(CONSTANTS.N): local('python messaging_service.py {} 2> e{}.txt &'.format(n, n)) time.sleep(0.5)
def compile(): """Compile po files. """ local('django-admin.py compilemessages')
def compressdb(self): print 'Compresses the dumped db using tar.' local('tar czf {tarfile}.tar.gz {sqlfile}'.format( tarfile=self.info['db_file'], sqlfile=self.info['db_file']))
def cleanup(self): print 'Cleans up the local dump file and tar file' local('rm {sqlfile} {tarfile}.tar.gz'.format( sqlfile=self.info['db_file'], tarfile=self.info['db_file']))
def push(): """Push source file to Transifex. """ local('tx push -s')
def package(version): package = env.package local("mkdir -p .packages") local( "tar czvf .packages/{package}-{version}.tar.gz --exclude='.packages' ." .format(package=package, version=version))
def download_online_db(self): print 'download online procurement db, uncompresses it, then deletes the local compressed file.' get(self.info['db_zip_online'], self.info['db_zip_online']) local('tar xzf {tarfile}'.format(tarfile=self.info['db_zip_online'])) local('rm {tarfile}'.format(tarfile=self.info['db_zip_online']))
def commit(): local("git add -p && git commit")
def get_testid_file_list(file_list_fname='', test_id='', file_ext='', pipe_cmd='', search_dir='.', no_abort=False): file_list = [] if pipe_cmd != '': pipe_cmd = ' | ' + pipe_cmd # if search dir is not specified try to find it in cache if search_dir == '.': search_dir = lookup_dir_cache(test_id) # if not in cache try to locate the directory based on the uname file if search_dir == '.': _files = _list( local( 'find -L %s -name "%s*uname.log*" -print | sed -e "s/^\.\///"%s' % (search_dir, test_id, pipe_cmd), capture=True)) if len(_files) > 0: search_dir = os.path.dirname(_files[0]) append_dir_cache(test_id, search_dir) if file_list_fname == '': # read from test_id list specified, this always overrules list in file if # also specified test_id_arr = test_id.split(';') if len(test_id_arr) == 0 or test_id_arr[0] == '': abort('Must specify test_id parameter') for test_id in test_id_arr: _files = _list( local( 'find -L %s -name "%s*%s" -print | sed -e "s/^\.\///"%s' % (search_dir, test_id, file_ext, pipe_cmd), capture=True)) _files = filter_duplicates(_files) file_list += _files else: # read list of test ids from file try: lines = [] with open(file_list_fname) as f: lines = f.readlines() for fname in lines: fname = fname.rstrip() _files = _list( local('find -L %s -name "%s" -print | sed -e "s/^\.\///"' % (search_dir, fname), capture=True)) _files = filter_duplicates(_files) file_list += _files except IOError: abort('Cannot open experiment list file %s' % file_list_fname) if not no_abort and len(file_list) == 0: abort('Cannot find any matching data files.\n' 'Remove outdated teacup_dir_cache.txt if files were moved.') return file_list
def setup_network(): print(yellow('Launching docker network...')) with lcd('.'): local('docker network create --driver bridge {project_name}-network' ''.format(project_name=project_name))
def push(): local("git push")
def dem_dir(dir, ramp_color='ramp_color.txt', ramp_slope='ramp_slope.txt'): """Generate hillshaded DEMs for an entire directory. Arguments: dir: Path to the directory to be processed. ramp_color: Path to a text file of the ramp for topo coloring ramp_slope: Path to a text file of the ramp for slope shading """ dir_esc = shellescapespace(dir) header('Cleaning up') delete_prompt = """ I plan on deleting all of these files in this dir: *-no_edges.tif *bak.tif *-3785.tif *-hillshade.tif *-slope.tif *-slopeshade.tif color.tif hillshades.tif slopes.tif Proceed? """ answer = check_true(prompt(delete_prompt)) if answer is not True: abort('Taking that as a no. No files affected.') # Reset the directory local('rm -rf %s' % os.path.join(dir_esc, '*-no_edges.tif')) local('rm -rf %s' % os.path.join(dir_esc, '*bak.tif')) local('rm -rf %s' % os.path.join(dir_esc, '*3785.tif')) local('rm -rf %s' % os.path.join(dir_esc, '*-hillshade.tif')) local('rm -rf %s' % os.path.join(dir_esc, '*-slope.tif')) local('rm -rf %s' % os.path.join(dir_esc, '*-slopeshade.tif')) local('rm -rf %s' % os.path.join(dir_esc, '*-color.tif')) local('rm -rf %s' % os.path.join(dir_esc, 'color.tif')) local('rm -rf %s' % os.path.join(dir_esc, 'hillshades.tif')) local('rm -rf %s' % os.path.join(dir_esc, 'slopes.tif')) filepath = os.path.join(dir, '*.tif') files = glob.glob(filepath) srs_3785_files = [] hillshade_files = [] slope_files = [] color_files = [] for file in files: print print header("Converting %s" % file) print "Converting DEM to Google Mercator" print srs_3785_file = srs_wgs84_to_google(shellquote(file)) srs_3785_files.append(srs_3785_file) print "SRS 3785 file created %s" % srs_3785_file print print "Creating color relief GeoTIFF" print color_file = color(srs_3785_file, dir_esc + os.sep + ramp_color) color_files.append(color_file) print "*" * 50 print "Color file created %s" % color_file print "*" * 50 print print "Creating Hillshade GeoTIFF" print hillshade_file = hillshade(srs_3785_file) hillshade_files.append(hillshade_file) print "Hillshade file created %s" % hillshade_file print print "Creating Slope GeoTIFF" print slope_file = slope(srs_3785_file, dir_esc + os.sep + ramp_slope) slope_files.append(slope_file) print "Slope file created %s" % slope_file print header("Merging files") local('gdal_merge.py -o ' + dir_esc + os.sep + 'srs_3785.tif ' + ' '.join(srs_3785_files)) local('gdal_merge.py -o ' + dir_esc + os.sep + 'hillshades.tif ' + ' '.join(hillshade_files)) local('gdal_merge.py -o ' + dir_esc + os.sep + 'slopes.tif ' + ' '.join(slope_files)) local('gdal_merge.py -o ' + dir_esc + os.sep + 'color.tif ' + ' '.join(color_files))
self.gobgp.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=e2) self.clients[e2.name] = e2 def test_03_check_neighbor_rib(self): rib = self.gobgp.get_local_rib(self.clients['e2']) self.assertTrue(len(rib) == 1) self.assertTrue(len(rib[0]['paths']) == 1) path = rib[0]['paths'][0] self.assertTrue(65001 not in path['aspath']) def test_04_withdraw_path(self): self.clients['g2'].local('gobgp global rib del 10.0.0.0/24') time.sleep(1) info = self.gobgp.get_neighbor( self.clients['g2'])['state']['adj-table'] self.assertTrue(info['advertised'] == 1) self.assertTrue('accepted' not in info) # means info['accepted'] == 0 self.assertTrue('received' not in info) # means info['received'] == 0 if __name__ == '__main__': output = local("which docker 2>&1 > /dev/null ; echo $?", capture=True) if int(output) is not 0: print "docker not found" sys.exit(1) nose.main(argv=sys.argv, addplugins=[OptionParser()], defaultTest=sys.argv[0])
def build_paper_events_replication_scheduler(): with lcd( os.path.join(get_fabric_file_directory_path(), 'infrastructure/event-scheduler')): local('rm -rf build') local('mkdir build') with lcd('build'): local('cp ../job.py .') local('pip install requests -t .') local('find . -type f -name "*.pyc" -exec rm {} +') local('find . -exec touch --date="1970-01-01" {} +') local('zip -X -r ../event-scheduler.zip *')
def build(): print(yellow('Building docker image...')) with lcd('.'): local('docker build --tag="{0}" .'.format(project_name))
def execute(self): if self.working_dir is None: # Set a scratch directory as working directory self.working_dir = self.workflow.get_scratch_dir_base( ) + "/" + self.get_scratch_name() # Create scratch directory os.makedirs(self.working_dir) # Set to remove the scratch directory self.remove_scratch_dir = True else: # Set to NOT remove the scratch directory self.remove_scratch_dir = False self.workflow.logger.debug("%s: Scratch directory: %s", self.name, self.working_dir) # Change to the scratch directory #os.chdir(self.working_dir) # Applay some command pre processing command = self.pre_process_command(self.command) # Get the arguments splitted by the schema args = command.split(Workflow.SCHEMA) for i in range(1, len(args)): # Split each argument in elements by the slash elements = args[i].split("/") # The task name is the first element task_name = elements[0] # Extract the task task = self.workflow.find_task_by_name(task_name) if task is not None: # Substitute the reference by the actual working dir command = command.replace(Workflow.SCHEMA + task.name, task.working_dir) # Apply some command post processing command = self.post_process_command(command) # Execute the bash command self.result = local(command, capture=True) # Check if the execution failed if self.result.failed: raise Exception('Executable raised a execption') # Remove the reference # For each workflow:// in the command # Get the arguments splitted by the schema args = self.command.split(Workflow.SCHEMA) for i in range(1, len(args)): # Split each argument in elements by the slash elements = args[i].split("/") # The task name is the first element task_name = elements[0] # Extract the task task = self.workflow.find_task_by_name(task_name) if task is not None: # Remove the reference from the task task.decrement_reference_count() # Remove the scratch directory self.remove_scratch()
def color(source, ramp_color): """Generate a color-relief GeoTIFF.""" target = filename_flag(source, 'color') cmd = 'gdaldem color-relief %s %s %s' % (source, ramp_color, target) local(cmd) return target
def go_path_check(): go_path_exist = False outbuf = local("echo `which go`", capture=True) if "go" in outbuf: go_path_exist = True return go_path_exist
def build_heartbeat(): with lcd( os.path.join(get_fabric_file_directory_path(), 'infrastructure/heartbeat')): local('rm -rf build') local('mkdir build') with lcd('build'): local('cp ../heartbeat.py .') local('pip install pg8000 -t .') local('find . -type f -name "*.pyc" -exec rm {} +') local('find . -exec touch --date="1970-01-01" {} +') local('zip -X -r ../heartbeat.zip *')
def change_exabgp_version(): cmd = "docker exec exabgp git -C /root/exabgp pull origin master" local(cmd, capture=True)
def uptime(): local('uptime')
def get_notification_from_exabgp_log(): log_path = CONFIG_DIRR + EXABGP_LOG_FILE cmd = "grep notification " + log_path + " | head -1" err_mgs = local(cmd, capture=True) return err_mgs
def reload_config(): cmd = "docker exec gobgp /usr/bin/pkill gobgpd -SIGHUP" local(cmd, capture=True) print "complete append docker container."
def start_exabgp(conf_file): cmd = "docker exec exabgp cp -f " + SHARE_VOLUME + "/exabgp_test_conf/exabgp.env /root/exabgp/etc/exabgp/exabgp.env" local(cmd, capture=True) conf_path = EXABGP_CONFDIR + "/" + conf_file cmd = "docker exec exabgp /root/exabgp/sbin/exabgp " + conf_path + " > /dev/null 2>&1 &" local(cmd, capture=True)
def install_docker_and_tools(): print "start install packages of test environment." if test_user_check() is False: print "you are not root" return local( "apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys " "36A1D7869245C8950F966E92D8576A8BA88D21E9", capture=True) local( 'sh -c "echo deb https://get.docker.io/ubuntu docker main > /etc/apt/sources.list.d/docker.list"', capture=True) local("apt-get update", capture=True) local("apt-get install -y --force-yes lxc-docker-1.3.2", capture=True) local("ln -sf /usr/bin/docker.io /usr/local/bin/docker", capture=True) local("gpasswd -a `whoami` docker", capture=True) local("apt-get install -y --force-yes emacs23-nox", capture=True) local("apt-get install -y --force-yes wireshark", capture=True) local("apt-get install -y --force-yes iputils-arping", capture=True) local("apt-get install -y --force-yes bridge-utils", capture=True) local("apt-get install -y --force-yes tcpdump", capture=True) local("apt-get install -y --force-yes lv", capture=True) local( "wget https://raw.github.com/jpetazzo/pipework/master/pipework -O /usr/local/bin/pipework", capture=True) local("chmod 755 /usr/local/bin/pipework", capture=True) local("docker pull osrg/quagga", capture=True) local("docker pull osrg/gobgp", capture=True) local("docker pull osrg/exabgp", capture=True)