def backup(repo, bitbucket_username, github_username, github_api_token): print "Syncing %s from BitBucket to GitHub" % repo['name'] # The github API is now OAUTH only so the original api used to # created the repo is dead. For now creating repos manually while # I work on getting mirroring working with existing hand created repos. # github.create_repo(repo, github_username, github_api_token) bitbucket_repo = bitbucket_url.format(bitbucket_username, repo['name']) github_repo = github_url.format(github_username, repo['name']) local_repo = os.path.join(tmp_dir, repo['name']) if repo['scm'] == 'hg': if (0 != HgPullOrClone(bitbucket_repo, local_repo)): print "Error getting repo" return 1 sh('hg bookmark master -f -R {0}'.format(local_repo)) github_repo = "git+ssh://" + github_repo[:-4] # remove .git if (0 != sh('hg push {0} -R {1}'.format(github_repo, local_repo))): print "Error pushing changes" return 1 else: # assume it's git if (0 != GitPullOrClone(bitbucket_repo, local_repo)): print "Error getting repo" return 1; cmd = 'git --git-dir={1}/.git --work-tree={1} push ssh://{0}'.format(github_repo, local_repo) print cmd if (0 != sh(cmd)): print "Error pushing changes" return 1 return 0
def build_docs_command(): sh('python setup.py build_sphinx') ver = version() package_name = PACKAGE_NAME sh("tar -c -v -z -C build/sphinx/ -f dist/%(package_name)s-docs-%(ver)s.tar.gz -s " "'/^html/%(package_name)s-docs-%(ver)s/' html" % locals())
def GitPullOrClone(remote_repo, local_repo): if os.path.exists(local_repo): os.chdir(local_repo) cmd = 'git --git-dir="{1}/.git" --work-tree="{1}" pull {0} master'.format(remote_repo, local_repo) print cmd return sh(cmd) else: cmd = 'git clone {0} {1}'.format(remote_repo, local_repo) return sh(cmd)
def babel_refresh_command(): "Extract messages and update translation files." # get directory of all extension that also use translations import wtforms wtforms_dir = os.path.dirname(wtforms.__file__) extensions = ' '.join([wtforms_dir]) sh('pybabel extract -F babel.cfg -k lazy_gettext -k _l -o geobox/web/translations/messages.pot geobox geobox/web geobox/model geobox/lib ' + extensions) sh('pybabel update -i geobox/web/translations/messages.pot -d geobox/web/translations')
def babel_refresh_command(): "Extract messages and update translation files." # get directory of all extension that also use translations import wtforms wtforms_dir = os.path.dirname(wtforms.__file__) extensions = ' '.join([wtforms_dir]) sh('pybabel extract -F babel.cfg -k lazy_gettext -k _l -o gbi_server/translations/messages.pot gbi_server gbi_server/model gbi_server/lib ' + extensions) sh('pybabel update -i gbi_server/translations/messages.pot -d gbi_server/translations')
def start_command(image_name=image): # NOTE: this will start an image and remove other images running running_imgs = shell.backtick('docker ps') # if there are any images running remove them if len([val for val in running_imgs.splitlines()]) > 1: stop_remove_images() # run docker image shell.sh(docker_run + ' ' + image_name)
def create_repo(reponame, username, password): """ Creates a public repository with the given credentials """ if not repo_exists(reponame, username): print "Creating " + reponame + " in BitBucket" # Somehow BitBucket authentication with urllib2 is not working. So using this ugly approach. cmd = 'curl -d "name={reponame}" -u {username}:{password} {base_url}/repositories/' cmd = cmd.format(reponame=reponame, username=username, password=password, base_url=base_url) sh(cmd)
def bump_version_command(version): short_version = ".".join(version.split(".")[:2]) for filename, replace in VERSION_FILES: if "###" in replace: search_for = replace.replace("###", "[^'\"]+") replace_with = replace.replace("###", version) else: search_for = replace.replace("##", "[^'\"]+") replace_with = replace.replace("##", short_version) search_for = search_for.replace('"', '\\"') replace_with = replace_with.replace('"', '\\"') sh("""perl -p -i -e "s/%(search_for)s/%(replace_with)s/" %(filename)s """ % locals()) prepare_command()
def bump_version_command(version): short_version = '.'.join(version.split('.')[:2]) for filename, replace in VERSION_FILES: if '###' in replace: search_for = replace.replace('###', '[^\'"]+') replace_with = replace.replace('###', version) else: search_for = replace.replace('##', '[^\'"]+') replace_with = replace.replace('##', short_version) search_for = search_for.replace('"', '\\"') replace_with = replace_with.replace('"', '\\"') sh('''perl -p -i -e "s/%(search_for)s/%(replace_with)s/" %(filename)s ''' % locals()) prepare_command()
def backup(repo): print "Syncing " + repo + " from BitBucket to GitHub" gh = login(github_username, password=github_api_token) bitbucket_repo = bitbucket_url.format(bitbucket_username, repo) github_repo = github_url.format(github_username, repo) local_repo = os.path.join(tmp_dir, repo) local_repo += ".bitbucket2github" if os.path.exists(local_repo): sh("hg pull {0} -R {1}".format(bitbucket_repo, local_repo)) else: sh("hg clone {0} {1}".format(bitbucket_repo, local_repo)) sh("hg bookmark master -f -R {0}".format(local_repo)) sh("hg push {0} -R {1}".format(github_repo, local_repo)) sh("rm -rf {0}".format(local_repo))
def rsync(): """ Sync everything from config['dirs'] to config['backup_dir'] """ for DIRS in config['dirs']: # check/fix paths in BACKUP_DIR - remove trailing slash BACKUP_DIR = config['rsync']['backup_dir'] if not config['rsync']['backup_dir'][-1] == '/': pass else: BACKUP_DIR = config['rsync']['backup_dir'][:-1] # check/fix paths in DIRS - add colon before dir and trailing slash in the end if DIRS[-1] == '/': REAL_DIR = DIRS pass else: REAL_DIR = DIRS + '/' if REAL_DIR[0] == ':': pass else: REAL_DIR = ':' + REAL_DIR # ensure that backup_dirs exist for dirname in config['dirs']: outdir = path( BACKUP_DIR + dirname) if not outdir.exists(): log.mark('Creating backup_dirs %s' % outdir) outdir.makedirs() # check/fix USER - add trailing @ to the username USER = config['rsync']['user'] if config['rsync']['user'][-1] == '@': pass else: USER = USER + '@' log.mark('Starting rsync.....') sh('/usr/bin/rsync %s %s%s%s %s%s &>>%s' % (config['rsync']['args'], USER, config['rsync']['server'], REAL_DIR, config['rsync']['backup_dir'][:-1], REAL_DIR[1:], config['rsync']['log_file']))
def backup(repo): print "Syncing %s from GitHub to BitBucket" % repo['name'] bitbucket.create_repo(repo, bitbucket_username, bitbucket_password) bitbucket_repo = bitbucket_url.format(bitbucket_username, repo['name']) github_repo = github_url.format(github_username, repo['name']) local_repo = os.path.join(tmp_dir, repo['name']) if os.path.exists(local_repo): sh('hg fetch {0} -R {1}'.format(github_repo, local_repo)) else: sh('hg clone {0} {1}'.format(github_repo, local_repo)) sh('hg bookmark master -f -R {0}'.format(local_repo)) sh('hg push {0} -R {1}'.format(bitbucket_repo, local_repo))
def mysqldump(): for DB in config['databases']: # check/fix paths in BACKUP_DIR - remove trailing slash BACKUP_DIR = config['rsync']['backup_dir'] if not BACKUP_DIR[-1] == '/': pass else: BACKUP_DIR = config['rsync']['backup_dir'][:-1] # check/fix paths in mysql_backup_dir - add leading slash MYSQL_DIR = config['mysql']['backup_dir'] if MYSQL_DIR[0] == '/': pass else: MYSQL_DIR = '/' + MYSQL_DIR # check/fix paths in mysql_backup_dir - add trailing slash if MYSQL_DIR[-1] == '/': pass else: MYSQL_DIR = MYSQL_DIR + '/' # fix paths in DATETIME - add trailing slash DATETIME = time.strftime("%d-%m-%Y") DATETIME = DATETIME + '/' MYSQL_ROOT_DIR = BACKUP_DIR + MYSQL_DIR REAL_DIR = BACKUP_DIR + MYSQL_DIR + DATETIME # ensure that backup_dirs exist outdir = path(REAL_DIR) if not outdir.exists(): log.mark('Creating mysql dir %s' % outdir) outdir.makedirs() # dumping all databases sh('/usr/bin/echo Dumping %s >> %s' % (DB, config['rsync']['log_file'])) sh('/usr/bin/mysqldump -h %s -u %s -p%s %s | gzip -9 > %s%s.sql.gz' % (config['mysql']['server'], config['mysql']['user'], config['mysql']['password'], DB, REAL_DIR, DB)) sh('/usr/bin/echo Done >> %s' %config['rsync']['log_file']) # cleaning old databases in mysql_backup_dir sh('/usr/bin/find %s -type d -mtime +%s -exec /bin/rm -rf {} \; &>/dev/null' % (MYSQL_ROOT_DIR, config['find']['days']))
def clean_log(): LOGFILE = config['rsync']['log_file'] sh('/bin/cat /dev/null > %s' %LOGFILE) sh('/usr/bin/echo Starting backup at `date` > %s' %LOGFILE)
def babel_init_lang_command(lang): "Initialize new language." sh('pybabel init -i gbi_server/translations/messages.pot -d gbi_server/translations -l %s' % (lang,))
def babel_refresh(): "Extract messages and update translation files." sh('pybabel extract -F ../app/babel.cfg -k lazy_gettext -k _l -o ../app/gbi_server/translations/messages.pot ../app/gbi_server ../app/gbi_server/model ../app/gbi_server/lib') sh('pybabel update -i ../app/gbi_server/translations/messages.pot -d ../app/gbi_server/translations')
def babel_init_lang(lang): "Initialize new language." sh('pybabel init -i ../app/gbi_server/translations/messages.pot -d gbi_server/translations -l %s' % (lang,))
def map_command(mapping=mapping): shell.sh(mapping)
def babel_init_lang_command(lang): "Initialize new language." sh("pybabel init -i geobox/web/translations/messages.pot -d geobox/web/translations -l %s" % (lang,))
def register_command(): sh('python setup.py egg_info -b "" -D register')
def bulk_command(bulk_index=bulk_index): shell.sh(bulk_index)
def build_wheel_command(): sh('python setup.py egg_info -b "" -D bdist_wheel')
def upload_final_sdist_command(): sh('python setup.py egg_info -b "" -D sdist upload')
def upload_docs_command(): ver = version() remote_doc_location = REMOTE_DOC_LOCATION sh('rsync -a -v -P -z build/sphinx/html/ %(remote_doc_location)s/%(ver)s' % locals())
def upload_final_wheel_command(): check_uncommited() build_wheel_command() ver = version() sh('twine upload dist/MapProxy-%(ver)s-py2.py3-none-any.whl' % locals())
def upload_final_sdist_command(): check_uncommited() build_sdist_command() ver = version() sh('twine upload dist/MapProxy-%(ver)s.tar.gz' % locals())
def babel_compile(): "Compile translations." sh('pybabel compile -d ../app/gbi_server/translations')
def babel_compile_command(): "Compile translations." sh('pybabel compile -d gbi_server/translations')
def babel_compile_command(): "Compile translations." sh('pybabel compile -d geobox/web/translations')
def prepare_command(tag=""): sh('python setup.py egg_info -D -b "%s"' % tag)
def HgPullOrClone(remote_repo, local_repo): if os.path.exists(local_repo): cmd = 'hg pull {0} -R {1}'.format(remote_repo, local_repo) else: cmd = 'hg clone {0} {1}'.format(remote_repo, local_repo) return sh(cmd)
def check_uncommited(): if sh('git diff-index --quiet HEAD --') != 0: print( 'ABORT: uncommited changes. please commit (and tag) release version number' ) sys.exit(1)
def babel_refresh_command(): "Extract messages and update translation files." sh( "pybabel extract -F babel.cfg -k lazy_gettext -k _l -o geobox/web/translations/messages.pot geobox/web geobox/model geobox/lib" ) sh("pybabel update -i geobox/web/translations/messages.pot -d geobox/web/translations")
def build_sdist_command(): sh('python setup.py egg_info -b "" -D sdist')
def babel_refresh_command(): "Extract messages and update translation files." sh('pybabel extract -F babel.cfg -k lazy_gettext -k _l -o geobox/web/translations/messages.pot geobox/web geobox/model geobox/lib' ) sh('pybabel update -i geobox/web/translations/messages.pot -d geobox/web/translations' )
def upload_sdist_command(): sh('python setup.py egg_info -b "" -D sdist') ver = version() remote_rel_location = REMOTE_REL_LOCATION sh('scp dist/MapProxy-%(ver)s.* %(remote_rel_location)s' % locals())
def upload_final_wheel_command(): sh('python setup.py egg_info -b "" -D bdist_wheel upload')
def babel_init_lang_command(lang): "Initialize new language." sh('pybabel init -i geobox/web/translations/messages.pot -d geobox/web/translations -l %s' % (lang, ))
def link_latest_command(ver=None): if ver is None: ver = version() host, path = REMOTE_DOC_LOCATION.split(':') sh('ssh %(host)s "cd %(path)s && rm latest && ln -s %(ver)s latest"' % locals())
def stop_remove_images(): shell.sh('docker stop $(docker ps -a -q)') shell.sh('docker rm $(docker ps -a -q)')