def asset(self, *args): new_asset = self.__parse_asset(args[0]) if type(args) is list else {} with open(os.path.join(self.config['IMAGE_HOME'], "annex", "vars.json"), 'rb') as M: annex_vars = json.loads(M.read()) if "ASSET_TAGS" not in annex_vars.keys(): annex_vars['ASSET_TAGS'] = {} if "name" not in new_asset.keys(): new_asset['name'] = prompt("New asset name: ") if not self.__tr(new_asset['name']): return False if "short_code" not in new_asset.keys(): new_asset['short_code'] = prompt("Short code for new mime type %s (i.e. \"my_json\"): " \ % new_asset['name']) if not self.__tr(new_asset['short_code']): return False annex_vars['ASSET_TAGS'].update({ new_asset['short_code'] : new_asset['name'] }) with open(os.path.join(self.config['IMAGE_HOME'], "annex", "vars.json"), 'wb+') as M: M.write(json.dumps(annex_vars, indent=4)) return True
def test_prompt_appends_space(): """ prompt() appends a single space when no default is given """ s = "This is my prompt" prompt(s) eq_(sys.stdout.getvalue(), s + ' ')
def get_release_properties(): # get current version from maven current_version = get_current_version() # remove -SNAPSHOT for release version release_version = current_version.replace("-SNAPSHOT", "") # scm tag scm_tag = "uht-traktor-" + release_version # get next version based on current release version new_dev_version = release_version last_num = re.compile(r'(?:[^\d]*(\d+)[^\d]*)+') m = last_num.search(release_version) if m: next = str(int(m.group(1))+1) start, end = m.span(1) new_dev_version = release_version[:max(end-len(next), start)] + next + release_version[end:] new_dev_version = new_dev_version + "-SNAPSHOT" # prompt for the final values, give default values from above release_version = prompt("What is the release version: ", None, release_version) scm_tag = prompt("What is SCM release tag or label: ", None, scm_tag) new_dev_version = prompt("What is the new development version: ", None, new_dev_version) # new dev version must end with -SNAPSHOT as otherwise we will run into maven troubles later if not new_dev_version.endswith("-SNAPSHOT"): abort(red("Invalid new development version, must end with '-SNAPSHOT'.")) return [release_version, scm_tag, new_dev_version]
def _db_questions(type, port): db_types = ['mysql-connector', 'postgres'] output = {} output['username'] = prompt('Database Username: '******'password'] = prompt('Database Password: '******'host'] = prompt('The host (localhost): ') output['port'] = prompt('The post (%s): ' % port) output['db_name'] = prompt('Database Name: ') output['host'] = output['host'] if output['host'] else 'localhost' output['port'] = output['port'] if output['port'] else port db_url = '%s://%s:%s@%s:%s/%s' % (db_types[type], output['username'], output['password'], output['host'], output['port'], output['db_name']) local('echo "DATABASE_URL=%s" > .env' % db_url) return db_url
def enter_password(): password1 = prompt('Enter the password for %s:'% original_username) password2 = prompt('Re-enter the password:'******'The password was not the same' enter_password() return password1
def build_new_server(): """ Build a brand new server from scratch. """ require('hosts', provided_by=['dev', 'staging', 'production']) ans = prompt('This will completely wipe out the server. Are you sure (YES/no)?') if ans != 'YES': print yellow('Glad you were just kidding.') return ans = prompt(yellow('%s' % env.hosts[0]) + ' will be wiped and rebuilt. Are you sure (YES/no)?') if ans != 'YES': print "Didn't think so." return env.keepalive = 30 install_root_key() add_deploy_user() common_install() setup_environment() setup_solr() setup_mongo() setup_postgres() setup_supervisor() build_api_app() build_web_app() setup_nginx() full_restart()
def run(self, **kwargs): dst_compute = self.dst_cloud.resources[utils.COMPUTE_RESOURCE] dst_mysql = dst_compute.mysql_connector def affirmative(ans): return ans in ['Y', 'y', 'yes'] answer = prompt("This will REMOVE ALL DATA from nova instances on " "destination cloud! Are you sure? (Y/N)") proceed_with_removal = False if affirmative(answer): answer = prompt("No, seriously, THIS WILL REMOVE ABSOLUTELY " "EVERYTHING from nova.instances DB table on " "destination cloud. ARE YOU SURE? (Y/N)") if affirmative(answer): proceed_with_removal = True if proceed_with_removal: LOG.warning("Following tables will be removed on destination: %s", self.tables_to_remove) for table in self.tables_to_remove: delete_sql = "delete from nova.{table}".format(table=table) dst_mysql.execute(delete_sql)
def install_postgis(postgres_ver=None, postgis_ver=None): """ Install PostGIS for PostgreSQL """ assert postgres_ver in SUPPORT_POSTGRESQL_VERSIONS or postgres_ver is None assert postgis_ver in ('2.3', '2.4') or postgis_ver is None if postgres_ver and postgis_ver and postgis_ver not in SUPPORT_POSTGIS_VERSIONS[postgres_ver]: AssertionError('Invalid postgis_ver {} for postgres_ver {}'.format(postgres_ver, postgis_ver)) check_sudo() os_name, os_ver = check_os() if not confirm('Do you want to install GEOS, GDAL, PROJ.4 and PostGIS?'): return allow_versions = ', '.join(SUPPORT_POSTGRESQL_VERSIONS) while postgres_ver not in SUPPORT_POSTGRESQL_VERSIONS: postgres_ver = prompt('Write PostgreSQL version you have ({}):'.format(allow_versions), default=SUPPORT_POSTGRESQL_VERSIONS[-1]) allow_versions = ', '.join(SUPPORT_POSTGIS_VERSIONS[postgres_ver]) while postgis_ver not in SUPPORT_POSTGIS_VERSIONS[postgres_ver]: postgis_ver = prompt('Write PostGIS version you need ({}):'.format(allow_versions), default=SUPPORT_POSTGIS_VERSIONS[postgres_ver][-1]) print_green('INFO: Install GEOS, GDAL, PROJ.4 and PostGIS {} for PostgreSQL {}...'.format(postgis_ver, postgres_ver)) packages = ['libgeos-dev libgeos++-dev gdal-bin python-gdal libproj-dev'] if os_name == 'Debian' and os_ver == '8': packages.extend(['libgeos-c1 libgeos-3.4.2 libgdal-dev libgdal1-dev libproj0']) if os_name == 'Debian' and os_ver == '9': packages.extend(['libgeos-c1v5 libgeos-3.5.1 libgdal-dev libproj12']) apt_install(' '.join(packages), noconfirm=True) apt_install('postgresql-{}-postgis-{}'.format(postgres_ver, postgis_ver), noconfirm=True) apt_install('libgeoip1 spatialite-bin', noconfirm=True) print_green('INFO: Install GEOS, GDAL, PROJ.4 and PostGIS {} for PostgreSQL {}... OK'.format(postgis_ver, postgres_ver))
def clear_databases(): """ Clear out all databases. This is very destructive, only useful for testing! """ require('hosts', provided_by=['dev', 'staging', 'production']) ans = prompt('This will completely wipe out the database. Are you sure (YES/no)?') if ans != 'YES': print yellow('Glad you were just kidding.') return ans = prompt(yellow('%s' % env.hosts[0]) + ' database will be wiped. Are you sure (YES/no)?') if ans != 'YES': print "Didn't think so." return run('supervisorctl stop all') print yellow('Recreate WEB database') run('dropdb ocl_web') run('createdb -O deploy ocl_web') # setup DB with prefix('source /opt/virtualenvs/ocl_web/bin/activate'): with prefix('export DJANGO_CONFIGURATION="Production"'): with prefix('export DJANGO_SECRET_KEY="blah"'): print yellow('creating WEB database...') run('/opt/deploy/ocl_web/ocl_web/manage.py syncdb --noinput --migrate') print yellow('Recreate API database') run('echo -e "use ocl \n db.dropDatabase();" | mongo') create_api_database()
def _run_command(directory, command): """ Runs a given command in the directory given. If the command begins with a diff-like program, a prompt is given in order to allow for user inspection. """ if command and not command.startswith(CFG_LINES_TO_IGNORE): match = re.match("sudo -u ([a-z]+) (.*)", command) if match: with cd(directory): sudo(match.group(2), user=match.group(1)) elif command.startswith('sudo'): sudo(command[5:], shell=False) elif command.startswith('cd'): # ignore cause cd doesn't work with run # but we should already be wrapped # with a "with cd()" context manager pass else: if command.startswith(('colordiff', 'diff')): with cd(directory): with hide('warnings'): run(command, warn_only=True) prompt("Press Enter to continue..") else: with cd(directory): run(command)
def test_env(): """Configure the test environment on EC2 Ask a series of questions before deploying to the cloud. Allow the user to select if a Elastic IP address is to be used """ if 'use_elastic_ip' in env: use_elastic_ip = to_boolean(env.use_elastic_ip) else: use_elastic_ip = confirm('Do you want to assign an Elastic IP to this instance: ', False) public_ip = None if use_elastic_ip: if 'public_ip' in env: public_ip = env.public_ip else: public_ip = prompt('What is the public IP address: ', 'public_ip') if 'instance_name' not in env: prompt('AWS Instance name: ', 'instance_name') # Create the instance in AWS host_names = create_instance([env.instance_name], use_elastic_ip, [public_ip]) env.hosts = host_names if not env.host_string: env.host_string = env.hosts[0] env.user = USERNAME env.key_filename = AWS_KEY env.roledefs = { 'gavo' : host_names, }
def configure_project(): venv = prompt( 'Specifica il percorso della directory per il virtualenv oppure lascia vuoto per installarlo dentro {}'.format( VENVS_DIRNAME)) if not venv: venv = VENVS_DIRNAME vassals = prompt('Specifica il percorso della directory per i vassals oppure lascia vuoto per usare {}'.format( VASSALS)) if not vassals: vassals = VASSALS if not os.path.exists("{}/{}".format(venv, PROJECT_DIRNAME)): local("virtualenv {}/{}".format(venv, PROJECT_DIRNAME)) local("{}/{}/bin/pip install -r {}/requirements.txt".format(venv, PROJECT_DIRNAME, BASE_DIR)) if not os.path.exists('templates'): local('mkdir templates') if not os.path.exists('static'): local('mkdir static') if not os.path.exists('media'): local('mkdir media') if not os.path.exists('{}/{}.ini'.format(vassals, PROJECT_DIRNAME)): local('ln -s {}/uwsgiconf/locals/{}.ini {}/{}.ini'.format(BASE_DIR, PROJECT_DIRNAME, vassals, PROJECT_DIRNAME)) how_db = prompt('Digita 1 per creare il db, 2 per scaricarlo dal server oppure lascia vuoto per non fare nulla!') if how_db == "1": create_db() elif how_db == "2": db_from_server()
def selenium_test(): require('environment', provided_by=('staging', 'preview', 'demo', 'production', 'india')) prompt("Jenkins username:"******"jenkins_user", default="selenium") prompt("Jenkins password:"******"jenkins_password") url = env.selenium_url % {"token": "foobar", "environment": env.environment} local("curl --user %(user)s:%(pass)s '%(url)s'" % \ {'user': env.jenkins_user, 'pass': env.jenkins_password, 'url': url})
def set_context(): env['context'] = { 'heading': prompt("Heading:", 'heading'), 'extra_info': prompt("Short description:", 'extra_info'), 'more_info_url': prompt("More info link:", 'more_info_url'), 'campaign_class': prompt("Campaign class (one of {}):".format(", ".join(CAMPAIGN_CLASSES)), 'campaign_class', validate=validate_classes) }
def _database_details(self): db_details = { "database_name": prompt("Database Name: "), "database_user": prompt("Database User: "******"database_pass": re.escape(getpass.getpass("Database Password: "******"database_host": prompt("Database Host: ", default="localhost") } """ @todo db_test_conn = _test_mysql_connection(db_name=db_details["database_name"], username=db_details["database_user"], password=db_details["database_pass"], db_host=db_details["database_host"]) if db_test_conn is None: print "MySQLdb module not installed, unable to test database connection, continuing." return db_details elif db_test_conn is False: print "Connection details failed." go_on = prompt("[c]ontinue [a]bort [r]etry:", default="r") if go_on == "r": return self._database_details() elif go_on == "c": return db_details else: import sys sys.exit() else: """ return db_details
def sync_database(): "Sync db, make cache tables, and run South migrations" new_installation = len(get_db_tables()) == 0 with lcd(PROJ_ROOT): local('{python} manage.py syncdb --noinput'.format(**env)) create_cache_tables() local('{python} manage.py migrate reversion --noinput'.format(**env)) local('{python} manage.py migrate --noinput'.format(**env)) if new_installation: print('\nDatabase synced. Follow prompts to create an initial ' 'super user and project.') username = prompt('Username: '******'{python} manage.py createsuperuser --username {username}'.format( username=username, **env)) project_name = prompt('Project name (blank to skip): ') if project_name: project_slug = prompt('Project slug: ', validate=str) local('{python} manage.py createproject ' '"{name}" {slug} --users={username}'.format( name=project_name, slug=project_slug, username=username, **env))
def setup_project(project_name=project_name, project_username=project_username, git_url=git_url, apache_server_name=apache_server_name, apache_server_alias=apache_server_alias, django_settings=None, branch=branch, requirements_path=requirements_path): """ Creates a user for the project, checks out the code and does basic apache config. """ django_settings = django_settings or _get_django_settings() setup_project_user(project_username) setup_postgres() print(green("Here is the project user's public key:")) run('cat /home/%s/.ssh/id_rsa.pub' % project_username) print(green("This script will attempt a `git clone` next.")) prompt(green("Press enter to continue.")) setup_project_code(project_name, project_username, git_url, branch) setup_project_virtualenv(project_username) install_project_requirements(project_username, requirements_path) with settings(user=project_username): with cd('/home/%s/%s' % (project_username, project_name)): run('/home/%s/%s/bin/python manage.py syncdb --settings=%s --noinput' % (project_username, env_path, django_settings)) # Don't make it an error if the project isn't using south with settings(warn_only=True): run('/home/%s/%s/bin/python manage.py migrate --settings=%s' % (project_username, env_path, django_settings)) run('/home/%s/%s/bin/python manage.py collectstatic --noinput --settings=%s' % (project_username, env_path, django_settings)) setup_project_apache(project_name, project_username, apache_server_name, apache_server_alias, django_settings, branch=branch) update_project() print(green("""Project setup complete. You may need to patch the virtualenv to install things like mx. You may do so with the patch_virtualenv command."""))
def catch_result(self, result): for line in result.split('\n'): if re.match('Applying: ', line): tqdm.write(colors.green(line)) self.pbar.update() if result.failed: if "git config --global user.email" in result: logger.error( "Need to configure git for this user\n" ) raise GitHubException(result) try: raise WiggleException except WiggleException: if self.auto_exit: sudo("git am --abort") logger.error('Aborting deploy and go back') raise GitHubException prompt("Manual resolve...") finally: if not self.auto_exit: to_commit = sudo( "git diff --cached --name-only --no-color", pty=False ) if to_commit: self.resolve() else: self.skip()
def deploy(): prompt("Pivotal Tracker API token: ", "pivotal_token") prompt("App port: ", "play_port", "9000", "\d+") local("play clean compile stage") with lcd("target/universal/stage"): local("rm -f conf/site.conf") run("mkdir -p %s" % env.release_dir) with cd(env.release_dir): put("*", ".") run("echo %s > REVISION" % local("git rev-parse HEAD", capture=True)) with cd(env.basedir): run("rm -f current") run("ln -s %s current" % env.release_dir) with settings(warn_only=True): run("sudo stop %(app_name)s" % env) run("mkdir -p %(shared_dir)s" % env) put("manifests", env.shared_dir) with cd(env.shared_dir): run("""FACTER_app_name=%(app_name)s\ FACTER_app_path=%(release_dir)s\ FACTER_manifest_path=%(manifest_dir)s\ FACTER_play_port=%(play_port)s\ FACTER_pivotal_token=%(pivotal_token)s\ sudo -E puppet\ apply\ --detailed-exitcodes\ --modulepath %(puppet_module_dir)s\ %(manifest_dir)s/bridge.pp;\ test $? -le 2 """ % env) with settings(warn_only=True): run("sudo restart %(app_name)s" % env) with cd(env.releases_dir): run("ls -1|head -n -5|xargs rm -rf")
def init(): # Touch README local('rm README.md') readme = prompt('Enter breif description for README: ').lower() local('echo "%s" > README.md' % (readme)) # Set Up App app_name = prompt("Enter App Name: ").lower() local("mkdir -p %s/{api,resources,services,tasks}" % (app_name)) local("cd %s && for DIR in $(find . -type d); do touch $DIR/__init__.py; done" % (app_name)) # Set Up Static Media media = prompt("Setup Static Media (Y/N): ") if(media == "Y"): local("mkdir -p public/{css,js,img}") local("mkdir -p public/css/src && mkdir -p public/js/lib") local("cd public && for DIR in $(find . -type d); do touch $DIR/empty; done") else: pass # Setup Git local("rm -rf .git/ && git init && git add .") commit = prompt("Git Commit Message: ").lower() local("git commit -m %s" % (commit)) origin = prompt("Add Origin: ") local("git remote add origin %s && git push -u origin master" % (origin))
def _get_svn_user_and_pass(): if 'svnuser' not in env or len(env.svnuser) == 0: # prompt user for username prompt('Enter SVN username:'******'svnuser') if 'svnpass' not in env or len(env.svnpass) == 0: # prompt user for password env.svnpass = getpass.getpass('Enter SVN password:')
def dockerrm(images=None, containers=None): """ mass remove images and containers """ prompt('Are you sure? Press Ctrl-C otherwise.') xrun('docker ps --all | grep Exited | cut -c1-19 | xargs -L1 docker rm') xrun('docker images | grep "6 months" | cut -c 35-65 | xargs -L1 docker rmi')
def set_context(): env['context'] = { 'heading': prompt("Heading for campaign:", 'heading'), 'extra_info': prompt("Extra information for campaign:", 'extra_info'), 'more_info': prompt("Link for more information:", 'more_info'), 'campaign_class': prompt("Campaign class:", 'campaign_class', validate=validate_classes) }
def configure_jenkins(): with settings(warn_only=True): if files.contains('/var/lib/jenkins/config.xml', 'hudson.security.PAMSecurityRealm'): print(green('Jenkins is already configured')) return # I want to have user authentication based on the unix pam sudo('usermod -a -G shadow jenkins') # And I want to be able to login! username = prompt("Jenkins username: "******"%s username password: ") sudo('useradd %s -p `openssl passwd -1 %s`' % (username, password)) deploy_dir = env.conf['deploy_dir'] links_to('/var/lib/jenkins/config.xml', '%sjenkins/config/jenkins/global.config.xml' % deploy_dir) # Configure the GIT plugin links_to('/var/lib/jenkins/hudson.plugins.git.GitSCM.xml', '%sjenkins/config/jenkins/hudson.plugins.git.GitSCM.xml' % deploy_dir) # Configure the w3af build job sudo('mkdir -p /var/lib/jenkins/jobs/w3af/') sudo('mkdir -p /var/lib/jenkins/jobs/w3af/builds/') links_to('/var/lib/jenkins/jobs/w3af/config.xml', '%sjenkins/config/jenkins/w3af.job.config.xml' % deploy_dir) print(green('Finished Jenkins configuration.'))
def deploy(): # Grab some data from user to build files conf['email'] = prompt("Enter your JPMChase email address:") conf['first_name'] = prompt("Enter your first name: ").lower() conf['last_name'] = prompt("Enter your last name: ").lower() conf['sid'] = prompt("Enter your JPMChase SID:") conf['db_password'] = getpass("Please enter a new password for your local test database: ") # Build some configuration conf['db_name'] = '%s_local' % conf['first_name'] # Let er rip! apt_proxy() bash_proxy() environment_proxy() reboot(wait=100) copy_packages() install_guest_additions() install_dev_packages() install_chrome() install_ruby() bash_oracle() install_oracle() upload_db_scripts() execute(create_db_users) install_sqldeveloper() set_git_config() clone_repos() copy_configs() remove_packages()
def update(self): if prompt('Update pbi client? Y/n', default='n').lower() == 'y': if hasattr(sys, 'real_prefix'): print((red(':' * 72))) print(red('You can not update the client while having an activated virtualenv. Run')) print(red('$ deactivate')) print(red('first.')) print((red(':' * 72))) sys.exit() print(cyan('You likely will be promted to enter your root password.')) print(red('After updating the pbi cli-client the proccess will exit.')) time.sleep(3) command = 'sudo pip install -I -e "git+https://github.com/palmbeach-interactive/pbi-client.git#egg=pbi-client"' local(command) sys.exit(0) if prompt('Update infrastructure repository? Y/n', default='y').lower() == 'y': print(cyan('Updating infratsructure repository')) with lcd(self.infrastructure_dir): command = 'git pull origin master' local(command) if prompt('Install ansible requirements? Y/n', default='y').lower() == 'y': print(cyan('installing ansible requirements')) with lcd(self.playbook_dir): command = 'ansible-galaxy install -f -r requirements.yml' local(command)
def create(): """Creates a new presentation subdirectory""" presentation_name = prompt('Presentation Short Name: ') author_name = prompt('Author Short Name: ') dirname = presentation_name.replace(' ', '_') + '-' + author_name.replace(' ', '_') if not os.path.exists(dirname): os.makedirs(dirname)
def set_context(): env['context'] = { 'heading': prompt("Heading for campaign:", 'heading'), 'extra_info': prompt("Extra information for campaign:", 'extra_info'), 'more_info_url': prompt("URL for more information:", 'more_info_url'), 'campaign_class': prompt("Campaign class (one of {}):".format(", ".join(CAMPAIGN_CLASSES)), 'campaign_class', validate=validate_classes) }
def setup_vhost_osx_xampp(): ''' Set up a new Virtual Host on OS X with XAMPP ''' # get location and domain name location = prompt("What is the folder location?") domain = prompt("What is the new virtual host domain?") # get config apache_conf = ''' <VirtualHost *:80> DocumentRoot "%s" ServerName %s ErrorLog "/Applications/XAMPP/xamppfiles/logs/%s" </VirtualHost> ''' % (location, domain, domain) hostname_conf = '127.0.0.1 %s' % (domain) # append config to apache httpd.conf _local_append('/Applications/XAMPP/xamppfiles/etc/extra/httpd-vhosts.conf', apache_conf) # append config to /etc/hosts _local_append('/etc/hosts', hostname_conf) # restart apache with lcd('/Applications/XAMPP/xamppfiles'): local('sudo ./xampp reloadapache')
def install_nginx(): """ Install Nginx web-server """ check_sudo() check_os() if not confirm('Do you want to install nginx?'): return print_green('INFO: Install nginx...') set_apt_repositories(NGINX_REPOSITORIES, NGINX_REPOS_INSTALL_KEYS_COMMANDS, subconf_name='nginx') apt_update() apt_install('nginx', noconfirm=True) user = prompt('Set user to nginx', default='www-data', validate='[\w\-]+') workers = prompt('Set worker_processes', default='1', validate='\d+') cmbs = prompt('Set client_max_body_size (MB)', default='32', validate='\d+') gzl = prompt('Set gzip_comp_level (set 0 to disable gzip)', default='1', validate='\d+') cfn = '/etc/nginx/nginx.conf' sed(cfn, r'user\s+nginx;', r'user {};'.format(user), use_sudo=True) sed(cfn, r'worker_processes\s+[0-9]+;', r'worker_processes {};'.format(workers), use_sudo=True, backup='') sed(cfn, r'http \{', (r'http \{\n\n' r' server_names_hash_bucket_size 64;\n' r' client_max_body_size {cmbs}m;\n\n').replace('{cmbs}', cmbs), use_sudo=True, backup='') if gzl != '0': sed(cfn, r'\s+#\s*gzip on;', (r' gzip on;\n' r' gzip_proxied any;\n' r' gzip_comp_level {gzl};\n' r' gzip_min_length 1000;\n' r' gzip_proxied expired no-cache no-store private auth;\n' r' gzip_types text/plain text/javascript text/xml text/css application/x-javascript ' r'application/javascript application/xml application/json image/svg+xml;\n' r' gzip_disable "msie6";\n' r' gzip_vary on;\n').format(gzl=gzl), use_sudo=True, backup='') print_green('INFO: Install nginx... OK')
def task(self, *args): new_task = self.__parse_asset(args[0]) if type(args) is list else {} new_task['root'] = os.path.join(self.config['IMAGE_HOME'], "annex", "Tasks") if "name" not in new_task.keys(): new_task['name'] = prompt("New task name: ") if not self.__tr(new_task['name']): return False if "dir" not in new_task.keys(): print "Which group should this task belong to? " for _, d, _ in os.walk(new_task['root']): if len(d) > 0: print "Choose one from these groups:" print ", ".join(d) print "or create a new one here." else: print "No groups yet! Create one here." break new_task['dir'] = prompt("Task group: ") if not self.__tr(new_task['dir']): return False new_task['dir'] = new_task['dir'].capitalize() with open( os.path.join(self.config['IMAGE_HOME'], "annex", "vars.json"), 'rb') as M: annex_vars = json.loads(M.read()) if "apply" not in new_task.keys(): new_task['apply'] = False if prompt("Apply mime-type to this task? Y|n: ") not in ["n", "N"]: new_task['apply'] = "mime_type" else: if prompt("Run task at project start? Y|n: ") not in [ "n", "N" ]: new_task['apply'] = "init" if new_task['apply'] == "mime_type": for m in ["MIME_TYPES", "MIME_TYPE_MAP", "MIME_TYPE_TASKS"]: if m not in annex_vars.keys(): annex_vars[m] = {} if "mime_type" not in new_task.keys(): if len(annex_vars['MIME_TYPES'].keys()) > 0: print "Choose from one of these mime types" print ", ".join(annex_vars['MIME_TYPES'].keys()) print "or create a new one here." else: print "No mime types yes! Create on here." new_task['mime_type'] = prompt("Mime type: ") if not self.__tr(new_task['mime_type']): return False if new_task['mime_type'] not in annex_vars['MIME_TYPES'].keys(): if "short_code" not in new_task.keys(): new_task['short_code'] = prompt("Short code for new mime type %s (i.e. \"my_json\"): " \ % new_task['mime_type']) if not self.__tr(new_task['short_code']): return False annex_vars['MIME_TYPES'].update( {new_task['mime_type']: new_task['short_code']}) annex_vars['MIME_TYPE_MAP'].update( {new_task['short_code']: new_task['mime_type']}) if new_task['mime_type'] not in annex_vars['MIME_TYPE_TASKS'].keys( ): annex_vars['MIME_TYPE_TASKS'][new_task['mime_type']] = [] annex_vars['MIME_TYPE_TASKS'][new_task['mime_type']].append( "%(dir)s.%(name)s.%(name)s" % new_task) elif new_task['apply'] == "init": if "INITIAL_TASKS" not in annex_vars.keys(): annex_vars['INITIAL_TASKS'] = [] annex_vars['INITIAL_TASKS'].append("%(dir)s.%(name)s.%(name)s" % new_task) if "TASK_POOL" not in annex_vars.keys(): annex_vars['TASK_POOL'] = [] annex_vars['TASK_POOL'].append("%(dir)s.%(name)s.%(name)s" % new_task) new_task['dir'] = os.path.join(new_task['root'], new_task['dir']) new_task['path'] = os.path.join(new_task['dir'], "%s.py" % new_task['name']) with open( os.path.join(self.config['IMAGE_HOME'], "annex", "vars.json"), 'wb+') as M: M.write(json.dumps(annex_vars, indent=4)) routine = [ "mkdir -p %(dir)s", "if [ ! -f %(dir)s/__init__.py ]; then touch %(dir)s/__init__.py; fi", "sed 's/NAME_OF_TASK/%(name)s/g' $UNVEILLANCE_BUILD_HOME/tmpl/annex.task.py > %(path)s" ] return build_routine([r % new_task for r in routine], dst=self.config['IMAGE_HOME'])
def prompt_user(self, conf): conf[self.key] = prompt(self.prompt, default=conf.get(self.key, self.default), validate=self.validate)
def get_user_input(obj, parent=None): from fabric.operations import prompt import sys import inspect from prettytable import PrettyTable if type(obj) is list: if len(obj) == 0: print 'The list is empty, back to {}'.format(parent) return get_user_input(obj=parent) fields_dic = {str(x): x for x in obj} all_names = sorted(fields_dic.keys()) methods_dic = {} elif type(obj) in [str, unicode, basestring]: return obj, obj else: methods = [ x for x in inspect.getmembers(obj, predicate=lambda x: inspect. isfunction(x) or inspect.ismethod(x)) if not x[0].startswith('__') ] fields = [ x for x in inspect.getmembers(obj, predicate=lambda x: not inspect.isfunction(x) and not inspect.ismethod(x)) if not x[0].startswith('__') ] tbl = PrettyTable(['', str(obj), str(obj.__class__)]) map(lambda x: tbl.add_row(['method', x[0], '']), methods) tbl.add_row(['', '', '']) map(lambda x: tbl.add_row(['field', x[0], str(x[1])]), fields) print tbl methods_dic = {x[0]: x[1] for x in methods} fields_dic = {x[0]: x[1] for x in fields} all_names = [x[0] for x in methods + fields] def chunks(lst, n): for i in range(0, len(lst), n): yield ' * '.join(lst[i:i + n]) sub_names = all_names while True: sub_list_str = '\n'.join(chunks(lst=sub_names, n=14)) print '\n' choice = prompt(text='{} * a * q: '.format(sub_list_str)) if choice == 'q': sys.exit(2) if choice == 'a': sub_names = all_names continue sub_list = filter(lambda x: choice in x, sub_names) if len(sub_list) == 1: choice = sub_list[0] if choice in sub_list: print 'Using:', choice, '\n' if choice in methods_dic: return obj, methods_dic[choice] if choice in fields_dic: return get_user_input(obj=fields_dic[choice], parent=obj) elif len(sub_list) == 0: continue # wrong input ask again with the same list of names else: sub_names = sub_list continue # ask again with restricted list of names
def validate(self, *args): print args def is_applicable(filename): if filename == "__init__.py": return False if re.match(r'.*pyc$', filename): return False with settings(hide('everything'), warn_only=True): if re.match(re.compile("%s:.*[pP]ython\sscript.*" % filename), local("file %s" % filename, capture=True)): return True return False # go through models, modules, tasks # pick out asset tags: make sure they exist in vars user_files = [] for d in ["Models", "Modules", "Tasks"]: for root, _, files in os.walk( os.path.join(self.config['IMAGE_HOME'], "annex", d)): user_files += [ os.path.join(root, f) for f in files if is_applicable(os.path.join(root, f)) ] if len(user_files) == 0: return True with open( os.path.join(self.config['IMAGE_HOME'], "annex", "vars.json"), 'rb') as M: annex_vars = json.loads(M.read()) if "ASSET_TAGS" not in annex_vars.keys(): annex_vars['ASSET_TAGS'] = {} for f in user_files: with open(f, 'rb') as F: for line in F.readlines(): for short_code in re.findall( ".*ASSET_TAGS\[[\'\"](.*)[\'\"]\].*", line): if short_code in annex_vars['ASSET_TAGS'].keys(): continue if not self.__tr(short_code): continue asset_tag = None try: if args[0][0] == "add_short_code": asset_tag = short_code except Exception as e: pass if asset_tag is None: asset_tag = prompt( "Descriptive string for \"%s\" asset? (i.e. \"json_from_my_annex\")" % short_code) if not self.__tr(asset_tag): continue annex_vars['ASSET_TAGS'][short_code] = asset_tag with open( os.path.join(self.config['IMAGE_HOME'], "annex", "vars.json"), 'wb+') as M: M.write(json.dumps(annex_vars, indent=4)) return True
def setupSRRepos(gitProto='http', comp=None): print 'Fetching Snaproute repositories dependencies....' global gAnchorDir, gGitUsrName, gRole gAnchorDir = prompt('Host directory:', default='git') gGitUsrName = prompt('Git username:'******'SnapRoute Employee (y/n):', default='n') if comp: srRepos = [comp] else: srRepos = setupHandler().getSRRepos() org = setupHandler().getOrg() pkgRepoOrg = setupHandler().getPkgRepoOrg() internalUser = setupHandler().getUsrRole() usrName = setupHandler().getUsrName() srcDir = setupHandler().getSRSrcDir() anchorDir = setupHandler().getAnchorDir() srPkgRepos = setupHandler().getSRPkgRepos() if not os.path.isfile(srcDir + '/Makefile'): cmd = 'ln -s ' + anchorDir + '/reltools/Makefile ' + srcDir + 'Makefile' local(cmd) if gitProto == "ssh": if not internalUser: userRepoPrefix = '[email protected]:%s/' % (org) remoteRepoPrefix = None pkgRepoPrefix = '[email protected]:%s/' % (pkgRepoOrg) else: userRepoPrefix = '[email protected]:%s/' % (usrName) remoteRepoPrefix = '[email protected]:%s/' % (org) else: if not internalUser: userRepoPrefix = 'https://github.com/%s/' % (org) remoteRepoPrefix = None pkgRepoPrefix = 'https://github.com/%s/' % (pkgRepoOrg) else: userRepoPrefix = 'https://github.com/%s/' % (usrName) remoteRepoPrefix = 'https://github.com/%s/' % (org) for repo in srRepos: with lcd(srcDir): if not (os.path.exists(srcDir + repo) and os.path.isdir(srcDir + repo)): if repo in srPkgRepos: prefix = pkgRepoPrefix else: prefix = userRepoPrefix cmd = 'git clone ' + prefix + repo local(cmd) if remoteRepoPrefix: with lcd(srcDir + repo): cmd = 'git remote add upstream ' + remoteRepoPrefix + repo + '.git' local(cmd) commandsToSync = [ 'git fetch upstream', 'git checkout master', 'git merge upstream/master' ] for cmd in commandsToSync: local(cmd) LFSRepos = setupHandler().getLFSEnabledRepos() if repo in LFSRepos: with lcd(srcDir + repo): commandsToCheckout = ['git lfs fetch', 'git lfs checkout'] for cmd in commandsToCheckout: local(cmd)
def ask_for_aws_keys(): """ Gets AWS keys from user """ env.aws_access = prompt('AWS_ACCESS_KEY_ID?') env.aws_secret = prompt('AWS_SECRET_ACCESS_KEY?')
def sync(src, dst): """ Moves drupal sites between servers """ import getpass from fabric.api import hide from fabric.operations import get, put, local from fabric.utils import abort from fabric.colors import blue from copy import copy # Really make sure user wants to push to production. if dst == 'production': force_push = prompt( 'Are you sure you want to push to production (WARNING: this will destroy production db):', None, 'n', 'y|n') if force_push == 'n': abort('Sync aborted') # record the environments execute(dst) dst_env = copy(env) execute(src) src_env = copy(env) # helper vars sqldump = '/tmp/src_%s.sql.gz' % env.db_db src_files = '%s/current/sites/default/files/' % src_env.host_site_path # grab a db dump with settings(host_string=src_env.hosts[0]): run('mysqldump -u%s -p%s %s | gzip > %s' % (src_env.db_user, src_env.db_pw, env.db_db, sqldump)) get(sqldump, sqldump) # parse src src_host = urlparse('ssh://' + src_env.hosts[0]) drop_tables_sql = """mysql -u%(db_user)s -p%(db_pw)s -BNe "show tables" %(db_db)s \ | tr '\n' ',' | sed -e 's/,$//' \ | awk '{print "SET FOREIGN_KEY_CHECKS = 0;DROP TABLE IF EXISTS " $1 ";SET FOREIGN_KEY_CHECKS = 1;"}' \ | mysql -u%(db_user)s -p%(db_pw)s %(db_db)s""" # Pulling remote to local if dst == 'local': local( drop_tables_sql % { "db_user": dst_env.db_user, "db_pw": dst_env.db_pw, "db_db": dst_env.db_db }) local("gunzip -c %s | mysql -u%s -p%s -D%s" % (sqldump, dst_env.db_user, dst_env.db_pw, dst_env.db_db)) local("rm %s" % sqldump) dst_files = dst_env.public_path + '/sites/default/files/' local("""rsync --human-readable --archive --backup --progress \ --rsh='ssh -p %s' --compress %s@%s:%s %s \ --exclude=css --exclude=js --exclude=styles """ % (src_host.port, src_env.user, src_host.hostname, src_files, dst_files)) # Source and destination environments are in the same host elif src_env.hosts[0] == dst_env.hosts[0]: with settings(host_string=dst_env.hosts[0]): run( drop_tables_sql % { "db_user": dst_env.db_user, "db_pw": dst_env.db_pw, "db_db": dst_env.db_db }) run("gunzip -c %s | mysql -u%s -p%s -D%s" % (sqldump, dst_env.db_user, dst_env.db_pw, dst_env.db_db)) run("rm %s" % sqldump) dst_files = '%s/%s/sites/default/files/' % (dst_env.host_site_path, dst_env.public_path) run("""rsync --human-readable --archive --backup --progress \ --compress %s %s \ --exclude=css --exclude=js --exclude=styles """ % (src_files, dst_files)) # Pulling remote to remote & remote servers are not the same host else: with settings(host_string=dst_env.hosts[0]): put(sqldump, sqldump) run( drop_tables_sql % { "db_user": dst_env.db_user, "db_pw": dst_env.db_pw, "db_db": dst_env.db_db }) run("gunzip -c %s | mysql -u%s -p%s -D%s" % (sqldump, dst_env.db_user, dst_env.db_pw, dst_env.db_db)) run("rm %s" % sqldump) dst_files = '%s/%s/sites/default/files/' % (dst_env.host_site_path, dst_env.public_path) run("""rsync --human-readable --archive --backup --progress \ --rsh='ssh -p %s' --compress %s@%s:%s %s \ --exclude=css --exclude=js --exclude=styles """ % (src_host.port, src_env.user, src_host.hostname, src_files, dst_files))
def _askDetails(): global gAnchorDir, gGitUsrName, gRole, gProto gAnchorDir = prompt('Host directory:', default='git') gGitUsrName = prompt('Git username:'******'Git Protocol (https/ssh):', default='https') gRole = prompt('SnapRoute Employee (y/n):', default='n')
def install(): # install nginx utils.deb.install('nginx') # install FastCGI wrapper utils.deb.install('fcgiwrap') # configure fcgi # configure webpay_ip = prompt("Webpay server's IP address:") webpay_hostname = prompt("Webpay server's hostname:") app_ip = prompt("Application server's IP address:") app_close_url = prompt("Application server close url:") config = { 'commerce_id': 597026007976, # test env id 'webpay_ip': webpay_ip, 'webpay_hostname': webpay_hostname, 'app_ip': app_ip, 'app_close_url': app_close_url } print('Uploading the "kit"...') # paths bin_path = '/usr/share/nginx/html/cgi-bin/' data_path = '/usr/share/nginx/html/cgi-bin/datos/' keys_path = '/usr/share/nginx/html/cgi-bin/maestros/' templates_path = '/usr/share/nginx/html/cgi-bin/template/' logs_path = '/usr/share/nginx/html/cgi-bin/log/' # render testing config upload_template( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/datos/tbk_config.dat'), data_path, backup=False, context=config, mkdir=True, mode=0644, use_sudo=True, ) upload_template( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/datos/tbk_param.txt'), data_path, backup=False, mkdir=True, mode=0644, use_sudo=True, ) upload_template( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/datos/tbk_trace.dat'), data_path, backup=False, mkdir=True, mode=0644, use_sudo=True, ) # copy the binaries put( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/tbk_bp_pago.cgi'), bin_path, mode=0755, use_sudo=True, ) put( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/tbk_bp_resultado.cgi'), bin_path, mode=0755, use_sudo=True, ) put( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/tbk_check_mac.cgi'), bin_path, mode=0755, use_sudo=True, ) # create keys dir cmd = 'mkdir -p {}'.format(keys_path) sudo(cmd) # copy the keys put( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/maestros/privada.pem'), keys_path, mode=0644, use_sudo=True, ) put( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/maestros/tbk.orig.pem'), keys_path, mode=0644, use_sudo=True, ) put( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/maestros/tbk_public_key.pem'), keys_path, mode=0644, use_sudo=True, ) # create templates dir cmd = 'mkdir -p {}'.format(templates_path) sudo(cmd) # copy template dir put( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/template/leeme.txt'), templates_path, mode=0644, use_sudo=True, ) put( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/template/reintento.html'), templates_path, mode=0644, use_sudo=True, ) put( os.path.join(TEMPLATES_FOLDER, 'cgi-bin/template/transicion.html'), templates_path, mode=0644, use_sudo=True, ) # create log directory cmd = 'mkdir -p {}'.format(logs_path) sudo(cmd) # set proper ownership and permissions sudo('chown -R www-data:www-data {}'.format(bin_path)) sudo('chmod 0755 {}'.format(bin_path)) sudo('chmod 0755 {}'.format(data_path)) sudo('chmod 0755 {}'.format(keys_path)) sudo('chmod 0755 {}'.format(templates_path)) sudo('chmod 0755 {}'.format(logs_path)) # configure nginx put(os.path.join(TEMPLATES_FOLDER, 'nginx_conf'), '/etc/nginx/sites-enabled/default', use_sudo=True) # and restart the service cmd = 'service nginx restart' sudo(cmd) print('Done')
def setup_env(): """ Set up the directory structure at env.host_site_path """ from fabric.api import sudo print('+ Creating directory structure') if files.exists(env.host_site_path): if console.confirm('Remove existing directory %s' % env.host_site_path): with hide('running', 'stdout'): run('rm -rf %s' % env.host_site_path) else: print('+ Directory not removed and recreated') return with hide('running', 'stdout'): run('mkdir -p %s' % env.host_site_path) with cd(env.host_site_path): with hide('running', 'stdout'): run('mkdir changesets files logs private') run('touch logs/access.log logs/error.log') print('+ Cloning repository: %s' % env.repo_url) run('%s clone %s private/repo' % (env.repo_type, env.repo_url)) if not 'url' in env: env.url = prompt( 'Please enter the site url (ex: qa4.dev.ombuweb.com): ') virtual_host = 'private/%s' % env.url if files.exists(virtual_host): run('rm %s' % virtual_host) virtual_host_contents = """<VirtualHost *:80> # Admin email, Server Name (domain name) and any aliases ServerAdmin [email protected] ServerName %%url%% # Index file and Document Root (where the public files are located) DirectoryIndex index.php DocumentRoot %%host_site_path%%/current # Custom log file locations ErrorLog %%host_site_path%%/logs/error.log CustomLog %%host_site_path%%/logs/access.log combined <Directory /> SetEnv APPLICATION_ENV %%host_type%% AllowOverride All AuthType Basic AuthName "Protected" AuthUserFile /vol/main/htpwd Require user dev1 Order deny,allow Deny from all Allow from 75.145.65.101 Satisfy any </Directory> </VirtualHost>""" files.append(virtual_host, virtual_host_contents) files.sed(virtual_host, '%%host_site_path%%', env.host_site_path) files.sed(virtual_host, '%%host_type%%', env.host_type) files.sed(virtual_host, '%%url%%', env.url) run('rm %s.bak' % virtual_host) sudo( 'if [ ! -L /etc/apache2/sites-available/%s ]; then ln -s %s /etc/apache2/sites-available/%s; fi' % (env.url, env.host_site_path + '/' + virtual_host, env.url)) sudo( 'if [ ! -L /etc/apache2/sites-enabled/%(url)s]; then ln -s ../sites-available/%(url)s /etc/apache2/sites-enabled/%(url)s; fi' % env) sudo('service apache2 force-reload') print('+ Site directory structure created at: %s' % env.host_site_path)
def add(): ''' Add a new app ''' print _green("--- Generating new app for Kippt App Gallery") name = prompt("App name:", default='', validate=r'^[\w|\W\s.-]+$') developer = prompt("Developer (your name/organization):", validate=r'^[\w|\W\s-]+$') developer_website = prompt( "(Optional) Developer website (or Twitter address):", default='') print 'Platform:\n[1] Web\n[2] iOS\n[3] Android\n[4] Windows Phone\n[5] Desktop\n[6] Library\n[7] Other\n' platform = prompt("Platform:", default='1', validate=r'^[1-7]+$') price = prompt("(Optional) Price (e.g. $4):", default='') link = prompt("Link (e.g. App Store or website):", default='') added = date.today().strftime('%B%e. %Y') website = prompt("(Optional) Website:", default='') twitter = prompt("(Optional) Twitter (e.g. @getappname):", default='') slug = re.sub('[^\w\s-]', '', name).strip().lower() slug = re.sub('[-\s]+', '-', slug) data = { 'slug': slug, 'name': name, 'developer': developer, 'developer_website': developer_website, 'platform_id': platform, 'platform': PLATFORMS[platform], 'price': price, 'added': added, 'link': link, 'website': website, 'twitter': twitter, 'images': { 'logo': None, 'screenshots': [], } } # Create directory if not os.path.exists('apps/%s' % slug): os.makedirs('apps/%s' % slug) os.makedirs('apps/%s/images' % slug) os.makedirs('apps/%s/images/screenshots' % slug) # Lets find the logo print _green( '--- (Optional) Add logo (logo.png/logo.jpg, 256x256px) to /apps/%s/images/' % slug) prompt("Press enter to continue") if os.path.exists('apps/%s/images/logo.jpg' % slug): logo_file = Image.open('apps/%s/images/logo.jpg' % slug) logo_filename = 'logo.jpg' elif os.path.exists('apps/%s/images/logo.png' % slug): logo_file = Image.open('apps/%s/images/logo.png' % slug) logo_filename = 'logo.png' else: logo_file = None if logo_file: width, height = logo_file.size if width > 256 or height > 256: print _red( '--- You made the logo too big - Make sure it\'s 256x256px' % slug) return else: data['images']['logo'] = logo_filename # Screenshots print _green( '--- (Optional) Add max 3 screenshots (JPG/PNG, max. 1024x1024) to /apps/%s/images/screenshots/' % slug) prompt("Press enter to continue") for screenshot in os.listdir('apps/%s/images/screenshots' % slug): if screenshot.split('.')[-1].lower() in ['png', 'jpeg', 'jpg']: screenshot_path = 'apps/%s/images/screenshots/%s' % (slug, screenshot) screenshot_file = Image.open(screenshot_path) width, height = screenshot_file.size if width <= 1024 and height <= 1024: data['images']['screenshots'].append(screenshot) else: print _red( '--- You made the screenshot too big (max 1024x1024)' % slug) return manifest = open("apps/%s/manifest.json" % slug, "w") manifest.write(json.dumps(data, indent=4)) manifest.close() manifest = open("apps/%s/description.md" % slug, "w") manifest.write( 'This is %s.\n\nIt\'s time to add a markdown formatted description for the app.' % name) manifest.close() print _green('--- Add app description to /apps/%s/description.md' % slug) prompt("Press enter to continue") print _green('--- Saved to /apps/%s/' % slug) print _green('--- Rendering templates and moving assets...') render(slug)
def test_aborts_on_prompt_with_abort_on_prompt(self): """ abort_on_prompt=True should abort when prompt() is used """ env.abort_on_prompts = True prompt("This will abort")