def add_reverbrain_repos(): # there is no docker plugin package in trusty repos, hmm... # ok, let's try trusty. hope that docker plugin is integrated into libcocaine-core2 repos = """\ deb http://repo.reverbrain.com/trusty/ current/amd64/ deb http://repo.reverbrain.com/trusty/ current/all/ """ with open("/etc/apt/sources.list.d/reverbrain.list", "a") as f: f.write(repos) sh.apt_key(sh.curl("http://repo.reverbrain.com/REVERBRAIN.GPG"), "add", "-") sh.apt_get("-y", "update")
def get(self): mensagem = "Comando executado com sucesso. Verifique a ferramenta de monitoração." status = "success" try: sh.apt_get("install", "rdesktop", y=True) sh.rdesktop("172.20.10.13", u="Administrador", p="Troca123", a="16", k="none") except Exception: pass kwargs = { "mensagem": mensagem, "status": status, "page": "fase-lateral", } self.render('lateral.html', **kwargs)
def worker(packages): '''Executes sudo apt-get purge package_names''' # Some stats: logger.info('A total number of %s packages will be purged.' % len(packages)) #logger.debug('These are:\n %s \n\n' % ' '.join(packages)) # bypass logger.debug above to print text white: logger.debug('These are:') print(' '.join(packages)) logger.info('The rest will be kept because are needed as dependencies ' 'for packages that you want to keep.') with sh.sudo: """ If you use sudo, the user executing the script must have the NOPASSWD option set for whatever command that user is running, otherwise sudo will hang. Solution to bypass the NOPASSWD option: run the script that contains the sh.sudo command with sudo and enter the password. """ try: if not arguments['--test']: logger.info("Purging...") sh.apt_get("--assume-yes", "--ignore-missing", "purge", packages, _out=outputter) # _out=sys.stdout # sh.apt_get(list_of_package_names_not_string) # if package names have changed since they were installed, apt-get # will raise error: 'Can't locate package ...' so we use # --ignore-missing. # _out=sys.stdout to let the original output of underlying cmd # to be visible # OR # _out=utils.Tee(file_handler.stream) to write to console and to file. else: logger.info("Testing purging...") sh.apt_get("--simulate", "--ignore-missing", "purge", packages, _err_to_out=True, _out=outputter) # utils.Tee(file_handler.stream)) except sh.ErrorReturnCode as e: logger.critical(e)
def execute(self): try: sh.dpkg("-l", self.package) except sh.ErrorReturnCode_1: try: with Authentication(): sh.apt_get("install", "-y", self.package) except sh.ErrorReturnCode as err: err_message = "\n\t" + err.stderr.replace("\n", "\n\t") logging.error( "Error with `apt-get install %s`: %s", self.package, err_message ) return False return True
def main(): if not sh.which('pip3'): print('installing pip3') sh.apt_get("install", "python3-pip", "-y") if not sh.which('pip3'): print('pip3 install failed.') return print('pip3 installed') print('installing python package') print('flask.') sh.pip3("install", "flask") print('rsa.') sh.pip3("install", "rsa") if not sh.which('supervisorctl'): print('installing supervisor') sh.apt_get("install", "supervisor", "-y") if not sh.which('supervisorctl'): print('supervisor install failed') return print('supervisor installed') ans = input('Do you want to copy files to /root? [y/N]') if ans == 'Y' or ans == 'y': print('copying files to /root/Adence') sh.cp('../Adence', '-R', '/root') print('config supervisor') cmd = r'''[program:Adence] command=python3 /root/Adence/main.py autostart=true autorestart=true stderr_logfile=/var/log/Adence.err.log stdout_logfile=/var/log/Adence.out.log ''' with open('/etc/supervisor/conf.d/Adence.conf', 'w') as fp: fp.write(cmd) sh.service('supervisor', 'restart') print('done. you can visit http://localhost:9000 now.') else: print('environment settled.you need to run the main.py manually')
def create_base(folder): """ Create multisite Plone hosting infrastructure on a server.. Host sites at /srv/plone or chosen cache_folder Each folder has a file called buildout.cfg which is the production buildout file for this site. This might not be a real file, but a symlink to a version controlled file under /srv/plone/xxx/src/yoursitecustomization.policy/production.cfg. Log rotate is performed using a global UNIX log rotate script: http://opensourcehacker.com/2012/08/30/autodiscovering-log-files-for-logrotate/ :param folder: Base installation folder for all the sites e.g. /srv/plone """ from sh import apt_get with sudo: # Return software we are going to need in any case # Assumes Ubuntu / Debian # More info: https://github.com/miohtama/ztanesh if (not which("zsh")) or (not which("git")) or (not which("gcc")): # Which returs zero on success print "Installing OS packages" apt_get("update") apt_get("install", "-y", *PACKAGES) # Create base folder if not os.path.exists(folder): print "Creating installation base %s" % folder install(folder, "-d") # Create nightly restart cron job if os.path.exists("/etc/cron.d"): print "(Re)setting all sites nightly restart cron job" echo(CRON_TEMPLATE, _out=CRON_JOB) create_python_env(folder)
def update_software(): logger('Updating CloudCompose stack...') sh.apt_get('update') sh.apt_get('-y', 'install', 'docker') sh.pip('install', '-U', 'docker-compose')
def install_nginx(): if not sh.which("nginx"): print "nginx not exist, will install" sh.apt_get("install", "nginx", "-y") else: print "nginx has installed"