def deploy_files(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) base_obj = self.pool.get('saas.base') self.purge_files(cr, uid, vals, context=context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) if not execute.exist(sftp, vals['app_version_full_hostpath']): execute.execute(ssh, ['mkdir', '-p', vals['app_version_full_hostpath']], context) sftp.put(vals['app_version_full_archivepath_targz'], vals['app_version_full_hostpath'] + '.tar.gz') execute.execute(ssh, ['tar', '-xf', vals['app_version_full_hostpath'] + '.tar.gz', '-C', vals['app_version_full_hostpath']], context) execute.execute(ssh, ['rm', vals['app_full_hostpath'] + '/' + vals['app_version_name'] + '.tar.gz'], context) ssh.close() sftp.close() ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) if 'files_from_service' in context: execute.execute(ssh, ['cp', '-R', vals['apptype_localpath_services'] + '/' + context['files_from_service'] + '/files', vals['service_full_localpath_files']], context) elif vals['service_custom_version'] or not vals['apptype_symlink']: execute.execute(ssh, ['cp', '-R', vals['app_version_full_localpath'], vals['service_full_localpath_files']], context) else: execute.execute(ssh, ['ln', '-s', vals['app_version_full_localpath'], vals['service_full_localpath_files']], context) service = self.browse(cr, uid, vals['service_id'], context=context) for base in service.base_ids: base_obj.save(cr, uid, [base.id], context=context) base_vals = base_obj.get_vals(cr, uid, base.id, context=context) base_obj.update_base(cr, uid, base_vals, context=context) ssh.close() sftp.close()
def purge(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge_files(cr, uid, vals, context=context) self.purge_pre_service(cr, uid, vals, context) ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['rm', '-rf', vals['service_full_localpath']], context) ssh.close() sftp.close() if vals['database_type'] != 'mysql': ssh, sftp = execute.connect(vals['database_fullname'], username='******', context=context) execute.execute(ssh, ['psql', '-c', '"DROP USER ' + vals['service_db_user'] + ';"'], context) ssh.close() sftp.close() ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['sed', '-i', '"/:*:' + vals['service_db_user'] + ':/d" ~/.pgpass'], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['database_fullname'], context=context) execute.execute(ssh, ["mysql -u root -p'" + vals['database_root_password'] + "' -se \"drop user " + vals['service_db_user'] + ";\""], context) ssh.close() sftp.close() return
def purge_db(self, cr, uid, vals, context=None): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) for key, database in vals['base_databases'].iteritems(): if vals['database_type'] != 'mysql': ssh, sftp = execute.connect(vals['database_fullname'], username='******', context=context) execute.execute(ssh, [ 'psql', '-c', '"update pg_database set datallowconn = \'false\' where datname = \'' + database + '\'; SELECT pg_terminate_backend(procpid) FROM pg_stat_activity WHERE datname = \'' + database + '\';"' ], context) execute.execute(ssh, ['dropdb', database], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['database_fullname'], context=context) execute.execute(ssh, [ "mysql -u root -p'" + vals['database_root_password'] + "' -se \"drop database " + database + ";\"" ], context) ssh.close() sftp.close() return
def purge_key(self, cr, uid, vals, context={}): ssh, sftp = execute.connect('localhost', 22, 'saas-conductor', context) execute.execute(ssh, ['sed', '-i', "'/Host " + vals['container_fullname'] + "/,/END " + vals['container_fullname'] + "/d'", vals['config_home_directory'] + '/.ssh/config'], context) ssh.close() sftp.close() execute.execute_local(['rm', '-rf', vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname']], context) execute.execute_local(['rm', '-rf', vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub'], context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, ['rm', '-rf', '/opt/keys/' + vals['container_fullname'] + '/authorized_keys'], context) ssh.close() sftp.close()
def restore_action(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) # # context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) # ssh, sftp = execute.connect(vals['save_container_restore_to_server'], 22, 'root', context) # execute.execute(ssh, ['docker', 'run', '-t', '--rm', '--volumes-from', vals['save_container_restore_to_name'], '-v', '/opt/keys/bup:/root/.ssh', 'img_bup:latest', '/opt/restore', 'base', vals['saverepo_name'], vals['save_now_bup']], context) # ssh.close() # sftp.close() # directory = '/tmp/restore-' + vals['saverepo_name'] ssh, sftp = execute.connect(vals['backup_fullname'], username='******', context=context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/config', '/home/backup/.ssh/config', context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/home/backup/.ssh/keys/' + vals['container_fullname'] + '.pub', context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '/home/backup/.ssh/keys/' + vals['container_fullname'], context) execute.execute(ssh, ['chmod', '-R', '700', '/home/backup/.ssh'], context) execute.execute(ssh, ['rm', '-rf', directory + '*'], context) execute.execute(ssh, ['mkdir', '-p', directory], context) if vals['config_restore_method'] == 'simple': execute.execute(ssh, ['cp', '-R', '/opt/backup/simple/' + vals['saverepo_name'] + '/' + vals['save_name'] + '/*', directory], context) if vals['config_restore_method'] == 'bup': execute.execute(ssh, ['export BUP_DIR=/opt/backup/bup;', 'bup restore -C ' + directory + ' ' + vals['saverepo_name'] + '/' + vals['save_now_bup']], context) execute.execute(ssh, ['mv', directory + '/' + vals['save_now_bup'] + '/*', directory], context) execute.execute(ssh, ['rm -rf', directory + '/' + vals['save_now_bup']], context) execute.execute(ssh, ['tar', 'cf', directory + '.tar.gz', '-C', directory, '.'], context) execute.execute(ssh, ['scp', '-o StrictHostKeychecking=no', directory + '.tar.gz', vals['container_fullname'] + ':' + directory + '.tar.gz'], context) execute.execute(ssh, ['rm', '-rf', directory + '*'], context) execute.execute(ssh, ['rm', '/home/backup/.ssh/keys/*'], context) ssh.close() sftp.close() ssh, sftp = execute.connect(vals['container_fullname'], context=context) execute.execute(ssh, ['rm', '-rf', directory], context) execute.execute(ssh, ['mkdir', directory], context) execute.execute(ssh, ['tar', '-xf', directory + '.tar.gz', '-C', directory], context) if vals['saverepo_type'] == 'container': for volume in vals['save_container_volumes'].split(','): execute.execute(ssh, ['rm', '-rf', volume + '/*'], context) else: execute.execute(ssh, ['rm', '-rf', '/base-backup/' + vals['saverepo_name']], context) execute.execute(ssh, ['rm', '-rf', directory + '/backup-date'], context) if vals['saverepo_type'] == 'container': execute.execute(ssh, ['cp', '-R', directory + '/*', '/'], context) else: execute.execute(ssh, ['cp', '-R', directory, '/base-backup/' + vals['saverepo_name']], context) execute.execute(ssh, ['chmod', '-R', '777', '/base-backup/' + vals['saverepo_name']], context) execute.execute(ssh, ['rm', '-rf', directory + '*'], context) ssh.close() sftp.close()
def deploy_key(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) # restart_required = False # try: # ssh_container, sftp_container = execute.connect(vals['container_fullname'], context=context) # except: # restart_required = True # pass self.purge_key(cr, uid, vals, context=context) execute.execute_local(['ssh-keygen', '-t', 'rsa', '-C', '*****@*****.**', '-f', vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '-N', ''], context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', 'Host ' + vals['container_fullname'], context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', '\n HostName ' + vals['server_domain'], context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', '\n Port ' + str(vals['container_ssh_port']), context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', '\n User root', context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', '\n IdentityFile ~/.ssh/keys/' + vals['container_fullname'], context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', '\n#END ' + vals['container_fullname'] + '\n', context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, ['mkdir', '/opt/keys/' + vals['container_fullname']], context) sftp.put(vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/opt/keys/' + vals['container_fullname'] + '/authorized_keys') ssh.close() sftp.close() # _logger.info('restart required %s', restart_required) # if not restart_required: # execute.execute(ssh_container, ['supervisorctl', 'restart', 'sshd'], context) # ssh_container.close() # sftp_container.close() # else: # self.start(cr, uid, vals, context=context) if vals['apptype_name'] == 'backup': shinken_ids = self.search(cr, uid, [('application_id.type_id.name', '=','shinken')], context=context) if not shinken_ids: execute.log('The shinken isnt configured in conf, skipping deploying backup keys in shinken', context) return for shinken in self.browse(cr, uid, shinken_ids, context=context): shinken_vals = self.get_vals(cr, uid, shinken.id, context=context) ssh, sftp = execute.connect(shinken_vals['container_fullname'], username='******', context=context) execute.execute(ssh, ['rm', '-rf', '/home/shinken/.ssh/keys/' + vals['container_fullname'] + '*'], context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/home/shinken/.ssh/keys/' + vals['container_fullname'] + '.pub', context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '/home/shinken/.ssh/keys/' + vals['container_fullname'], context) execute.execute(ssh, ['chmod', '-R', '700', '/home/shinken/.ssh'], context) execute.execute(ssh, ['sed', '-i', "'/Host " + vals['container_fullname'] + "/,/END " + vals['container_fullname'] + "/d'", '/home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo "Host ' + vals['container_fullname'] + '" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo " Hostname ' + vals['server_domain'] + '" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo " Port ' + str(vals['container_ssh_port']) + '" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo " User backup" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo " IdentityFile ~/.ssh/keys/' + vals['container_fullname'] + '" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo "#END ' + vals['container_fullname'] +'" >> ~/.ssh/config'], context)
def deploy(self, cr, uid, vals, context=None): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge(cr, uid, vals, context=context) if 'base_restoration' in context: return res = self.deploy_create_database(cr, uid, vals, context) if not res: for key, database in vals['base_databases'].iteritems(): if vals['database_type'] != 'mysql': ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['createdb', '-h', vals['database_server'], '-U', vals['service_db_user'], database], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['database_fullname'], context=context) execute.execute(ssh, ["mysql -u root -p'" + vals['database_root_password'] + "' -se \"create database " + database + ";\""], context) execute.execute(ssh, ["mysql -u root -p'" + vals['database_root_password'] + "' -se \"grant all on " + database + ".* to '" + vals['service_db_user'] + "';\""], context) ssh.close() sftp.close() execute.log('Database created', context) if vals['base_build'] == 'build': self.deploy_build(cr, uid, vals, context) elif vals['base_build'] == 'restore': if vals['database_type'] != 'mysql': ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['pg_restore', '-h', vals['bdd_server_domain'], '-U', vals['service_db_user'], '--no-owner', '-Fc', '-d', vals['base_unique_name_'], vals['app_version_full_localpath'] + '/' + vals['app_bdd'] + '/build.sql'], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['mysql', '-h', vals['bdd_server_domain'], '-u', vals['service_db_user'], '-p' + vals['bdd_server_mysql_passwd'], vals['base_unique_name_'], '<', vals['app_version_full_localpath'] + '/' + vals['app_bdd'] + '/build.sql'], context) ssh.close() sftp.close() self.deploy_post_restore(cr, uid, vals, context) if vals['base_build'] != 'none': if vals['base_poweruser_name'] and vals['base_poweruser_email'] and vals['apptype_admin_name'] != vals['base_poweruser_name']: self.deploy_create_poweruser(cr, uid, vals, context) if vals['base_test']: self.deploy_test(cr, uid, vals, context) self.deploy_post(cr, uid, vals, context) #For shinken self.save(cr, uid, [vals['base_id']], context=context)
def purge_db(self, cr, uid, vals, context=None): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) for key, database in vals['base_databases'].iteritems(): if vals['database_type'] != 'mysql': ssh, sftp = execute.connect(vals['database_fullname'], username='******', context=context) execute.execute(ssh, ['psql', '-c', '"update pg_database set datallowconn = \'false\' where datname = \'' + database + '\'; SELECT pg_terminate_backend(procpid) FROM pg_stat_activity WHERE datname = \'' + database + '\';"'], context) execute.execute(ssh, ['dropdb', database], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['database_fullname'], context=context) execute.execute(ssh, ["mysql -u root -p'" + vals['database_root_password'] + "' -se \"drop database " + database + ";\""], context) ssh.close() sftp.close() return
def save_fsck(self, cr, uid, ids, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) vals = self.get_vals(cr, uid, context=context) ssh, sftp = execute.connect(vals['backup_fullname'], username='******', context=context) execute.execute( ssh, ['export BUP_DIR=/opt/backup/bup;', 'bup', 'fsck', '-r'], context) #http://stackoverflow.com/questions/1904860/how-to-remove-unreferenced-blobs-from-my-git-repo #https://github.com/zoranzaric/bup/tree/tmp/gc/Documentation #https://groups.google.com/forum/#!topic/bup-list/uvPifF_tUVs execute.execute(ssh, ['git', 'gc', '--prune=now'], context, path='/opt/backup/bup') execute.execute( ssh, ['export BUP_DIR=/opt/backup/bup;', 'bup', 'fsck', '-g'], context) ssh.close() sftp.close() now = datetime.now().strftime("%Y-%m-%d %H:%M:%S") config = self.pool.get('ir.model.data').get_object( cr, uid, 'saas', 'saas_settings') self.write(cr, uid, [config.id], {'end_fsck': now}, context=context)
def purge(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) ssh, sftp = execute.connect(vals['backup_fullname'], context=context) execute.execute(ssh, ['rm', '-rf', '/opt/backup/simple/' + vals['saverepo_name'] + '/'+ vals['save_name']], context) ssh.close() sftp.close() return
def deploy_post(self, cr, uid, vals, context): super(saas_container, self).deploy_post(cr, uid, vals, context) context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) if vals['apptype_name'] == 'docker': ssh, sftp = execute.connect(vals['container_fullname'], context=context) execute.execute(ssh, ['echo "host all all ' + vals['container_options']['network']['value'] + ' md5" >> /etc/postgresql/' + vals['app_current_version'] + '/main/pg_hba.conf'], context) execute.execute(ssh, ['echo "listen_addresses=\'' + vals['container_options']['listen']['value'] + '\'" >> /etc/postgresql/' + vals['app_current_version'] + '/main/postgresql.conf'], context)
def start(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.stop(cr, uid, vals, context=context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, ['docker', 'start', vals['container_name']], context) ssh.close() sftp.close() time.sleep(3)
def purge(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) ssh, sftp = execute.connect(vals['dns_fullname'], username='******', context=context) execute.execute(ssh, ['sed', '-i', "'/zone\s\"" + vals['domain_name'] + "\"/,/END\s" + vals['domain_name'] + "/d'", '/etc/bind/named.conf'], context) execute.execute(ssh, ['rm', vals['domain_configfile']], context) execute.execute(ssh, ['/etc/init.d/bind9', 'reload'], context) ssh.close() sftp.close()
def purge(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) if 'shinken_server_domain' in vals: ssh, sftp = execute.connect(vals['shinken_fullname'], context=context) execute.execute(ssh, ['rm', vals['server_shinken_configfile']], context) execute.execute(ssh, ['/etc/init.d/shinken', 'reload'], context) ssh.close() sftp.close()
def stop(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, ['docker', 'stop', vals['container_name']], context) ssh.close() sftp.close()
def purge_files(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['rm', '-rf', vals['service_full_localpath_files']], context) ssh.close() sftp.close() self.check_files(cr, uid, vals, context=context)
def purge(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) #TODO we need to launch a direct command, without ssh ssh, sftp = execute.connect('localhost', 22, 'saas-conductor', context) execute.execute(ssh, ['sed', '-i', "'/Host " + vals['server_domain'] + "/,/END " + vals['server_domain'] + "/d'", vals['config_home_directory'] + '/.ssh/config'], context) ssh.close() sftp.close() execute.execute_local(['rm', '-rf', vals['config_home_directory'] + '/.ssh/keys/' + vals['server_domain']], context)
def check_files(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) service_ids = self.search(cr, uid, [('application_version_id', '=', vals['app_version_id']),('container_id.server_id','=',vals['server_id'])], context=context) if vals['service_id'] in service_ids: service_ids.remove(vals['service_id']) if not service_ids: ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, ['rm', '-rf', vals['app_version_full_hostpath']], context) ssh.close() sftp.close()
def deploy(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) _logger.info('test %s', vals['shinken_server_domain']) if 'shinken_server_domain' in vals: ssh, sftp = execute.connect(vals['shinken_fullname'], context=context) sftp.put(vals['config_conductor_path'] + '/saas/saas_shinken/res/server-shinken.config', vals['server_shinken_configfile']) execute.execute(ssh, ['sed', '-i', '"s/NAME/' + vals['server_domain'] + '/g"', vals['server_shinken_configfile']], context) execute.execute(ssh, ['/etc/init.d/shinken', 'reload'], context) ssh.close() sftp.close()
def deploy_post(self, cr, uid, vals, context): super(saas_container, self).deploy_post(cr, uid, vals, context) context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) if vals['apptype_name'] == 'docker': if 'public_key' in vals['container_options']: ssh, sftp = execute.connect(vals['container_fullname'], context=context) execute.execute(ssh, [ 'echo "' + vals['container_options']['public_key']['value'] + '" > /root/.ssh/authorized_keys2' ], context)
def deploy(self, cr, uid, vals, context=None): container_obj = self.pool.get('saas.container') context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge(cr, uid, vals, context=context) execute.log('Creating database user', context=context) #SI postgres, create user if vals['database_type'] != 'mysql': ssh, sftp = execute.connect(vals['database_fullname'], username='******', context=context) execute.execute(ssh, ['psql', '-c', '"CREATE USER ' + vals['service_db_user'] + ' WITH PASSWORD \'' + vals['service_db_password'] + '\' CREATEDB;"'], context) ssh.close() sftp.close() ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['sed', '-i', '"/:*:' + vals['service_db_user'] + ':/d" ~/.pgpass'], context) execute.execute(ssh, ['echo "' + vals['database_server'] + ':5432:*:' + vals['service_db_user'] + ':' + vals['service_db_password'] + '" >> ~/.pgpass'], context) execute.execute(ssh, ['chmod', '700', '~/.pgpass'], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['database_fullname'], context=context) execute.execute(ssh, ["mysql -u root -p'" + vals['database_root_password'] + "' -se \"create user '" + vals['service_db_user'] + "' identified by '" + vals['service_db_password'] + "';\""], context) ssh.close() sftp.close() execute.log('Database user created', context) ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['mkdir', '-p', vals['service_full_localpath']], context) ssh.close() sftp.close() self.deploy_files(cr, uid, vals, context=context) self.deploy_post_service(cr, uid, vals, context) container_obj.start(cr, uid, vals, context=context)
def purge(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) ssh, sftp = execute.connect(vals['dns_fullname'], username='******', context=context) execute.execute(ssh, [ 'sed', '-i', "'/zone\s\"" + vals['domain_name'] + "\"/,/END\s" + vals['domain_name'] + "/d'", '/etc/bind/named.conf' ], context) execute.execute(ssh, ['rm', vals['domain_configfile']], context) execute.execute(ssh, ['/etc/init.d/bind9', 'reload'], context) ssh.close() sftp.close()
def save_upload(self, cr, uid, ids, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) vals = self.get_vals(cr, uid, context=context) ssh, sftp = execute.connect(vals['backup_fullname'], context=context) execute.execute(ssh, ['tar', 'czf', '/opt/backup.tar.gz', '-C', '/opt/backup', '.'], context) stdin =[ 'rm -rf /*\n', 'put /opt/backup.tar.gz\n', 'exit\n' ] execute.execute(ssh, ['ncftp', '-u', vals['config_ftpuser'], '-p' + vals['config_ftppass'], vals['config_ftpserver']], context, stdin_arg=stdin) execute.execute(ssh, ['rm', '/opt/backup.tar.gz'], context) ssh.close() sftp.close()
def purge(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge_key(cr, uid, vals, context=context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, ['sudo','docker', 'stop', vals['container_name']], context) execute.execute(ssh, ['sudo','docker', 'rm', vals['container_name']], context) execute.execute(ssh, ['rm', '-rf', '/opt/keys/' + vals['container_fullname']], context) ssh.close() sftp.close() return
def save_fsck(self, cr, uid, ids, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) vals = self.get_vals(cr, uid, context=context) ssh, sftp = execute.connect(vals['backup_fullname'], username='******', context=context) execute.execute(ssh, ['export BUP_DIR=/opt/backup/bup;', 'bup', 'fsck', '-r'], context) #http://stackoverflow.com/questions/1904860/how-to-remove-unreferenced-blobs-from-my-git-repo #https://github.com/zoranzaric/bup/tree/tmp/gc/Documentation #https://groups.google.com/forum/#!topic/bup-list/uvPifF_tUVs execute.execute(ssh, ['git', 'gc', '--prune=now'], context, path='/opt/backup/bup') execute.execute(ssh, ['export BUP_DIR=/opt/backup/bup;', 'bup', 'fsck', '-g'], context) ssh.close() sftp.close() now = datetime.now().strftime("%Y-%m-%d %H:%M:%S") config = self.pool.get('ir.model.data').get_object(cr, uid, 'saas', 'saas_settings') self.write(cr, uid, [config.id], {'end_fsck': now}, context=context)
def deploy(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) ssh, sftp = execute.connect(vals['dns_fullname'], username='******', context=context) sftp.put(vals['config_conductor_path'] + '/saas/saas/res/bind.config', vals['domain_configfile']) execute.execute(ssh, ['sed', '-i', '"s/DOMAIN/' + vals['domain_name'] + '/g"', vals['domain_configfile']], context) execute.execute(ssh, ['sed', '-i', '"s/IP/' + vals['dns_server_ip'] + '/g"', vals['domain_configfile']], context) execute.execute(ssh, ["echo 'zone \"" + vals['domain_name'] + "\" {' >> /etc/bind/named.conf"], context) execute.execute(ssh, ['echo "type master;" >> /etc/bind/named.conf'], context) execute.execute(ssh, ['echo "allow-transfer {213.186.33.199;};" >> /etc/bind/named.conf'], context) execute.execute(ssh, ["echo 'file \"/etc/bind/db." + vals['domain_name'] + "\";' >> /etc/bind/named.conf"], context) execute.execute(ssh, ['echo "notify yes;" >> /etc/bind/named.conf'], context) execute.execute(ssh, ['echo "};" >> /etc/bind/named.conf'], context) execute.execute(ssh, ['echo "//END ' + vals['domain_name'] + '" >> /etc/bind/named.conf'], context) execute.execute(ssh, ['/etc/init.d/bind9', 'reload'], context) ssh.close() sftp.close()
def save_upload(self, cr, uid, ids, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) vals = self.get_vals(cr, uid, context=context) ssh, sftp = execute.connect(vals['backup_fullname'], context=context) execute.execute( ssh, ['tar', 'czf', '/opt/backup.tar.gz', '-C', '/opt/backup', '.'], context) stdin = ['rm -rf /*\n', 'put /opt/backup.tar.gz\n', 'exit\n'] execute.execute(ssh, [ 'ncftp', '-u', vals['config_ftpuser'], '-p' + vals['config_ftppass'], vals['config_ftpserver'] ], context, stdin_arg=stdin) execute.execute(ssh, ['rm', '/opt/backup.tar.gz'], context) ssh.close() sftp.close()
def purge(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge_key(cr, uid, vals, context=context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, ['sudo', 'docker', 'stop', vals['container_name']], context) execute.execute(ssh, ['sudo', 'docker', 'rm', vals['container_name']], context) execute.execute( ssh, ['rm', '-rf', '/opt/keys/' + vals['container_fullname']], context) ssh.close() sftp.close() return
def purge_key(self, cr, uid, vals, context={}): execute.execute_local([ vals['config_conductor_path'] + '/saas/saas/shell/sed.sh', vals['container_fullname'], vals['config_home_directory'] + '/.ssh/config' ], context) execute.execute_local([ 'rm', '-rf', vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] ], context) execute.execute_local([ 'rm', '-rf', vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub' ], context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, [ 'rm', '-rf', '/opt/keys/' + vals['container_fullname'] + '/authorized_keys' ], context) ssh.close() sftp.close()
def deploy(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) ssh, sftp = execute.connect(vals['dns_fullname'], username='******', context=context) sftp.put(vals['config_conductor_path'] + '/saas/saas/res/bind.config', vals['domain_configfile']) execute.execute(ssh, [ 'sed', '-i', '"s/DOMAIN/' + vals['domain_name'] + '/g"', vals['domain_configfile'] ], context) execute.execute(ssh, [ 'sed', '-i', '"s/IP/' + vals['dns_server_ip'] + '/g"', vals['domain_configfile'] ], context) execute.execute(ssh, [ "echo 'zone \"" + vals['domain_name'] + "\" {' >> /etc/bind/named.conf" ], context) execute.execute(ssh, ['echo "type master;" >> /etc/bind/named.conf'], context) execute.execute(ssh, [ 'echo "allow-transfer {213.186.33.199;};" >> /etc/bind/named.conf' ], context) execute.execute(ssh, [ "echo 'file \"/etc/bind/db." + vals['domain_name'] + "\";' >> /etc/bind/named.conf" ], context) execute.execute(ssh, ['echo "notify yes;" >> /etc/bind/named.conf'], context) execute.execute(ssh, ['echo "};" >> /etc/bind/named.conf'], context) execute.execute(ssh, [ 'echo "//END ' + vals['domain_name'] + '" >> /etc/bind/named.conf' ], context) execute.execute(ssh, ['/etc/init.d/bind9', 'reload'], context) ssh.close() sftp.close()
def deploy(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) execute.log('Saving ' + vals['save_name'], context) execute.log('Comment: ' + vals['save_comment'], context) if vals['saverepo_type'] == 'base': base_vals = self.pool.get('saas.base').get_vals(cr, uid, vals['save_base_id'], context=context) ssh, sftp = execute.connect(base_vals['container_fullname'], username=base_vals['apptype_system_user'], context=context) execute.execute(ssh, ['mkdir', '-p', '/base-backup/' + vals['saverepo_name']], context) for key, database in base_vals['base_databases'].iteritems(): if vals['database_type'] != 'mysql': execute.execute(ssh, ['pg_dump', '-O', '-h', base_vals['database_server'], '-U', base_vals['service_db_user'], database, '>', '/base-backup/' + vals['saverepo_name'] + '/' + database + '.dump'], context) else: execute.execute(ssh, ['mysqldump', '-h', base_vals['database_server'], '-u', base_vals['service_db_user'], '-p' + base_vals['service_db_password'], database, '>', '/base-backup/' + vals['saverepo_name'] + '/' + database + '.dump'], context) self.deploy_base(cr, uid, base_vals, context=context) execute.execute(ssh, ['chmod', '-R', '777', '/base-backup/' + vals['saverepo_name']], context) ssh.close() sftp.close() # # ssh, sftp = execute.connect(vals['save_container_restore_to_server'], 22, 'root', context) # execute.execute(ssh, ['docker', 'run', '-t', '--rm', '--volumes-from', vals['save_container_restore_to_name'], '-v', '/opt/keys/bup:/root/.ssh', 'img_bup:latest', '/opt/save', vals['saverepo_type'], vals['saverepo_name'], str(int(vals['save_now_epoch'])), vals['save_container_volumes'] or ''], context) # ssh.close() # sftp.close() directory = '/tmp/' + vals['saverepo_name'] ssh, sftp = execute.connect(vals['container_fullname'], context=context) execute.execute(ssh, ['rm', '-rf', directory + '*'], context) execute.execute(ssh, ['mkdir', directory], context) if vals['saverepo_type'] == 'container': for volume in vals['save_container_volumes'].split(','): execute.execute(ssh, ['cp', '-R', '--parents', volume, directory], context) else: execute.execute(ssh, ['cp', '-R', '/base-backup/' + vals['saverepo_name'] + '/*', directory], context) execute.execute(ssh, ['echo "' + vals['now_date'] + '" > ' + directory + '/backup-date'], context) execute.execute(ssh, ['tar', 'cf', directory + '.tar.gz', '-C', directory, '.'], context) execute.execute(ssh, ['chmod', '-R', '777', directory + '*'], context) ssh.close() sftp.close() ssh, sftp = execute.connect(vals['backup_fullname'], username='******', context=context) if vals['saverepo_type'] == 'container': name = vals['container_fullname'] else: name = vals['base_unique_name_'] execute.execute(ssh, ['rm', '-rf', '/opt/backup/list/' + name], context) execute.execute(ssh, ['mkdir', '-p', '/opt/backup/list/' + name], context) execute.execute(ssh, ['echo "' + vals['saverepo_name'] + '" > /opt/backup/list/' + name + '/repo'], context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/config', '/home/backup/.ssh/config', context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/home/backup/.ssh/keys/' + vals['container_fullname'] + '.pub', context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '/home/backup/.ssh/keys/' + vals['container_fullname'], context) execute.execute(ssh, ['chmod', '-R', '700', '/home/backup/.ssh'], context) execute.execute(ssh, ['rm', '-rf', directory], context) execute.execute(ssh, ['mkdir', directory], context) execute.execute(ssh, ['scp', '-o StrictHostKeychecking=no', vals['container_fullname'] + ':' + directory + '.tar.gz', '/tmp/'], context) execute.execute(ssh, ['tar', '-xf', directory + '.tar.gz', '-C', directory], context) for backup in vals['config_backups']: if backup == 'simple': execute.execute(ssh, ['mkdir', '-p', '/opt/backup/simple/' + vals['saverepo_name'] + '/' + vals['save_name']], context) execute.execute(ssh, ['cp', '-R', directory + '/*', '/opt/backup/simple/' + vals['saverepo_name'] + '/' + vals['save_name']], context) execute.execute(ssh, ['rm', '/opt/backup/simple/' + vals['saverepo_name'] + '/latest'], context) execute.execute(ssh, ['ln', '-s', '/opt/backup/simple/' + vals['saverepo_name'] + '/' + vals['save_name'], '/opt/backup/simple/' + vals['saverepo_name'] + '/latest'], context) if backup == 'bup': execute.execute(ssh, ['export BUP_DIR=/opt/backup/bup;', 'bup index ' + directory], context) execute.execute(ssh, ['export BUP_DIR=/opt/backup/bup;', 'bup save -n ' + vals['saverepo_name'] + ' -d ' + str(int(vals['save_now_epoch'])) + ' --strip ' + directory], context) execute.execute(ssh, ['rm', '-rf', directory + '*'], context) execute.execute(ssh, ['rm', '/home/backup/.ssh/keys/*'], context) ssh.close() sftp.close() ssh, sftp = execute.connect(vals['container_fullname'], context=context) execute.execute(ssh, ['rm', '-rf', directory + '*'], context) ssh.close() sftp.close() if vals['saverepo_type'] == 'base': base_vals = self.pool.get('saas.base').get_vals(cr, uid, vals['save_base_id'], context=context) ssh, sftp = execute.connect(base_vals['container_fullname'], username=base_vals['apptype_system_user'], context=context) execute.execute(ssh, ['rm', '-rf', '/base-backup/' + vals['saverepo_name']], context) ssh.close() sftp.close() return
def restore(self, cr, uid, ids, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) container_obj = self.pool.get('saas.container') base_obj = self.pool.get('saas.base') server_obj = self.pool.get('saas.server') domain_obj = self.pool.get('saas.domain') application_obj = self.pool.get('saas.application') application_version_obj = self.pool.get('saas.application.version') image_obj = self.pool.get('saas.image') image_version_obj = self.pool.get('saas.image.version') service_obj = self.pool.get('saas.service') for save in self.browse(cr, uid, ids, context=context): context = self.create_log(cr, uid, save.id, 'restore', context) vals = self.get_vals(cr, uid, save.id, context=context) app_ids = application_obj.search(cr, uid, [('code','=',save.container_app)], context=context) if not app_ids: raise osv.except_osv(_('Error!'),_("Couldn't find application " + save.container_app + ", aborting restoration.")) img_ids = image_obj.search(cr, uid, [('name','=',save.container_img)], context=context) if not img_ids: raise osv.except_osv(_('Error!'),_("Couldn't find image " + save.container_img + ", aborting restoration.")) img_version_ids = image_version_obj.search(cr, uid, [('name','=',save.container_img_version)], context=context) # upgrade = True if not img_version_ids: execute.log("Warning, couldn't find the image version, using latest", context) #We do not want to force the upgrade if we had to use latest # upgrade = False versions = image_obj.browse(cr, uid, img_ids[0], context=context).version_ids if not versions: raise osv.except_osv(_('Error!'),_("Couldn't find versions for image " + save.container_img + ", aborting restoration.")) img_version_ids = [versions[0].id] if save.container_restore_to_name or not save.container_id: container_ids = container_obj.search(cr, uid, [('name','=',vals['save_container_restore_to_name']),('server_id.name','=',vals['save_container_restore_to_server'])], context=context) if not container_ids: execute.log("Can't find any corresponding container, creating a new one", context) server_ids = server_obj.search(cr, uid, [('name','=',vals['save_container_restore_to_server'])], context=context) if not server_ids: raise osv.except_osv(_('Error!'),_("Couldn't find server " + vals['save_container_restore_to_server'] + ", aborting restoration.")) ports = [] for port, port_vals in ast.literal_eval(save.container_ports).iteritems(): del port_vals['id'] del port_vals['hostport'] ports.append((0,0,port_vals)) volumes = [] for volume, volume_vals in ast.literal_eval(save.container_volumes).iteritems(): del volume_vals['id'] volumes.append((0,0,volume_vals)) options = [] for option, option_vals in ast.literal_eval(save.container_options).iteritems(): del option_vals['id'] options.append((0,0,option_vals)) links = [] for link, link_vals in ast.literal_eval(save.container_links).iteritems(): if not link_vals['name']: link_app_ids = self.pool.get('saas.application').search(cr, uid, [('code','=',link_vals['name_name'])], context=context) if link_app_ids: link_vals['name'] = link_app_ids[0] else: continue del link_vals['name_name'] links.append((0,0,link_vals)) container_vals = { 'name': vals['save_container_restore_to_name'], 'server_id': server_ids[0], 'application_id': app_ids[0], 'image_id': img_ids[0], 'image_version_id': img_version_ids[0], 'port_ids': ports, 'volume_ids': volumes, 'option_ids': options, 'link_ids': links } container_id = container_obj.create(cr, uid, container_vals, context=context) else: execute.log("A corresponding container was found", context) container_id = container_ids[0] else: execute.log("A container_id was linked in the save", context) container_id = save.container_id.id if vals['saverepo_type'] == 'container': vals = self.get_vals(cr, uid, save.id, context=context) vals_container = container_obj.get_vals(cr, uid, container_id, context=context) if vals_container['image_version_id'] != img_version_ids[0]: # if upgrade: container_obj.write(cr, uid, [container_id], {'image_version_id': img_version_ids[0]}, context=context) del context['forcesave'] context['nosave'] = True context['save_comment'] = 'Before restore ' + save.name container_obj.save(cr, uid, [container_id], context=context) # vals = self.get_vals(cr, uid, save.id, context=context) # vals_container = container_obj.get_vals(cr, uid, container_id, context=context) context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) ssh, sftp = execute.connect(vals_container['container_fullname'], context=context) execute.execute(ssh, ['supervisorctl', 'stop', 'all'], context) execute.execute(ssh, ['supervisorctl', 'start', 'sshd'], context) self.restore_action(cr, uid, vals, context=context) # ssh, sftp = execute.connect(vals['saverepo_container_server'], 22, 'root', context) # execute.execute(ssh, ['docker', 'run', '-t', '--rm', '--volumes-from', vals['saverepo_container_name'], '-v', '/opt/keys/bup:/root/.ssh', 'img_bup:latest', '/opt/restore', 'container', vals['saverepo_name'], vals['save_now_bup'], vals['save_container_volumes']], context) # ssh.close() # sftp.close() for key, volume in vals_container['container_volumes'].iteritems(): if volume['user']: execute.execute(ssh, ['chown', '-R', volume['user'] + ':' + volume['user'], volume['name']], context) # execute.execute(ssh, ['supervisorctl', 'start', 'all'], context) ssh.close() sftp.close() container_obj.start(cr, uid, vals_container, context=context) container_obj.deploy_links(cr, uid, [container_id], context=context) self.end_log(cr, uid, save.id, context=context) res = container_id else: # upgrade = False app_version_ids = application_version_obj.search(cr, uid, [('name','=',save.base_app_version),('application_id','=', app_ids[0])], context=context) if not app_version_ids: execute.log("Warning, couldn't find the application version, using latest", context) #We do not want to force the upgrade if we had to use latest # upgrade = False versions = application_obj.browse(cr, uid, app_version_ids[0], context=context).version_ids if not versions: raise osv.except_osv(_('Error!'),_("Couldn't find versions for application " + save.container_app + ", aborting restoration.")) app_version_ids = [versions[0].id] if not save.service_id or save.service_id.container_id.id != container_id: service_ids = service_obj.search(cr, uid, [('name','=',save.service_name),('container_id.id','=',container_id)], context=context) if not service_ids: execute.log("Can't find any corresponding service, creating a new one", context) options = [] for option, option_vals in ast.literal_eval(save.service_options).iteritems(): del option_vals['id'] options.append((0,0,option_vals)) links = [] for link, link_vals in ast.literal_eval(save.service_links).iteritems(): if not link_vals['name']: link_app_ids = self.pool.get('saas.application').search(cr, uid, [('code','=',link_vals['name_name'])], context=context) if link_app_ids: link_vals['name'] = link_app_ids[0] else: continue del link_vals['name_name'] links.append((0,0,link_vals)) service_vals = { 'name': save.service_name, 'container_id': container_id, 'database_container_id': save.service_database_id.id, 'application_version_id': app_version_ids[0], # 'option_ids': options, 'link_ids': links } service_id = service_obj.create(cr, uid, service_vals, context=context) else: execute.log("A corresponding service was found", context) service_id = service_ids[0] else: execute.log("A service_id was linked in the save", context) service_id = save.service_id.id if save.base_restore_to_name or not save.base_id: base_ids = base_obj.search(cr, uid, [('name','=',vals['save_base_restore_to_name']),('domain_id.name','=',vals['save_base_restore_to_domain'])], context=context) if not base_ids: execute.log("Can't find any corresponding base, creating a new one", context) domain_ids = domain_obj.search(cr, uid, [('name','=',vals['save_base_restore_to_domain'])], context=context) if not domain_ids: raise osv.except_osv(_('Error!'),_("Couldn't find domain " + vals['save_base_restore_to_domain'] + ", aborting restoration.")) options = [] for option, option_vals in ast.literal_eval(save.base_options).iteritems(): del option_vals['id'] options.append((0,0,option_vals)) links = [] for link, link_vals in ast.literal_eval(save.base_links).iteritems(): if not link_vals['name']: link_app_ids = self.pool.get('saas.application').search(cr, uid, [('code','=',link_vals['name_name'])], context=context) if link_app_ids: link_vals['name'] = link_app_ids[0] else: continue del link_vals['name_name'] links.append((0,0,link_vals)) base_vals = { 'name': vals['save_base_restore_to_name'], 'service_id': service_id, 'application_id': app_ids[0], 'domain_id': domain_ids[0], 'title': save.base_title, 'proxy_id': save.base_proxy_id.id, 'mail_id': save.base_mail_id.id, 'admin_passwd': save.base_admin_passwd, 'poweruser_name': save.base_poweruser_name, 'poweruser_passwd': save.base_poweruser_password, 'poweruser_email': save.base_poweruser_email, 'build': save.base_build, 'test': save.base_test, 'lang': save.base_lang, 'nosave': save.base_nosave, # 'option_ids': options, 'link_ids': links, } context['base_restoration'] = True base_id = base_obj.create(cr, uid, base_vals, context=context) else: execute.log("A corresponding base was found", context) base_id = base_ids[0] else: execute.log("A base_id was linked in the save", context) base_id = save.base_id.id vals = self.get_vals(cr, uid, save.id, context=context) base_vals = base_obj.get_vals(cr, uid, base_id, context=context) if base_vals['app_version_id'] != app_version_ids[0]: # if upgrade: base_obj.write(cr, uid, [base_id], {'application_version_id': app_version_ids[0]}, context=context) context['save_comment'] = 'Before restore ' + save.name base_obj.save(cr, uid, [base_id], context=context) self.restore_action(cr, uid, vals, context=context) base_obj.purge_db(cr, uid, base_vals, context=context) ssh, sftp = execute.connect(base_vals['container_fullname'], username=base_vals['apptype_system_user'], context=context) for key, database in base_vals['base_databases'].iteritems(): if vals['database_type'] != 'mysql': execute.execute(ssh, ['createdb', '-h', base_vals['database_server'], '-U', base_vals['service_db_user'], base_vals['base_unique_name_']], context) execute.execute(ssh, ['cat', '/base-backup/' + vals['saverepo_name'] + '/' + vals['save_base_dumpfile'], '|', 'psql', '-q', '-h', base_vals['database_server'], '-U', base_vals['service_db_user'], base_vals['base_unique_name_']], context) else: ssh_mysql, sftp_mysql = execute.connect(base_vals['database_fullname'], context=context) execute.execute(ssh_mysql, ["mysql -u root -p'" + base_vals['database_root_password'] + "' -se \"create database " + database + ";\""], context) execute.execute(ssh_mysql, ["mysql -u root -p'" + base_vals['database_root_password'] + "' -se \"grant all on " + database + ".* to '" + base_vals['service_db_user'] + "';\""], context) ssh_mysql.close() sftp_mysql.close() execute.execute(ssh, ['mysql', '-h', base_vals['database_server'], '-u', base_vals['service_db_user'], '-p' + base_vals['service_db_password'], database, '<', '/base-backup/' + vals['saverepo_name'] + '/' + database + '.dump'], context) self.restore_base(cr, uid, base_vals, context=context) base_obj.deploy_links(cr, uid, [base_id], context=context) execute.execute(ssh, ['rm', '-rf', '/base-backup/' + vals['saverepo_name']], context) ssh.close() sftp.close() self.end_log(cr, uid, save.id, context=context) res = base_id self.write(cr, uid, [save.id], {'container_restore_to_name': False, 'container_restore_to_server_id': False, 'base_restore_to_name': False, 'base_restore_to_domain_id': False}, context=context) return res
def deploy(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge(cr, uid, vals, context=context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) cmd = ['sudo','docker', 'run', '-d'] nextport = STARTPORT for key, port in vals['container_ports'].iteritems(): if not port['hostport']: while not port['hostport'] and nextport != ENDPORT: port_ids = self.pool.get('saas.container.port').search(cr, uid, [('hostport','=',nextport),('container_id.server_id','=',vals['server_id'])], context=context) if not port_ids and not execute.execute(ssh, ['netstat', '-an', '|', 'grep', str(nextport)], context): self.pool.get('saas.container.port').write(cr, uid, [port['id']], {'hostport': nextport}, context=context) port['hostport'] = nextport if port['name'] == 'ssh': vals['container_ssh_port'] = nextport nextport += 1 _logger.info('nextport %s', nextport) _logger.info('server_id %s, hostport %s, localport %s', vals['server_ip'], port['hostport'], port['localport']) udp = '' if port['udp']: udp = '/udp' cmd.extend(['-p', vals['server_ip'] + ':' + str(port['hostport']) + ':' + port['localport'] + udp]) for key, volume in vals['container_volumes'].iteritems(): if volume['hostpath']: arg = volume['hostpath'] + ':' + volume['name'] if volume['readonly']: arg += ':ro' cmd.extend(['-v', arg]) for key, link in vals['container_links'].iteritems(): if link['make_link'] and link['target']['link_server_id'] == vals['server_id']: cmd.extend(['--link', link['target']['link_name'] + ':' + link['code']]) if vals['container_privileged']: cmd.extend(['--privileged']) cmd.extend(['-v', '/opt/keys/' + vals['container_fullname'] + ':/opt/keys', '--name', vals['container_name'], vals['image_version_fullname']]) #Deploy key now, otherwise the container will be angry to not find the key. We can't before because vals['container_ssh_port'] may not be set self.deploy_key(cr, uid, vals, context=context) #Run container execute.execute(ssh, cmd, context) time.sleep(3) self.deploy_post(cr, uid, vals, context) self.start(cr, uid, vals, context=context) ssh.close() sftp.close() for key, links in vals['container_links'].iteritems(): if links['name'] == 'postfix': ssh, sftp = execute.connect(vals['container_fullname'], context=context) execute.execute(ssh, ['echo "root=' + vals['config_email_sysadmin'] + '" > /etc/ssmtp/ssmtp.conf'], context) execute.execute(ssh, ['echo "mailhub=postfix:25" >> /etc/ssmtp/ssmtp.conf'], context) execute.execute(ssh, ['echo "rewriteDomain=' + vals['container_fullname'] + '" >> /etc/ssmtp/ssmtp.conf'], context) execute.execute(ssh, ['echo "hostname=' + vals['container_fullname'] + '" >> /etc/ssmtp/ssmtp.conf'], context) execute.execute(ssh, ['echo "FromLineOverride=YES" >> /etc/ssmtp/ssmtp.conf'], context) ssh.close() sftp.close() #For shinken self.save(cr, uid, [vals['container_id']], context=context) return
def deploy_key(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) # restart_required = False # try: # ssh_container, sftp_container = execute.connect(vals['container_fullname'], context=context) # except: # restart_required = True # pass self.purge_key(cr, uid, vals, context=context) execute.execute_local([ 'ssh-keygen', '-t', 'rsa', '-C', '*****@*****.**', '-f', vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '-N', '' ], context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', 'Host ' + vals['container_fullname'], context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', '\n HostName ' + vals['server_domain'], context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', '\n Port ' + str(vals['container_ssh_port']), context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', '\n User root', context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', '\n IdentityFile ~/.ssh/keys/' + vals['container_fullname'], context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', '\n#END ' + vals['container_fullname'] + '\n', context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, ['mkdir', '/opt/keys/' + vals['container_fullname']], context) sftp.put( vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/opt/keys/' + vals['container_fullname'] + '/authorized_keys') ssh.close() sftp.close() # _logger.info('restart required %s', restart_required) # if not restart_required: # execute.execute(ssh_container, ['supervisorctl', 'restart', 'sshd'], context) # ssh_container.close() # sftp_container.close() # else: # self.start(cr, uid, vals, context=context) if vals['apptype_name'] == 'backup': shinken_ids = self.search( cr, uid, [('application_id.type_id.name', '=', 'shinken')], context=context) if not shinken_ids: execute.log( 'The shinken isnt configured in conf, skipping deploying backup keys in shinken', context) return for shinken in self.browse(cr, uid, shinken_ids, context=context): shinken_vals = self.get_vals(cr, uid, shinken.id, context=context) ssh, sftp = execute.connect(shinken_vals['container_fullname'], username='******', context=context) execute.execute(ssh, [ 'rm', '-rf', '/home/shinken/.ssh/keys/' + vals['container_fullname'] + '*' ], context) execute.send( sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/home/shinken/.ssh/keys/' + vals['container_fullname'] + '.pub', context) execute.send( sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '/home/shinken/.ssh/keys/' + vals['container_fullname'], context) execute.execute(ssh, ['chmod', '-R', '700', '/home/shinken/.ssh'], context) execute.execute(ssh, [ 'sed', '-i', "'/Host " + vals['container_fullname'] + "/,/END " + vals['container_fullname'] + "/d'", '/home/shinken/.ssh/config' ], context) execute.execute(ssh, [ 'echo "Host ' + vals['container_fullname'] + '" >> /home/shinken/.ssh/config' ], context) execute.execute(ssh, [ 'echo " Hostname ' + vals['server_domain'] + '" >> /home/shinken/.ssh/config' ], context) execute.execute(ssh, [ 'echo " Port ' + str(vals['container_ssh_port']) + '" >> /home/shinken/.ssh/config' ], context) execute.execute( ssh, ['echo " User backup" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, [ 'echo " IdentityFile ~/.ssh/keys/' + vals['container_fullname'] + '" >> /home/shinken/.ssh/config' ], context) execute.execute(ssh, [ 'echo "#END ' + vals['container_fullname'] + '" >> ~/.ssh/config' ], context)
def deploy(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge(cr, uid, vals, context=context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) cmd = ['sudo', 'docker', 'run', '-d'] nextport = vals['server_start_port'] for key, port in vals['container_ports'].iteritems(): if not port['hostport']: while not port[ 'hostport'] and nextport != vals['server_end_port']: port_ids = self.pool.get('saas.container.port').search( cr, uid, [('hostport', '=', nextport), ('container_id.server_id', '=', vals['server_id'])], context=context) if not port_ids and not execute.execute( ssh, ['netstat', '-an', '|', 'grep', str(nextport)], context): self.pool.get('saas.container.port').write( cr, uid, [port['id']], {'hostport': nextport}, context=context) port['hostport'] = nextport if port['name'] == 'ssh': vals['container_ssh_port'] = nextport nextport += 1 _logger.info('nextport %s', nextport) _logger.info('server_id %s, hostport %s, localport %s', vals['server_ip'], port['hostport'], port['localport']) udp = '' if port['udp']: udp = '/udp' cmd.extend([ '-p', vals['server_ip'] + ':' + str(port['hostport']) + ':' + port['localport'] + udp ]) for key, volume in vals['container_volumes'].iteritems(): if volume['hostpath']: arg = volume['hostpath'] + ':' + volume['name'] if volume['readonly']: arg += ':ro' cmd.extend(['-v', arg]) for key, link in vals['container_links'].iteritems(): if link['make_link'] and link['target']['link_server_id'] == vals[ 'server_id']: cmd.extend([ '--link', link['target']['link_name'] + ':' + link['code'] ]) if vals['container_privileged']: cmd.extend(['--privileged']) cmd.extend([ '-v', '/opt/keys/' + vals['container_fullname'] + ':/opt/keys', '--name', vals['container_name'], vals['image_version_fullname'] ]) #Deploy key now, otherwise the container will be angry to not find the key. We can't before because vals['container_ssh_port'] may not be set self.deploy_key(cr, uid, vals, context=context) #Run container execute.execute(ssh, cmd, context) time.sleep(3) self.deploy_post(cr, uid, vals, context) self.start(cr, uid, vals, context=context) ssh.close() sftp.close() for key, links in vals['container_links'].iteritems(): if links['name'] == 'postfix': ssh, sftp = execute.connect(vals['container_fullname'], context=context) execute.execute(ssh, [ 'echo "root=' + vals['config_email_sysadmin'] + '" > /etc/ssmtp/ssmtp.conf' ], context) execute.execute( ssh, ['echo "mailhub=postfix:25" >> /etc/ssmtp/ssmtp.conf'], context) execute.execute(ssh, [ 'echo "rewriteDomain=' + vals['container_fullname'] + '" >> /etc/ssmtp/ssmtp.conf' ], context) execute.execute(ssh, [ 'echo "hostname=' + vals['container_fullname'] + '" >> /etc/ssmtp/ssmtp.conf' ], context) execute.execute( ssh, ['echo "FromLineOverride=YES" >> /etc/ssmtp/ssmtp.conf'], context) ssh.close() sftp.close() #For shinken self.save(cr, uid, [vals['container_id']], context=context) return
def deploy(self, cr, uid, vals, context=None): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge(cr, uid, vals, context=context) if 'base_restoration' in context: return res = self.deploy_create_database(cr, uid, vals, context) if not res: for key, database in vals['base_databases'].iteritems(): if vals['database_type'] != 'mysql': ssh, sftp = execute.connect( vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, [ 'createdb', '-h', vals['database_server'], '-U', vals['service_db_user'], database ], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['database_fullname'], context=context) execute.execute(ssh, [ "mysql -u root -p'" + vals['database_root_password'] + "' -se \"create database " + database + ";\"" ], context) execute.execute(ssh, [ "mysql -u root -p'" + vals['database_root_password'] + "' -se \"grant all on " + database + ".* to '" + vals['service_db_user'] + "';\"" ], context) ssh.close() sftp.close() execute.log('Database created', context) if vals['base_build'] == 'build': self.deploy_build(cr, uid, vals, context) elif vals['base_build'] == 'restore': if vals['database_type'] != 'mysql': ssh, sftp = execute.connect( vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, [ 'pg_restore', '-h', vals['bdd_server_domain'], '-U', vals['service_db_user'], '--no-owner', '-Fc', '-d', vals['base_unique_name_'], vals['app_version_full_localpath'] + '/' + vals['app_bdd'] + '/build.sql' ], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect( vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, [ 'mysql', '-h', vals['bdd_server_domain'], '-u', vals['service_db_user'], '-p' + vals['bdd_server_mysql_passwd'], vals['base_unique_name_'], '<', vals['app_version_full_localpath'] + '/' + vals['app_bdd'] + '/build.sql' ], context) ssh.close() sftp.close() self.deploy_post_restore(cr, uid, vals, context) if vals['base_build'] != 'none': if vals['base_poweruser_name'] and vals[ 'base_poweruser_email'] and vals[ 'apptype_admin_name'] != vals['base_poweruser_name']: self.deploy_create_poweruser(cr, uid, vals, context) if vals['base_test']: self.deploy_test(cr, uid, vals, context) self.deploy_post(cr, uid, vals, context) #For shinken self.save(cr, uid, [vals['base_id']], context=context)