def deploy(self, cr, uid, vals, context=None): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge(cr, uid, vals, context=context) if 'base_restoration' in context: return res = self.deploy_create_database(cr, uid, vals, context) if not res: for key, database in vals['base_databases'].iteritems(): if vals['database_type'] != 'mysql': ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['createdb', '-h', vals['database_server'], '-U', vals['service_db_user'], database], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['database_fullname'], context=context) execute.execute(ssh, ["mysql -u root -p'" + vals['database_root_password'] + "' -se \"create database " + database + ";\""], context) execute.execute(ssh, ["mysql -u root -p'" + vals['database_root_password'] + "' -se \"grant all on " + database + ".* to '" + vals['service_db_user'] + "';\""], context) ssh.close() sftp.close() execute.log('Database created', context) if vals['base_build'] == 'build': self.deploy_build(cr, uid, vals, context) elif vals['base_build'] == 'restore': if vals['database_type'] != 'mysql': ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['pg_restore', '-h', vals['bdd_server_domain'], '-U', vals['service_db_user'], '--no-owner', '-Fc', '-d', vals['base_unique_name_'], vals['app_version_full_localpath'] + '/' + vals['app_bdd'] + '/build.sql'], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['mysql', '-h', vals['bdd_server_domain'], '-u', vals['service_db_user'], '-p' + vals['bdd_server_mysql_passwd'], vals['base_unique_name_'], '<', vals['app_version_full_localpath'] + '/' + vals['app_bdd'] + '/build.sql'], context) ssh.close() sftp.close() self.deploy_post_restore(cr, uid, vals, context) if vals['base_build'] != 'none': if vals['base_poweruser_name'] and vals['base_poweruser_email'] and vals['apptype_admin_name'] != vals['base_poweruser_name']: self.deploy_create_poweruser(cr, uid, vals, context) if vals['base_test']: self.deploy_test(cr, uid, vals, context) self.deploy_post(cr, uid, vals, context) #For shinken self.save(cr, uid, [vals['base_id']], context=context)
def deploy_key(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) # restart_required = False # try: # ssh_container, sftp_container = execute.connect(vals['container_fullname'], context=context) # except: # restart_required = True # pass self.purge_key(cr, uid, vals, context=context) execute.execute_local(['ssh-keygen', '-t', 'rsa', '-C', '*****@*****.**', '-f', vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '-N', ''], context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', 'Host ' + vals['container_fullname'], context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', '\n HostName ' + vals['server_domain'], context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', '\n Port ' + str(vals['container_ssh_port']), context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', '\n User root', context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', '\n IdentityFile ~/.ssh/keys/' + vals['container_fullname'], context) execute.execute_write_file(vals['config_home_directory'] + '/.ssh/config', '\n#END ' + vals['container_fullname'] + '\n', context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, ['mkdir', '/opt/keys/' + vals['container_fullname']], context) sftp.put(vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/opt/keys/' + vals['container_fullname'] + '/authorized_keys') ssh.close() sftp.close() # _logger.info('restart required %s', restart_required) # if not restart_required: # execute.execute(ssh_container, ['supervisorctl', 'restart', 'sshd'], context) # ssh_container.close() # sftp_container.close() # else: # self.start(cr, uid, vals, context=context) if vals['apptype_name'] == 'backup': shinken_ids = self.search(cr, uid, [('application_id.type_id.name', '=','shinken')], context=context) if not shinken_ids: execute.log('The shinken isnt configured in conf, skipping deploying backup keys in shinken', context) return for shinken in self.browse(cr, uid, shinken_ids, context=context): shinken_vals = self.get_vals(cr, uid, shinken.id, context=context) ssh, sftp = execute.connect(shinken_vals['container_fullname'], username='******', context=context) execute.execute(ssh, ['rm', '-rf', '/home/shinken/.ssh/keys/' + vals['container_fullname'] + '*'], context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/home/shinken/.ssh/keys/' + vals['container_fullname'] + '.pub', context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '/home/shinken/.ssh/keys/' + vals['container_fullname'], context) execute.execute(ssh, ['chmod', '-R', '700', '/home/shinken/.ssh'], context) execute.execute(ssh, ['sed', '-i', "'/Host " + vals['container_fullname'] + "/,/END " + vals['container_fullname'] + "/d'", '/home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo "Host ' + vals['container_fullname'] + '" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo " Hostname ' + vals['server_domain'] + '" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo " Port ' + str(vals['container_ssh_port']) + '" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo " User backup" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo " IdentityFile ~/.ssh/keys/' + vals['container_fullname'] + '" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, ['echo "#END ' + vals['container_fullname'] +'" >> ~/.ssh/config'], context)
def save(self, cr, uid, ids, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) save_obj = self.pool.get('saas.save.save') res = {} now = datetime.now() for container in self.browse(cr, uid, ids, context=context): if 'nosave' in context or (container.nosave and not 'forcesave' in context): execute.log('This base container not be saved or the backup isnt configured in conf, skipping save container', context) continue context = self.create_log(cr, uid, container.id, 'save', context) vals = self.get_vals(cr, uid, container.id, context=context) if not 'backup_server_domain' in vals: execute.log('The backup isnt configured in conf, skipping save container', context) return links = {} for app_code, link in vals['container_links'].iteritems(): links[app_code] = { 'name': link['app_id'], 'name_name': link['name'], 'target': link['target'] and link['target']['link_id'] or False } save_vals = { 'name': vals['now_bup'] + '_' + vals['container_fullname'], 'repo_id': vals['saverepo_id'], 'date_expiration': (now + timedelta(days=container.save_expiration or container.application_id.container_save_expiration)).strftime("%Y-%m-%d"), 'comment': 'save_comment' in context and context['save_comment'] or container.save_comment or 'Manual', 'now_bup': vals['now_bup'], 'container_id': vals['container_id'], 'container_volumes_comma': vals['container_volumes_save'], 'container_app': vals['app_code'], 'container_img': vals['image_name'], 'container_img_version': vals['image_version_name'], 'container_ports': str(vals['container_ports']), 'container_volumes': str(vals['container_volumes']), 'container_options': str(vals['container_options']), 'container_links': str(links), } res[container.id] = save_obj.create(cr, uid, save_vals, context=context) next = (datetime.now() + timedelta(minutes=container.time_between_save or container.application_id.container_time_between_save)).strftime("%Y-%m-%d %H:%M:%S") self.write(cr, uid, [container.id], {'save_comment': False, 'date_next_save': next}, context=context) self.end_log(cr, uid, container.id, context=context) return res
def deploy(self, cr, uid, vals, context=None): container_obj = self.pool.get('saas.container') context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge(cr, uid, vals, context=context) execute.log('Creating database user', context=context) #SI postgres, create user if vals['database_type'] != 'mysql': ssh, sftp = execute.connect(vals['database_fullname'], username='******', context=context) execute.execute(ssh, ['psql', '-c', '"CREATE USER ' + vals['service_db_user'] + ' WITH PASSWORD \'' + vals['service_db_password'] + '\' CREATEDB;"'], context) ssh.close() sftp.close() ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['sed', '-i', '"/:*:' + vals['service_db_user'] + ':/d" ~/.pgpass'], context) execute.execute(ssh, ['echo "' + vals['database_server'] + ':5432:*:' + vals['service_db_user'] + ':' + vals['service_db_password'] + '" >> ~/.pgpass'], context) execute.execute(ssh, ['chmod', '700', '~/.pgpass'], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['database_fullname'], context=context) execute.execute(ssh, ["mysql -u root -p'" + vals['database_root_password'] + "' -se \"create user '" + vals['service_db_user'] + "' identified by '" + vals['service_db_password'] + "';\""], context) ssh.close() sftp.close() execute.log('Database user created', context) ssh, sftp = execute.connect(vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, ['mkdir', '-p', vals['service_full_localpath']], context) ssh.close() sftp.close() self.deploy_files(cr, uid, vals, context=context) self.deploy_post_service(cr, uid, vals, context) container_obj.start(cr, uid, vals, context=context)
def purge(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) if not 'link_target_container_id' in vals: execute.log('The target isnt configured in the link, skipping deploy link', context) return if vals['link_target_app_code'] not in vals['base_links']: execute.log('The target isnt in the application link for base, skipping deploy link', context) return if not vals['base_links'][vals['link_target_app_code']]['base']: execute.log('This application isnt for base, skipping deploy link', context) return self.purge_link(cr, uid, vals, context=context)
def purge(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) if not 'link_target_container_id' in vals: execute.log('The target isnt configured in the link, skipping deploy link', context) return if vals['link_target_app_code'] not in vals['service_links']: execute.log('The target isnt in the application link for service, skipping deploy link', context) return if not vals['service_links'][vals['link_target_app_code']]['service']: execute.log('This application isnt for service, skipping deploy link', context) return self.purge_link(cr, uid, vals, context=context)
def deploy(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) execute.log('Saving ' + vals['save_name'], context) execute.log('Comment: ' + vals['save_comment'], context) if vals['saverepo_type'] == 'base': base_vals = self.pool.get('saas.base').get_vals(cr, uid, vals['save_base_id'], context=context) ssh, sftp = execute.connect(base_vals['container_fullname'], username=base_vals['apptype_system_user'], context=context) execute.execute(ssh, ['mkdir', '-p', '/base-backup/' + vals['saverepo_name']], context) for key, database in base_vals['base_databases'].iteritems(): if vals['database_type'] != 'mysql': execute.execute(ssh, ['pg_dump', '-O', '-h', base_vals['database_server'], '-U', base_vals['service_db_user'], database, '>', '/base-backup/' + vals['saverepo_name'] + '/' + database + '.dump'], context) else: execute.execute(ssh, ['mysqldump', '-h', base_vals['database_server'], '-u', base_vals['service_db_user'], '-p' + base_vals['service_db_password'], database, '>', '/base-backup/' + vals['saverepo_name'] + '/' + database + '.dump'], context) self.deploy_base(cr, uid, base_vals, context=context) execute.execute(ssh, ['chmod', '-R', '777', '/base-backup/' + vals['saverepo_name']], context) ssh.close() sftp.close() # # ssh, sftp = execute.connect(vals['save_container_restore_to_server'], 22, 'root', context) # execute.execute(ssh, ['docker', 'run', '-t', '--rm', '--volumes-from', vals['save_container_restore_to_name'], '-v', '/opt/keys/bup:/root/.ssh', 'img_bup:latest', '/opt/save', vals['saverepo_type'], vals['saverepo_name'], str(int(vals['save_now_epoch'])), vals['save_container_volumes'] or ''], context) # ssh.close() # sftp.close() directory = '/tmp/' + vals['saverepo_name'] ssh, sftp = execute.connect(vals['container_fullname'], context=context) execute.execute(ssh, ['rm', '-rf', directory + '*'], context) execute.execute(ssh, ['mkdir', directory], context) if vals['saverepo_type'] == 'container': for volume in vals['save_container_volumes'].split(','): execute.execute(ssh, ['cp', '-R', '--parents', volume, directory], context) else: execute.execute(ssh, ['cp', '-R', '/base-backup/' + vals['saverepo_name'] + '/*', directory], context) execute.execute(ssh, ['echo "' + vals['now_date'] + '" > ' + directory + '/backup-date'], context) execute.execute(ssh, ['tar', 'cf', directory + '.tar.gz', '-C', directory, '.'], context) execute.execute(ssh, ['chmod', '-R', '777', directory + '*'], context) ssh.close() sftp.close() ssh, sftp = execute.connect(vals['backup_fullname'], username='******', context=context) if vals['saverepo_type'] == 'container': name = vals['container_fullname'] else: name = vals['base_unique_name_'] execute.execute(ssh, ['rm', '-rf', '/opt/backup/list/' + name], context) execute.execute(ssh, ['mkdir', '-p', '/opt/backup/list/' + name], context) execute.execute(ssh, ['echo "' + vals['saverepo_name'] + '" > /opt/backup/list/' + name + '/repo'], context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/config', '/home/backup/.ssh/config', context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/home/backup/.ssh/keys/' + vals['container_fullname'] + '.pub', context) execute.send(sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '/home/backup/.ssh/keys/' + vals['container_fullname'], context) execute.execute(ssh, ['chmod', '-R', '700', '/home/backup/.ssh'], context) execute.execute(ssh, ['rm', '-rf', directory], context) execute.execute(ssh, ['mkdir', directory], context) execute.execute(ssh, ['scp', '-o StrictHostKeychecking=no', vals['container_fullname'] + ':' + directory + '.tar.gz', '/tmp/'], context) execute.execute(ssh, ['tar', '-xf', directory + '.tar.gz', '-C', directory], context) for backup in vals['config_backups']: if backup == 'simple': execute.execute(ssh, ['mkdir', '-p', '/opt/backup/simple/' + vals['saverepo_name'] + '/' + vals['save_name']], context) execute.execute(ssh, ['cp', '-R', directory + '/*', '/opt/backup/simple/' + vals['saverepo_name'] + '/' + vals['save_name']], context) execute.execute(ssh, ['rm', '/opt/backup/simple/' + vals['saverepo_name'] + '/latest'], context) execute.execute(ssh, ['ln', '-s', '/opt/backup/simple/' + vals['saverepo_name'] + '/' + vals['save_name'], '/opt/backup/simple/' + vals['saverepo_name'] + '/latest'], context) if backup == 'bup': execute.execute(ssh, ['export BUP_DIR=/opt/backup/bup;', 'bup index ' + directory], context) execute.execute(ssh, ['export BUP_DIR=/opt/backup/bup;', 'bup save -n ' + vals['saverepo_name'] + ' -d ' + str(int(vals['save_now_epoch'])) + ' --strip ' + directory], context) execute.execute(ssh, ['rm', '-rf', directory + '*'], context) execute.execute(ssh, ['rm', '/home/backup/.ssh/keys/*'], context) ssh.close() sftp.close() ssh, sftp = execute.connect(vals['container_fullname'], context=context) execute.execute(ssh, ['rm', '-rf', directory + '*'], context) ssh.close() sftp.close() if vals['saverepo_type'] == 'base': base_vals = self.pool.get('saas.base').get_vals(cr, uid, vals['save_base_id'], context=context) ssh, sftp = execute.connect(base_vals['container_fullname'], username=base_vals['apptype_system_user'], context=context) execute.execute(ssh, ['rm', '-rf', '/base-backup/' + vals['saverepo_name']], context) ssh.close() sftp.close() return
def restore(self, cr, uid, ids, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) container_obj = self.pool.get('saas.container') base_obj = self.pool.get('saas.base') server_obj = self.pool.get('saas.server') domain_obj = self.pool.get('saas.domain') application_obj = self.pool.get('saas.application') application_version_obj = self.pool.get('saas.application.version') image_obj = self.pool.get('saas.image') image_version_obj = self.pool.get('saas.image.version') service_obj = self.pool.get('saas.service') for save in self.browse(cr, uid, ids, context=context): context = self.create_log(cr, uid, save.id, 'restore', context) vals = self.get_vals(cr, uid, save.id, context=context) app_ids = application_obj.search(cr, uid, [('code','=',save.container_app)], context=context) if not app_ids: raise osv.except_osv(_('Error!'),_("Couldn't find application " + save.container_app + ", aborting restoration.")) img_ids = image_obj.search(cr, uid, [('name','=',save.container_img)], context=context) if not img_ids: raise osv.except_osv(_('Error!'),_("Couldn't find image " + save.container_img + ", aborting restoration.")) img_version_ids = image_version_obj.search(cr, uid, [('name','=',save.container_img_version)], context=context) # upgrade = True if not img_version_ids: execute.log("Warning, couldn't find the image version, using latest", context) #We do not want to force the upgrade if we had to use latest # upgrade = False versions = image_obj.browse(cr, uid, img_ids[0], context=context).version_ids if not versions: raise osv.except_osv(_('Error!'),_("Couldn't find versions for image " + save.container_img + ", aborting restoration.")) img_version_ids = [versions[0].id] if save.container_restore_to_name or not save.container_id: container_ids = container_obj.search(cr, uid, [('name','=',vals['save_container_restore_to_name']),('server_id.name','=',vals['save_container_restore_to_server'])], context=context) if not container_ids: execute.log("Can't find any corresponding container, creating a new one", context) server_ids = server_obj.search(cr, uid, [('name','=',vals['save_container_restore_to_server'])], context=context) if not server_ids: raise osv.except_osv(_('Error!'),_("Couldn't find server " + vals['save_container_restore_to_server'] + ", aborting restoration.")) ports = [] for port, port_vals in ast.literal_eval(save.container_ports).iteritems(): del port_vals['id'] del port_vals['hostport'] ports.append((0,0,port_vals)) volumes = [] for volume, volume_vals in ast.literal_eval(save.container_volumes).iteritems(): del volume_vals['id'] volumes.append((0,0,volume_vals)) options = [] for option, option_vals in ast.literal_eval(save.container_options).iteritems(): del option_vals['id'] options.append((0,0,option_vals)) links = [] for link, link_vals in ast.literal_eval(save.container_links).iteritems(): if not link_vals['name']: link_app_ids = self.pool.get('saas.application').search(cr, uid, [('code','=',link_vals['name_name'])], context=context) if link_app_ids: link_vals['name'] = link_app_ids[0] else: continue del link_vals['name_name'] links.append((0,0,link_vals)) container_vals = { 'name': vals['save_container_restore_to_name'], 'server_id': server_ids[0], 'application_id': app_ids[0], 'image_id': img_ids[0], 'image_version_id': img_version_ids[0], 'port_ids': ports, 'volume_ids': volumes, 'option_ids': options, 'link_ids': links } container_id = container_obj.create(cr, uid, container_vals, context=context) else: execute.log("A corresponding container was found", context) container_id = container_ids[0] else: execute.log("A container_id was linked in the save", context) container_id = save.container_id.id if vals['saverepo_type'] == 'container': vals = self.get_vals(cr, uid, save.id, context=context) vals_container = container_obj.get_vals(cr, uid, container_id, context=context) if vals_container['image_version_id'] != img_version_ids[0]: # if upgrade: container_obj.write(cr, uid, [container_id], {'image_version_id': img_version_ids[0]}, context=context) del context['forcesave'] context['nosave'] = True context['save_comment'] = 'Before restore ' + save.name container_obj.save(cr, uid, [container_id], context=context) # vals = self.get_vals(cr, uid, save.id, context=context) # vals_container = container_obj.get_vals(cr, uid, container_id, context=context) context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) ssh, sftp = execute.connect(vals_container['container_fullname'], context=context) execute.execute(ssh, ['supervisorctl', 'stop', 'all'], context) execute.execute(ssh, ['supervisorctl', 'start', 'sshd'], context) self.restore_action(cr, uid, vals, context=context) # ssh, sftp = execute.connect(vals['saverepo_container_server'], 22, 'root', context) # execute.execute(ssh, ['docker', 'run', '-t', '--rm', '--volumes-from', vals['saverepo_container_name'], '-v', '/opt/keys/bup:/root/.ssh', 'img_bup:latest', '/opt/restore', 'container', vals['saverepo_name'], vals['save_now_bup'], vals['save_container_volumes']], context) # ssh.close() # sftp.close() for key, volume in vals_container['container_volumes'].iteritems(): if volume['user']: execute.execute(ssh, ['chown', '-R', volume['user'] + ':' + volume['user'], volume['name']], context) # execute.execute(ssh, ['supervisorctl', 'start', 'all'], context) ssh.close() sftp.close() container_obj.start(cr, uid, vals_container, context=context) container_obj.deploy_links(cr, uid, [container_id], context=context) self.end_log(cr, uid, save.id, context=context) res = container_id else: # upgrade = False app_version_ids = application_version_obj.search(cr, uid, [('name','=',save.base_app_version),('application_id','=', app_ids[0])], context=context) if not app_version_ids: execute.log("Warning, couldn't find the application version, using latest", context) #We do not want to force the upgrade if we had to use latest # upgrade = False versions = application_obj.browse(cr, uid, app_version_ids[0], context=context).version_ids if not versions: raise osv.except_osv(_('Error!'),_("Couldn't find versions for application " + save.container_app + ", aborting restoration.")) app_version_ids = [versions[0].id] if not save.service_id or save.service_id.container_id.id != container_id: service_ids = service_obj.search(cr, uid, [('name','=',save.service_name),('container_id.id','=',container_id)], context=context) if not service_ids: execute.log("Can't find any corresponding service, creating a new one", context) options = [] for option, option_vals in ast.literal_eval(save.service_options).iteritems(): del option_vals['id'] options.append((0,0,option_vals)) links = [] for link, link_vals in ast.literal_eval(save.service_links).iteritems(): if not link_vals['name']: link_app_ids = self.pool.get('saas.application').search(cr, uid, [('code','=',link_vals['name_name'])], context=context) if link_app_ids: link_vals['name'] = link_app_ids[0] else: continue del link_vals['name_name'] links.append((0,0,link_vals)) service_vals = { 'name': save.service_name, 'container_id': container_id, 'database_container_id': save.service_database_id.id, 'application_version_id': app_version_ids[0], # 'option_ids': options, 'link_ids': links } service_id = service_obj.create(cr, uid, service_vals, context=context) else: execute.log("A corresponding service was found", context) service_id = service_ids[0] else: execute.log("A service_id was linked in the save", context) service_id = save.service_id.id if save.base_restore_to_name or not save.base_id: base_ids = base_obj.search(cr, uid, [('name','=',vals['save_base_restore_to_name']),('domain_id.name','=',vals['save_base_restore_to_domain'])], context=context) if not base_ids: execute.log("Can't find any corresponding base, creating a new one", context) domain_ids = domain_obj.search(cr, uid, [('name','=',vals['save_base_restore_to_domain'])], context=context) if not domain_ids: raise osv.except_osv(_('Error!'),_("Couldn't find domain " + vals['save_base_restore_to_domain'] + ", aborting restoration.")) options = [] for option, option_vals in ast.literal_eval(save.base_options).iteritems(): del option_vals['id'] options.append((0,0,option_vals)) links = [] for link, link_vals in ast.literal_eval(save.base_links).iteritems(): if not link_vals['name']: link_app_ids = self.pool.get('saas.application').search(cr, uid, [('code','=',link_vals['name_name'])], context=context) if link_app_ids: link_vals['name'] = link_app_ids[0] else: continue del link_vals['name_name'] links.append((0,0,link_vals)) base_vals = { 'name': vals['save_base_restore_to_name'], 'service_id': service_id, 'application_id': app_ids[0], 'domain_id': domain_ids[0], 'title': save.base_title, 'proxy_id': save.base_proxy_id.id, 'mail_id': save.base_mail_id.id, 'admin_passwd': save.base_admin_passwd, 'poweruser_name': save.base_poweruser_name, 'poweruser_passwd': save.base_poweruser_password, 'poweruser_email': save.base_poweruser_email, 'build': save.base_build, 'test': save.base_test, 'lang': save.base_lang, 'nosave': save.base_nosave, # 'option_ids': options, 'link_ids': links, } context['base_restoration'] = True base_id = base_obj.create(cr, uid, base_vals, context=context) else: execute.log("A corresponding base was found", context) base_id = base_ids[0] else: execute.log("A base_id was linked in the save", context) base_id = save.base_id.id vals = self.get_vals(cr, uid, save.id, context=context) base_vals = base_obj.get_vals(cr, uid, base_id, context=context) if base_vals['app_version_id'] != app_version_ids[0]: # if upgrade: base_obj.write(cr, uid, [base_id], {'application_version_id': app_version_ids[0]}, context=context) context['save_comment'] = 'Before restore ' + save.name base_obj.save(cr, uid, [base_id], context=context) self.restore_action(cr, uid, vals, context=context) base_obj.purge_db(cr, uid, base_vals, context=context) ssh, sftp = execute.connect(base_vals['container_fullname'], username=base_vals['apptype_system_user'], context=context) for key, database in base_vals['base_databases'].iteritems(): if vals['database_type'] != 'mysql': execute.execute(ssh, ['createdb', '-h', base_vals['database_server'], '-U', base_vals['service_db_user'], base_vals['base_unique_name_']], context) execute.execute(ssh, ['cat', '/base-backup/' + vals['saverepo_name'] + '/' + vals['save_base_dumpfile'], '|', 'psql', '-q', '-h', base_vals['database_server'], '-U', base_vals['service_db_user'], base_vals['base_unique_name_']], context) else: ssh_mysql, sftp_mysql = execute.connect(base_vals['database_fullname'], context=context) execute.execute(ssh_mysql, ["mysql -u root -p'" + base_vals['database_root_password'] + "' -se \"create database " + database + ";\""], context) execute.execute(ssh_mysql, ["mysql -u root -p'" + base_vals['database_root_password'] + "' -se \"grant all on " + database + ".* to '" + base_vals['service_db_user'] + "';\""], context) ssh_mysql.close() sftp_mysql.close() execute.execute(ssh, ['mysql', '-h', base_vals['database_server'], '-u', base_vals['service_db_user'], '-p' + base_vals['service_db_password'], database, '<', '/base-backup/' + vals['saverepo_name'] + '/' + database + '.dump'], context) self.restore_base(cr, uid, base_vals, context=context) base_obj.deploy_links(cr, uid, [base_id], context=context) execute.execute(ssh, ['rm', '-rf', '/base-backup/' + vals['saverepo_name']], context) ssh.close() sftp.close() self.end_log(cr, uid, save.id, context=context) res = base_id self.write(cr, uid, [save.id], {'container_restore_to_name': False, 'container_restore_to_server_id': False, 'base_restore_to_name': False, 'base_restore_to_domain_id': False}, context=context) return res
def deploy_key(self, cr, uid, vals, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) # restart_required = False # try: # ssh_container, sftp_container = execute.connect(vals['container_fullname'], context=context) # except: # restart_required = True # pass self.purge_key(cr, uid, vals, context=context) execute.execute_local([ 'ssh-keygen', '-t', 'rsa', '-C', '*****@*****.**', '-f', vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '-N', '' ], context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', 'Host ' + vals['container_fullname'], context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', '\n HostName ' + vals['server_domain'], context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', '\n Port ' + str(vals['container_ssh_port']), context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', '\n User root', context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', '\n IdentityFile ~/.ssh/keys/' + vals['container_fullname'], context) execute.execute_write_file( vals['config_home_directory'] + '/.ssh/config', '\n#END ' + vals['container_fullname'] + '\n', context) ssh, sftp = execute.connect(vals['server_domain'], vals['server_ssh_port'], 'root', context) execute.execute(ssh, ['mkdir', '/opt/keys/' + vals['container_fullname']], context) sftp.put( vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/opt/keys/' + vals['container_fullname'] + '/authorized_keys') ssh.close() sftp.close() # _logger.info('restart required %s', restart_required) # if not restart_required: # execute.execute(ssh_container, ['supervisorctl', 'restart', 'sshd'], context) # ssh_container.close() # sftp_container.close() # else: # self.start(cr, uid, vals, context=context) if vals['apptype_name'] == 'backup': shinken_ids = self.search( cr, uid, [('application_id.type_id.name', '=', 'shinken')], context=context) if not shinken_ids: execute.log( 'The shinken isnt configured in conf, skipping deploying backup keys in shinken', context) return for shinken in self.browse(cr, uid, shinken_ids, context=context): shinken_vals = self.get_vals(cr, uid, shinken.id, context=context) ssh, sftp = execute.connect(shinken_vals['container_fullname'], username='******', context=context) execute.execute(ssh, [ 'rm', '-rf', '/home/shinken/.ssh/keys/' + vals['container_fullname'] + '*' ], context) execute.send( sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'] + '.pub', '/home/shinken/.ssh/keys/' + vals['container_fullname'] + '.pub', context) execute.send( sftp, vals['config_home_directory'] + '/.ssh/keys/' + vals['container_fullname'], '/home/shinken/.ssh/keys/' + vals['container_fullname'], context) execute.execute(ssh, ['chmod', '-R', '700', '/home/shinken/.ssh'], context) execute.execute(ssh, [ 'sed', '-i', "'/Host " + vals['container_fullname'] + "/,/END " + vals['container_fullname'] + "/d'", '/home/shinken/.ssh/config' ], context) execute.execute(ssh, [ 'echo "Host ' + vals['container_fullname'] + '" >> /home/shinken/.ssh/config' ], context) execute.execute(ssh, [ 'echo " Hostname ' + vals['server_domain'] + '" >> /home/shinken/.ssh/config' ], context) execute.execute(ssh, [ 'echo " Port ' + str(vals['container_ssh_port']) + '" >> /home/shinken/.ssh/config' ], context) execute.execute( ssh, ['echo " User backup" >> /home/shinken/.ssh/config'], context) execute.execute(ssh, [ 'echo " IdentityFile ~/.ssh/keys/' + vals['container_fullname'] + '" >> /home/shinken/.ssh/config' ], context) execute.execute(ssh, [ 'echo "#END ' + vals['container_fullname'] + '" >> ~/.ssh/config' ], context)
def save(self, cr, uid, ids, context={}): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) save_obj = self.pool.get('saas.save.save') res = {} now = datetime.now() for container in self.browse(cr, uid, ids, context=context): if 'nosave' in context or (container.nosave and not 'forcesave' in context): execute.log( 'This base container not be saved or the backup isnt configured in conf, skipping save container', context) continue context = self.create_log(cr, uid, container.id, 'save', context) vals = self.get_vals(cr, uid, container.id, context=context) if not 'backup_server_domain' in vals: execute.log( 'The backup isnt configured in conf, skipping save container', context) return links = {} for app_code, link in vals['container_links'].iteritems(): links[app_code] = { 'name': link['app_id'], 'name_name': link['name'], 'target': link['target'] and link['target']['link_id'] or False } save_vals = { 'name': vals['now_bup'] + '_' + vals['container_fullname'], 'repo_id': vals['saverepo_id'], 'date_expiration': (now + timedelta(days=container.save_expiration or container.application_id.container_save_expiration) ).strftime("%Y-%m-%d"), 'comment': 'save_comment' in context and context['save_comment'] or container.save_comment or 'Manual', 'now_bup': vals['now_bup'], 'container_id': vals['container_id'], 'container_volumes_comma': vals['container_volumes_save'], 'container_app': vals['app_code'], 'container_img': vals['image_name'], 'container_img_version': vals['image_version_name'], 'container_ports': str(vals['container_ports']), 'container_volumes': str(vals['container_volumes']), 'container_options': str(vals['container_options']), 'container_links': str(links), } res[container.id] = save_obj.create(cr, uid, save_vals, context=context) next = (datetime.now() + timedelta(minutes=container.time_between_save or container. application_id.container_time_between_save) ).strftime("%Y-%m-%d %H:%M:%S") self.write(cr, uid, [container.id], { 'save_comment': False, 'date_next_save': next }, context=context) self.end_log(cr, uid, container.id, context=context) return res
def deploy(self, cr, uid, vals, context=None): context.update({'saas-self': self, 'saas-cr': cr, 'saas-uid': uid}) self.purge(cr, uid, vals, context=context) if 'base_restoration' in context: return res = self.deploy_create_database(cr, uid, vals, context) if not res: for key, database in vals['base_databases'].iteritems(): if vals['database_type'] != 'mysql': ssh, sftp = execute.connect( vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, [ 'createdb', '-h', vals['database_server'], '-U', vals['service_db_user'], database ], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect(vals['database_fullname'], context=context) execute.execute(ssh, [ "mysql -u root -p'" + vals['database_root_password'] + "' -se \"create database " + database + ";\"" ], context) execute.execute(ssh, [ "mysql -u root -p'" + vals['database_root_password'] + "' -se \"grant all on " + database + ".* to '" + vals['service_db_user'] + "';\"" ], context) ssh.close() sftp.close() execute.log('Database created', context) if vals['base_build'] == 'build': self.deploy_build(cr, uid, vals, context) elif vals['base_build'] == 'restore': if vals['database_type'] != 'mysql': ssh, sftp = execute.connect( vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, [ 'pg_restore', '-h', vals['bdd_server_domain'], '-U', vals['service_db_user'], '--no-owner', '-Fc', '-d', vals['base_unique_name_'], vals['app_version_full_localpath'] + '/' + vals['app_bdd'] + '/build.sql' ], context) ssh.close() sftp.close() else: ssh, sftp = execute.connect( vals['container_fullname'], username=vals['apptype_system_user'], context=context) execute.execute(ssh, [ 'mysql', '-h', vals['bdd_server_domain'], '-u', vals['service_db_user'], '-p' + vals['bdd_server_mysql_passwd'], vals['base_unique_name_'], '<', vals['app_version_full_localpath'] + '/' + vals['app_bdd'] + '/build.sql' ], context) ssh.close() sftp.close() self.deploy_post_restore(cr, uid, vals, context) if vals['base_build'] != 'none': if vals['base_poweruser_name'] and vals[ 'base_poweruser_email'] and vals[ 'apptype_admin_name'] != vals['base_poweruser_name']: self.deploy_create_poweruser(cr, uid, vals, context) if vals['base_test']: self.deploy_test(cr, uid, vals, context) self.deploy_post(cr, uid, vals, context) #For shinken self.save(cr, uid, [vals['base_id']], context=context)