def doUpdate(self, db_update=True, norefresh=None, names=[], set_local=True): """ set_local is not used yet """ # self.update_container_info() # we need to learn what ip address the local docker db is using # if the container does not yet exists, we create them (master and slave) self.check_and_create_container() server_dic = get_remote_server_info(self.opts) # we have to decide, whether this is a local or remote remote_data_path = server_dic['remote_data_path'] dumper_image = BASE_INFO.get('docker_dumper_image') if not dumper_image: print(bcolors.FAIL + \ 'the %s image is not available. please create it first. ' \ 'insturctions on how to do it , you find in %s/dumper' % ( dumper_image, self.default_values['sites_home'] + bcolors.ENDC)) if not self.checkImage(dumper_image): self.createDumperImage() if not self.checkImage(dumper_image): print(bcolors.FAIL + \ 'the %s image is not available. please create it first. ' \ 'insturctions on how to do it , you find in %s/dumper' % ( dumper_image, self.default_values['sites_home'] + bcolors.ENDC)) return #mp = self.default_values.get('docker_path_map') #if mp and ACT_USER != 'root': #t, s = mp #remote_data_path = remote_data_path.replace(s, t) self.stop_container(self.site_name) self._doUpdate(db_update, norefresh, self.site_name, self.opts.verbose and ' -v' or '') # if we know admins password, we set it # for non docker pw is usualy admin, so we do not use it #adminpw = self.sites[self.site_name].get('erp_admin_pw') #if adminpw: #cmd_lines_docker += [['%s/psql' % where, '-U', user, '-d', site_name, '-c', "update res_users set password='******' where login='******';" % adminpw]] self.start_container(self.site_name)
def doUpdate(self, db_update=True, norefresh=None, names=[], is_local=False, set_local=True): """ """ opts = self.opts if not names: names = self.site_names if norefresh is None: norefresh = opts.norefresh if len(names) > 1: # if len name > 1 we are a restoring ... # would this work at all? set_local = False for site_name in names: # we have to get info about the remote server indirectly # as it could be overridden by overrideremote if not site_name in list(self.sites.keys()): print(bcolors.WARNING) print('*' * 80) print('%s is not a valid site name' % site_name) print(bcolors.ENDC) continue server_dic = get_remote_server_info(opts) # do we need to close all connections first? if opts.dataupdate_close_connections: if opts.new_target_site: use_site_name = opts.new_target_site else: use_site_name = site_name self.close_db_connections_and_delete_db(use_site_name) # determine what erp command to execute for process_name in PROCESS_NAMES: process_info = get_process_id(process_name, self.default_values['inner']) if process_info: break if set_local: # kill the process if not process_info: print('odoo/flectra not running') else: if not norefresh: p = psutil.Process(process_info[0][0]) if p.is_running(): p.terminate() self._doUpdate(db_update, norefresh, site_name, opts.verbose and ' -v' or '', extra_data=server_dic) if set_local: # restart process os.chdir(self.default_values['inner']) prossess_pid = None for process_name in PROCESS_NAMES: run_cmd = 'bin/%s' % process_name if os.path.exists(run_cmd): break try: if not opts.noupdatedb: #norefresh: if process_info: p = subprocess.Popen(process_info[0][1], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) else: if process_name != 'start_openerp': p = subprocess.Popen([run_cmd], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) else: # good old start_openerp p = subprocess.Popen([ '%s/bin/python' % self.default_values['inner'], run_cmd ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) lcounter = 0 for line in getlines(p.stdout): lcounter += 1 if lcounter > 100: break if "running on" in line: print(bcolors.OKGREEN) print("STARTUP OK") print(bcolors.ENDC) break else: print(line) if 'Address already in use' in line: p = subprocess.Popen( ['lsof', '-i', ':8069'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) result = p.communicate() print(result) break prossess_pid = p.pid except: print('could not chdir to:%s' % self.default_values['inner']) pass if not opts.noupdatedb: try: # make sure we do not start sending out emails and such self.set_local_data() except: # we probably could not log in pass if prossess_pid: p = psutil.Process(prossess_pid, ) print('about to terminate %s' % p.cmdline()[-1]) if p.is_running(): p.terminate() p.kill() # just to be sure .. process_info = get_process_id( PROCESS_NAMES_DIC[process_name], self.default_values['inner']) if process_info: p = psutil.Process(process_info[0][0]) print('about to terminate %s' % ','.join(p.cmdline()[-1])) if p.is_running(): try: p.terminate() except: print('failed to terminate: %' % ','.join(p.cmdline()[-1]))
def doTransfer(self): """ transfer data from on docker acount to an other the following steps have to be executed - dump the source - copy the source to the target. changing the folder in target - stoping the target container - restoring the source dump into target - restarting target container the transfer always is done on localhost but this will change """ opts = self.opts for site_name in self.site_names: slave_db_data = SITES[site_name] # we have to get info about the remote server indirectly # as it could be overridden by overrideremote server_dic = get_remote_server_info(opts) if not server_dic: return if not site_name in self.get_instance_list(): print('*' * 80) print('site %s does not exist or is not initialized' % site_name) print('run bin/c.sh -D %s' % site_name) if len(self.site_names) > 1: continue return remote_url = server_dic.get('remote_url') remote_user = server_dic.get('remote_user') remote_data_path = server_dic.get('remote_data_path') dpath = '%s/%s/dump/%s.dmp' % (BASE_INFO['erp_server_data_path'], site_name, site_name) # get info about main site if opts.transferdocker: docker_info = slave_db_data.get('docker') if not docker_info or not docker_info.get('container_name'): print('*' * 80) print( 'no docker info found for %s, or container_name not set' % site_name) if len(self.site_names) > 1: continue return slave_info = slave_db_data.get('slave_info') if not slave_info: print('*' * 80) print('no slave info found for %s' % site_name) if len(self.site_names) > 1: continue return master_name = slave_info.get('master_site') if not master_name in self.get_instance_list(): print('*' * 80) print('master_site %s does not exist or is not initialized' % master_name) print('run bin/c.sh -D %s' % master_name) if len(self.site_names) > 1: continue return if not master_name: print('*' * 80) print('master_site not provided for %s' % site_name) if len(self.site_names) > 1: continue return master_db_data = SITES[master_name] master_server_dic = get_remote_server_info(opts, master_name) master_remote_url = 'localhost' # server_dic.get('remote_url') master_remote_user = server_dic.get('remote_user') master_remote_data_path = server_dic.get('remote_data_path') # update local master file, but not local database self.doUpdate(db_update=False, names=[master_name]) # rsync -avzC --delete /home/robert/erp_workbench/afbs/filestore/afbs/ /home/robert/erp_workbench/afbstest/filestore/afbstest/ ddiC = { 'base_path': self.default_values['sites_home'], 'master_name': master_name, 'master_db_name': master_db_data['db_name'], 'slave_name': site_name, 'slave_db_name': slave_db_data['db_name'] } # make sure directory for the rsync target exist rsync_target = '%(base_path)s/%(slave_name)s/filestore/%(slave_name)s/' % ddiC if not os.path.exists(rsync_target): os.makedirs(rsync_target) cmd_lines = [ 'rsync -avzC --delete %(base_path)s/%(master_name)s/dump/%(master_name)s.dmp %(base_path)s/%(slave_name)s/dump/%(slave_name)s.dmp' % ddiC, 'rsync -avzC --delete %(base_path)s/%(master_name)s/filestore/%(master_db_name)s/ %(base_path)s/%(slave_name)s/filestore/%(slave_db_name)s/' % ddiC, ] # now we have to decide whether docker needs to be used if opts.transferdocker: # stop local docker stopdocker = 'docker stop %s' % docker_info.get( 'container_name') cmd_lines += [stopdocker] # execute transfer # user and pw needs to be defined ?? self.run_commands(cmd_lines, user, pw) # update database self.doUpdate(names=[site_name], norefresh=True) if opts.transferdocker: # restart local docker startdocker = 'docker restart %s' % docker_info.get( 'container_name') cmd_lines += [startdocker] self.run_commands(cmd_lines, user, pw)
def dump_instance(self): # -dump -ip 10.42.0.140 redo2oo -v opts = self.opts dbname = self.site_name dpath = '' dpath = '%s/%s/dump' % (BASE_INFO['erp_server_data_path'], dbname) print(bcolors.WARNING) print('*' * 80) # step one, create local dump if os.path.exists(dpath): odoo = self.get_odoo(verbose=True) import odoorpc try: runnig_db = odoo.env.db except (odoorpc.error.InternalError, AttributeError): runnig_db = 'unknown' if odoo and runnig_db == dbname: #["PGPASSWORD=%s " % POSTGRES_PASSWORD, "/usr/bin/pg_dump", "-h", POSTGRES_HOST, "-U", POSTGRES_USER, '-Fc', dbname, "> %s/%s.dmp" % (dpath, dbname)] cmdline = "PGPASSWORD=%s /usr/bin/pg_dump -h %s -U %s -Fc %s > %s/%s.dmp" % \ (self.db_password, self.db_host, self.db_user, dbname, dpath, dbname) print(cmdline) #print cmds p = subprocess.Popen(cmdline, stdout=subprocess.PIPE, shell=True) p.communicate() print('dumped:', dpath) elif odoo: if not opts.force: print((bcolors.FAIL)) print(('*' * 80)) print(( 'site %s is running, not %s, I can not determin what to so. leaving!' % (runnig_db, dbname))) if opts.new_target_site: print(( 'stop the the running odoo or use the option -F (force) if you just want to copy %s to %s' % (dbname, opts.new_target_site))) print((bcolors.ENDC)) sys.exit() print('odoo is not running, so NO! dump was created') else: print('odoo is not running, so NO! dump was created') else: print('not existing:', dpath) print(bcolors.ENDC) sys.exit() # if we want to copy the dumped stuff to a remote site # do it now if opts.use_ip_target: # we want to move the data to some remote server # so we have to look up what path we need remotely # this probably only works if we have root permission on the target target_site_name = dbname if opts.new_target_site: # is this a valid site? if not opts.new_target_site in list(self.sites.keys()): print('not a valid site:', opts.new_target_site) print(bcolors.ENDC) sys.exit() target_site_name = opts.new_target_site server_dic = get_remote_server_info(opts) remote_data_path = server_dic['remote_data_path'] remote_user = server_dic['remote_user'] # we have to copy the local filestore to the remote filestore lfst_path = '%s/%s/filestore/%s' % ( BASE_INFO['erp_server_data_path'], dbname, dbname) rfst_path = '%s/%s/filestore/%s' % ( remote_data_path, target_site_name, target_site_name) ipt = opts.use_ip_target if ipt in ['localhost', '127.0.0.1']: rfst_path = '%s/%s/filestore/%s' % ( BASE_INFO['erp_server_data_path'], target_site_name, target_site_name) dpath = '%s/%s/dump' % (BASE_INFO['erp_server_data_path'], dbname) target = '%s/%s/dump' % (BASE_INFO['erp_server_data_path'], target_site_name) print('*' * 80) print('will copy the site data to %s' % rfst_path) cmdline = 'rsync -av %s/ %s/ --delete' % (lfst_path, rfst_path) p = subprocess.Popen(cmdline, stdout=subprocess.PIPE, shell=True) print(cmdline) if opts.verbose: print(p.communicate()) else: p.communicate() # now copy dumped db local_dump = '%s/%s.dmp' % (dpath, dbname) remote_dump = '%s/%s/dump/%s.dmp' % ( BASE_INFO['erp_server_data_path'], target_site_name, target_site_name) cmdline = 'rsync -av %s %s' % (local_dump, remote_dump) p = subprocess.Popen(cmdline, stdout=subprocess.PIPE, shell=True) print(cmdline) if opts.verbose: print(p.communicate()) else: p.communicate() else: print('*' * 80) print('will copy the site data to %s@%s:/%s' % (remote_user, opts.use_ip_target, rfst_path)) cmdline = 'rsync -av %s/ %s@%s:%s/ --delete' % ( lfst_path, remote_user, opts.use_ip_target, rfst_path) p = subprocess.Popen(cmdline, stdout=subprocess.PIPE, shell=True) print(cmdline) if opts.verbose: print(p.communicate()) else: p.communicate() # now copy dumped db local_dump = '%s/%s.dmp' % (dpath, dbname) remote_dump = '%s/%s/dump/%s.dmp' % ( remote_data_path, target_site_name, target_site_name) cmdline = 'rsync -av %s %s@%s:%s' % ( local_dump, remote_user, opts.use_ip_target, remote_dump) p = subprocess.Popen(cmdline, stdout=subprocess.PIPE, shell=True) print(cmdline) if opts.verbose: print(p.communicate()) else: p.communicate() print(bcolors.ENDC)