def _get_sites(self): # old_exit = sys.exit # monkey patch sys.exit not # def exit(arg=0): # print('monkey patched sys exit executed') # sys.exit = exit try: # if the sites_list does not exist yet # the following import will create it # but call exit afterwards, which we have monkeypatched from config import sites_list self.sites_list_path = os.path.dirname(sites_list.__file__) self.SITES_G = sites_list.SITES_G self.SITES_L = sites_list.SITES_L except: # try: from config import BASE_INFO sys.path.append( os.path.dirname(BASE_INFO.get('sitesinfo_path'))) import sites_list self.sites_list_path = os.path.dirname(sites_list.__file__) self.SITES_G = sites_list.SITES_G self.SITES_L = sites_list.SITES_L except: raise finally: # sys.exit = old_exit pass
def get_base_info(base_info, base_defaults): "collect base info from user, update base_info" for k, v in list(base_defaults.items()): name, explanation, default = v # use value as stored, default otherwise default = BASE_INFO.get(k, default) base_info[k] = get_single_value(name, explanation, default)
def editor(self): # first check whether an editor is defined in BASE_INFO editor = BASE_INFO.get("site_editor") if not editor: editor = os.environ.get("EDITOR") if not editor: # editor = '/usr/bin/atom' editor = "/bin/nano" return editor
def editor(self): # firs check whether an editor is defined in BASE_INFO editor = BASE_INFO.get('site_editor') if not editor: editor = os.environ.get('EDITOR') if not editor: #editor = '/usr/bin/atom' editor = '/bin/nano' return editor
def pull(self, auto='check'): actual = os.getcwd() if not hasattr(self, 'sites_list_path'): return if auto =='check': if not BASE_INFO.get('sites_autopull', ''): return os.chdir(self.sites_list_path) p = subprocess.Popen( 'git pull', stdout=PIPE, env=dict(os.environ, PATH='/usr/bin'), shell=True) p.communicate() os.chdir(actual)
def doUpdate(self, db_update=True, norefresh=None, names=[], set_local=True): """ set_local is not used yet """ # self.update_container_info() # we need to learn what ip address the local docker db is using # if the container does not yet exists, we create them (master and slave) self.check_and_create_container() server_dic = get_remote_server_info(self.opts) # we have to decide, whether this is a local or remote remote_data_path = server_dic['remote_data_path'] dumper_image = BASE_INFO.get('docker_dumper_image') if not dumper_image: print(bcolors.FAIL + \ 'the %s image is not available. please create it first. ' \ 'insturctions on how to do it , you find in %s/dumper' % ( dumper_image, self.default_values['sites_home'] + bcolors.ENDC)) if not self.checkImage(dumper_image): self.createDumperImage() if not self.checkImage(dumper_image): print(bcolors.FAIL + \ 'the %s image is not available. please create it first. ' \ 'insturctions on how to do it , you find in %s/dumper' % ( dumper_image, self.default_values['sites_home'] + bcolors.ENDC)) return #mp = self.default_values.get('docker_path_map') #if mp and ACT_USER != 'root': #t, s = mp #remote_data_path = remote_data_path.replace(s, t) self.stop_container(self.site_name) self._doUpdate(db_update, norefresh, self.site_name, self.opts.verbose and ' -v' or '') # if we know admins password, we set it # for non docker pw is usualy admin, so we do not use it #adminpw = self.sites[self.site_name].get('erp_admin_pw') #if adminpw: #cmd_lines_docker += [['%s/psql' % where, '-U', user, '-d', site_name, '-c', "update res_users set password='******' where login='******';" % adminpw]] self.start_container(self.site_name)
def check_pull(self, auto='check'): """automatically update sites-list according to seettings config file. Keyword Arguments: auto {str} -- should we first check config? (default: {'check'}) """ actual = os.getcwd() if not hasattr(self, 'sites_list_path'): return if auto == 'check': if not BASE_INFO.get('sites_autopull', ''): return os.chdir(self.sites_list_path) p = subprocess.Popen('git pull', stdout=PIPE, env=dict(os.environ, PATH='/usr/bin'), shell=True) p.communicate() os.chdir(actual)
def check_and_create_sites_repo(self, force = False): # check whether sites repo defined in BASEINFO exists # if not download and install it sites_list_path = BASE_INFO.get('sitesinfo_path') if not sites_list_path: return '' # not yet configured sites_list_url = BASE_INFO.get('sitesinfo_url') if sites_list_url == 'localhost': bp = '/' + '/'.join([p for p in sites_list_path.split('/') if p][:-1]) if not os.path.exists(bp): print(LOCALSITESLIST_BASEPATH_MISSING % bp) p1 = sites_list_path # there could be a siteslist folder existing but without # the its inner structure (how??, but it happens!) must_create = False for n in ['__init__.py', 'sites_global', 'sites_local']: if not os.path.exists(os.path.normpath('%s/%s' % (p1, n))): must_create = True break if not os.path.exists(p1) or must_create: os.makedirs(p1, exist_ok=True) # add __init__.py open('%s/__init__.py' % p1, 'w').write(SITES_LIST_INI) template = open('%s/templates/newsite.py' % self.base_path, 'r').read() template = template.replace('xx.xx.xx.xx', 'localhost') # default values for the demo sites defaults = { 'site_name' : 'demo_global', 'marker' : self.marker, 'base_sites_home' : '/home/%s/erp_workbench' % ACT_USER, 'erp_provider' : PROJECT_DEFAULTS.get('erp_provider', 'odoo'), 'erp_version' : PROJECT_DEFAULTS.get('erp_version', '12'), 'erp_minor' : PROJECT_DEFAULTS.get('erp_minor', '12'), 'erp_nightly' : PROJECT_DEFAULTS.get('erp_nightly', '12'), 'base_url' : 'demo_global', 'local_user_mail' : '*****@*****.**', 'remote_server' : 'localhost', 'docker_port' : 8800, 'docker_long_poll_port' : 18800, 'docker_hub_name' : DOCKER_DEFAULTS.get('docker_hub_name', ''), 'erp_image_version' : DOCKER_DEFAULTS.get('erp_image_version', ''), } # create global sites os.mkdir('%s/sites_global' % p1) __ini__data = open('%s/templates/sites_list__init__.py' % self.base_path).read() open('%s/sites_global/__init__.py' % p1, 'w').write(__ini__data) open('%s/sites_global/demo_global.py' % p1, 'w').write(SITES_GLOBAL_TEMPLATE % ( 'demo_global', template % defaults)) # create local sites os.mkdir('%s/sites_local' % p1) __ini__data = __ini__data.replace('SITES_G', 'SITES_L') open('%s/sites_local/__init__.py' % p1, 'w').write(__ini__data) defaults['site_name'] = 'demo_local' open('%s/sites_local/demo_local.py' % p1, 'w').write(SITES_GLOBAL_TEMPLATE % ( 'demo_local', template % defaults)) print(LOCALSITESLIST_CREATED % ( os.path.normpath('%s/sites_global/demo_global.py' % p1), os.path.normpath('%s/sites_local/demo_local.py' % p1))) sys.exit() elif not os.path.exists(sites_list_path): # try to git clone sites_list_url act = os.getcwd() dp = '/' + '/'.join([p for p in sites_list_path.split('/') if p][:-1]) os.chdir(dp) cmd_lines = ['git clone %s ' % sites_list_url] for cmd_line in cmd_lines: p = subprocess.Popen( cmd_line, stdout=PIPE, env=dict(os.environ, PATH='/usr/bin'), shell=True) p.communicate() print(LOCALSITESLIST_CLONED % (sites_list_url, os.getcwd())) os.chdir(act) return sites_list_path
def checkout_sa(opts): """ get addons from repository @opts : options as entered by the user """ if not opts.name: # need a site_name to do anythin sensible return from .git_check import gitcheck, argopts result = {'failed': [], 'need_reload': []} site_addons = [] is_local = SITES_LOCAL.get(opts.name) is not None _s = SITES.get(opts.name) if not _s: return if is_local: _s = SITES_LOCAL.get(opts.name) site_addons = _s.get('addons', []) skip_list = _s.get('skip', {}).get('addons', []) flag_info = _s.get('tags', {}) dev_list = [] # whether we want to override branches use_branch = opts.use_branch # we need to construct a dictonary with path elements to fix # the access urls according to the way we want to access the code repositories # we construct an sa_dic with # {'gitlab.redcor.ch': 'ssh://[email protected]:10022/', 'github...', 'access url', ..} sa_string = BASE_INFO.get('repo_mapper', '') if sa_string.endswith('/'): sa_string = sa_string[:-1] sa_dic = {} if sa_string: parts = sa_string.split(',') for part in parts: if '=' in part: pp = part.split('=') sa_dic[pp[0]] = pp[1] # restrict list of modules to update only_these_modules = [] if opts.module_update: only_these_modules = opts.module_update.split(',') downloaded = [] # list of downloaded modules, shown when -v ubDic = {} # dic with branch per module for site_addon in site_addons: if (not site_addon.get('url')) or (not site_addon): continue names = find_addon_names(site_addon) # name url = site_addon['url'] % sa_dic # 'ssh://[email protected]:10022//afbs/afbs_extra_data.git' addon_name = site_addon.get('addon_name', url.split('/')[-1].split('.git')[0]) # if we want to handle only some modules if only_these_modules: if addon_name and addon_name not in only_these_modules: continue only_these_modules.pop(only_these_modules.index(addon_name)) if not only_these_modules: only_these_modules = [''] # so it is not empty # Updating bae3b03..4bc383f # if the addon is in the project folders addon_path, we assume it is under developement, # and we do not download it temp_target = os.path.normpath( '%s/%s/%s/%s_addons/%%s' % (BASE_INFO['project_path'], opts.name, opts.name, opts.name)) target = os.path.normpath( '%s/%s/addons' % (BASE_INFO['erp_server_data_path'], opts.name)) # when we have a folder full of addons, as it is the case with OCA modules # they will be downloaded to download_target download_target = '' if 'target' in site_addon: download_target = '%s/%s' % (target, site_addon['target']) download_target = os.path.normpath(download_target) if 'group' in site_addon: target = '%s/%s' % (target, site_addon['group']) target = os.path.normpath(target) # if we want to use a branch if use_branch or flag_info.get(addon_name): # if we are checking out a flag, there is no use # bother whether a branch exists, as we will end # positioning our self on comit which is pointed to # by the flag if use_branch and use_branch.startswith('all'): # we want to check whether a module # has a branch, if yes use it, if not # use the branch from the site description branch = use_branch.split(':')[-1] target = download_target or target if repo_has_branch(target, branch): url = site_addon['url'] % sa_dic ubDic[url] = branch elif use_branch: # we use a branch for selected addons if not ubDic: # we need do do this only once for binfo in use_branch.split(','): if binfo: bl = binfo.split(':') if len(bl) == 2: ubDic[bl[0]] = bl[1] # get the branch either .. # - from the -b option # - the addon description # - from the flag # - or default to master branch = ubDic.get( # is there a branch asked for addon_name, # do we have a branch for the url ubDic.get( url, # is there a flag for the addon flag_info.get( addon_name, # or does the site description use a branch # finally as default, use master site_addon.get('branch', 'master')))) downloaded.append([addon_name, branch]) if not dev_list or addon_name in dev_list: real_target = download_target or target cpath = os.getcwd() gr = GitRepo(real_target, url) # GitRepo can not easily tell actual branch try: # when we create this module, there is not yet anything if os.path.exists(real_target): actual_branch = str(git.Repo(real_target).active_branch) else: actual_branch = None except TypeError as e: # when the repo does not exist yet # or the head is detached pointing to a flag message = e.message if 'detached' in message: # "HEAD is a detached symbolic reference as it points to '669bc1f5949bc028f2a75c3e6e20fab9f20f2cfd'" sha = eval(e.message.split()[-1]) try: if sha == repo_get_tag_sha(real_target, flag_info.get(addon_name), opts.verbose): actual_branch = flag_info.get(addon_name) except ValueError as e: if 'does not exist' in str(e): pass else: raise else: actual_branch = None # do we need to checkout a tag? if not os.path.exists(real_target) or branch != actual_branch: # create sandbox and check out try: gr(branch) except subprocess.CalledProcessError as e: print(bcolors.FAIL) print('*' * 80) print('target:', real_target) print('actual_branch:', actual_branch) print('target branch:', branch) print(str(e)) print('*' * 80) print(bcolors.ENDC) continue os.chdir(real_target) #argopts['verbose'] = True argopts['checkremote'] = True return_counts = {} # if needed we reset the remote url reset_git_to = opts.__dict__.get( 'reset_git' ) and url or '' # this option does not exist, robert okt 2018 set_git_remote(gr, reset_git_to) action_needed = gitcheck(return_counts) os.chdir(cpath) if action_needed: # what action is needed we find in return_counts if return_counts.get('to_pull'): gr(branch) for name in names: # we have to download all modules, also the ones in the skiplist # we only should not install them # if name and name in skip_list: # continue # if we did download the files to a target directory, we must create symlinks # from the target directory to the real_target if download_target: # make sure that target exists. this is not the case when we land here the first time if not os.path.exists(target): os.mkdir(target) if not os.path.exists('%s/%s' % (target, name)): # check if name exists in download_target if os.path.exists('%s/%s' % (download_target, name)): # construct the link os.symlink('%s/%s' % (download_target, name), '%s/%s' % (target, name)) else: # hopalla, nothing here to link to # this is an error! print(bcolors.FAIL) print(('*' * 80)) print('%s/%s' % (download_target, name), 'does not exist') print(bcolors.ENDC) if only_these_modules and only_these_modules[0]: print(bcolors.WARNING) print(('*' * 80)) print( '%s where not handled, maybe these are submodules and you should name it in its addons block' % only_these_modules) print(bcolors.ENDC) if opts.verbose: print((bcolors.OKGREEN)) print(('*' * 80)) for d in downloaded: print(d) print((bcolors.ENDC)) return result
shell=shell) print(p.communicate()) if __name__ == '__main__': usage = "drefresh.py -h for help on usage" parser = ArgumentParser(usage=usage) parser.add_argument("-n", "--no-docker", action="store_true", dest="no_docker", default=False, help="do not use docker") parser.add_argument("-s", "--skip", action="store", dest="skip", default='', help="skip list of addons") opts = parser.parse_args() args, unknownargs = parser.parse_known_args() if not unknownargs: print('need name of site') sys.exit() site_name = unknownargs[0] if not SITES.get(site_name): '%s is not a know site' % site_name sys.exit() pp = os.path.normpath('%s/%s' % (BASE_INFO.get('erp_server_data_path'), site_name)) if not os.path.exists(pp) and os.path.isdir(pp): print('%s does not point to a valid site sctructure' % pp) sys.exit() main(args, site_name) #
def __init__(self, opts, sites=SITES, url='unix://var/run/docker.sock', use_tunnel=False): """ """ self.docker_db_admin_pw = DOCKER_DEFAULTS['dockerdbpw'] self.docker_db_admin = DOCKER_DEFAULTS['dockerdbuser'] super(DockerHandler, self).__init__(opts, sites) try: from docker import Client except ImportError: print('*' * 80) print('could not import docker') print('please run bin/pip install -r install/requirements.txt') return cli = self.default_values.get('docker_client') self.url = url if not cli: from docker import Client cli = Client(base_url=self.url) self.default_values['docker_client'] = cli if not self.site: return # when we are creating the db container # make sure the registry exists self.update_docker_info() # ---------------------- # get the db container # ---------------------- # the name of the database container is by default db docker_info = self.site['docker'] self.docker_db_admin_pw = docker_info.get( 'db_admin_pw', DOCKER_DEFAULTS['dockerdbpw']) if not self.opts.dockerdbname: db_container_name = docker_info.get( 'db_container_name', DOCKER_DEFAULTS['dockerdb_container_name']) self.db_container_name = db_container_name # update the docker registry so we get info about the db_container_name self.update_container_info() # get the dbcontainer db_container_list = cli.containers(filters={'name': db_container_name}) if db_container_list: db_container = db_container_list[0] else: return # either db container was missing or some other problem # the ip address to access the db container self.docker_db_ip = db_container['NetworkSettings']['Networks'][ 'bridge']['IPAddress'] # the db container allows access to the postgres server running within # trough a port that has been defined when the container has been created self.postgres_port = BASE_INFO.get('docker_postgres_port') # todo should we check whether the postgres port is accessible?? # ---------------------- # get the sites container # ---------------------- container_name = docker_info['container_name'] self.docker_rpc_port = docker_info.get('erp_port', docker_info.get('odoo_port')) long_polling_port = docker_info.get('erp_longpoll', docker_info.get('odoo_longpoll')) if not long_polling_port: long_polling_port = int(self.docker_rpc_port) + 10000 self.docker_long_polling_port = long_polling_port self.registry = self.default_values['docker_registry'].get( container_name) try: self.docker_rpc_host = self.registry['NetworkSettings'][ 'IPAddress'] except: self.docker_rpc_host = 'localhost' # -------------------------------------------------- # get the credential to log into the db container # -------------------------------------------------- # by default the odoo docker user db is 'odoo' self.docker_db_admin = docker_info.get('db_admin', 'odoo') if self.opts.dockerdbuser: self.docker_db_admin = self.opts.dockerdbuser or DOCKER_DEFAULTS[ 'dockerdbuser'] # by default the odoo docker db user's pw is 'odoo' #self.docker_db_admin_pw = DOCKER_DEFAULTS['dockerdbpw'] if self.opts.dockerdbpw: self.docker_db_admin_pw = self.opts.dockerdbpw or DOCKER_DEFAULTS[ 'dockerdbpw'] # -------------------------------------------------- # get the credential to log into the sites container # -------------------------------------------------- docker_rpc_user = self.opts.drpcuser if not docker_rpc_user: docker_rpc_user = DOCKER_DEFAULTS['dockerrpcuser'] self.docker_rpc_user = docker_rpc_user docker_rpc_user_pw = self.opts.drpcuserpw if not docker_rpc_user_pw: # no password was provided by an option # we try whether we can learn it from the site itself docker_rpc_user_pw = self.site.get('erp_admin_pw') if not docker_rpc_user_pw: docker_rpc_user_pw = DOCKER_DEFAULTS['dockerrpcuserpw'] self.docker_rpc_user_pw = docker_rpc_user_pw
def check_and_create_container(self, container_name='', rename_container=False, pull_image=False, update_container=False): """create a new docker container or manage an existing one Keyword Arguments: container_name {str} -- name of the container, mandatory (default: {''}) rename_container {bool} -- rename the container by adding a time-stamp to its name (default: {False}) pull_image {bool} -- pull an actual image from dockerhup (default: {False}) update_container {bool} -- create a container, that runs etc/runodoo.sh as entrypoint. --stop-after-init (default: {False}) Raises: ValueError -- [description] """ name = self.site_name or container_name site = self.site BASE_INFO['docker_command'] = shutil.which('docker') if name == 'db': self.update_docker_info('db') site = { 'docker': { 'container_name': 'db', 'erp_port': 'db', 'erp_longpoll': 'db', 'erp_image_version': 'db', } } erp_port = '' long_polling_port = '' else: site = self.site if not site: raise ValueError('%s is not a known site' % name) docker_info = site['docker'] if not container_name: # get info on the docker container to use #'docker' : { #'erp_image_version': 'odoo:9.0', #'container_name' : 'afbs', #'erp_port' : '8070', #}, container_name = docker_info['container_name'] erp_port = docker_info['erp_port'] if erp_port == '??': print(DOCKER_INVALID_PORT % (name, name)) return () long_polling_port = docker_info.get('erp_longpoll') if long_polling_port == '??': print(DOCKER_INVALID_PORT % (name, name)) return () if not long_polling_port: long_polling_port = int(erp_port) + 10000 if pull_image: image = docker_info['erp_image_version'] if image: self.pull_image(image) return if rename_container: self.stop_container(container_name) n = str(datetime.datetime.now()).replace(':', '_').replace( '.', '_').replace(' ', '_').replace('-', '_') self.rename_container(container_name, '%s.%s' % (container_name, n)) # if we are running as user root, we make sure that the # folders that are accessed from within odoo belong to the respective # we do that before we start the container, so it has immediat access if os.geteuid() == 0: # cd to the site folder, preserve old folder act_pwd = os.getcwd() t_folder = os.path.normpath( '%s/%s' % (BASE_INFO['erp_server_data_path'], name)) try: os.chdir(t_folder) user_and_group = docker_info.get('external_user_group_id', '104:107') cmdlines = [ ['/bin/chown', user_and_group, 'log'], ['/bin/chown', user_and_group, 'filestore', '-R'], ] for c in cmdlines: os.system(' '.join(c)) #self.run_commands(cmdlines, self.user, pw='') os.chdir(act_pwd) except OSError: pass # no such folder # the docker registry was created by update_docker_info # if this registry does not contain a description for container_name # we have to create it info_dic = { 'erp_port': erp_port, 'erp_longpoll': long_polling_port, 'site_name': name, 'container_name': container_name, 'remote_data_path': self.site and self.site.get('remote_server', {}).get('remote_data_path', '') or '', 'erp_image_version': docker_info.get('erp_image_version', docker_info.get('odoo_image_version')), 'erp_server_data_path': BASE_INFO.get('erp_server_data_path', docker_info.get('odoo_server_data_path')), } # make sure we have valid elements for k, v in info_dic.items(): if k == 'erp_image_version': v = v.split(':')[0] # avoid empty image version with only tag if not v: print(bcolors.FAIL) print('*' * 80) print( 'the value for %s is not set but is needed to create a docker container.' % k) print('*' * 80) print(bcolors.ENDC) sys.exit() if update_container: # create a container that runs etc/odoorunner.sh as entrypoint from templates.docker_templates import docker_template_update self._create_container(docker_template_update, info_dic) elif rename_container or self.default_values.get('docker_registry') \ and not self.default_values['docker_registry'].get(container_name) or (container_name == 'db'): if container_name != 'db': from templates.docker_templates import docker_template, flectra_docker_template if site.get('erp_provider', 'odoo') == 'flectra': docker_template = flectra_docker_template self._create_container(docker_template, info_dic) else: # we need a postgres version pg_version = self.use_postgres_version if not pg_version: print(bcolors.FAIL) print('*' * 80) print( 'you must define a postgres version like 10.0 in config/docker.yaml' ) print('*' * 80) print(bcolors.ENDC) sys.exit() # here we need to decide , whether we run flectra or odoo if site.get('erp_provider') == 'flectra': from templates.docker_templates import flectra_docker_template else: from templates.docker_templates import docker_db_template BASE_INFO['postgres_version'] = pg_version docker_template = docker_db_template % BASE_INFO try: self.run_commands([docker_template], user=self.user, pw='') except: pass # did exist allready ?? if self.opts.verbose: print(docker_template) else: if self.opts.verbose: print('container %s allready running' % name)
def add_options_create(parser, result_dic): """add options to the create parser Arguments: parser {argparse instance} -- instance to which arguments should be added """ parser_manage = ParserHandler(parser, result_dic) parser_manage.add_argument( "-c", "--create", action="store_true", dest="create", default=False, help='create new site structure in %s. Name must be provided' % BASE_INFO.get('project_path', os.path.expanduser('projects')), need_name=True) parser_manage.add_argument( "-D", "--directories", action="store_true", dest="directories", default=False, help= 'create local directories for site %s. option -n must be set and valid. This option is seldomly needed. Normaly directories are created when needed' % BASE_INFO.get('erp_server_data_path', BASE_PATH), need_name=True) parser_manage.add_argument( "--DELETELOCAL", action="store_true", dest="delete_site_local", default=False, help= """Delete all elements of a locally installed project. Name must be provided.\n This includes (for Proj_Mame):\n - ooda/Proj_NAME folders\n - ~/projecty/Proj_Name folder\n - virtualenv Proj_Name\n - database Proj_Name """, need_name=True) parser_manage.add_argument( "-lo", "--listownmodules", action="store_true", dest="listownmodules", default=False, help= 'list installable modules from sites.py sites description. Name must be provided', need_name=True) parser_manage.add_argument("-io", "--installown", action="store_true", dest="installown", default=False, help='install all modules listed as addons', need_name=True) parser_manage.add_argument( "-uo", "--updateown", action="store", dest="updateown", default='', help= 'update modules listed as addons, pass a comma separated list (no spaces) or all', need_name=True) parser_manage.add_argument( "-ro", "--removeown", action="store", dest="removeown", default='', help= 'remove modules listed as addons, pass a comma separated list (no spaces) or all', need_name=True) parser_manage.add_argument("-I", "--install_erp_modules", action="store_true", dest="install_erp_modules", default=False, help='install modules listed as addons', need_name=True) parser_manage.add_argument("-ls", "--list", action="store_true", dest="list_sites", default=False, help='list available sites') parser_manage.add_argument( "-s", "--single-step", action="store_true", dest="single_step", default=False, help= 'load modules one after the other. MUCH! slower, but problems are easier to spot' ) parser_manage.add_argument( "-u", "--dataupdate", action="store_true", dest="dataupdate", default=False, help= 'update local server from remote server. Automatically set local data') parser_manage.add_argument( "-uu", "--dataupdate-close-conections", action="store_true", dest="dataupdate_close_connections", default=False, help= 'update local server from remote server, Force close of all connection to the db' ) parser_manage.add_argument("-dump", "--dump-local", action="store_true", dest="dump_local", default=False, help=""" dump database data into the servers dump folder. does not use docker. \n You can use the option -ipt (ip-target) to dump the site to a remote server.\n Using the option -NTS (new-target-site) you can define to what target site the data is dumped. """, need_name=True) parser_manage.add_argument( "-M", "--module-update", action="store", dest="module_update", help= 'Pull modules listed for a site from the repository. Provide comma separated list, no spaces. Name must be provided', need_name=True) parser_manage.add_argument( "-m", "--modules-update", action="store_true", dest="modules_update", default=False, help= 'Pull all modules listed for a site from the repository. Name must be provided', need_name=True) parser_manage.add_argument( "-b", "--use-branch", action="store", dest="use_branch", help= """use branch for addon. pass a comma separated list of addon:branch,addon:branch .. use all:.. if you want to use the branch for all modules. It will only be applied if the branch exists for the module""" ) # options -ip and -ipt moved to parent_parser parser_manage.add_argument("-NTS", "--new-target-site", action="store", dest="new_target_site", help=""" copy the source site identified by name to the TARGET site. This mainly renames the dump file and the target folder inside filestore. The target site should be existing and running on the target server. The command does not check this!!!! If you want to copy a local site to an other local site do it like this: bin/c -dump SOURCE -NTS TARGET -ipt localhost """, need_name=True)
def main(opts, parsername, need_names_dic): """ """ # default_handler = SiteCreator try: import wingdbstub except: pass # if the name ends with /, cut it off if opts.name.endswith('/'): opts.name = opts.name[:-1] opts.subparser_name = parsername if parsername == 'create': handler = SiteCreator(opts, SITES) elif parsername == 'support': handler = SupportHandler(opts, SITES) elif parsername == 'remote': handler = RemoteHandler(opts, SITES) elif parsername == 'docker': handler = DockerHandler(opts, SITES) elif parsername == 'mail': handler = MailHandler(opts, SITES) else: handler = SiteCreator(opts, SITES) # ckeck whether the used option needs a name to work handler.check_name(need_names_dic=need_names_dic) # ckeck wheter the path to the sites-list has to be adapted # if yes, flag it to the user. To do the actual update # the user has to use the support optin --fix-sites-list from config import sites_handler sites_handler.fix_sites_list() # ckeck wheter the the sites-list has to be autoloaded sites_handler.check_pull() # ---------------------- # create commands # ---------------------- if parsername == 'create': # create # ------ # builds or updates a server structure # to do so, it does a number of steps # - creates the needed folders in $ERP_SERVER_DATA # - creates a build structure in $PROJECT_HOME/$SITENAME/$SITENAME # where $PROJECT_HOME is read from the config file. # - copies and sets up all files from skeleton directory to the build structure # this is done executing create_new_project and do_copy # - builds a virtualenv environment in the build structure # - prepares to builds an erp server within the build structure by # execution bin/build_erp within the build structure. # Within this bild environment the erp's module path will be set # that it points to the usual erp-workbench directories within the build substructure # and also to the directories within erp_workbench as dictated by the # various modules installed from interpreting the site declaration # in sites.py # - add a "private" addons folder within the build structure called # $SITENAME_addons. This folder is also added to the erp-site's addon path. # - set the data_dir to point to $ERP_SERVER_DATA/$SITENAME/filestore # # modules_update # ------------- if opts.create or opts.modules_update or opts.module_update: info_dic = { 'project_path': handler.default_values['inner'], 'erp_version': BASE_INFO.get('erp_version', BASE_INFO.get('odoo_version')), 'site_name': handler.site_name, 'project_type': PROJECT_DEFAULTS.get('project_type'), } if opts.create: existed = handler.create_or_update_site() if existed: if not opts.quiet: print() print('%s site allredy existed' % handler.site_name) print(SITE_EXISTED % info_dic) else: if handler.site_name: if not opts.quiet: print() print('%s site created' % handler.site_name) print(SITE_NEW % info_dic) # create the folder structure within the datafoler defined in the config # this also creates the config file used by a docker server within the # newly created folders handler.create_folders(quiet=True) create_server_config(handler) did_run_a_command = True # make sure project was added to bash_aliases handler.add_aliases() # checkout repositories checkout_sa(opts) # list_sites # ---------- # list_sites lists all existing sites both from global and local sites if opts.list_sites: list_sites(SITES, opts.quiet) did_run_a_command = True return # listownmodules # -------------- # list the modules that are declared within the selected site # installown install all erp modules declared in the selected site # todo: why are the two following options combined here??? !!!!!!!!!!!! if opts.listownmodules or opts.install_erp_modules: handler.install_own_modules() did_run_a_command = True return # delete_site_local # -------- # delete_site_local removes a site and all project files if opts.delete_site_local: handler.delete_site_local() did_run_a_command = True return # installown or updateown or removeown # ------------------------------------ # installown install all modules declared in the selected site # updateown updates one or all modules declared in the selected site # removeown removes one or all modules declared in the selected site # # to be able to execute do this, the target server has to be running. # this server is accessed uding odoo's rpc_api. # to do so, info on user, that should access the running server needs # to be collected. the following values # read from either the config data or can be set using command line options. # --- database --- # - db_user : the user to access the servers database # to check what modules are allready installed the servers database # has to be accessed. # option: "-dbu", "--dbuser". # default: logged in user # - db_password # option: "-p", "--dbpw". # default: admin # - dbhost: the host on which the database is running # option: "-dbh", "--dbhost" # default: localhost. # --- user accessing the running odoo server --- # - rpcuser: the login user to access the odoo server # option: "-rpcu", "--rpcuser" # default: admin. # - rpcpw: the login password to access the odoo server # option: "-P", "--rpcpw" # default: admin. # - rpcport: the the odoo server is running at # option: "-PO", "--port" # default: 8069. if opts.installown or opts.updateown or opts.removeown: handler.install_own_modules() did_run_a_command = True # dataupdate # ---------- # these options are used to copy a running remote server to a lokal # odoo instance # # dataupdate: # ----------- # this copies both an odoo db and the related file data structure from # a remote server to a locally existing (buildout created) server. # the needed info is gathered from diverse sources: # local_data.py # ------------- # - DB_USER: the user name with which to access the local database # default: the logged in user. # - DB_PASSWORD: the password to access the local database server # default: odoo # If the option -p --password is used, the password in local_data is # overruled. # remote data: # ------------ # to collect data on the remote server the key remote_server is used # to get info from sites.py for $SITENAME # - remote_url : the servers url # - remote_data_path : COLLECT it from ODOO_SERVER_DATA ?? # local_data.REMOTE_SERVERS: # --------------------------- # from this dictonary information on the remote server is collected # this is done looking up 'remote_url' in local_data.REMOTE_SERVERS. # - remote_user: user to acces the remote server with # - remote_pw : password to access the remote user with. should normaly the empty # as it is best only to use a public key. # - remote_data_path: how the odoo erverdata can be access on the remote server # ??? should be created automatically # sites_pw.py: # ------------ # the several password used for the services to be acces on the odoo instance, # the remote server or on the mail server can be mixed in from # sites_pw.py. # !!!! sites_pw.py should be kept separate, and should not be version controlled with the rest !!! # # it executes these steps: # - it executes a a command in a remote remote server in a remote shell # this command starts a temporary docker container and dumps the # database of the source server to its dump folder which is: # $REMOTE_URL:$ODOO_SERVER_DATA/$SITENAME/dump/$SITENAME.dmp # - rsync this file to: # localhost:$ODOO_SERVER_DATA/$SITENAME/dump/$SITENAME.dmp # - drop the local database $SITENAME # - create the local database $SITENAME # - restore the local datbase $SITENAME from localhost:$ODOO_SERVER_DATA/$SITENAME/dump/$SITENAME.dmp # - rsync the remote filestore to the local filestore: # which is done with a command similar to: # rsync -av $REMOTEUSER@$REMOTE_URL:$ODOO_SERVER_DATA/$SITENAME/filestore/ localhost:$ODOO_SERVER_DATA/$SITENAME/filestore/ # # run_local_docker # ---------------- # when the option -L --local_docker is used, data is copied from a docker container # running on localhost if opts.dataupdate or opts.dataupdate_close_connections: # def __init__(self, opts, default_values, site_name, foldernames=FOLDERNAMES) set_local = True handler.doUpdate(db_update=not opts.noupdatedb, norefresh=opts.norefresh, set_local=set_local) did_run_a_command = True if opts.dump_local: # def __init__(self, opts, default_values, site_name, foldernames=FOLDERNAMES) handler.dump_instance() did_run_a_command = True # ---------------------- # docker commands # ---------------------- if parsername == 'docker': # docker_create_container # ----------------------- # it creates and starts a docker container # the created container collects info from sites.py for $SITENAME # it uses the data found with the key "docker" # it collects these data: # - container_name: name of the container to create. # must be unique for each remote server # - odoo_image_version: name of the docker image used to build # the container # - odoo_port: port on which to the running odoo server within the # container can be reached. must be unique for each remote server if opts.docker_create_container: # "docker -dc", "--create_container", handler.check_and_create_container() did_run_a_command = True if opts.docker_create_update_container: # "docker -dcu", "--create_update_container", handler.check_and_create_container(update_container=True) did_run_a_command = True if opts.docker_create_db_container: # "docker -dcdb", "--create_db_container", handler.check_and_create_container(container_name='db') did_run_a_command = True # build image # ---------- # build docker image used by a site if opts.docker_build_image: handler.build_image() did_run_a_command = True return if opts.build_dumper_image: handler.build_dumper_image() did_run_a_command = True return # installown or updateown or removeown # ------------------------------------ # installown install all modules declared in the selected site # updateown updates one or all modules declared in the selected site # removeown removes one or all modules declared in the selected site # ----------> see create commands if opts.dinstallown or opts.dupdateown or opts.dremoveown or opts.dinstall_erp_modules: #handler = dockerHandler(opts, default_values, site_name) handler.docker_install_own_modules() did_run_a_command = True # dataupdate_docker # ------------------------------- # these options are used to copy a running remote server to a lokal # odoo instance # # see explanation create->dataupdate # run_local_docker # ---------------- # when the option -L --local_docker is used, data is copied from a docker container # running on localhost if opts.dataupdate_docker: # def __init__(self, opts, default_values, site_name, foldernames=FOLDERNAMES) set_local = True handler.doUpdate(db_update=not opts.noupdatedb, norefresh=opts.norefresh, set_local=set_local) did_run_a_command = True if opts.dump_local_docker: # def __init__(self, opts, default_values, site_name, foldernames=FOLDERNAMES) handler.dump_instance() did_run_a_command = True # start or restart docker if opts.docker_restart_container or opts.docker_start_container or opts.docker_stop_container: if opts.docker_start_container: handler.start_container() elif opts.docker_restart_container: handler.restart_container() else: handler.stop_container() did_run_a_command = True # ---------------------- # support commands # ---------------------- if parsername == 'support': # add_site # -------- # add_site adds a site description to the sites.py file # add_site_local adds a site description to the sites_local.py file if opts.add_site or opts.add_site_local: handler.add_site_to_sitelist() did_run_a_command = True return # drop_site # -------- # drop_site removes a site description from the sites.py file if opts.drop_site: handler.drop_site() did_run_a_command = True return # edit_site, edit_server # ---------------------- # Lets the user edit the content of config/localdat.py to edit a server # description, or change the server description in LOCALDATA['sitesinfo_path'] if opts.edit_site or opts.edit_server: if opts.edit_site: handler.check_name() handler.edit_site_or_server() did_run_a_command = True return # add_server # ---------- # add_server_to_server_list # add new server info to localdat # ---------------------------------- if opts.add_server: handler.add_server_to_server_list() did_run_a_command = True return
def add_site_to_sitelist(self): """ add new site description to sites.py @opts : option instance @default_values : dictionary with default values returns dictonary with info about what happened, mainly for testing purposes """ from config import sites_handler # check if user wants to copy an existing site template = '' #!!!!!!!!!!!!!!!!!!! does not work anymore opts = self.opts if '::' in self.opts.name: name, template = self.opts.name.split('::') self.site_names = [name] self.opts.name = name #site_name = self.site_name #name = site_name has_forbidden_chars = re.compile(r'[^A-Za-z0-9_]').search # if we have only on sub -site_list, we can take this one. # if there are severals, the user has to tell which one siteinfos = BASE_INFO.get('siteinfos') siteinfo_names = siteinfos and list(siteinfos.keys()) or [] site_name, subsite_name = (opts.name.split(':') + [''])[:2] # make sure all other processes pick the rigth name self.site_names = [site_name] self.default_values['site_name'] = site_name if len(siteinfo_names) == 1: # if we have one subsite, use it subsite_name = siteinfo_names[0] elif not subsite_name in siteinfo_names: print(bcolors.FAIL) print('*' * 80) print('There is more than one place to add the new site') print( 'You have to add the subsite name to the sitename after a colon' ) if site_name: print('like %s:%s' % (site_name, siteinfo_names[0])) else: print('like new_site_name:localhost') print('valid site-list-names are %s' % siteinfo_names) print(bcolors.ENDC) sys.exit() if not site_name or has_forbidden_chars(site_name): print(bcolors.FAIL) print('*' * 80) print('The name %s contains forbidden charaters or is too short' % opts.name) print('only [A-Za-z0-9_] allowed') print(bcolors.ENDC) sys.exit() self.default_values['marker'] = MARKER # was a version option used if opts.erp_version: erp_version = opts.erp_version parts = erp_version.split('.') + ['.0'] erp_version, erp_minor = parts[:2] if erp_minor == '0': erp_minor = '.0' self.default_values['erp_version'] = erp_version self.default_values['erp_minor'] = erp_minor self.default_values['erp_nightly'] = '%s%s' % (erp_version, erp_minor) # if the site allready exist, we bail out if self.sites.get(self.site_name): print("site %s allready defined" % self.site_name) return { 'existed': self.site_name, 'site': self.sites[self.site_name] } # make sure the variables for the the docker port and remote site are set docker_port = DOCKER_DEFAULTS.get('docker_port', 9000) docker_long_poll_port = 19000 if opts.docker_port: try: docker_port = int(opts.docker_port) docker_long_poll_port = docker_port + 10000 except Exception as e: print((bcolors.FAIL)) print(('*' * 80)) print(('%s is not a valid port number' % opts.docker_port)) print((bcolors.ENDC)) return {'error': e} else: docker_port = self.default_values.get('docker_port', docker_port) docker_long_poll_port = self.default_values.get( 'docker_long_poll_port', docker_long_poll_port) self.default_values['docker_port'] = docker_port self.default_values['docker_long_poll_port'] = docker_long_poll_port # docker hub self.default_values['docker_hub_name'] = DOCKER_DEFAULTS.get( 'docker_hub_name', 'dockerhubname missing') self.default_values['erp_image_version'] = PROJECT_DEFAULTS.get( 'project_type', 'odoo') if opts.remote_server: self.default_values['remote_server'] = opts.remote_server else: self.default_values['remote_server'] = self.default_values.get( 'remote_server', '127.0.0.1') if opts.add_site: # before we can construct a site description we need a a file with site values pvals = {} # dict to get link to the preset-vals-file # preset_values = self.get_preset_values(pvals) if 1: #preset_values: result = sites_handler.add_site_global( handler=self, template_name=template, sublist=subsite_name) #, preset_values=preset_values) if result: if not opts.quiet: print("%s added to sites.py" % self.site_name) else: if pvals.get('pvals_path'): print((bcolors.WARNING)) print(('*' * 80)) print('a file with values for the new site was created') print((pvals.get('pvals_path'))) print( 'please edit and adapt it. It will be incorporated in the site description' ) print('and will be used to set the site values') print(('*' * 80)) print((bcolors.ENDC)) # no preset stuff yet return { 'added': self.site_name, 'site': result, #self.sites[self.site_name], } # before we can construct a site description we need a file with site values if opts.use_preset: pvals = {} # dict to get link to the preset-vals-file preset_values = self.get_preset_values.get_preset_values(pvals) if preset_values: result = sites_handler.add_site_global( handler=self, template_name=template, preset_values=preset_values) else: if pvals.get('pvals_path'): print((bcolors.WARNING)) print(('*' * 80)) print( 'a file with values for the new site was created') print((pvals.get('pvals_path'))) print( 'please edit and adapt it. It will be incorporated in the site description' ) print('and will be used to set the site values') print(('*' * 80)) print((bcolors.ENDC)) else: print((bcolors.FAIL)) print(('*' * 80)) print( 'could not read or generate a file with default values' ) print(('*' * 80)) print((bcolors.ENDC)) return {'pvals': 'still ??'} else: result = sites_handler.add_site_global(handler=self, template_name=template) #preset_values=preset_values) if result: print("%s added to sites.py" % self.site_name) return {'added': self.site_name, 'result': result, 'type': 'G'} elif opts.add_site_local: # we add to sites local # we read untill we find an empty } # before we can construct a site description we need a a file with site values if opts.__dict__.get('use_preset'): pvals = {} # dict to get link to the preset-vals-file preset_values = self.preset_handler.get_preset_values( pvals, is_local=True) result = sites_handler.add_site_local( handler=self, template_name=template, preset_values=preset_values) else: result = sites_handler.add_site_local( handler=self, template_name=template) #, preset_values=preset_values) if result: print("%s added to sites.py (local)" % self.site_name) return {'added': self.site_name, 'result': result, 'type': 'L'} return {'error': 'should not have come here'}
def test_editor(self): from config import BASE_INFO self.assertEqual(BASE_INFO.get('site_editor'), self.handler.editor)
def _doUpdate(self, db_update=True, norefresh=None, site_name='', verbose='', extra_data={}): """ """ opts = self.opts try: # we want to make sure the local directories exist self.create_folders(site_name, quiet=True) except AttributeError: pass if 'remote_user' in extra_data: remote_user = extra_data['remote_user'] else: remote_user = self.remote_user #remote_data_path = self.remote_data_path if 'remote_url' in extra_data: # self.opts.use_ip: remote_url = extra_data['remote_url'] else: remote_url = self.remote_url # remote_data_path = self.remote_data_path # server_info['remote_data_path'] if 'remote_data_path' in extra_data: remote_data_path = extra_data['remote_data_path'] else: remote_data_path = self.remote_data_path # if 'remote_pw' in extra_data: remote_pw = extra_data['remote_pw'] else: remote_pw = self.db_password # make sure we are in odo_instances so we find the scripts actual_pwd = os.getcwd() os.chdir(self.sites_home) # self.remote_user #server_info.get('user', 'remote_user') # da is ein wischi waschi .. user = remote_user pw = remote_pw # self.db_password # check if we have to copy things to a new target use_site_name = site_name if opts.new_target_site: use_site_name = opts.new_target_site dpath = '%s/%s/dump/%s.dmp' % (self.data_path, use_site_name, use_site_name) if not norefresh: # --------------------------- # updatedb.sh # --------------------------- """ #!/bin/sh # updatedb.sh executes the script dodump.sh on a remote server # dodump.sh creates a temporary docker container that dumps a servers database # into this servers data folder within erp_workbench # parameters: # $1 : site name # $2 : server url # $3 : remote_data_path like /root/erp_workbench # $4 : login name on remote server # $5 : local path to odo server data # $6 : local path to odo_instances # $7 : vebose flag c1="ssh $4@$2 'bash -s' < $6/scripts/dodump.sh $1 $3 $7" echo "-1-" $c1 $c1 c2="rsync -avzC --delete $4@$2:$3/$1/filestore/ $5/$1/filestore/" echo "-2-" $c2 $c2 c3="rsync -avzC --delete $4@$2:$3/$1/dump/ $5/$1/dump/" echo "-3-" $c3 $c3 """ # --------------------------- # dodump.sh # --------------------------- """ #!/bin/sh # dodump.sh dumps a site's database into its folder # the folder is /root/erp_workbench/$1/dump where $1 represents the site's name # dodump creates a temporary docker container that dumps a servers database # it is called by updatedb.sh and executed on the remote computer # $1 : name of the server updatedb.$1 # $2 : path to the location of odo_instances updatedb.$3 # on the remote server # $3 : verbose flag updatedb.$7 echo '----------- running dodump ----------------' FILE=$2/dumper/rundumper.py echo "FILE:$FILE" echo $HOSTNAME if [ -f "$FILE" ] then { echo 'calling python' $FILE $1 $3 python $FILE $1 -d $3 } else { echo 'kein rundumper' sudo docker run -v $2:/mnt/sites --rm=true --link db:db dbdumper -d $1 } fi """ os.system('%s/scripts/updatedb.sh %s %s %s %s %s %s %s' % ( self.default_values['sites_home'], # no param site_name, # param 1 remote_url, # param 2 remote_data_path, # param 3 remote_user, # param 4 self.erp_server_data_path, # param 5 self.sites_home, # param 6 verbose, # param 7 )) # if remote user is not root we first have to copy things where we can access it if remote_user != 'root': """ dodump_remote.sh is run on the reote server, and copies everything to a place, where it can be accessed by user that is logged in to the remote server. Assuming that the remote server is: 82.220.39.73 Assuming that the remote data is at: /root/erp_workbench the local user loggs in to the remote server as: odooprojects the remote user has its odoo data in: /home/odooprojects/erp_workbench the server name for which we want to copy the data is: afbstest then on the REMOTE server we have to execute the following commands: rsync -av /root/erp_workbench/afbstest/filestore/ /home/odooprojects/erp_workbench/afbstest/filestore/ rsync -av /root/erp_workbench/afbstest/dump/ /home/odooprojects/erp_workbench/afbstest/dump/ chmod a+rw /home/odooprojects/erp_workbench/afbstest/* -R the above commands will be executed on the REMOTE machine by calling: #sudo $5/scripts/site_syncer.py $1 $2 $3 $4 $5 sudo /root/odoo_sites/scripts/site_syncer.py afbstest 82.220.39.73 /home/odoprojects/erp_workbench odoprojects /root/odoo_sites #!/bin/sh # dodump_remote.sh rsyncs a remote site in /root/odoo_sites/SITENAME # to /home/someuser/odoo_sites/SITENAME, so we can rsync it from there # the folder is /root/erp_workbench/$1/dump where $1 represents the site's name # parameters: # $1 : site name # $2 : server url # $3 : remote_data_path to the server data like /root/odoo_server_data # $4 : login name on remote server # $5 : path to roots instance home on the remote server (/root/erp_workbench) echo sudo $5/scripts/site_syncer.py $1 $2 $3 $4 $5 sudo $5/scripts/site_syncer.py $1 $2 $3 $4 $5 """ # this calls the remote site_syncer.py script # it copies needed files to the users home and changes ownership remote_user_data_path = remote_data_path # self.remote_user_data_path os.system( '%s/scripts/updatedb_remote.sh %s %s %s %s %s' % (self.default_values['sites_home'], site_name, remote_url, remote_user_data_path, remote_user, self.remote_sites_home )) # where scripts/site_syncer.py is to be found # ----------------------------------------------- # rsync the remote files to the local directories # ----------------------------------------------- """ #!/bin/sh # updatedb.sh executes the script dodump on a remote server # dodump creates a temporary docker container that dumps a servers database # into this servers data folder within erp_workbench # parameters: # $1 : site name # $2 : server url # $3 : remote_data_path like /root/erp_workbench # $4 : login name on remote server # $5 : erp_server_data_path # $6 : target site name echo ssh $4@$2 'bash -s' < scripts/dodump.sh $1 ssh $4@$2 'bash -s' < scripts/dodump.sh $1 echo rsync -avzC --delete $4@$2:/$3/$1/filestore/$1 $5/$6/filestore/$6 rsync -avzC --delete $4@$2:/$3/$1/filestore/$1 $5/$6/filestore/$6 echo rsync -avzC --delete $4@$2:/$3/$1/dump/$1.dmp $5/$6/dump/$6.dmp rsync -avzC --delete $4@$2:/$3/$1/dump/$1.dmp $5/$6/dump/$6.dmp """ if remote_user != 'root': remote_user_data_path = remote_data_path #remote_data_path = self.remote_user_data_path os.system('%s/scripts/rsync_remote_local.sh %s %s %s %s %s %s' % ( self.default_values['sites_home'], site_name, remote_url, remote_data_path, remote_user, BASE_INFO['erp_server_data_path'], use_site_name, )) if not os.path.exists(dpath): print( '-------------------------------------------------------') print('%s not found' % dpath) print( '-------------------------------------------------------') return try: if self.opts.backup: # no need to update database return except AttributeError: pass if db_update: # make sure the needed directories exist fp = '%s/%s/filestore' % (self.data_path, use_site_name) if not os.path.exists(fp) and os.path.isdir(fp): print(bcolors.FAIL + '%s is not yet created, can not be updated' % use_site_name + bcolors.ENDC) return pw = self.login_info['db_password'] user = self.login_info['db_user'] shell = False # mac needs absolute path to psql where = os.path.split(which('psql'))[0] wd = which('docker') dumper_image_name = BASE_INFO.get('docker_dumper_image') if wd: whered = os.path.split(wd)[0] else: whered = '' if whered: cmd_lines_docker = [[ '%s/docker run -v %s:/mnt/sites -v %s/dumper/:/mnt/sites/dumper --rm=true --link db:db -it %s -r %s' % (whered, BASE_INFO['erp_server_data_path'], BASE_PATH, dumper_image_name, use_site_name) ]] else: cmd_lines_docker = [[ 'docker run -v %s:/mnt/sites -v %s/dumper/:/mnt/sites/dumper --rm=true --link db:db -it %s -r %s' % (BASE_INFO['erp_server_data_path'], BASE_PATH, dumper_image_name, use_site_name) ]] # if we know admins password, we set it # for non docker pw is usualy admin, so we do not use it #adminpw = self.sites[self.site_name].get('erp_admin_pw') # if adminpw: #cmd_lines_docker += [['%s/psql' % where, '-U', user, '-d', site_name, '-c', "update res_users set password='******' where login='******';" % adminpw]] cmd_lines_no_docker = [ # delete the local database(s) [ '%s/psql' % where, '-U', user, '-d', 'postgres', '-c', "drop database IF EXISTS %s;" % use_site_name ], # create database again [ '%s/psql' % where, '-U', user, '-d', 'postgres', '-c', "create database %s;" % use_site_name ], # do the actual reading of the database # the database will have thae same name as on the remote server [ '%s/pg_restore' % where, '-O', '-U', user, '-d', use_site_name, dpath ], # set standard password [ '%s/psql' % where, '-U', user, '-d', use_site_name, '-c', "update res_users set password='******' where login='******';" ], ] cmd_lines = [ { 'cmd_line': [ 'chmod', 'a+rw', '%s/%s/filestore' % (BASE_INFO['erp_server_data_path'], use_site_name) ], 'is_builtin': True }, { 'cmd_line': [ 'chmod', 'a+rw', '%s/%s/filestore/' % (BASE_INFO['erp_server_data_path'], use_site_name), '-R' ], 'is_builtin': True }, { 'cmd_line': [ 'chmod', 'a+rw', '%s/%s/log' % (BASE_INFO['erp_server_data_path'], use_site_name) ], 'is_builtin': True }, ] if self.opts.subparser_name == 'docker' and self.opts.dataupdate_docker: # or self.opts.transferdocker: cmd_lines = cmd_lines_docker + cmd_lines shell = True else: cmd_lines = cmd_lines_no_docker + cmd_lines self.run_commands(cmd_lines, shell=shell, user=user, pw=pw) # go back where we have been os.chdir(actual_pwd)
def check_and_create_sites_repo(self, force=False): # check whether sites repo defined in BASEINFO exists # if not download and install it must_exit = False must_update_ini = False sitelist_names = [] sites_list_path = BASE_INFO.get('sitesinfo_path') if not sites_list_path: return '' # not yet configured # create sitelisth path os.makedirs(sites_list_path, exist_ok=True) siteinfos = BASE_INFO.get('siteinfos', []) if siteinfos: for sitelist_name, sites_list_url in list(siteinfos.items()): #sites_list_url = BASE_INFO.get('sitesinfo_url') sitelist_names.append(sitelist_name) running_path = os.path.normpath( '%s/%s' % (sites_list_path, sitelist_name)) if sites_list_url == 'localhost': must_exit = self._create_sites_rep(running_path) # when we create the site-list, we must also create the ini file if not os.path.exists('%s/__init__.py' % sites_list_path): must_update_ini = True elif not os.path.exists(running_path): # try to git clone sites_list_url must_update_ini = True act = os.getcwd() #dp = '/' + '/'.join([p for p in running_path.split('/') if p][:-1]) os.chdir(sites_list_path) cmd_line = [ 'git clone %s %s' % (sites_list_url, sitelist_name) ] p = subprocess.Popen(cmd_line, stdout=PIPE, stderr=PIPE, env=dict(os.environ, PATH='/usr/bin'), shell=True) result = p.communicate() if p.returncode: print(bcolors.FAIL) print('Error:') print('The commandline %s produced an error' % cmd_line) print( 'please check if the sites_list in config/config.yaml is properly formated' ) for part in result[1].split(b'\n'): print(part.decode("utf-8")) print(bcolors.ENDC) # clean up if os.path.exists(running_path): os.unlink(running_path) else: print(bcolors.WARNING) print(LOCALSITESLIST_CLONED % (sites_list_url, os.getcwd())) os.chdir(act) # now create missing elements if not p.returncode: must_exit = self._create_sites_rep(running_path) # create outer inifile if needed if must_update_ini: ini = SITES_LIST_OUTER_HEAD for sn in sitelist_names: ini += (SITES_LIST_OUTER_LINE % {'file_name': sn}) with open('%s/__init__.py' % sites_list_path, 'w') as f: f.write(ini) sys.exit() if must_exit: sys.exit() return sites_list_path
dest="no_docker", default=False, help="do not use docker", ) parser.add_argument( "-s", "--skip", action="store", dest="skip", default="", help="skip list of addons", ) opts = parser.parse_args() args, unknownargs = parser.parse_known_args() if not unknownargs: print("need name of site") sys.exit() site_name = unknownargs[0] if not SITES.get(site_name): "%s is not a know site" % site_name sys.exit() pp = os.path.normpath("%s/%s" % (BASE_INFO.get("erp_server_data_path"), site_name)) if not os.path.exists(pp) and os.path.isdir(pp): print("%s does not point to a valid site sctructure" % pp) sys.exit() main(args, site_name) #
def main(opts, parsername, need_names_dic): """ """ # default_handler = SiteCreator try: import wingdbstub except: pass opts.subparser_name = parsername if parsername == 'create': handler = SiteCreator(opts, SITES) elif parsername == 'support': handler = SupportHandler(opts, SITES) elif parsername == 'remote': handler = RemoteHandler(opts, SITES) elif parsername == 'docker': handler = DockerHandler(opts, SITES) elif parsername == 'mail': handler = MailHandler(opts, SITES) else: handler = SiteCreator(opts, SITES) # ckeck whether the used option needs a name to work handler.check_name(need_names_dic=need_names_dic) # ---------------------- # create commands # ---------------------- if parsername == 'create': # create # ------ # builds or updates a server structure # to do so, it does a number of steps # - creates the needed folders in $ERP_SERVER_DATA # - creates a build structure in $PROJECT_HOME/$SITENAME/$SITENAME # where $PROJECT_HOME is read from the config file. # - copies and sets up all files from skeleton directory to the build structure # this is done executing create_new_project and do_copy # - builds a virtualenv environment in the build structure # - prepares to builds an erp server within the build structure by # execution bin/build_erp within the build structure. # Within this bild environment the erp's module path will be set # that it points to the usual erp-workbench directories within the build substructure # and also to the directories within erp_workbench as dictated by the # various modules installed from interpreting the site declaration # in sites.py # - add a "private" addons folder within the build structure called # $SITENAME_addons. This folder is also added to the erp-site's addon path. # - set the data_dir to point to $ERP_SERVER_DATA/$SITENAME/filestore # # modules_update # ------------- if opts.create or opts.modules_update or opts.module_update: info_dic = { 'project_path': handler.default_values['inner'], 'erp_version': BASE_INFO.get('erp_version'), 'site_name': handler.site_name } if opts.create: existed = handler.create_or_update_site() if existed: if not opts.quiet: print() print('%s site allredy existed' % handler.site_name) print(SITE_EXISTED % info_dic) else: if handler.site_name: if not opts.quiet: print() print('%s site created' % handler.site_name) print(SITE_NEW % info_dic) # create the folder structure within the datafoler defined in the config # this also creates the config file used by a docker server within the # newly created folders handler.create_folders(quiet=True) create_server_config(handler) did_run_a_command = True # make sure project was added to bash_aliases handler.add_aliases() # checkout repositories checkout_sa(opts) # list_sites # ---------- # list_sites lists all existing sites both from global and local sites if opts.list_sites: list_sites(SITES, opts.quiet) did_run_a_command = True return # listownmodules # -------------- # list the modules that are declared within the selected site # installown install all erp modules declared in the selected site # todo: why are the two following options combined here??? !!!!!!!!!!!! if opts.listownmodules or opts.install_erp_modules: handler.install_own_modules() did_run_a_command = True return # delete_site_local # -------- # delete_site_local removes a site and all project files if opts.delete_site_local: handler.delete_site_local() did_run_a_command = True return # installown or updateown or removeown # ------------------------------------ # installown install all modules declared in the selected site # updateown updates one or all modules declared in the selected site # removeown removes one or all modules declared in the selected site # # to be able to execute do this, the target server has to be running. # this server is accessed uding odoo's rpc_api. # to do so, info on user, that should access the running server needs # to be collected. the following values # read from either the config data or can be set using command line options. # --- database --- # - db_user : the user to access the servers database # to check what modules are allready installed the servers database # has to be accessed. # option: "-dbu", "--dbuser". # default: logged in user # - db_password # option: "-p", "--dbpw". # default: admin # - dbhost: the host on which the database is running # option: "-dbh", "--dbhost" # default: localhost. # --- user accessing the running odoo server --- # - rpcuser: the login user to access the odoo server # option: "-rpcu", "--rpcuser" # default: admin. # - rpcpw: the login password to access the odoo server # option: "-P", "--rpcpw" # default: admin. # - rpcport: the the odoo server is running at # option: "-PO", "--port" # default: 8069. if opts.installown or opts.updateown or opts.removeown: handler.install_own_modules() did_run_a_command = True # ---------------------- # docker commands # ---------------------- if parsername == 'docker': # docker_create_container # ----------------------- # it creates and starts a docker container # the created container collects info from sites.py for $SITENAME # it uses the data found with the key "docker" # it collects these data: # - container_name: name of the container to create. # must be unique for each remote server # - odoo_image_version: name of the docker image used to build # the container # - odoo_port: port on which to the running odoo server within the # container can be reached. must be unique for each remote server if opts.docker_create_container: # "docker -dc", "--create_container", handler.check_and_create_container() did_run_a_command = True if opts.docker_create_update_container: # "docker -dcu", "--create_update_container", handler.check_and_create_container(update_container=True) did_run_a_command = True if opts.docker_create_db_container: # "docker -dcdb", "--create_db_container", handler.check_and_create_container(container_name='db') did_run_a_command = True # build image # ---------- # build docker image used by a site if opts.docker_build_image: handler.build_image() did_run_a_command = True return # installown or updateown or removeown # ------------------------------------ # installown install all modules declared in the selected site # updateown updates one or all modules declared in the selected site # removeown removes one or all modules declared in the selected site # ----------> see create commands if opts.dinstallown or opts.dupdateown or opts.dremoveown or opts.dinstallodoomodules: #handler = dockerHandler(opts, default_values, site_name) handler.docker_install_own_modules() did_run_a_command = True # ---------------------- # support commands # ---------------------- if parsername == 'support': # add_site # -------- # add_site adds a site description to the sites.py file # add_site_local adds a site description to the sites_local.py file if opts.add_site or opts.add_site_local: handler.add_site_to_sitelist() did_run_a_command = True return # drop_site # -------- # drop_site removes a site description from the sites.py file if opts.drop_site: handler.drop_site() did_run_a_command = True return # edit_site, edit_server # ---------------------- # Lets the user edit the content of config/localdat.py to edit a server # description, or change the server description in LOCALDATA['sitesinfo_path'] if opts.edit_site or opts.edit_server: if opts.edit_site: handler.check_name() handler.edit_site_or_server() did_run_a_command = True return