def test_pylint(self): if pylint is None: self._skip_test('please install pylint') if LooseVersion(getattr(pylint, '__version__', '0.0.1')) < LooseVersion('1.6.4'): self._skip_test('please upgrade pylint to >= 1.6.4') paths = [tools.config['root_path']] for module in get_modules(): module_path = get_module_path(module) if not module_path.startswith( join(tools.config['root_path'], 'addons')): paths.append(module_path) options = [ '--disable=all', '--enable=%s' % ','.join(self.ENABLED_CODES), '--reports=n', "--msg-template='{msg} ({msg_id}) at {path}:{line}'", '--deprecated-modules=%s' % ','.join(self.BAD_MODULES), ] try: with open(devnull, 'w') as devnull_file: process = subprocess.Popen(['pylint'] + options + paths, stdout=subprocess.PIPE, stderr=devnull_file) except (OSError, IOError): self._skip_test('pylint executable not found in the path') else: out = process.communicate()[0] if process.returncode: self.fail(msg="\n" + out)
def _write_file(self, module_name, version, content, filename="upgrade_analysis.txt"): module = self.env["ir.module.module"].search([("name", "=", module_name)])[0] if module.is_odoo_module: if not self.upgrade_path: return ( "ERROR: no upgrade_path set when writing analysis of %s\n" % module_name) full_path = os.path.join(self.upgrade_path, module_name, version) else: full_path = os.path.join(get_module_path(module_name), "migrations", version) if not os.path.exists(full_path): try: os.makedirs(full_path) except os.error: return "ERROR: could not create migrations directory %s:\n" % ( full_path) logfile = os.path.join(full_path, filename) try: f = open(logfile, "w") except Exception: return "ERROR: could not open file %s for writing:\n" % logfile _logger.debug("Writing analysis to %s", logfile) f.write(content) f.close() return None
def deploy_ssh_config(self): for node in self: name = node.fulldomain ssh_config = os.path.join(self.home_directory, '.ssh', 'config') sed = os.path.join( modules.get_module_path('clouder'), 'res', 'sed.sh', ) self.execute_local([sed, name, ssh_config]) self.execute_write_file(ssh_config, 'Host %s' % name) self.execute_write_file( ssh_config, '\n HostName %s' % node.public_ip, ) self.execute_write_file( ssh_config, '\n Port %s' % node.ssh_port, ) self.execute_write_file( ssh_config, '\n User %s' % (node.login or 'root'), ) self.execute_write_file( ssh_config, '\n IdentityFile ~/.ssh/keys/%s' % name, ) self.execute_write_file( ssh_config, '\n#END %s\n' % name, )
def deploy_shinken_node(self, nrpe): """ Deploy the configuration file to watch the node performances. """ node = nrpe.node_id self.send( modules.get_module_path('clouder_template_shinken') + '/res/node-shinken.config', node.shinken_configfile, username='******') self.execute([ 'sed', '-i', '"s/IP/' + node.ip + '/g"', node.shinken_configfile], username='******') self.execute([ 'sed', '-i', '"s/NAME/' + node.name + '/g"', node.shinken_configfile], username='******') self.execute([ 'sed', '-i', '"s/SSHPORT/' + str(node.ssh_port) + '/g"', node.shinken_configfile], username='******') self.execute([ 'sed', '-i', '"s/NRPEPORT/' + nrpe.ports['nrpe']['hostport'] + '/g"', node.shinken_configfile], username='******') self.execute(['/usr/local/shinken/bin/init.d/shinken', 'reload'], username='******')
def deploy_build(self): """ Configure nginx. """ res = super(ClouderBase, self).deploy_build() if self.application_id.type_id.name == 'piwik': ssh = self.connect(self.service_id.service_id.fullname) config_file = '/etc/nginx/sites-available/' + self.fullname self.send(ssh, modules.get_module_path( 'clouder_template_piwik') + '/res/nginx.config', config_file) self.execute(ssh, ['sed', '-i', '"s/BASE/' + self.name + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/DOMAIN/' + self.domain_id.name + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/PATH/' + self.service_id.full_localpath_files .replace('/', r'\/') + '/g"', config_file]) self.execute(ssh, ['ln', '-s', '/etc/nginx/sites-available/' + self.fullname, '/etc/nginx/sites-enabled/' + self.fullname]) self.execute(ssh, ['/etc/init.d/nginx', 'reload']) ssh.close() return res
def main(): args = sys.argv[1:] # The only shared option is '--addons-path=' needed to discover additional # commands from modules if len(args) > 1 and args[0].startswith('--addons-path=') and not args[1].startswith("-"): # parse only the addons-path, do not setup the logger... odoo.tools.config._parse_config([args[0]]) args = args[1:] # Default legacy command command = "server" # TODO: find a way to properly discover addons subcommands without importing the world # Subcommand discovery if len(args) and not args[0].startswith("-"): logging.disable(logging.CRITICAL) for module in get_modules(): if isdir(joinpath(get_module_path(module), 'cli')): __import__('odoo.addons.' + module) logging.disable(logging.NOTSET) command = args[0] args = args[1:] if command in commands: o = commands[command]() o.run(args) else: sys.exit('Unknown command %r' % (command,))
def setUp(self): super(TestModuleUpgrade, self).setUp() module_name = 'module_auto_update' self.own_module = self.env['ir.module.module'].search([ ('name', '=', module_name), ]) self.own_dir_path = get_module_path(module_name)
def load_custom_module_terms(self, mods, langs): # 该操作耗时较长,将流程与系统原有流程分离。 # res = super(IrTranslation, self).load_module_terms(modules, langs) mod_dict = { mod.name: mod.dependencies_id.mapped('name') for mod in mods } modules = topological_sort(mod_dict) res_lang = self.env['res.lang'].sudo() for lang in langs: res_lang.load_lang(lang) for module_name in modules: modpath = get_module_path(module_name) if not modpath: continue for lang in langs: context = dict(self._context) lang_code = tools.get_iso_codes(lang) lmch_extra_file = get_lmch_extra_file(module_name, lang_code) if lmch_extra_file: _logger.info( u'模块 %s: loading lmch extra translation file (%s) for language %s', module_name, lang_code, lang) tools.trans_load(self._cr, lmch_extra_file, lang, verbose=False, module_name=module_name, context=context) return True
def test_pylint(self): if pylint is None: self._skip_test("please install pylint") if LooseVersion(getattr(pylint, "__version__", "0.0.1")) < LooseVersion("1.6.4"): self._skip_test("please upgrade pylint to >= 1.6.4") paths = [tools.config["root_path"]] for module in get_modules(): module_path = get_module_path(module) if not module_path.startswith(join(tools.config["root_path"], "addons")): paths.append(module_path) options = [ "--disable=all", "--enable=%s" % ",".join(self.ENABLED_CODES), "--reports=n", "--msg-template='{msg} ({msg_id}) at {path}:{line}'", ] try: with open(devnull, "w") as devnull_file: process = subprocess.Popen(["pylint"] + options + paths, stdout=subprocess.PIPE, stderr=devnull_file) except (OSError, IOError): self._skip_test("pylint executable not found in the path") else: out = process.communicate()[0] if process.returncode: self.fail(msg="\n" + out)
def deploy(self): """ Configure the domain in the bind service, if configured. """ super(ClouderDomain, self).deploy() if self.dns_id and self.dns_id.application_id.type_id.name == 'bind': self.dns_id.send( modules.get_module_path('clouder_template_dns') + '/res/bind.config', self.configfile) self.dns_id.execute([ 'sed', '-i', '"s/DOMAIN/' + self.name + '/g"', self.configfile]) self.dns_id.execute([ 'sed', '-i', '"s/IP/' + self.dns_id.node_id.public_ip + '/g"', self.configfile]) self.dns_id.execute([ "echo 'zone \"" + self.name + "\" {' >> /etc/bind/named.conf"]) self.dns_id.execute([ 'echo "type master;" >> /etc/bind/named.conf']) self.dns_id.execute([ "echo 'file \"/etc/bind/db." + self.name + "\";' >> /etc/bind/named.conf"]) self.dns_id.execute(['echo "notify yes;" >> /etc/bind/named.conf']) self.dns_id.execute(['echo "};" >> /etc/bind/named.conf']) self.dns_id.execute([ 'echo "//END ' + self.name + '" >> /etc/bind/named.conf']) self.refresh_serial()
def deploy_build(self): """ Configure nginx. """ res = super(ClouderBase, self).deploy_build() if self.application_id.type_id.name == 'dolibarr': config_file = '/etc/nginx/sites-available/' + self.fullname self.service_id.send( modules.get_module_path('clouder_template_dolibarr') + '/res/nginx.config', config_file) self.service_id.execute(['sed', '-i', '"s/BASE/' + self.name + '/g"', config_file]) self.service_id.execute([ 'sed', '-i', '"s/DOMAIN/' + self.domain_id.name + '/g"', config_file]) self.service_id.execute([ 'ln', '-s', '/etc/nginx/sites-available/' + self.fullname, '/etc/nginx/sites-enabled/' + self.fullname]) self.service_id.execute(['/etc/init.d/nginx', 'reload']) return res
def main(): args = sys.argv[1:] # The only shared option is '--addons-path=' needed to discover additional # commands from modules if len(args) > 1 and args[0].startswith('--addons-path=') and not args[1].startswith("-"): # parse only the addons-path, do not setup the logger... odoo.tools.config._parse_config([args[0]]) args = args[1:] # Default legacy command command = "server" # TODO: find a way to properly discover addons subcommands without importing the world # Subcommand discovery if len(args) and not args[0].startswith("-"): logging.disable(logging.CRITICAL) for module in get_modules(): if isdir(joinpath(get_module_path(module), 'cli')): __import__('odoo.addons.' + module) logging.disable(logging.NOTSET) command = args[0] args = args[1:] if command in commands: o = commands[command]() o.run(args)
def test_pylint(self): if pylint is None: self._skip_test('please install pylint') if LooseVersion(getattr(pylint, '__version__', '0.0.1')) < LooseVersion('1.6.4'): self._skip_test('please upgrade pylint to >= 1.6.4') paths = [tools.config['root_path']] for module in get_modules(): module_path = get_module_path(module) if not module_path.startswith(join(tools.config['root_path'], 'addons')): paths.append(module_path) options = [ '--disable=all', '--enable=%s' % ','.join(self.ENABLED_CODES), '--reports=n', "--msg-template='{msg} ({msg_id}) at {path}:{line}'", '--load-plugins=pylint.extensions.bad_builtin', '--bad-functions=%s' % ','.join(self.BAD_FUNCTIONS), '--deprecated-modules=%s' % ','.join(self.BAD_MODULES) ] try: process = subprocess.Popen(['pylint'] + options + paths, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except (OSError, IOError): self._skip_test('pylint executable not found in the path') else: out, err = process.communicate() if process.returncode: self.fail("\n" + out + "\n" + err)
def button_save_translation(self): format_ = 'po' i18n_path = os.path.join(get_module_path(self.name), 'i18n') if not os.path.isdir(i18n_path): os.mkdir(i18n_path) lang_obj = self.env['res.lang'] condition = [('translatable', '=', True), ('code', '!=', 'en_US')] langs = lang_obj.search(condition) files = [('%s.pot' % self.name, False)] for lang in langs: iso_code = get_iso_codes(lang.code) filename = '%s.%s' % (iso_code, format_) files.append((filename, lang.code)) for filename, lang in files: path = os.path.join(i18n_path, filename) with open(path, 'w') as buf: tools.trans_export(lang, [self.name], buf, format_, self.env.cr) return True
def _get_desc(self): for module in self: if not module.name: module.description_html = False continue module_path = modules.get_module_path(module.name, display_warning=False) # avoid to log warning for fake community module if module_path: path = modules.check_resource_path(module_path, 'static/description/index.html') if module_path and path: with tools.file_open(path, 'rb') as desc_file: doc = desc_file.read() html = lxml.html.document_fromstring(doc) for element, attribute, link, pos in html.iterlinks(): if element.get('src') and not '//' in element.get('src') and not 'static/' in element.get('src'): element.set('src', "/%s/static/description/%s" % (module.name, element.get('src'))) module.description_html = tools.html_sanitize(lxml.html.tostring(html)) else: overrides = { 'embed_stylesheet': False, 'doctitle_xform': False, 'output_encoding': 'unicode', 'xml_declaration': False, 'file_insertion_enabled': False, } output = publish_string(source=module.description if not module.application and module.description else '', settings_overrides=overrides, writer=MyWriter()) module.description_html = tools.html_sanitize(output)
def __init__(self, api_id, api_key): wsdl_path = modules.get_module_path( 'account_taxcloud') + '/api/taxcloud.wsdl' self.client = Client('file:///%s' % wsdl_path) self.factory = self.client.type_factory('ns0') self.api_login_id = api_id self.api_key = api_key
def collect(ctx): logger = (ctx.obj['logger']) from odoo.modules import get_modules, get_module_path from odoo.tools.osutil import listdir from odooku.backends import get_backend s3_backend = get_backend('s3') for module in get_modules(): if module in RESERVED: logger.warning("Module name %s clashes with a reserved key", module) continue static_dir = os.path.join(get_module_path(module), 'static') if os.path.exists(static_dir): for filename in listdir(static_dir, True): path = os.path.join(static_dir, filename) url = os.path.join(module, 'static', filename) logger.info("Uploading %s", url) s3_backend.client.upload_file(path, s3_backend.bucket, url, ExtraArgs={ 'ACL': 'public-read', 'CacheControl': ('max-age=%d, public' % (s3_backend.cache_time)) })
def test_pylint(self): if pylint is None: self._skip_test('please install pylint') if LooseVersion(getattr(pylint, '__version__', '0.0.1')) < LooseVersion('1.6.4'): self._skip_test('please upgrade pylint to >= 1.6.4') paths = [tools.config['root_path']] for module in get_modules(): module_path = get_module_path(module) if not module_path.startswith(join(tools.config['root_path'], 'addons')): paths.append(module_path) options = [ '--disable=all', '--enable=%s' % ','.join(self.ENABLED_CODES), '--reports=n', "--msg-template='{msg} ({msg_id}) at {path}:{line}'", ] try: with open(devnull, 'w') as devnull_file: process = subprocess.Popen(['pylint'] + options + paths, stdout=subprocess.PIPE, stderr=devnull_file) except (OSError, IOError): self._skip_test('pylint executable not found in the path') else: out = process.communicate()[0] if process.returncode: self.fail("\n" + out)
def deploy_post(self): """ Configure the ldap node. """ super(ClouderContainer, self).deploy_post() if self.application_id.type_id.name == 'openldap': ssh = self.connect(self.fullname) self.execute(ssh, [ 'echo "slapd slapd/internal/generated_adminpw password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/password2 password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/internal/adminpw password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/password1 password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd shared/organization string ' + self.options['organization']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/domain string ' + self.options['domain']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['dpkg-reconfigure', '-f', 'noninteractive', 'slapd']) config_file = '/etc/ldap/schema/' + \ self.options['domain']['value'] + '.ldif' self.send(ssh, modules.get_module_path('clouder_template_ldap') + '/res/ldap.ldif', config_file) domain_dc = '' for dc in self.options['domain']['value'].split('.'): if domain_dc: domain_dc += ',' domain_dc += 'dc=' + dc self.execute(ssh, ['sed', '-i', r'"s/\$DOMAIN/' + domain_dc + '/g"', config_file]) self.execute(ssh, ['sed', '-i', r'"s/\$PASSWORD/' + self.options['password']['value'] + '/g"', config_file]) self.execute(ssh, ['sed', '-i', r'"s/\$ORGANIZATION/' + self.options['organization']['value'] + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/dc=example,dc=com/' + domain_dc + '/g"', '/etc/phpldapadmin/config.php']) ssh.close() self.start() ssh = self.connect(self.fullname) self.execute(ssh, ['ldapadd', '-Y', 'EXTERNAL', '-H', 'ldapi:///', '-f', config_file]) ssh.close()
def deploy_link(self): """ Deploy the configuration file to watch the base. """ super(ClouderBaseLink, self).deploy_link() if self.name.type_id.name == 'shinken': config_file = 'base-shinken' if not self.base_id.auto_backup: config_file = 'base-shinken-no-backup' self.target.send( modules.get_module_path('clouder_template_shinken') + '/res/' + config_file + '.config', self.base_id.shinken_configfile, username='******') self.target.execute([ 'sed', '-i', '"s/BACKUPIP/' + self.base_id.backup_ids[0].node_id.ip + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/PORT/' + self.base_id.backup_ids[0].ports['nrpe']['hostport'] + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/METHOD/' + self.base_id.backup_ids[0].backup_method + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/TYPE/base/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/UNIQUE_NAME/' + self.base_id.fullname + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/DATABASES/' + self.base_id.databases_comma + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/BASE/' + self.base_id.name + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/DOMAIN/' + self.base_id.fulldomain + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute( ['/usr/local/shinken/bin/init.d/shinken', 'reload'], username='******')
def gitlab_ressource(self, type, name, project_id='', data=None): if not data: data = {} path = '' if type == 'group': path = '/groups' if type == 'group': flag = False data['path'] = name groups = self.request(self.gitlab_url + path, headers=self.gitlab_headers).json() for group in groups: if group['path'] == name: res = group flag = True if not flag: res = self.request(self.gitlab_url + path, headers=self.gitlab_headers, method='post', data=data).json() if type == 'variable': data['key'] = name if self.request(self.gitlab_url + '/projects/' + project_id + '/variables/' + name, headers=self.gitlab_headers).status_code != 200: res = self.request(self.gitlab_url + '/projects/' + project_id + '/variables', headers=self.gitlab_headers, method='post', data=data).json() else: res = self.request(self.gitlab_url + '/projects/' + project_id + '/variables/' + name, headers=self.gitlab_headers, method='put', data=data).json() if type == 'file': with open( modules.get_module_path( 'clouder_template_' + self.service_id.application_id.type_id.name) + '/res/' + name, 'rb') as file: res = self.request(self.gitlab_url + '/projects/' + project_id + '/repository/files', headers=self.gitlab_headers, method='post', data={ 'file_path': name, 'branch_name': 'master', 'commit_message': 'Add ' + name, 'content': file.read() }) return res
def setUp(self): super(TestModuleUpgrade, self).setUp() module_name = 'module_auto_update' self.env["ir.config_parameter"].set_param(PARAM_DEPRECATED, "1") self.own_module = self.env['ir.module.module'].search([ ('name', '=', module_name), ]) self.own_dir_path = get_module_path(module_name)
def deploy_build(self): """ Build the drupal by calling drush site-install, and installing the specified modules and themes. """ from odoo import modules res = super(ClouderBase, self).deploy_build() if self.application_id.type_id.name == 'drupal': config_file = '/etc/nginx/sites-available/' + self.fullname self.service_id.send( modules.get_module_path('clouder_template_drupal') + '/res/nginx.config', config_file) self.service_id.execute( ['sed', '-i', '"s/BASE/' + self.name + '/g"', config_file]) self.service_id.execute([ 'sed', '-i', '"s/DOMAIN/' + self.domain_id.name + '/g"', config_file ]) self.service_id.execute([ 'ln', '-s', '/etc/nginx/sites-available/' + self.fullname, '/etc/nginx/sites-enabled/' + self.fullname ]) self.service_id.execute(['/etc/init.d/nginx', 'reload']) # self.service_id.execute([ 'drush', '-y', 'si', '--db-url=' + self.service_id.db_type + '://' + self.service_id.db_user + ':' + self.service_id.db_password + '@' + self.service_id.db_node + '/' + self.fullname_, '--account-mail=' + self.admin_email, '--account-name=' + self.admin_name, '--account-pass='******'--sites-subdir=' + self.fulldomain, 'minimal' ], path='/var/www/drupal', username='******') if self.application_id.options['install_modules']['value']: modules = self.application_id.options['install_modules'][ 'value'].split(',') for module in modules: self.service_id.execute(['drush', '-y', 'en', module], path='/var/www/drupal/sites/' + self.fulldomain, username='******') if self.application_id.options['theme']['value']: theme = self.application_id.options['theme']['value'] self.service_id.execute(['drush', '-y', 'pm-enable', theme], path='/var/www/drupal/sites/' + self.fulldomain, username='******') self.service_id.execute([ 'drush', 'vset', '--yes', '--exact', 'theme_default', theme ], path='/var/www/drupal/sites/' + self.fulldomain, username='******') return res
def deploy_build(self): """ Build the drupal by calling drush site-install, and installing the specified modules and themes. """ from odoo import modules res = super(ClouderBase, self).deploy_build() if self.application_id.type_id.name == 'drupal': config_file = '/etc/nginx/sites-available/' + self.fullname self.service_id.send( modules.get_module_path('clouder_template_drupal') + '/res/nginx.config', config_file) self.service_id.execute([ 'sed', '-i', '"s/BASE/' + self.name + '/g"', config_file]) self.service_id.execute([ 'sed', '-i', '"s/DOMAIN/' + self.domain_id.name + '/g"', config_file]) self.service_id.execute([ 'ln', '-s', '/etc/nginx/sites-available/' + self.fullname, '/etc/nginx/sites-enabled/' + self.fullname]) self.service_id.execute(['/etc/init.d/nginx', 'reload']) # self.service_id.execute([ 'drush', '-y', 'si', '--db-url=' + self.service_id.db_type + '://' + self.service_id.db_user + ':' + self.service_id.db_password + '@' + self.service_id.db_node + '/' + self.fullname_, '--account-mail=' + self.admin_email, '--account-name=' + self.admin_name, '--account-pass='******'--sites-subdir=' + self.fulldomain, 'minimal'], path='/var/www/drupal', username='******') if self.application_id.options['install_modules']['value']: modules = self.application_id.options['install_modules'][ 'value'].split(',') for module in modules: self.service_id.execute([ 'drush', '-y', 'en', module], path='/var/www/drupal/sites/' + self.fulldomain, username='******') if self.application_id.options['theme']['value']: theme = self.application_id.options['theme']['value'] self.service_id.execute([ 'drush', '-y', 'pm-enable', theme], path='/var/www/drupal/sites/' + self.fulldomain, username='******') self.service_id.execute([ 'drush', 'vset', '--yes', '--exact', 'theme_default', theme], path='/var/www/drupal/sites/' + self.fulldomain, username='******') return res
def _compute_is_odoo_module(self): for module in self: module_path = get_module_path(module.name) if not module_path: module.is_odoo_module = False continue absolute_repo_path = os.path.split(module_path)[0] x, relative_repo_path = os.path.split(absolute_repo_path) module.is_odoo_module = relative_repo_path == "addons"
def deploy_link(self): """ Deploy the configuration file to watch the service. """ super(ClouderContainerLink, self).deploy_link() if self.name.type_id.name == 'shinken': if self.service_id.auto_backup: config_file = 'service-shinken' self.target.send( modules.get_module_path('clouder_template_shinken') + '/res/' + config_file + '.config', self.service_id.shinken_configfile, username='******') self.target.execute([ 'sed', '-i', '"s/BACKUPIP/' + self.service_id.backup_ids[0].node_id.ip + '/g"', self.service_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/PORT/' + self.service_id.backup_ids[0].ports['nrpe']['hostport'] + '/g"', self.service_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/METHOD/' + self.service_id.backup_ids[0].backup_method + '/g"', self.service_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/TYPE/service/g"', self.service_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/BACKUPIP/' + self.service_id.backup_ids[0].node_id.ip + '/g"', self.service_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/UNIQUE_NAME/' + self.service_id.fullname + '/g"', self.service_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/HOST/' + self.service_id.node_id.name + '/g"', self.service_id.shinken_configfile ], username='******') self.target.execute( ['/usr/local/shinken/bin/init.d/shinken', 'reload'], username='******')
def test_dunderinit(self): """ Test that __init__.py exists in Odoo modules, otherwise they won't get packaged""" modules_list = [mod for mod in get_modules() if mod not in WHITELIST] for mod in modules_list: dunderinit_path = Path(get_module_path(mod)) / '__init__.py' self.assertTrue(dunderinit_path.is_file(), "Missing `__init__.py ` in module %s" % mod) _logger.info('%s modules checked', len(modules_list))
def _compute_upgrade_path(self): """Return the --upgrade-path configuration option or the `scripts` directory in `openupgrade_scripts` if available """ res = config.get("upgrade_path", False) if not res: module_path = get_module_path("openupgrade_scripts", display_warning=False) if module_path: res = os.path.join(module_path, "scripts") self.upgrade_path = res
def deploy_post(self): super(ClouderContainer, self).deploy_post() if self.application_id.type_id.name == 'drupal'\ and self.application_id.code == 'wkc' \ and self.application_id.check_tags(['exec']): self.send(modules.get_module_path( 'clouder_template_drupal_wikicompare') + '/res/wikicompare.script', '/var/www/drupal/wikicompare.script', username='******') self.send(modules.get_module_path( 'clouder_template_drupal_wikicompare') + '/res/patch/revisioning_postgres.patch', '/var/www/drupal/revisioning_postgres.patch', username='******') self.execute([ 'patch', '-p0', '-d', '/var/www/drupal/sites/all/modules/revisioning/', '<', '/var/www/drupal/revisioning_postgres.patch'], username='******')
def gitlab_ressource(self, type, name, project_id='', data=None): if not data: data = {} path = '' if type == 'group': path = '/groups' if type == 'group': flag = False data['path'] = name groups = self.request( self.gitlab_url + path, headers=self.gitlab_headers).json() for group in groups: if group['path'] == name: res = group flag = True if not flag: res = self.request( self.gitlab_url + path, headers=self.gitlab_headers, method='post', data=data).json() if type == 'variable': data['key'] = name if self.request( self.gitlab_url + '/projects/' + project_id + '/variables/' + name, headers=self.gitlab_headers).status_code != 200: res = self.request( self.gitlab_url + '/projects/' + project_id + '/variables', headers=self.gitlab_headers, method='post', data=data).json() else: res = self.request( self.gitlab_url + '/projects/' + project_id + '/variables/' + name, headers=self.gitlab_headers, method='put', data=data).json() if type == 'file': with open(modules.get_module_path( 'clouder_template_' + self.service_id.application_id.type_id.name) + '/res/' + name, 'rb') as file: res = self.request( self.gitlab_url + '/projects/' + project_id + '/repository/files', headers=self.gitlab_headers, method='post', data={'file_path': name, 'branch_name': 'master', 'commit_message': 'Add ' + name, 'content': file.read()}) return res
def setUp(self): super(TestModule, self).setUp() module_name = 'module_auto_update' self.own_module = self.env['ir.module.module'].search([ ('name', '=', module_name), ]) self.own_dir_path = get_module_path(module_name) self.own_checksum = dirhash( self.own_dir_path, 'sha1', excluded_extensions=['pyc', 'pyo'], )
def confirm_button(self): """ export pk yang is_efaktur_exported = False update setelah export :return: """ cr = self.env.cr headers = [ 'FK', 'KD_JENIS_TRANSAKSI', 'FG_PENGGANTI', 'NOMOR_FAKTUR', 'MASA_PAJAK', 'TAHUN_PAJAK', 'TANGGAL_FAKTUR', 'NPWP', 'NAMA', 'ALAMAT_LENGKAP', 'JUMLAH_DPP', 'JUMLAH_PPN', 'JUMLAH_PPNBM', 'ID_KETERANGAN_TAMBAHAN', 'FG_UANG_MUKA', 'UANG_MUKA_DPP', 'UANG_MUKA_PPN', 'UANG_MUKA_PPNBM', 'REFERENSI' ] mpath = get_module_path('c10i_account_faktur_pajak') csvfile = open(mpath + '/static/format_faktur_keluaran.csv', 'wb') csvwriter = csv.writer(csvfile, delimiter=',') csvwriter.writerow([h.upper() for h in headers]) onv_obj = self.env['account.invoice'] if self.invoice_ids: invoices = self.invoice_ids else: invoices = onv_obj.search([('state', '=', 'open'), ('faktur_keluaran_id', '!=', False), ('type', '=', 'out_invoice'), ('faktur_keluaran_exported', '=', False) ]) company = self.env.user.company_id.partner_id i = 0 self.baris2(headers, csvwriter) self.baris3(headers, csvwriter) for invoice in invoices: self.baris4(headers, csvwriter, invoice) self.baris5(headers, csvwriter, company) for line in invoice.invoice_line_ids: self.baris6(headers, csvwriter, line) invoice.faktur_keluaran_exported = True i += 1 cr.commit() csvfile.close() raise UserError("Export %s record(s) Done!" % i)
def setUp(self): super(TestModule, self).setUp() self.own_module = self.env["ir.module.module"].search( [("name", "=", MODULE_NAME)] ) self.own_dir_path = get_module_path(MODULE_NAME) keep_langs = self.env["res.lang"].search([]).mapped("code") self.own_checksum = addon_hash( self.own_dir_path, exclude_patterns=DEFAULT_EXCLUDE_PATTERNS.split(","), keep_langs=keep_langs, ) self.own_writeable = os.access(self.own_dir_path, os.W_OK)
def setUp(self): super(TestModule, self).setUp() self.own_module = self.env['ir.module.module'].search([ ('name', '=', MODULE_NAME), ]) self.own_dir_path = get_module_path(MODULE_NAME) keep_langs = self.env['res.lang'].search([]).mapped('code') self.own_checksum = addon_hash( self.own_dir_path, exclude_patterns=DEFAULT_EXCLUDE_PATTERNS.split(','), keep_langs=keep_langs, ) self.own_writeable = os.access(self.own_dir_path, os.W_OK)
def confirm_button(self): """ export partner yang is_efaktur_exported = False update setelah export :return: """ cr = self.env.cr headers = [ 'LT', 'NPWP', 'NAMA', 'JALAN', 'BLOK', 'NOMOR', 'RT', 'RW', 'KECAMATAN', 'KELURAHAN', 'KABUPATEN', 'PROPINSI', 'KODE_POS', 'NOMOR_TELEPON' ] mpath = get_module_path('vit_efaktur') csvfile = open(mpath + '/static/partner.csv', 'wb') csvwriter = csv.writer(csvfile, delimiter=',') csvwriter.writerow([h.upper() for h in headers]) partner = self.env['res.partner'] partners = partner.search([('is_efaktur_exported', '=', False), ('npwp', '!=', False)]) i = 0 for part in partners: npwp = part.npwp.replace(".", "").replace("-", "") data = { 'LT': 'LT', 'NPWP': npwp, 'NAMA': part.name, 'JALAN': part.street or '', 'BLOK': part.blok or '', 'NOMOR': part.nomor or '', 'RT': part.rt or '', 'RW': part.rw or '', 'KECAMATAN': part.kecamatan_id.name or '', 'KELURAHAN': part.kelurahan_id.name or '', 'KABUPATEN': part.kecamatan_id.kota_id.name or '', 'PROPINSI': part.state_id.name or '', 'KODE_POS': part.zip or '', 'NOMOR_TELEPON': part.phone or '' } csvwriter.writerow([data[v] for v in headers]) part.is_efaktur_exported = True part.date_efaktur_exported = time.strftime("%Y-%m-%d %H:%M:%S") i += 1 cr.commit() csvfile.close() raise UserError("Export %s record(s) Done!" % i)
def confirm_button(self): """ export product yang is_efaktur_exported = False update setelah export :return: """ cr = self.env.cr headers = ['OB', 'KODE_OBJEK', 'NAMA', 'HARGA_SATUAN'] mpath = get_module_path('vit_efaktur') # csvfile = open(mpath + '/static/product.csv', 'wb') csvfile = StringIO() csvwriter = csv.writer(csvfile, delimiter=',') csvwriter.writerow([h.upper() for h in headers]) product = self.env['product.template'] products = product.search([('is_efaktur_exported', '=', False)]) i = 0 for prod in products: data = { 'OB': 'OB', 'KODE_OBJEK': prod.default_code or '', 'NAMA': prod.name, 'HARGA_SATUAN': prod.list_price } csvwriter.writerow([data[v] for v in headers]) prod.is_efaktur_exported = True prod.date_efaktur_exported = time.strftime("%Y-%m-%d %H:%M:%S") i += 1 cr.commit() # csvfile.close() # raise UserError("Export %s record(s) Done!" % i) self.export_file = base64.b64encode(csvfile.getvalue().encode()) self.export_filename = 'Export-%s.csv' % time.strftime("%Y%m%d_%H%M%S") return { 'name': "Export E-Faktur Complete, total %s records" % i, 'type': 'ir.actions.act_window', 'res_model': 'vit.efaktur_product', 'view_mode': 'form', 'view_type': 'form', 'res_id': self.id, 'views': [(False, 'form')], 'target': 'new', }
def confirm_button(self): """ export pm yang is_efaktur_exported = False update setelah export :return: """ cr = self.env.cr headers = [ 'FM', 'KD_JENIS_TRANSAKSI', 'FG_PENGGANTI', 'NOMOR_FAKTUR', 'MASA_PAJAK', 'TAHUN_PAJAK', 'TANGGAL_FAKTUR', 'NPWP', 'NAMA', 'ALAMAT_LENGKAP', 'JUMLAH_DPP', 'JUMLAH_PPN', 'JUMLAH_PPNBM', 'IS_CREDITABLE' ] mpath = get_module_path('vit_efaktur') csvfile = open(mpath + '/static/fpm.csv', 'wb') csvwriter = csv.writer(csvfile, delimiter=',') csvwriter.writerow([h.upper() for h in headers]) onv_obj = self.env['account.invoice'] invoices = onv_obj.search([('is_efaktur_exported','=',False), ('state','=','open'), ('efaktur_masukan','!=', ''), ('type','=','in_invoice')]) i=0 for invoice in invoices: self.baris2(headers, csvwriter, invoice) invoice.is_efaktur_exported=True invoice.date_efaktur_exported=time.strftime("%Y-%m-%d %H:%M:%S") i+=1 cr.commit() csvfile.close() raise UserError("Export %s record(s) Done!" % i)
def deploy_link(self): """ Deploy the configuration file to watch the base. """ super(ClouderBaseLink, self).deploy_link() if self.target \ and self.target.application_id.type_id.name == 'shinken': config_file = 'base-shinken' if not self.base_id.auto_backup: config_file = 'base-shinken-no-backup' self.target.send( modules.get_module_path('clouder_template_shinken') + '/res/' + config_file + '.config', self.base_id.shinken_configfile, username='******') self.target.execute([ 'sed', '-i', '"s/BACKUPIP/' + self.base_id.backup_ids[0].node_id.ip + '/g"', self.base_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/PORT/' + self.base_id.backup_ids[0].ports['nrpe']['hostport'] + '/g"', self.base_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/METHOD/' + self.base_id.backup_ids[0].backup_method + '/g"', self.base_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/TYPE/base/g"', self.base_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/UNIQUE_NAME/' + self.base_id.fullname + '/g"', self.base_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/DATABASES/' + self.base_id.db_names_comma + '/g"', self.base_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/BASE/' + self.base_id.name + '/g"', self.base_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/DOMAIN/' + self.base_id.fulldomain + '/g"', self.base_id.shinken_configfile], username='******') self.target.execute( ['/usr/local/shinken/bin/init.d/shinken', 'reload'], username='******')
def deploy_link(self): """ Deploy the configuration file to watch the service. """ super(ClouderContainerLink, self).deploy_link() if self.target \ and self.target.application_id.type_id.name == 'shinken': if self.service_id.auto_backup: config_file = 'service-shinken' self.target.send( modules.get_module_path('clouder_template_shinken') + '/res/' + config_file + '.config', self.service_id.shinken_configfile, username='******') self.target.execute([ 'sed', '-i', '"s/BACKUPIP/' + self.service_id.backup_ids[0].node_id.ip + '/g"', self.service_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/PORT/' + self.service_id.backup_ids[0].ports['nrpe']['hostport'] + '/g"', self.service_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/METHOD/' + self.service_id.backup_ids[0].backup_method + '/g"', self.service_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/TYPE/service/g"', self.service_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/BACKUPIP/' + self.service_id.backup_ids[0].node_id.ip + '/g"', self.service_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/UNIQUE_NAME/' + self.service_id.fullname + '/g"', self.service_id.shinken_configfile], username='******') self.target.execute([ 'sed', '-i', '"s/HOST/' + self.service_id.node_id.name + '/g"', self.service_id.shinken_configfile], username='******') self.target.execute( ['/usr/local/shinken/bin/init.d/shinken', 'reload'], username='******')
def load_module_terms(self, modules, langs): """ Load PO files of the given modules for the given languages. """ # make sure the given languages are active res_lang = self.env['res.lang'].sudo() for lang in langs: res_lang.load_lang(lang) # load i18n files for module_name in modules: modpath = get_module_path(module_name) if not modpath: continue for lang in langs: context = dict(self._context) lang_code = tools.get_iso_codes(lang) base_lang_code = None if '_' in lang_code: base_lang_code = lang_code.split('_')[0] # Step 1: for sub-languages, load base language first (e.g. es_CL.po is loaded over es.po) if base_lang_code: base_trans_file = get_module_resource(module_name, 'i18n', base_lang_code + '.po') if base_trans_file: _logger.info('module %s: loading base translation file %s for language %s', module_name, base_lang_code, lang) tools.trans_load(self._cr, base_trans_file, lang, verbose=False, module_name=module_name, context=context) context['overwrite'] = True # make sure the requested translation will override the base terms later # i18n_extra folder is for additional translations handle manually (eg: for l10n_be) base_trans_extra_file = get_module_resource(module_name, 'i18n_extra', base_lang_code + '.po') if base_trans_extra_file: _logger.info('module %s: loading extra base translation file %s for language %s', module_name, base_lang_code, lang) tools.trans_load(self._cr, base_trans_extra_file, lang, verbose=False, module_name=module_name, context=context) context['overwrite'] = True # make sure the requested translation will override the base terms later # Step 2: then load the main translation file, possibly overriding the terms coming from the base language trans_file = get_module_resource(module_name, 'i18n', lang_code + '.po') if trans_file: _logger.info('module %s: loading translation file (%s) for language %s', module_name, lang_code, lang) tools.trans_load(self._cr, trans_file, lang, verbose=False, module_name=module_name, context=context) elif lang_code != 'en_US': _logger.info('module %s: no translation for language %s', module_name, lang_code) trans_extra_file = get_module_resource(module_name, 'i18n_extra', lang_code + '.po') if trans_extra_file: _logger.info('module %s: loading extra translation file (%s) for language %s', module_name, lang_code, lang) tools.trans_load(self._cr, trans_extra_file, lang, verbose=False, module_name=module_name, context=context) return True
def test_pylint(self): if pylint is None: self._skip_test('please install pylint') required_pylint_version = LooseVersion('1.6.4') if sys.version_info >= (3, 6): required_pylint_version = LooseVersion('1.7.0') if LooseVersion(getattr(pylint, '__version__', '0.0.1')) < required_pylint_version: self._skip_test('please upgrade pylint to >= %s' % required_pylint_version) paths = [tools.config['root_path']] for module in get_modules(): module_path = get_module_path(module) if not module_path.startswith(join(tools.config['root_path'], 'addons')): paths.append(module_path) options = [ '--rcfile=%s' % os.devnull, '--disable=all', '--enable=%s' % ','.join(self.ENABLED_CODES), '--reports=n', "--msg-template='{msg} ({msg_id}) at {path}:{line}'", '--load-plugins=pylint.extensions.bad_builtin,_odoo_checkers', '--bad-functions=%s' % ','.join(self.BAD_FUNCTIONS), '--deprecated-modules=%s' % ','.join(self.BAD_MODULES) ] pypath = HERE + os.pathsep + os.environ.get('PYTHONPATH', '') env = dict(os.environ, PYTHONPATH=pypath) try: pylint_bin = tools.which('pylint') process = subprocess.Popen( [pylint_bin] + options + paths, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, ) except (OSError, IOError): self._skip_test('pylint executable not found in the path') else: out, err = process.communicate() if process.returncode: self.fail("pylint test failed:\n" + (b"\n" + out + b"\n" + err).decode('utf-8').strip())
def deploy_post(self): """ Add the general configuration files. """ super(ClouderContainer, self).deploy_post() if self.application_id.type_id.name == 'shinken' \ and self.application_id.check_tags(['data']): self.send( modules.get_module_path('clouder_template_shinken') + '/res/general-shinken.config', '/usr/local/shinken/etc/services/clouder.cfg', username='******') self.execute([ 'sed', '-i', '"s/SYSADMIN_MAIL/' + self.email_sysadmin + '/g"', '/usr/local/shinken/etc/services/clouder.cfg'], username='******') self.execute( ['rm', '/usr/local/shinken/etc/hosts/localhost.cfg'], username='******')
def update_list(self): res = [0, 0] # [update, add] default_version = modules.adapt_version('1.0') known_mods = self.with_context(lang=None).search([]) known_mods_names = {mod.name: mod for mod in known_mods} # iterate through detected modules and update/create them in db for mod_name in modules.get_modules(): mod = known_mods_names.get(mod_name) terp = self.get_module_info(mod_name) values = self.get_values_from_terp(terp) if mod: updated_values = {} for key in values: old = getattr(mod, key) updated = tools.ustr(values[key]) if isinstance(values[key], pycompat.string_types) else values[key] if (old or updated) and updated != old: updated_values[key] = values[key] if terp.get('installable', True) and mod.state == 'uninstallable': updated_values['state'] = 'uninstalled' if parse_version(terp.get('version', default_version)) > parse_version(mod.latest_version or default_version): res[0] += 1 if updated_values: mod.write(updated_values) else: mod_path = modules.get_module_path(mod_name) if not mod_path: continue if not terp or not terp.get('installable', True): continue mod = self.create(dict(name=mod_name, state='uninstalled', **values)) res[1] += 1 mod._update_dependencies(terp.get('depends', [])) mod._update_exclusions(terp.get('excludes', [])) mod._update_category(terp.get('category', 'Uncategorized')) return res
def hook_deploy(self, ports, volumes): """ Deploy the service in the node. """ res = super(ClouderContainer, self).hook_deploy(ports, volumes) if self.node_id.runner_id.application_id.type_id.name == 'openshift': ports_dict = '[' for port in ports: ports_dict += '{"name": "' + port.name + '", ' ports_dict += '"protocol": "' + \ (port.udp and 'UDP' or 'TCP') + '",' ports_dict += '"port": ' + port.local_port + ',' ports_dict += '"targetPort": ' + port.hostport + ',' ports_dict += '"nodePort": 0}' ports_dict += ']' volume_mounts_dict = [] volumes_dict = [] for volume in volumes: volume_mounts_dict.append( { 'name': volume.name, 'mountPath': volume.localpath, '??': volume.hostpath, 'readonly': volume.readonly } ) volumes_dict.append( { 'name': volume['name'], 'emptyDir': { 'medium': '' } } ) _logger.info('%s', ports_dict.replace('\"', '\\"')) runner = self.node_id.runner_id service_file = '/tmp/config' runner.send(modules.get_module_path('clouder_runner_openshift') + '/res/service.config', service_file) runner.execute(['sed', '-i', '"s/CONTAINER_NAME/' + self.name + '/g"', service_file]) runner.execute(['sed', '-i', '"s/IMAGE_NAME/' + self.image_version_id.fullpath_localhost.replace( '/', r'\/') + '/g"', service_file]) runner.execute(['sed', '-i', '"s/PORTS/' + ports_dict.replace('\"', '\\"') + '/g"', service_file]) runner.execute(['sed', '-i', '"s/VOLUME_MOUNTS/' + str(volume_mounts_dict) + '/g"', service_file]) runner.execute(['sed', '-i', '"s/VOLUMES/' + str(volumes_dict) + '/g"', service_file]) runner.execute(['oc', 'create', '-f', service_file]) runner.execute(['rm', service_file]) return res
def install_from_urls(self, urls): if not self.env.user.has_group('base.group_system'): raise AccessDenied() apps_server = urlparse.urlparse(self.get_apps_server()) OPENERP = odoo.release.product_name.lower() tmp = tempfile.mkdtemp() _logger.debug('Install from url: %r', urls) try: # 1. Download & unzip missing modules for module_name, url in urls.iteritems(): if not url: continue # nothing to download, local version is already the last one up = urlparse.urlparse(url) if up.scheme != apps_server.scheme or up.netloc != apps_server.netloc: raise AccessDenied() try: _logger.info('Downloading module `%s` from OpenERP Apps', module_name) content = urllib2.urlopen(url).read() except Exception: _logger.exception('Failed to fetch module %s', module_name) raise UserError(_('The `%s` module appears to be unavailable at the moment, please try again later.') % module_name) else: zipfile.ZipFile(StringIO(content)).extractall(tmp) assert os.path.isdir(os.path.join(tmp, module_name)) # 2a. Copy/Replace module source in addons path for module_name, url in urls.iteritems(): if module_name == OPENERP or not url: continue # OPENERP is special case, handled below, and no URL means local module module_path = modules.get_module_path(module_name, downloaded=True, display_warning=False) bck = backup(module_path, False) _logger.info('Copy downloaded module `%s` to `%s`', module_name, module_path) shutil.move(os.path.join(tmp, module_name), module_path) if bck: shutil.rmtree(bck) # 2b. Copy/Replace server+base module source if downloaded if urls.get(OPENERP): # special case. it contains the server and the base module. # extract path is not the same base_path = os.path.dirname(modules.get_module_path('base')) # copy all modules in the SERVER/openerp/addons directory to the new "openerp" module (except base itself) for d in os.listdir(base_path): if d != 'base' and os.path.isdir(os.path.join(base_path, d)): destdir = os.path.join(tmp, OPENERP, 'addons', d) # XXX 'openerp' subdirectory ? shutil.copytree(os.path.join(base_path, d), destdir) # then replace the server by the new "base" module server_dir = tools.config['root_path'] # XXX or dirname() bck = backup(server_dir) _logger.info('Copy downloaded module `openerp` to `%s`', server_dir) shutil.move(os.path.join(tmp, OPENERP), server_dir) #if bck: # shutil.rmtree(bck) self.update_list() with_urls = [module_name for module_name, url in urls.iteritems() if url] downloaded = self.search([('name', 'in', with_urls)]) installed = self.search([('id', 'in', downloaded.ids), ('state', '=', 'installed')]) to_install = self.search([('name', 'in', urls.keys()), ('state', '=', 'uninstalled')]) post_install_action = to_install.button_immediate_install() if installed: # in this case, force server restart to reload python code... self._cr.commit() odoo.service.server.restart() return { 'type': 'ir.actions.client', 'tag': 'home', 'params': {'wait': True}, } return post_install_action finally: shutil.rmtree(tmp)
def send_drush_file(self): self.send( modules.get_module_path('clouder_template_drupal_wikicompare') + '/res/drush.make', '/var/www/drush.make', username='******')
def send_drush_file(self): from odoo import modules self.send( modules.get_module_path('clouder_template_drupal') + '/res/drush.make', '/var/www/drush.make', username='******')
def deploy_link(self): """ Configure the proxy to redirect to the application port. """ super(ClouderBaseLink, self).deploy_link() if self.name.type_id.name == 'proxy': if not self.base_id.ssl_only: configfile = 'proxy.config' else: configfile = 'proxy-sslonly.config' target = self.target module_path = modules.get_module_path( 'clouder_template_' + self.base_id.application_id.type_id.name) proxy_module_path = modules.get_module_path( 'clouder_template_proxy' ) flag = True # Always transfer proxy and ssl settings for config in ['nginx-ssl', 'nginx-proxy']: target.send( os.path.join( proxy_module_path, 'res', '%s.config' % config, ), os.path.join('/etc/nginx/conf.d', config), ) if module_path: configtemplate = module_path + '/res/' + configfile if self.local_file_exist(configtemplate): target.send( configtemplate, self.base_id.nginx_configfile) flag = False if flag: target.send( modules.get_module_path( 'clouder_template_proxy' ) + '/res/' + configfile, self.base_id.nginx_configfile) if self.base_id.is_root: target.send( os.path.join( proxy_module_path, 'res', 'proxy-root.config', ), '%s-root' % self.base_id.nginx_configfile, ) target.execute([ 'cat', self.base_id.nginx_configfile + '-root', '>>', self.base_id.nginx_configfile]) target.execute(['rm', self.base_id.nginx_configfile + '-root']) target.execute([ 'sed', '-i', '"s/BASE/' + self.base_id.name + '/g"', self.base_id.nginx_configfile]) target.execute([ 'sed', '-i', '"s/DOMAIN/' + self.base_id.fulldomain + '/g"', self.base_id.nginx_configfile]) node = self.base_id.service_id.node_id.private_ip type = 'hostport' if self.runner == 'swarm': node = self.base_id.service_id.host type = 'local_port' if 'http' in self.base_id.service_id.ports: protocol = 'http' port = self.base_id.service_id.ports['http'][type] if 'https' in self.base_id.service_id.ports: protocol = 'https' port = self.base_id.service_id.ports['https'][type] target.execute([ 'sed', '-i', '"s/SERVER/' + node + '/g"', self.base_id.nginx_configfile]) target.execute([ 'sed', '-i', '"s/PORT/' + port + '/g"', self.base_id.nginx_configfile]) target.execute([ 'sed', '-i', '"s/PROTOCOL/' + protocol + '/g"', self.base_id.nginx_configfile]) self.nginx_config_update(target) # self.deploy_prepare_apache(cr, uid, vals, context) cert_file = '/etc/ssl/certs/' + self.base_id.fulldomain + '.crt' key_file = '/etc/ssl/private/' + self.base_id.fulldomain + '.key' if self.base_id.cert_cert and self.base_id.cert_key: target.execute([ 'echo', '"' + self.base_id.cert_cert + '"', '>', cert_file ]) target.execute([ 'echo', '"' + self.base_id.cert_key + '"', '>', key_file]) elif self.base_id.domain_id.cert_cert\ and self.base_id.domain_id.cert_key: target.execute([ 'echo', '"' + self.base_id.domain_id.cert_cert + '"', '>', cert_file]) target.execute([ 'echo', '"' + self.base_id.domain_id.domain_cert_key + '"', '>', key_file]) else: target.execute([ 'openssl', 'req', '-x509', '-nodes', '-days', '365', '-newkey', 'rsa:2048', '-out', cert_file, ' -keyout', key_file, '-subj', '"/C=FR/L=Paris/O=' + self.base_id.domain_id.organisation + '/CN=' + self.base_id.name + '.' + self.base_id.domain_id.name + '"']) target.execute([ 'ln', '-s', self.base_id.nginx_configfile, '/etc/nginx/sites-enabled/' + self.base_id.fullname]) target.execute(['nginx', '-s', 'reload'])
def build_image( self, model, node, runner=False, expose_ports=None, salt=True): if not expose_ports: expose_ports = [] res = super(ClouderImage, self).build_image( model, node, runner=runner, expose_ports=expose_ports, salt=salt) if not runner or runner.application_id.type_id.name == 'docker': path = '%s-%s' % ( model.name, datetime.now().strftime('%Y%m%d.%H%M%S'), ) if model._name == 'clouder.service': name = path else: name = model.fullpath if salt: build_dir = os.path.join( '/srv', 'salt', 'services', 'build_%s' % model.name, ) node = model.salt_master else: build_dir = self.env['clouder.model']._get_directory_tmp(name) node.execute(['rm', '-rf', build_dir]) node.execute(['mkdir', '-p', build_dir]) if self.type_id: if self.type_id.name in [ 'backup', 'salt-master', 'salt-minion' ]: sources_path = os.path.join( modules.get_module_path('clouder'), 'sources', ) else: module_path = modules.get_module_path( 'clouder_template_%s' % self.type_id.name ) sources_path = module_path and os.path.join( module_path, 'sources' ) if sources_path and self.env['clouder.model'].local_dir_exist( sources_path ): node.send_dir( sources_path, os.path.join(build_dir, 'sources'), ) docker_file = os.path.join(build_dir, 'Dockerfile') node.execute([ 'echo "%s" >> "%s"' % ( self.computed_dockerfile.replace('"', r'\"'), docker_file, ), ]) if expose_ports: node.execute([ 'echo "EXPOSE %s" >> "%s"' % ( ' '.join(expose_ports), docker_file, ), ]) if not salt: node.execute([ 'docker', 'build', '--pull', '-t', name, build_dir, ]) node.execute(['rm', '-rf', build_dir]) return name return res
def generate_cert_exec(self): """ Generate a new certificate """ res = super(ClouderBase, self).generate_cert_exec() proxy_links = self._get_proxy_links() if proxy_links: proxy_link = proxy_links[0] proxy = proxy_link.target proxy_link.purge_link() webroot = '/var/www/' + self.fullname + '-certs' proxy.execute(['mkdir -p ' + webroot]) proxy.send( modules.get_module_path( 'clouder_template_proxy' ) + '/res/nginx.config', self.nginx_configfile) proxy.execute([ 'ln', '-s', self.nginx_configfile, '/etc/nginx/sites-enabled/' + self.fullname]) proxy.execute([ 'sed', '-i', '"s/BASE/' + self.name + '/g"', self.nginx_configfile]) domain = self.fulldomain if self.is_root: domain = domain + ' ' + self.name + '.' + self.fulldomain proxy.execute([ 'sed', '-i', '"s/DOMAIN/' + domain + '/g"', self.nginx_configfile]) proxy.execute([ 'sed', '-i', '"s/REPO/' + self.fullname + '/g"', self.nginx_configfile]) proxy.execute(['nginx', '-s', 'reload']) domain = self.fulldomain if self.is_root: domain = domain + ' -d ' + self.name + '.' + self.fulldomain proxy.execute([ 'certbot certonly --webroot -w ' + webroot + ' -d ' + domain + ' -m ' + proxy.email_sysadmin + ' --agree-tos']) key = proxy.execute([ 'cat', '/etc/letsencrypt/live/' + self.fulldomain + '/privkey.pem']) cert = proxy.execute([ 'cat', '/etc/letsencrypt/live/' + self.fulldomain + '/fullchain.pem']) if key: self.write({ 'cert_key': key, 'cert_cert': cert, 'cert_renewal_date': fields.Datetime.to_string( datetime.now() + self.DELTA_CERT_RENEW ), 'dh_param': self._create_dh_param(proxy), }) proxy.execute([ 'rm', '/etc/nginx/sites-enabled/' + self.fullname]) proxy.execute(['rm', self.nginx_configfile]) proxy.execute(['nginx', '-s', 'reload']) proxy.execute(['rm -rf ' + webroot]) proxy_link.deploy_link() return res