def build_application(self): """ Patch some files in the archive. """ super(ClouderApplicationVersion, self).build_application() if self.application_id.type_id.name == 'drupal'\ and self.application_id.code == 'wkc': ssh = self.connect(self.archive_id.fullname) self.send(ssh, modules.get_module_path( 'clouder_template_drupal_wikicompare') + '/res/wikicompare.script', self.full_archivepath + '/wikicompare.script') self.send(ssh, modules.get_module_path( 'clouder_template_drupal_wikicompare') + '/res/patch/revisioning_postgres.patch', self.full_archivepath + '/revisioning_postgres.patch') self.execute(ssh, ['patch', '-p0', '-d', self.full_archivepath + '/sites/all/modules/revisioning/', '<', self.full_archivepath + '/revisioning_postgres.patch']) ssh.close() # # if [[ $name == 'dev' ]] # then # patch -p0 -d $archive_path/$app/${app}-${name}/archive/sites/all/ # themes/wikicompare_theme/ < $openerp_path/clouder/clouder/apps/ # drupal/patch/dev_zen_rebuild_registry.patch # fi return
def set_module_type_repository(self): type = False module_path = False web_addons_path = modules.get_module_path('web') official_addons_path = os.path.split(web_addons_path)[0] # for the official module, we could loop on addons_path # and take the path that contains the 'web' module -> we know it's full_module_path = modules.get_module_path(self.name) if full_module_path: module_path = os.path.split(full_module_path)[0] if self.name == 'base': type = 'official' elif module_path and module_path == official_addons_path: type = 'official' elif self.author and 'Odoo Community Association' in self.author: type = 'oca' elif isinstance(self.name, (str, unicode)) and self.name.endswith( '_profile'): type = 'specific' elif isinstance(self.name, (str, unicode)) and self.name.startswith( self._get_specific_prefix_module()): type = 'specific' else: type = 'community' # Another way here to find specific if isinstance(self.name, (str, unicode)) and \ self.name in self._get_list_specific_modules(): type = 'specific' # set short path if module_path: module_path = os.path.split(module_path)[1] self.type = type self.repository = module_path
def view_url(self, recipe=None, recipe_ref=None, **post): url = post.get('url','') if len(url)>0 and url[0] == '/': url=url[1:] if recipe_ref: recipe = request.env.ref(recipe_ref) # 'imagemagick.my_recipe' if url: return recipe.send_file(url='/'.join(get_module_path(url.split('/')[0]).split('/')[0:-1]) + '/' + url) return http.send_file(StringIO(recipe.run(Image(filename=get_module_path('web') + '/static/src/img/placeholder.png')).make_blob(format=recipe.image_format if recipe.image_format else 'png')))
def _modules(self): modules = [] module_path = set() for t in self.task_ids: for m in t.module_ids: modules.append(m.name) if get_module_path(m.name).split('/')[-2] not in module_path: module_path.add(get_module_path(m.name).split('/')[-2]) self.git_projects = ','.join(m for m in module_path) self.modules = ','.join(m for m in modules)
def deploy_post(self): super(ClouderContainer, self).deploy_post() if self.application_id.type_id.name == 'drupal'\ and self.application_id.code == 'wkc': self.send(modules.get_module_path( 'clouder_template_drupal_wikicompare') + '/res/wikicompare.script', '/var/www/drupal/wikicompare.script', username='******') self.send(modules.get_module_path( 'clouder_template_drupal_wikicompare') + '/res/patch/revisioning_postgres.patch', '/var/www/drupal/revisioning_postgres.patch', username='******') self.execute(['patch', '-p0', '-d', '/var/www/drupal/sites/all/modules/revisioning/', '<', '/var/www/drupal/revisioning_postgres.patch'], username='******')
def run(self, image, **kwargs): # return a image with specified recipe kwargs.update({p.name: p.value for p in self.param_ids}) kwargs.update({p.name: p.value for p in self.param_ids.filtered(lambda p: p.device_type == request.session.get('device_type','md'))}) #get parameters from recipe #TODO: Remove time import once caching is working import time company = request.website.company_id if request.website else self.env.user.company_id kwargs.update({ 'time': time, 'Image': Image, 'Color': Color, 'display': display, 'Drawing': Drawing, 'image': image, '_logger': _logger, 'user': self.env['res.users'].browse(self._uid), 'record': kwargs.get('record',None), 'http': http, 'request': request, 'website': request.website, #~ 'logo': Image(blob=company.logo.decode('base64')), #~ 'logo_web': Image(blob=company.logo_web.decode('base64')), }) try: eval(self.recipe, kwargs, mode='exec', nocopy=True) except ValueError: e = sys.exc_info() _logger.error('ImageMagick Recipe: %s' % ''.join(traceback.format_exception(e[0], e[1], e[2]))) return kwargs.get('res', image or None) or Image(filename=get_module_path('web') + '/static/src/img/placeholder.png')
def deploy_shinken_server(self, nrpe): """ Deploy the configuration file to watch the server performances. """ server = nrpe.server_id self.send(modules.get_module_path('clouder_template_shinken') + '/res/server-shinken.config', server.shinken_configfile, username='******') self.execute([ 'sed', '-i', '"s/IP/' + server.ip + '/g"', server.shinken_configfile ], username='******') self.execute([ 'sed', '-i', '"s/NAME/' + server.name + '/g"', server.shinken_configfile ], username='******') self.execute([ 'sed', '-i', '"s/SSHPORT/' + str(server.ssh_port) + '/g"', server.shinken_configfile ], username='******') self.execute([ 'sed', '-i', '"s/NRPEPORT/' + nrpe.ports['nrpe']['hostport'] + '/g"', server.shinken_configfile ], username='******') self.execute(['/usr/local/shinken/bin/init.d/shinken', 'reload'], username='******')
def credit_profile_form(self, stmt=None): pdf = modules.get_module_path( 'gt_order_mgnt') + "/credit_profile_pdf/credit_profile_pdf.pdf" f = open(pdf, 'rb') image_base64 = f.read() response = request.make_response( image_base64, headers=[('Content-Type', 'application/pdf'), ('Content-Disposition', 'attachment; filename=credit_profile_pdf.pdf;')]) return response # @http.route(['/coldcalling_import_formate/'], type='http', website=True) # def credit_profile_form(self, stmt=None): # pdf = modules.get_module_path('gt_order_mgnt') + "/import_csv/coldcalling.csv" # f = open(pdf, 'rb') # image_base64 = f.read() # response = request.make_response( # image_base64, # headers=[ # ('Content-Type', 'application/pdf'), # ('Content-Disposition', 'attachment; filename=credit_profile_pdf.pdf;') # ] # ) # return response
def deploy_build(self): """ Configure nginx. """ res = super(ClouderBase, self).deploy_build() if self.application_id.type_id.name == 'wordpress': ssh = self.connect(self.service_id.container_id.fullname) config_file = '/etc/nginx/sites-available/' + self.fullname self.send(ssh, modules.get_module_path('clouder_template_wordpress') + '/res/nginx.config', config_file) self.execute(ssh, ['sed', '-i', '"s/BASE/' + self.name + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/DOMAIN/' + self.domain_id.name + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/PATH/' + self.service_id.full_localpath_files .replace('/', '\/') + '/g"', config_file]) self.execute(ssh, ['ln', '-s', '/etc/nginx/sites-available/' + self.fullname, '/etc/nginx/sites-enabled/' + self.fullname]) self.execute(ssh, ['/etc/init.d/nginx', 'reload']) ssh.close() return res
def install_pip_dependencies(self): for module in self: mod_path = modules.get_module_path(module.name) req_file = get_req_file(mod_path) if req_file: install_pip_requirements(req_file) return True
def setUp(self): super(TestModuleUpgrade, self).setUp() module_name = 'module_auto_update' self.own_module = self.env['ir.module.module'].search([ ('name', '=', module_name), ]) self.own_dir_path = get_module_path(module_name)
def download(self, cr, uid, ids, download=True, context=None): res = [] for mod in self.browse(cr, uid, ids, context=context): if not mod.url: continue match = re.search('-([a-zA-Z0-9\._-]+)(\.zip)', mod.url, re.I) version = '0' if match: version = match.group(1) if parse_version(mod.installed_version or '0') >= parse_version(version): continue res.append(mod.url) if not download: continue zip_content = urllib.urlopen(mod.url).read() fname = addons.get_module_path(str(mod.name)+'.zip', downloaded=True) try: with open(fname, 'wb') as fp: fp.write(zip_content) except Exception: _logger.exception('Error when trying to create module ' 'file %s', fname) raise orm.except_orm(_('Error'), _('Can not create the module file:\n %s') % (fname,)) terp = self.get_module_info(mod.name) self.write(cr, uid, mod.id, self.get_values_from_terp(terp)) cr.execute('DELETE FROM ir_module_module_dependency ' \ 'WHERE module_id = %s', (mod.id,)) self._update_dependencies(cr, uid, mod, terp.get('depends', [])) self._update_category(cr, uid, mod, terp.get('category', 'Uncategorized')) # Import module zimp = zipimport.zipimporter(fname) zimp.load_module(mod.name) return res
def create(self, cr, uid, ids, data, context=None): pool = registry(cr.dbname) merger = PdfFileMerger() outfiles = [] for p in pool.get(self.model).read(cr, uid, ids): outfiles.append(self.newfilename()) sla = self.render(cr, uid, p, data.get('template') or self.template) if self.report_type == 'scribus_sla': os.unlink(outfiles[-1]) return (sla.read(), 'sla') command = "xvfb-run -a scribus-ng -ns -g %s -py %s -pa -o %s" % ( sla.name, os.path.join(get_module_path('report_scribus'), 'scribus.py'), outfiles[-1]) _logger.info(command) res = os.system(command) sla.close() if not os.path.exists(outfiles[-1]) or os.stat( outfiles[-1]).st_size == 0: raise MissingError( 'There are something wrong with the template or scribus installation' ) merger.append(PdfFileReader(file(outfiles[-1], 'rb'))) outfile = tempfile.NamedTemporaryFile(mode='w+b', suffix='.pdf') merger.write(outfile.name) for filename in outfiles: os.unlink(filename) outfile.seek(0) pdf = outfile.read() outfile.close() return (pdf, 'pdf')
def _file_read(self, cr, uid, fname, bin_size=False): full_path = self._full_path(cr, uid, fname) r = '' try: if bin_size: r = os.path.getsize(full_path) else: r = open(full_path, 'rb').read().encode('base64') except: try: env = api.Environment(cr, uid, {}) create_image = int(env['ir.config_parameter'].get_param( 'attachment_default_image.create_image', '0')) path = '%s/placeholder.png' % get_module_path( 'attachment_default_image') _logger.warn('\npath: %s\n', path) if create_image: r = open(path, 'rb').read() dir_path = '/'.join(full_path.split('/')[:-1]) if not os.path.isdir(dir_path): os.mkdir(dir_path) r_new = open(full_path, 'wb') r_new.write(r) r_new.close() if bin_size: r = os.path.getsize(path) else: r = (r or open(path, 'rb').read()).encode('base64') except: _logger.exception( "_read_file reading %s. Placeholder image also failed (%s).", full_path, path) _logger.exception( "Using placeholder image (_read_file reading %s)", full_path) return r
def main(): args = sys.argv[1:] # The only shared option is '--addons-path=' needed to discover additional # commands from modules if len(args) > 1 and args[0].startswith('--addons-path=') and not args[1].startswith("-"): # parse only the addons-path, do not setup the logger... openerp.tools.config._parse_config([args[0]]) args = args[1:] # Default legacy command command = "server" # TODO: find a way to properly discover addons subcommands without importing the world # Subcommand discovery if len(args) and not args[0].startswith("-"): logging.disable(logging.CRITICAL) for module in get_modules(): if isdir(joinpath(get_module_path(module), 'cli')): __import__('openerp.addons.' + module) logging.disable(logging.NOTSET) command = args[0] args = args[1:] if command in commands: o = commands[command]() o.run(args)
def open_documentation(self): for rec in self: split_path = rec.path.split(os.sep) module_name = len(split_path) > 1 and split_path[0] or '' file_name_total = len(split_path) > 1 and split_path[-1] or '' file_name_split = file_name_total and file_name_total.split( os.extsep) or False file_name = len(file_name_split) > 1 and file_name_split[0] or '' module_path = modules.get_module_path(module_name) path_from_module = len(split_path) > 1 and os.sep.join( split_path[1:]) or '' total_path = os.sep.join([module_path, path_from_module]) cmd = subprocess.Popen("asciidoctor-pdf -D /tmp " + total_path, stderr=subprocess.STDOUT, shell=True, stdout=subprocess.PIPE) cmd.wait() with open('/tmp/' + file_name + '.pdf', 'rb') as file: content = file.read() rec.remove_attachments() attachment = rec.create_attachment( base64.encodestring(content), rec.name + '.pdf') url = "/web/binary/saveas?model=ir.attachment&field=datas&id=%s&filename_field=name" % attachment.id return { "type": "ir.actions.act_url", "url": url, "target": "self" }
def deploy(self): """ Configure the domain in the bind container, if configured. """ if self.dns_id: ssh = self.connect(self.dns_id.fullname) self.send(ssh, modules.get_module_path('clouder_template_bind') + '/res/bind.config', self.configfile) self.execute(ssh, ['sed', '-i', '"s/DOMAIN/' + self.name + '/g"', self.configfile]) self.execute(ssh, ['sed', '-i', '"s/IP/' + self.dns_id.server_id.ip + '/g"', self.configfile]) self.execute(ssh, [ "echo 'zone \"" + self.name + "\" {' >> /etc/bind/named.conf"]) self.execute(ssh, ['echo "type master;" >> /etc/bind/named.conf']) self.execute(ssh, ['echo "allow-transfer {213.186.33.199;};" ' '>> /etc/bind/named.conf']) self.execute(ssh, ["echo 'file \"/etc/bind/db." + self.name + "\";' >> /etc/bind/named.conf"]) self.execute(ssh, ['echo "notify yes;" >> /etc/bind/named.conf']) self.execute(ssh, ['echo "};" >> /etc/bind/named.conf']) self.execute(ssh, [ 'echo "//END ' + self.name + '" >> /etc/bind/named.conf']) self.execute(ssh, ['/etc/init.d/bind9', 'reload']) ssh.close()
def download(self, cr, uid, ids, download=True, context=None): res = [] default_version = modules.adapt_version('1.0') for mod in self.browse(cr, uid, ids, context=context): if not mod.url: continue match = re.search('-([a-zA-Z0-9\._-]+)(\.zip)', mod.url, re.I) version = default_version if match: version = match.group(1) if parse_version(mod.installed_version) >= parse_version(version): continue res.append(mod.url) if not download: continue zip_content = urllib.urlopen(mod.url).read() fname = modules.get_module_path(str(mod.name) + '.zip', downloaded=True) try: with open(fname, 'wb') as fp: fp.write(zip_content) except Exception: _logger.exception('Error when trying to create module ' 'file %s', fname) raise orm.except_orm(_('Error'), _('Can not create the module file:\n %s') % (fname,)) terp = self.get_module_info(mod.name) self.write(cr, uid, mod.id, self.get_values_from_terp(terp)) cr.execute('DELETE FROM ir_module_module_dependency WHERE module_id = %s', (mod.id,)) self._update_dependencies(cr, uid, mod, terp.get('depends', [])) self._update_category(cr, uid, mod, terp.get('category', 'Uncategorized')) # Import module zimp = zipimport.zipimporter(fname) zimp.load_module(mod.name) return res
def deploy_build(self): """ Configure nginx. """ res = super(ClouderBase, self).deploy_build() if self.application_id.type_id.name == 'piwik': ssh = self.connect(self.service_id.container_id.fullname) config_file = '/etc/nginx/sites-available/' + self.fullname self.send( ssh, modules.get_module_path('clouder_template_piwik') + '/res/nginx.config', config_file) self.execute( ssh, ['sed', '-i', '"s/BASE/' + self.name + '/g"', config_file]) self.execute(ssh, [ 'sed', '-i', '"s/DOMAIN/' + self.domain_id.name + '/g"', config_file ]) self.execute(ssh, [ 'sed', '-i', '"s/PATH/' + self.service_id.full_localpath_files.replace('/', '\/') + '/g"', config_file ]) self.execute(ssh, [ 'ln', '-s', '/etc/nginx/sites-available/' + self.fullname, '/etc/nginx/sites-enabled/' + self.fullname ]) self.execute(ssh, ['/etc/init.d/nginx', 'reload']) ssh.close() return res
def main(): args = sys.argv[1:] # The only shared option is '--addons-path=' needed to discover additional # commands from modules if len(args) > 1 and args[0].startswith( '--addons-path=') and not args[1].startswith("-"): # parse only the addons-path, do not setup the logger... openerp.tools.config._parse_config([args[0]]) args = args[1:] # Default legacy command command = "server" # TODO: find a way to properly discover addons subcommands without importing the world # Subcommand discovery if len(args) and not args[0].startswith("-"): logging.disable(logging.CRITICAL) for module in get_modules(): if isdir(joinpath(get_module_path(module), 'cli')): __import__('openerp.addons.' + module) logging.disable(logging.NOTSET) command = args[0] args = args[1:] if command in commands: o = commands[command]() o.run(args)
def button_save_translation(self): format_ = 'po' i18n_path = os.path.join(get_module_path(self.name), 'i18n') if not os.path.isdir(i18n_path): os.mkdir(i18n_path) lang_obj = self.env['res.lang'] condition = [('translatable', '=', True), ('code', '!=', 'en_US')] langs = lang_obj.search(condition) files = [('%s.pot' % self.name, False)] for lang in langs: iso_code = get_iso_codes(lang.code) filename = '%s.%s' % (iso_code, format_) files.append((filename, lang.code)) for filename, lang in files: path = os.path.join(i18n_path, filename) with open(path, 'w') as buf: tools.trans_export(lang, [self.name], buf, format_, self.env.cr) return True
def gitlab_ressource(self, type, name, project_id='', data={}): path = '' if type == 'group': path = '/groups' if type == 'group': flag = False data['prefix'] = name groups = self.request(self.gitlab_url + path, headers=self.gitlab_headers).json() for group in groups: if group['path'] == name: res = group flag = True if not flag: res = self.request(self.gitlab_url + path, headers=self.gitlab_headers, method='post', data=data) if type == 'variable': data['key'] = name if self.request(self.gitlab_url + '/projects/' + project_id + '/variables/' + name, headers=self.gitlab_headers).status_code != 200: res = self.request(self.gitlab_url + '/projects/' + project_id + '/variables', headers=self.gitlab_headers, method='post', data=data).json() else: res = self.request(self.gitlab_url + '/projects/' + project_id + '/variables/' + name, headers=self.gitlab_headers, method='put', data=data).json() if type == 'file': with open(modules.get_module_path('clouder_template_' + self.container_id.application_id.type_id.name) +'/res/' + name, 'rb') as file: res = self.request(self.gitlab_url + '/projects/' + project_id + '/repository/files', headers=self.gitlab_headers, method='post', data={'file_path': name, 'branch_name': 'master', 'commit_message': 'Add ' + name, 'content': file.read()}) return res
def deploy_link(self): """ Deploy the configuration file to watch the base. """ super(ClouderBaseLink, self).deploy_link() if self.name.name.code == 'shinken': config_file = 'base-shinken' if not self.base_id.autosave: config_file = 'base-shinken-nosave' self.target.send( modules.get_module_path('clouder_template_shinken') + '/res/' + config_file + '.config', self.base_id.shinken_configfile, username='******') self.target.execute([ 'sed', '-i', '"s/BACKUPIP/' + self.base_id.backup_ids[0].server_id.ip + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/PORT/' + self.base_id.backup_ids[0].ports['nrpe']['hostport'] + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/METHOD/' + self.base_id.backup_ids[0].backup_method + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/TYPE/base/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/UNIQUE_NAME/' + self.base_id.fullname + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/DATABASES/' + self.base_id.databases_comma + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/BASE/' + self.base_id.name + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/DOMAIN/' + self.base_id.fulldomain + '/g"', self.base_id.shinken_configfile ], username='******') self.target.execute( ['/usr/local/shinken/bin/init.d/shinken', 'reload'], username='******')
def _get_logo(img): try: return open( os.path.join(get_module_path('edi_gs1_axfood'), 'static', 'img', img), 'rb').read().encode('base64') except: _logger.debug("Image not found for Axfood Chain: '%s'" % img) return None
def setUp(self): super(TestModuleUpgrade, self).setUp() module_name = 'module_auto_update' self.env["ir.config_parameter"].set_param(PARAM_DEPRECATED, "1") self.own_module = self.env['ir.module.module'].search([ ('name', '=', module_name), ]) self.own_dir_path = get_module_path(module_name)
def data_to_img(self, data): # return an image object while filename is data #_logger.warning('<<<<<<<<<<<<<< data_to_img >>>>>>>>>>>>>>>>: %s' % data) if data: return Image(blob=data.decode('base64')) return Image(filename='/'.join( get_module_path('/web/static/src/img/foo.png'.split('/')[1]).split( '/')[0:-1]) + '/web/static/src/img/placeholder.png')
def _get_logo(role): if role == 'CITY GROSS': img = 'citygross.png' elif role == 'M.A.T': img = 'mat.png' elif role == 'MATREBELLERNA': img = 'matrebellen.png' return open(os.path.join(get_module_path('edi_gs1_bergendahls'), 'static', 'img', img), 'rb').read().encode('base64')
def deploy_post(self): """ Configure the ldap server. """ super(ClouderContainer, self).deploy_post() if self.application_id.type_id.name == 'openldap': ssh = self.connect(self.fullname) self.execute(ssh, [ 'echo "slapd slapd/internal/generated_adminpw password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/password2 password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/internal/adminpw password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/password1 password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd shared/organization string ' + self.options['organization']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/domain string ' + self.options['domain']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['dpkg-reconfigure', '-f', 'noninteractive', 'slapd']) config_file = '/etc/ldap/schema/' + \ self.options['domain']['value'] + '.ldif' self.send(ssh, modules.get_module_path('clouder_template_ldap') + '/res/ldap.ldif', config_file) domain_dc = '' for dc in self.options['domain']['value'].split('.'): if domain_dc: domain_dc += ',' domain_dc += 'dc=' + dc self.execute(ssh, ['sed', '-i', '"s/\$DOMAIN/' + domain_dc + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/\$PASSWORD/' + self.options['password']['value'] + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/\$ORGANIZATION/' + self.options['organization']['value'] + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/dc=example,dc=com/' + domain_dc + '/g"', '/etc/phpldapadmin/config.php']) ssh.close() self.start() ssh = self.connect(self.fullname) self.execute(ssh, ['ldapadd', '-Y', 'EXTERNAL', '-H', 'ldapi:///', '-f', config_file]) ssh.close()
def deploy_link(self): """ Deploy the configuration file to watch the container. """ super(ClouderContainerLink, self).deploy_link() if self.name.type_id.name == 'shinken': if self.container_id.autosave: config_file = 'container-shinken' self.target.send( modules.get_module_path('clouder_template_shinken') + '/res/' + config_file + '.config', self.container_id.shinken_configfile, username='******') self.target.execute([ 'sed', '-i', '"s/BACKUPIP/' + self.container_id.backup_ids[0].server_id.ip + '/g"', self.container_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/PORT/' + self.container_id.backup_ids[0].ports['nrpe']['hostport'] + '/g"', self.container_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/METHOD/' + self.container_id.backup_ids[0].backup_method + '/g"', self.container_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/TYPE/container/g"', self.container_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/BACKUPIP/' + self.container_id.backup_ids[0].server_id.ip + '/g"', self.container_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/UNIQUE_NAME/' + self.container_id.fullname + '/g"', self.container_id.shinken_configfile ], username='******') self.target.execute([ 'sed', '-i', '"s/HOST/' + self.container_id.server_id.name + '/g"', self.container_id.shinken_configfile ], username='******') self.target.execute( ['/usr/local/shinken/bin/init.d/shinken', 'reload'], username='******')
def compute_coverage_state(self, force_product_ids=None): module_path = modules.get_module_path('purchase_planning_improved') product_ids = force_product_ids or [] if not force_product_ids: products = self.mapped('product_id') if not products: return product_ids = products.ids with open(module_path + '/sql/' + 'covering_dates_query.sql') as sql_file: self.env.cr.execute(sql_file.read(), (tuple(product_ids), )) for result_line in self.env.cr.dictfetchall(): line = self.env['purchase.order.line'].search([ ('id', '=', result_line['pol_id']) ]) if line.product_id.type == 'product': real_need_date = result_line['real_need_date'] or False date_required = real_need_date and self.env['procurement.order']. \ _get_purchase_schedule_date(procurement=False, company=line.order_id.company_id, ref_product=line.product_id, ref_location=line.order_id.location_id, ref_date=real_need_date) or False limit_order_date = date_required and self.env['procurement.order']. \ with_context(force_partner_id=line.order_id.partner_id.id). \ _get_purchase_order_date(procurement=False, company=line.order_id.company_id, schedule_date=date_required, ref_product=line.product_id) or False limit_order_date = limit_order_date and fields.Datetime.to_string( limit_order_date) or False date_required = date_required and fields.Datetime.to_string( date_required) or False dict_pol = { 'date_required': date_required, 'limit_order_date': limit_order_date, 'covering_date': result_line['covering_date'] or False, 'covering_state': result_line['covering_date'] and 'coverage_computed' or 'all_covered', 'to_delete': result_line['to_delete'], 'opmsg_reduce_qty': result_line['opmsg_reduce_qty'] or 0, } else: dict_pol = { 'date_required': False, 'limit_order_date': False, 'covering_date': False, 'covering_state': 'unknown_coverage', 'to_delete': False, 'opmsg_reduce_qty': line.product_qty, } line.write(dict_pol)
def deploy_build(self): """ Build the drupal by calling drush site-install, and installing the specified modules and themes. """ from openerp import modules res = super(ClouderBase, self).deploy_build() if self.application_id.type_id.name == 'drupal': config_file = '/etc/nginx/sites-available/' + self.fullname self.container_id.send( modules.get_module_path('clouder_template_drupal') + '/res/nginx.config', config_file) self.container_id.execute( ['sed', '-i', '"s/BASE/' + self.name + '/g"', config_file]) self.container_id.execute([ 'sed', '-i', '"s/DOMAIN/' + self.domain_id.name + '/g"', config_file ]) self.container_id.execute([ 'ln', '-s', '/etc/nginx/sites-available/' + self.fullname, '/etc/nginx/sites-enabled/' + self.fullname ]) self.container_id.execute(['/etc/init.d/nginx', 'reload']) # self.container_id.execute([ 'drush', '-y', 'si', '--db-url=' + self.container_id.db_type + '://' + self.container_id.db_user + ':' + self.container_id.db_password + '@' + self.container_id.db_server + '/' + self.fullname_, '--account-mail=' + self.admin_email, '--account-name=' + self.admin_name, '--account-pass='******'--sites-subdir=' + self.fulldomain, 'minimal' ], path='/var/www/drupal', username='******') if self.application_id.options['install_modules']['value']: modules = self.application_id.options['install_modules'][ 'value'].split(',') for module in modules: self.container_id.execute(['drush', '-y', 'en', module], path='/var/www/drupal/sites/' + self.fulldomain, username='******') if self.application_id.options['theme']['value']: theme = self.application_id.options['theme']['value'] self.container_id.execute(['drush', '-y', 'pm-enable', theme], path='/var/www/drupal/sites/' + self.fulldomain, username='******') self.container_id.execute([ 'drush', 'vset', '--yes', '--exact', 'theme_default', theme ], path='/var/www/drupal/sites/' + self.fulldomain, username='******') return res
def deploy_post(self): """ Configure the ldap server. """ super(ClouderContainer, self).deploy_post() if self.application_id.type_id.name == 'openldap': ssh = self.connect(self.fullname) self.execute(ssh, [ 'echo "slapd slapd/internal/generated_adminpw password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/password2 password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/internal/adminpw password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/password1 password ' + self.options['password']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd shared/organization string ' + self.options['organization']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['echo "slapd slapd/domain string ' + self.options['domain']['value'] + '"', '|', 'debconf-set-selections']) self.execute(ssh, ['dpkg-reconfigure', '-f', 'noninteractive', 'slapd']) config_file = '/etc/ldap/schema/' + \ self.options['domain']['value'] + '.ldif' self.send(ssh, modules.get_module_path('clouder_ldap') + '/res/ldap.ldif', config_file) domain_dc = '' for dc in self.options['value'].split('.'): if domain_dc: domain_dc += ',' domain_dc += 'dc=' + dc self.execute(ssh, ['sed', '-i', '"s/\$DOMAIN/' + domain_dc + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/\$PASSWORD/' + self.options['password']['value'] + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/\$ORGANIZATION/' + self.options['organization']['value'] + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/dc=example,dc=com/' + domain_dc + '/g"', '/etc/phpldapadmin/config.php']) ssh.close() self.start() ssh = self.connect(self.container.fullname) self.execute(ssh, ['ldapadd', '-Y', 'EXTERNAL', '-H', 'ldapi:///', '-f', config_file]) ssh.close()
def _image(self): try: url = self.env['ir.config_parameter'].get_param('imagemagick.test_image') if not url: self.env['ir.config_parameter'].set_param('imagemagick.test_image','website/static/src/img/fields.jpg') url = self.env['ir.config_parameter'].get_param('imagemagick.test_image') self.image = self.run(self.url_to_img('/'.join(get_module_path(url.split('/')[0]).split('/')[0:-1]) + '/' + url)).make_blob(format='jpg').encode('base64') except: pass
def purge(self): """ Remove the keys from the filesystem and the ssh config. """ self.execute_local([modules.get_module_path('clouder') + '/res/sed.sh', self.name, self.home_directory + '/.ssh/config']) self.execute_local(['rm', '-rf', self.home_directory + '/.ssh/keys/' + self.name])
def attachment_to_img( self, attachment ): # return an image object while filename is an attachment if attachment.url: # make image url as /module_path/attachment_url and use it as filename path = '/'.join( get_module_path(attachment.url.split('/')[1]).split('/')[0:-1]) return Image(filename=path + attachment.url) #_logger.warning('<<<<<<<<<<<<<< attachment_to_img >>>>>>>>>>>>>>>>: %s' % attachment.datas) return Image(blob=attachment.datas.decode('base64'))
def _get_logo(role): if role == 'CITY GROSS': img = 'citygross.png' elif role == 'M.A.T': img = 'mat.png' elif role == 'MATREBELLERNA': img = 'matrebellen.png' return open( os.path.join(get_module_path('edi_gs1_bergendahls'), 'static', 'img', img), 'rb').read().encode('base64')
def build_application(self): """ Build the archive with git or the anybox recipes. """ super(ClouderApplicationVersion, self).build_application() if self.application_id.type_id.name == 'odoo': ssh = self.connect(self.archive_id.fullname) self.execute(ssh, ['mkdir', '-p', self.full_archivepath + '/extra']) self.execute(ssh, ['mkdir', '-p', self.full_archivepath + '/parts']) for command in self.application_id.buildfile.split('\n'): if command.startswith('git'): self.execute(ssh, [command], path=self.full_archivepath) # Support for anybox recipes. We don't use it anymore because # it's slow without any real added value # self.execute(ssh, [ # 'echo "' + self.application_id.buildfile + '" >> ' + # self.full_archivepath + '/buildout.cfg']) # self.execute(ssh, ['wget', # 'https://raw.github.com/buildout/buildout/' # 'master/bootstrap/bootstrap.py'], # path=self.full_archivepath) # self.execute(ssh, ['virtualenv', 'sandbox'], # path=self.full_archivepath) # self.execute(ssh, # ['yes | sandbox/bin/pip uninstall setuptools pip'], # path=self.full_archivepath) # self.execute(ssh, ['sandbox/bin/python', 'bootstrap.py'], # path=self.full_archivepath) # self.execute(ssh, ['bin/buildout'], path=self.full_archivepath) # self.execute(ssh, ['sed', '-i', # '"s/' + self.archive_path.replace('/', '\/') + # '/' + self.application_id.type_id.localpath # .replace('/', '\/') + '/g"', # self.full_archivepath + '/bin/start_odoo']) # self.execute(ssh, ['sed', '-i', # '"s/' + self.archive_path.replace('/', '\/') + # '/' + self.application_id.type_id.localpath. # replace('/', '\/') + '/g"', # self.full_archivepath + '/bin/buildout']) self.send( ssh, modules.get_module_path('clouder_template_odoo') + '/res/http.patch', self.full_archivepath + '/parts/http.patch') self.execute(ssh, [ 'patch', self.full_archivepath + '/parts/odoo/openerp/http.py', '<', self.full_archivepath + '/parts/http.patch' ]) ssh.close() return
def deploy_post_service(self): """ Update the odoo configuration file and supervisor conf. """ super(ClouderService, self).deploy_post_service() if self.container_id.application_id.type_id.name == 'odoo': ssh = self.connect( self.container_id.fullname, username=self.container_id.application_id.type_id.system_user) config_file = '/opt/odoo/' + self.name + '/etc/config' self.execute(ssh, ['mkdir', '-p', '/opt/odoo/' + self.name + '/etc']) self.send(ssh, modules.get_module_path('clouder_template_odoo') + '/res/openerp.config', config_file) addons_path = '/opt/odoo/' +\ self.name + '/files/parts/odoo/addons,' sftp = ssh.open_sftp() for extra_dir in sftp.listdir( '/opt/odoo/' + self.name + '/files/extra'): addons_path += '/opt/odoo/' + self.name +\ '/files/extra/' + extra_dir + ',' sftp.close() self.execute(ssh, ['sed', '-i', '"s/ADDONS_PATH/' + addons_path.replace('/', '\/') + '/g"', config_file]) self.execute(ssh, ['sed', '-i', '"s/APPLICATION/' + self.container_id.application_id.code .replace('-', '_') + '/g"', config_file]) self.execute(ssh, ['sed', '-i', 's/SERVICE/' + self.name + '/g', config_file]) self.execute(ssh, ['sed', '-i', 's/DATABASE_SERVER/' + self.database_server + '/g', config_file]) self.execute(ssh, ['sed', '-i', 's/DBUSER/' + self.db_user + '/g', config_file]) self.execute(ssh, ['sed', '-i', 's/DATABASE_PASSWORD/' + self.database_password + '/g', config_file]) self.execute(ssh, ['sed', '-i', 's/PORT/' + self.port['localport'] + '/g', config_file]) self.execute(ssh, ['mkdir', '-p', '/opt/odoo/' + self.name + '/filestore']) self.execute(ssh, ['echo "[program:' + self.name + ']" ' '>> /opt/odoo/supervisor.conf']) self.execute(ssh, [ 'echo "command=su odoo -c \'/opt/odoo/' + self.name + '/files/parts/odoo/odoo.py -c ' + config_file + '\'" >> /opt/odoo/supervisor.conf']) ssh.close() return
def update_list(self, cr, uid, context=None): res = [0, 0] # [update, add] default_version = modules.adapt_version('1.0') known_mods = self.browse(cr, uid, self.search(cr, uid, [])) known_mods_names = dict([(m.name, m) for m in known_mods]) # iterate through detected modules and update/create them in db for mod_name in modules.get_modules(): mod = known_mods_names.get(mod_name) terp = self.get_module_info(mod_name) values = self.get_values_from_terp(terp) if mod: updated_values = {} for key in values: old = getattr(mod, key) updated = isinstance(values[key], basestring) and tools.ustr( values[key]) or values[key] if (old or updated) and updated != old: updated_values[key] = values[key] if terp.get('installable', True) and mod.state == 'uninstallable': updated_values['state'] = 'uninstalled' if parse_version(terp.get( 'version', default_version)) > parse_version( mod.latest_version or default_version): res[0] += 1 if updated_values: self.write(cr, uid, mod.id, updated_values) else: mod_path = modules.get_module_path(mod_name) if not mod_path: continue if not terp or not terp.get('installable', True): continue id = self.create( cr, uid, dict(name=mod_name, state='uninstalled', **values)) mod = self.browse(cr, uid, id) res[1] += 1 self._update_dependencies(cr, uid, mod, terp.get('depends', [])) self._update_category(cr, uid, mod, terp.get('category', 'Uncategorized')) # Trigger load_addons if new module have been discovered it exists on # wsgi handlers, so they can react accordingly if tuple(res) != (0, 0): for handler in openerp.service.wsgi_server.module_handlers: if hasattr(handler, 'load_addons'): handler.load_addons() return res
def build_application(self): """ Build the archive with git or the anybox recipes. """ super(ClouderApplicationVersion, self).build_application() if self.application_id.type_id.name == 'odoo': ssh = self.connect(self.archive_id.fullname) self.execute(ssh, ['mkdir', '-p', self.full_archivepath + '/extra']) self.execute(ssh, ['mkdir', '-p', self.full_archivepath + '/parts']) for command in self.application_id.buildfile.split('\n'): if command.startswith('git'): self.execute(ssh, [command], path=self.full_archivepath) # Support for anybox recipes. We don't use it anymore because # it's slow without any real added value # self.execute(ssh, [ # 'echo "' + self.application_id.buildfile + '" >> ' + # self.full_archivepath + '/buildout.cfg']) # self.execute(ssh, ['wget', # 'https://raw.github.com/buildout/buildout/' # 'master/bootstrap/bootstrap.py'], # path=self.full_archivepath) # self.execute(ssh, ['virtualenv', 'sandbox'], # path=self.full_archivepath) # self.execute(ssh, # ['yes | sandbox/bin/pip uninstall setuptools pip'], # path=self.full_archivepath) # self.execute(ssh, ['sandbox/bin/python', 'bootstrap.py'], # path=self.full_archivepath) # self.execute(ssh, ['bin/buildout'], path=self.full_archivepath) # self.execute(ssh, ['sed', '-i', # '"s/' + self.archive_path.replace('/', '\/') + # '/' + self.application_id.type_id.localpath # .replace('/', '\/') + '/g"', # self.full_archivepath + '/bin/start_odoo']) # self.execute(ssh, ['sed', '-i', # '"s/' + self.archive_path.replace('/', '\/') + # '/' + self.application_id.type_id.localpath. # replace('/', '\/') + '/g"', # self.full_archivepath + '/bin/buildout']) self.send(ssh, modules.get_module_path('clouder_template_odoo') + '/res/http.patch', self.full_archivepath + '/parts/http.patch') self.execute(ssh, [ 'patch', self.full_archivepath + '/parts/odoo/openerp/http.py', '<', self.full_archivepath + '/parts/http.patch']) ssh.close() return
def deploy_post(self): """ Add a ssmtp file if the container is linked to a postfix, and the configure the postfix. """ super(ClouderContainer, self).deploy_post() for link in self.link_ids: if link.name.name.code == 'postfix' and link.target: ssh = self.connect(self.fullname) self.execute(ssh, ['echo "root=' + self.email_sysadmin + '" > /etc/ssmtp/ssmtp.conf']) self.execute(ssh, ['echo "mailhub=postfix:25" ' '>> /etc/ssmtp/ssmtp.conf']) self.execute(ssh, ['echo "rewriteDomain=' + self.fullname + '" >> /etc/ssmtp/ssmtp.conf']) self.execute(ssh, ['echo "hostname=' + self.fullname + '" >> /etc/ssmtp/ssmtp.conf']) self.execute(ssh, ['echo "FromLineOverride=YES" >> ' '/etc/ssmtp/ssmtp.conf']) ssh.close() if self.application_id.type_id.name == 'postfix': ssh = self.connect(self.fullname) self.execute(ssh, [ 'echo "relayhost = [smtp.mandrillapp.com]" ' '>> /etc/postfix/main.cf']) self.execute(ssh, [ 'echo "smtp_sasl_auth_enable = yes" >> /etc/postfix/main.cf']) self.execute(ssh, [ 'echo "smtp_sasl_password_maps = ' 'hash:/etc/postfix/sasl_passwd" >> /etc/postfix/main.cf']) self.execute(ssh, [ 'echo "smtp_sasl_security_options = noanonymous" ' '>> /etc/postfix/main.cf']) self.execute(ssh, ['echo "smtp_use_tls = yes" >> /etc/postfix/main.cf']) self.execute(ssh, [ 'echo "mynetworks = 127.0.0.0/8 172.17.0.0/16" ' '>> /etc/postfix/main.cf']) self.execute(ssh, [ 'echo "[smtp.mandrillapp.com] ' + self.options['mailchimp_username']['value'] + ':' + self.options['mailchimp_apikey']['value'] + '" > /etc/postfix/sasl_passwd']) self.execute(ssh, ['postmap /etc/postfix/sasl_passwd']) self.send(ssh, modules.get_module_path('clouder_template_postfix') + '/res/openerp_mailgate.py', '/bin/openerp_mailgate.py') self.execute(ssh, ['chmod', '+x', '/bin/openerp_mailgate.py']) ssh.close()
def deploy_post(self): """ Add the general configuration files. """ super(ClouderContainer, self).deploy_post() if self.application_id.type_id.name == 'shinken': ssh = self.connect(self.fullname, username='******') self.send(ssh, modules.get_module_path('clouder_template_shinken') + '/res/general-shinken.config', '/usr/local/shinken/etc/services/clouder.cfg') self.send(ssh, modules.get_module_path('clouder_template_shinken') + '/res/control_backup.sh', '/home/shinken/control_backup.sh') self.execute(ssh, ['chmod', '+x', '/home/shinken/control_backup.sh']) self.execute(ssh, ['rm', '/usr/local/shinken/etc/hosts/localhost.cfg']) ssh.close()
def update_list(self, cr, uid, context=None): res = [0, 0] # [update, add] default_version = modules.adapt_version("1.0") known_mods = self.browse(cr, uid, self.search(cr, uid, [])) known_mods_names = dict([(m.name, m) for m in known_mods]) # iterate through detected modules and update/create them in db for mod_name in modules.get_modules(): mod = known_mods_names.get(mod_name) terp = self.get_module_info(mod_name) values = self.get_values_from_terp(terp) if mod: updated_values = {} for key in values: old = getattr(mod, key) updated = isinstance(values[key], basestring) and tools.ustr(values[key]) or values[key] if (old or updated) and updated != old: updated_values[key] = values[key] if terp.get("installable", True) and mod.state == "uninstallable": updated_values["state"] = "uninstalled" if parse_version(terp.get("version", default_version)) > parse_version( mod.latest_version or default_version ): res[0] += 1 if updated_values: self.write(cr, uid, mod.id, updated_values) else: mod_path = modules.get_module_path(mod_name) if not mod_path: continue if not terp or not terp.get("installable", True): continue id = self.create(cr, uid, dict(name=mod_name, state="uninstalled", **values)) mod = self.browse(cr, uid, id) res[1] += 1 self._update_dependencies(cr, uid, mod, terp.get("depends", [])) self._update_category(cr, uid, mod, terp.get("category", "Uncategorized")) # Trigger load_addons if new module have been discovered it exists on # wsgi handlers, so they can react accordingly if tuple(res) != (0, 0): for handler in openerp.service.wsgi_server.module_handlers: if hasattr(handler, "load_addons"): handler.load_addons() return res
def deploy(self): """ Deploy the configuration file to watch the server performances. """ super(ClouderServer, self).deploy() if self.supervision_id: ssh = self.connect(self.supervision_id.fullname, username='******') self.send(ssh, modules.get_module_path('clouder_template_shinken') + '/res/server-shinken.config', self.shinken_configfile) self.execute(ssh, ['sed', '-i', '"s/NAME/' + self.name + '/g"', self.shinken_configfile]) self.execute(ssh, ['/etc/init.d/shinken', 'reload']) ssh.close()
def purge_key(self): """ Remove the key. """ self.execute_local([ modules.get_module_path('clouder') + '/res/sed.sh', self.fullname, self.home_directory + '/.ssh/config']) self.execute_local([ 'rm', '-rf', self.home_directory + '/.ssh/keys/' + self.fullname]) self.execute_local([ 'rm', '-rf', self.home_directory + '/.ssh/keys/' + self.fullname + '.pub']) ssh = self.connect(self.server_id.name) self.execute(ssh, [ 'rm', '-rf', '/opt/keys/' + self.fullname + '/authorized_keys']) ssh.close()
def deploy_link(self): """ Deploy the configuration file to watch the base. """ super(ClouderBaseLink, self).deploy_link() if self.name.name.code == 'shinken': ssh = self.connect(self.target.fullname, username='******') config_file = 'base-shinken' if self.base_id.nosave: config_file = 'base-shinken-nosave' self.send(ssh, modules.get_module_path('clouder_template_shinken') + '/res/' + config_file + '.config', self.base_id.shinken_configfile) self.execute(ssh, ['sed', '-i', '"s/TYPE/base/g"', self.base_id.shinken_configfile]) self.execute(ssh, [ 'sed', '-i', '"s/UNIQUE_NAME/' + self.base_id.fullname_ + '/g"', self.base_id.shinken_configfile]) self.execute(ssh, [ 'sed', '-i', '"s/DATABASES/' + self.base_id.databases_comma + '/g"', self.base_id.shinken_configfile]) self.execute(ssh, ['sed', '-i', '"s/BASE/' + self.base_id.name + '/g"', self.base_id.shinken_configfile]) self.execute(ssh, [ 'sed', '-i', '"s/DOMAIN/' + self.base_id.domain_id.name + '/g"', self.base_id.shinken_configfile]) self.execute(ssh, [ 'sed', '-i', '"s/METHOD/' + self.base_id.backup_ids[0].backup_method + '/g"', self.base_id.shinken_configfile]) self.execute(ssh, [ 'sed', '-i', '"s/CONTAINER/' + self.base_id .backup_ids[0].fullname + '/g"', self.base_id.shinken_configfile]) self.execute(ssh, ['/etc/init.d/shinken', 'reload']) send_key_to_shinken(ssh, self.base_id) ssh.close()
def get_certificates(self, cr, uid, ids, context=None): this = self.browse(cr, uid, ids)[0] path_module = modules.get_module_path('management-courses') file = codecs.open(path_module + "/course_data.txt", "w") instructor_id = self.browse(cr,uid,ids) for i in instructor_id[0].participant_ids: if i.is_instructor == True: instructor = i.name.name for participant in this.participant_ids: if participant.is_instructor == False: data = "%s,%s,%s,%s,%s,%s:%s\n" % (this.name, time.strftime("%d/%m/%y", time.strptime(this.start_date, "%Y-%m-%d")), time.strftime("%d/%m/%y", time.strptime(this.end_date, "%Y-%m-%d")), this.hours, instructor, participant.name.name, participant.cedula_rif) file.write(data.encode('utf-8')) file.close() command = "perl " + path_module + "/generarcertificados.pl" process = subprocess.call(["perl", path_module + "/generarcertificados.pl"]) file = open(path_module + "/" + this.name + ".pdf", "rb") fileContent = file.read() out = base64.encodestring(fileContent) self.write(cr, uid, ids, {'state' : 'get', 'certificate_pdf' : out}, context=context) file.close() return { 'type': 'ir.actions.act_window', 'res_model': 'management.course', 'view_mode': 'form', 'view_type': 'form', 'res_id': this.id , 'views': [(False, 'form')], 'target': 'new', }
def test_button_save_translation(self): load_language(self.cr, 'fr_FR') condition = [('name', '=', 'save_translation_file')] record = self.env['ir.module.module'].search(condition) record.button_save_translation() i18n_path = os.path.join(get_module_path(record.name), 'i18n') lang_obj = self.env['res.lang'] condition = [('translatable', '=', True), ('code', '!=', 'en_US')] langs = lang_obj.search(condition) for lang in langs: iso_code = get_iso_codes(lang.code) path = os.path.join(i18n_path, '%s.po' % iso_code) self.assertEqual(os.path.isfile(path), True, '%s.po must exists' % iso_code)
def deploy_link(self): """ Deploy the configuration file to watch the container. """ super(ClouderContainerLink, self).deploy_link() if self.name.name.code == 'shinken': ssh = self.connect(self.target.fullname, username='******') config_file = 'container-shinken' if self.container_id.nosave: config_file = 'container-shinken-nosave' self.send(ssh, modules.get_module_path('clouder_template_shinken') + '/res/' + config_file + '.config', self.container_id.shinken_configfile) self.execute(ssh, [ 'sed', '-i', '"s/METHOD/' + self.container_id.backup_ids[0].backup_method + '/g"', self.container_id.shinken_configfile]) self.execute(ssh, ['sed', '-i', '"s/TYPE/container/g"', self.container_id.shinken_configfile]) self.execute(ssh, [ 'sed', '-i', '"s/CONTAINER/' + self.container_id.backup_ids[0] .fullname + '/g"', self.container_id.shinken_configfile]) self.execute(ssh, [ 'sed', '-i', '"s/UNIQUE_NAME/' + self.container_id.fullname + '/g"', self.container_id.shinken_configfile]) self.execute(ssh, [ 'sed', '-i', '"s/HOST/' + self.container_id.server_id.name + '/g"', self.container_id.shinken_configfile]) self.execute(ssh, [ 'sed', '-i', '"s/PORT/' + str(self.container_id.ssh_port) + '/g"', self.container_id.shinken_configfile]) self.execute(ssh, ['/etc/init.d/shinken', 'reload']) send_key_to_shinken(ssh, self.container_id) ssh.close()
def update_list(self, cr, uid, context=None): res = [0, 0] # [update, add] default_version = modules.adapt_version("1.0") known_mods = self.browse(cr, uid, self.search(cr, uid, [])) known_mods_names = dict([(m.name, m) for m in known_mods]) # iterate through detected modules and update/create them in db for mod_name in modules.get_modules(): mod = known_mods_names.get(mod_name) terp = self.get_module_info(mod_name) values = self.get_values_from_terp(terp) if mod: updated_values = {} for key in values: old = getattr(mod, key) updated = isinstance(values[key], basestring) and tools.ustr(values[key]) or values[key] if (old or updated) and updated != old: updated_values[key] = values[key] if terp.get("installable", True) and mod.state == "uninstallable": updated_values["state"] = "uninstalled" if parse_version(terp.get("version", default_version)) > parse_version( mod.latest_version or default_version ): res[0] += 1 if updated_values: self.write(cr, uid, mod.id, updated_values) else: mod_path = modules.get_module_path(mod_name) if not mod_path: continue if not terp or not terp.get("installable", True): continue id = self.create(cr, uid, dict(name=mod_name, state="uninstalled", **values)) mod = self.browse(cr, uid, id) res[1] += 1 self._update_dependencies(cr, uid, mod, terp.get("depends", [])) self._update_category(cr, uid, mod, terp.get("category", "Uncategorized")) return res
def update_translations(self, cr, uid, ids, filter_lang=None, context=None): if context is None: context = {} logger = logging.getLogger('i18n') if not filter_lang: pool = pooler.get_pool(cr.dbname) lang_obj = pool.get('res.lang') lang_ids = lang_obj.search(cr, uid, [('translatable', '=', True)]) filter_lang = [lang.code for lang in lang_obj.browse(cr, uid, lang_ids)] elif not isinstance(filter_lang, (list, tuple)): filter_lang = [filter_lang] for mod in self.browse(cr, uid, ids): if mod.state != 'installed': continue modpath = addons.get_module_path(mod.name) if not modpath: # unable to find the module. we skip continue for lang in filter_lang: iso_lang = tools.get_iso_codes(lang) f = addons.get_module_resource(mod.name, 'i18n', iso_lang + '.po') context2 = context and context.copy() or {} if f and '_' in iso_lang: iso_lang2 = iso_lang.split('_')[0] f2 = addons.get_module_resource(mod.name, 'i18n', iso_lang2 + '.po') if f2: logger.info('module %s: loading base translation file %s for language %s', mod.name, iso_lang2, lang) tools.trans_load(cr, f2, lang, verbose=False, context=context) context2['overwrite'] = True # Implementation notice: we must first search for the full name of # the language derivative, like "en_UK", and then the generic, # like "en". if (not f) and '_' in iso_lang: iso_lang = iso_lang.split('_')[0] f = addons.get_module_resource(mod.name, 'i18n', iso_lang + '.po') if f: logger.info('module %s: loading translation file (%s) for language %s', mod.name, iso_lang, lang) tools.trans_load(cr, f, lang, verbose=False, context=context2) elif iso_lang != 'en': logger.warning('module %s: no translation for language %s', mod.name, iso_lang) tools.trans_update_res_ids(cr)
def write_file(module, version, contents, filename='openupgrade_analysis.txt'): module_path = get_module_path(module) if not module_path: return "ERROR: could not find module path:\n" full_path = os.path.join( module_path, 'migrations', version) if not os.path.exists(full_path): try: os.makedirs(full_path) except os.error: return "ERROR: could not create migrations directory:\n" logfile = os.path.join(full_path, filename) try: f = open(logfile, 'w') except Exception: return "ERROR: could not open file %s for writing:\n" % logfile f.write(contents) f.close() return None
def update_list(self, cr, uid, context=None): res = [0, 0] # [update, add] known_mods = self.browse(cr, uid, self.search(cr, uid, [])) known_mods_names = dict([(m.name, m) for m in known_mods]) # iterate through detected modules and update/create them in db for mod_name in addons.get_modules(): mod = known_mods_names.get(mod_name) terp = self.get_module_info(mod_name) values = self.get_values_from_terp(terp) if mod: updated_values = {} for key in values: old = getattr(mod, key) updated = isinstance(values[key], basestring) and tools.ustr(values[key]) or values[key] if not old == updated: updated_values[key] = values[key] if terp.get('installable', True) and mod.state == 'uninstallable': updated_values['state'] = 'uninstalled' if parse_version(terp.get('version', '')) > parse_version(mod.latest_version or ''): res[0] += 1 if updated_values: self.write(cr, uid, mod.id, updated_values) else: mod_path = addons.get_module_path(mod_name) if not mod_path: continue if not terp or not terp.get('installable', True): continue id = self.create(cr, uid, dict(name=mod_name, state='uninstalled', **values)) mod = self.browse(cr, uid, id) res[1] += 1 self._update_dependencies(cr, uid, mod, terp.get('depends', [])) self._update_category(cr, uid, mod, terp.get('category', 'Uncategorized')) return res
def dic_invoice(self, cr, uid, ids): inv = {} path_module = modules.get_module_path("openerp-postscript-reports") invoices = self.browse(cr, uid, ids) for invoice in invoices: inv.update( { "partner_name": invoice.partner_id.name, "date_invoice": invoice.date_invoice, "fiscal_position_name": invoice.fiscal_position.name, "invoice_amount_untaxed": invoice.amount_untaxed, "invoice_amount_tax": invoice.amount_tax, "invoice_amount_total": invoice.amount_total, "invoice_number": invoice.number, "path": path_module, } ) invoice_lines = invoice.invoice_line inv_lines = [] inv_line = {} for invoice_line in invoice_lines: inv_line.update( { "invoice_line_name": invoice_line.name, "invoice_line_price_subtotal": invoice_line.price_subtotal, "invoice_line_price_unit": invoice_line.price_unit, "invoice_line_quantity": invoice_line.quantity, } ) inv_lines.append(inv_line) inv.update({"invoice_lines": inv_lines}) return inv
def install_from_urls(self, cr, uid, urls, context=None): if not self.pool['res.users'].has_group(cr, uid, 'base.group_system'): raise openerp.exceptions.AccessDenied() apps_server = urlparse.urlparse(self.get_apps_server(cr, uid, context=context)) OPENERP = 'openerp' tmp = tempfile.mkdtemp() _logger.debug('Install from url: %r', urls) try: # 1. Download & unzip missing modules for module_name, url in urls.items(): if not url: continue # nothing to download, local version is already the last one up = urlparse.urlparse(url) if up.scheme != apps_server.scheme or up.netloc != apps_server.netloc: raise openerp.exceptions.AccessDenied() try: _logger.info('Downloading module `%s` from OpenERP Apps', module_name) content = urllib2.urlopen(url).read() except Exception: _logger.exception('Failed to fetch module %s', module_name) raise osv.except_osv(_('Module not found'), _('The `%s` module appears to be unavailable at the moment, please try again later.') % module_name) else: zipfile.ZipFile(StringIO(content)).extractall(tmp) assert os.path.isdir(os.path.join(tmp, module_name)) # 2a. Copy/Replace module source in addons path for module_name, url in urls.items(): if module_name == OPENERP or not url: continue # OPENERP is special case, handled below, and no URL means local module module_path = modules.get_module_path(module_name, downloaded=True, display_warning=False) bck = backup(module_path, False) _logger.info('Copy downloaded module `%s` to `%s`', module_name, module_path) shutil.move(os.path.join(tmp, module_name), module_path) if bck: shutil.rmtree(bck) # 2b. Copy/Replace server+base module source if downloaded if urls.get(OPENERP, None): # special case. it contains the server and the base module. # extract path is not the same base_path = os.path.dirname(modules.get_module_path('base')) # copy all modules in the SERVER/openerp/addons directory to the new "openerp" module (except base itself) for d in os.listdir(base_path): if d != 'base' and os.path.isdir(os.path.join(base_path, d)): destdir = os.path.join(tmp, OPENERP, 'addons', d) # XXX 'openerp' subdirectory ? shutil.copytree(os.path.join(base_path, d), destdir) # then replace the server by the new "base" module server_dir = openerp.tools.config['root_path'] # XXX or dirname() bck = backup(server_dir) _logger.info('Copy downloaded module `openerp` to `%s`', server_dir) shutil.move(os.path.join(tmp, OPENERP), server_dir) #if bck: # shutil.rmtree(bck) self.update_list(cr, uid, context=context) with_urls = [m for m, u in urls.items() if u] downloaded_ids = self.search(cr, uid, [('name', 'in', with_urls)], context=context) already_installed = self.search(cr, uid, [('id', 'in', downloaded_ids), ('state', '=', 'installed')], context=context) to_install_ids = self.search(cr, uid, [('name', 'in', urls.keys()), ('state', '=', 'uninstalled')], context=context) post_install_action = self.button_immediate_install(cr, uid, to_install_ids, context=context) if already_installed: # in this case, force server restart to reload python code... cr.commit() openerp.service.server.restart() return { 'type': 'ir.actions.client', 'tag': 'home', 'params': {'wait': True}, } return post_install_action finally: shutil.rmtree(tmp)
def install_from_urls(self, cr, uid, urls, context=None): if not self.pool["res.users"].has_group(cr, uid, "base.group_system"): raise openerp.exceptions.AccessDenied() apps_server = urlparse.urlparse(self.get_apps_server(cr, uid, context=context)) OPENERP = openerp.release.product_name.lower() tmp = tempfile.mkdtemp() _logger.debug("Install from url: %r", urls) try: # 1. Download & unzip missing modules for module_name, url in urls.items(): if not url: continue # nothing to download, local version is already the last one up = urlparse.urlparse(url) if up.scheme != apps_server.scheme or up.netloc != apps_server.netloc: raise openerp.exceptions.AccessDenied() try: _logger.info("Downloading module `%s` from OpenERP Apps", module_name) content = urllib2.urlopen(url).read() except Exception: _logger.exception("Failed to fetch module %s", module_name) raise UserError( _("The `%s` module appears to be unavailable at the moment, please try again later.") % module_name ) else: zipfile.ZipFile(StringIO(content)).extractall(tmp) assert os.path.isdir(os.path.join(tmp, module_name)) # 2a. Copy/Replace module source in addons path for module_name, url in urls.items(): if module_name == OPENERP or not url: continue # OPENERP is special case, handled below, and no URL means local module module_path = modules.get_module_path(module_name, downloaded=True, display_warning=False) bck = backup(module_path, False) _logger.info("Copy downloaded module `%s` to `%s`", module_name, module_path) shutil.move(os.path.join(tmp, module_name), module_path) if bck: shutil.rmtree(bck) # 2b. Copy/Replace server+base module source if downloaded if urls.get(OPENERP, None): # special case. it contains the server and the base module. # extract path is not the same base_path = os.path.dirname(modules.get_module_path("base")) # copy all modules in the SERVER/openerp/addons directory to the new "openerp" module (except base itself) for d in os.listdir(base_path): if d != "base" and os.path.isdir(os.path.join(base_path, d)): destdir = os.path.join(tmp, OPENERP, "addons", d) # XXX 'openerp' subdirectory ? shutil.copytree(os.path.join(base_path, d), destdir) # then replace the server by the new "base" module server_dir = openerp.tools.config["root_path"] # XXX or dirname() bck = backup(server_dir) _logger.info("Copy downloaded module `openerp` to `%s`", server_dir) shutil.move(os.path.join(tmp, OPENERP), server_dir) # if bck: # shutil.rmtree(bck) self.update_list(cr, uid, context=context) with_urls = [m for m, u in urls.items() if u] downloaded_ids = self.search(cr, uid, [("name", "in", with_urls)], context=context) already_installed = self.search( cr, uid, [("id", "in", downloaded_ids), ("state", "=", "installed")], context=context ) to_install_ids = self.search( cr, uid, [("name", "in", urls.keys()), ("state", "=", "uninstalled")], context=context ) post_install_action = self.button_immediate_install(cr, uid, to_install_ids, context=context) if already_installed: # in this case, force server restart to reload python code... cr.commit() openerp.service.server.restart() return {"type": "ir.actions.client", "tag": "home", "params": {"wait": True}} return post_install_action finally: shutil.rmtree(tmp)
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from openerp.osv import fields, osv from openerp.tools.translate import _ from datetime import datetime import xlrd from openerp import SUPERUSER_ID import os from openerp import modules base_path = os.path.dirname(modules.get_module_path('z_localization')) class Country(osv.osv): _inherit = 'res.country' _columns = { 'default_shipping_id': fields.many2one('delivery.carrier', 'Default Shipping'), } Country() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# -*- coding: utf-8 -*- # ############################################################################# # # ############################################################################# from osv import fields, osv from tools.translate import _ import xlrd from openerp import SUPERUSER_ID import os from openerp import modules base_path = os.path.dirname(modules.get_module_path('general_base')) class res_bank(osv.osv): _inherit = 'res.bank' def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100): if not args: args = [] if not context: context = {} if name: # Be sure name_search is symetric to name_get name = name.split(' / ')[-1] ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit, context=context) #Thanh: Search by BIC (Vietcombank,...) if not ids: ids = self.search(cr, uid, [('bic', operator, name)] + args, limit=limit, context=context) #Thanh: Search by BIC (Vietcombank,...) else: