def create(self, vals): data_obj = self.env['ir.model.data'] sequence_ids = data_obj.search([('name','=','seq_is_demande_achat')]) if len(sequence_ids)>0: sequence_id = sequence_ids[0].res_id vals['name'] = self.env['ir.sequence'].get_id(sequence_id, 'id') res = super(is_demande_achat, self).create(vals) model, action_id = data_obj.get_object_reference('is_plastigray', 'is_demande_achat_action') print ":action_id::",action_id base_url = self.env['ir.config_parameter'].get_param('web.base.url') #menu_id = self.env['ir.ui.menu'].search([('name','=', "Demandes d'achat")]) menu_id = data_obj.get_object_reference('is_plastigray', 'is_mes_demande_achat_menu') route = 'login' query = dict(db=self._cr.dbname) model, action_id = data_obj.get_object_reference('is_plastigray', 'is_demande_achat_action') fragment = dict() fragment['action'] = action_id fragment['view_type'] = 'form' fragment['menu_id'] = menu_id[0] fragment['model'] = model fragment['id'] = res.id if fragment: query['redirect'] = '/web#' + werkzeug.url_encode(fragment) record_url = urljoin(base_url, "/web/%s?%s" % (route, werkzeug.url_encode(query))) res.record_url = record_url return res
def _prep_input(self, method, data, content_type): """Return encoded and packed POST data.""" if data is None or method != 'POST': prepped = { 'input_stream': None, 'content_length': None, 'content_type': None, } if method == 'GET' and data: qs = MultiDict() for key, value in to_pairs(data): qs.setlistdefault(key).append(value) prepped['query_string'] = url_encode(qs) return prepped else: if content_type == 'multipart/form-data': data = [(k, _wrap_file(*v)) if isinstance(v, tuple) else (k,v) for k,v in data] boundary, payload = encode_multipart(MultiDict(to_pairs(data))) content_type = 'multipart/form-data; boundary=' + boundary else: payload = url_encode(MultiDict(to_pairs(data))) content_type = 'application/x-www-form-urlencoded' return { 'input_stream': StringIO(payload), 'content_length': len(payload), 'content_type': content_type }
def _notification_get_recipient_groups(self, message, recipients): result = super(Holidays, self)._notification_get_recipient_groups(message, recipients) app_action = "/mail/workflow?%s" % url_encode({"model": self._name, "res_id": self.id, "signal": "validate"}) ref_action = "/mail/workflow?%s" % url_encode({"model": self._name, "res_id": self.id, "signal": "refuse"}) actions = [] if self.state == "confirm": actions.append({"url": app_action, "title": "Approve"}) if self.state in ["confirm", "validate", "validate1"]: actions.append({"url": ref_action, "title": "Refuse"}) result["group_hr_user"] = {"actions": actions} return result
def __call__(self, path=None, **kw): if not path: path = self.path for k, v in self.args.items(): kw.setdefault(k, v) l = [] for k, v in kw.items(): if v: if isinstance(v, list) or isinstance(v, set): l.append(werkzeug.url_encode([(k, i) for i in v])) else: l.append(werkzeug.url_encode([(k, v)])) if l: path += '?' + '&'.join(l) return path
def auth(self, **kw): # kw: # # state: {"p": 1, "r": "%2Fweb%2Flogin%3F", "d": "some-test-3"} # redirect_uri: https://example.odoo.com/auth_oauth/signin # response_type: token # client_id: d885dde2-0168-4650-9a32-ceb058e652a2 # debug: False # scope: userinfo uri, http_method, body, headers = self._extract_params(request, kw) client_redirect = kw.get('redirect') user = request.registry['res.users'].browse(request.cr, SUPERUSER_ID, request.uid) try: scopes, credentials = self._server.validate_authorization_request( uri, http_method, body, headers) # Errors that should be shown to the user on the provider website except errors.FatalClientError as e: return self._response_from_error(e) # Errors embedded in the redirect URI back to the client except errors.OAuth2Error as e: return self._response({'Location':e.redirect_uri}, None, 302) if user.login == 'public': params = {'mode':'login', 'scope':kw.get('scope'), #'debug':1, #'login':?, #'redirect_hostname':TODO, 'redirect': '/oauth2/auth?%s' % werkzeug.url_encode(kw) } return self._response({'Location':'/web/login?%s' % werkzeug.url_encode(params)}, None, 302) else: credentials.update({'user':user}) try: headers, body, status = self._server.create_authorization_response( uri, http_method, body, headers, scopes, credentials) return self._response(headers, body, status) except errors.FatalClientError as e: return self._response_from_error(e)
def _validate_recaptcha(self, challenge, response, remote_addr): """Performs the actual validation.""" try: private_key = current_app.config["RECAPTCHA_PRIVATE_KEY"] except KeyError: raise RuntimeError("No RECAPTCHA_PRIVATE_KEY config set") data = url_encode( {"privatekey": private_key, "remoteip": remote_addr, "challenge": challenge, "response": response} ) response = http.urlopen(RECAPTCHA_VERIFY_SERVER, to_bytes(data)) if response.code != 200: return False rv = [l.strip() for l in response.readlines()] if rv and rv[0] == to_bytes("true"): return True if len(rv) > 1: error = rv[1] if error in self._error_codes: raise RuntimeError(self._error_codes[error]) return False
def __call__(self, field, error=None, **kwargs): """Returns the recaptcha input HTML.""" server = RECAPTCHA_SSL_API_SERVER if self.use_ssl else RECAPTCHA_API_SERVER query_options = dict(k=self.public_key) if getattr(field, "recaptcha_error", None) is not None: query_options["error"] = unicode(field.recaptcha_error) query = url_encode(query_options) options = { "theme": "clean", "custom_translations": { "visual_challenge": _("Get a visual challenge"), "audio_challenge": _("Get an audio challenge"), "refresh_btn": _("Get a new challenge"), "instructions_visual": _("Type the two words:"), "instructions_audio": _("Type what you hear:"), "help_btn": _("Help"), "play_again": _("Play sound again"), "cant_hear_this": _("Download sound as MP3"), "incorrect_try_again": _("Incorrect. Try again."), }, } options.update(self.options) return self.recaptcha_html(server, query, options)
def _validate_recaptcha(self, challenge, response, remote_addr): """Performs the actual validation.""" if current_app.testing: return True try: private_key = current_app.config['RECAPTCHA_PRIVATE_KEY'] except KeyError: raise RuntimeError("No RECAPTCHA_PRIVATE_KEY config set") data = url_encode({ 'privatekey': private_key, 'remoteip': remote_addr, 'challenge': challenge, 'response': response }) response = http.urlopen(RECAPTCHA_VERIFY_SERVER, data) if response.code != 200: return False rv = [l.strip() for l in response.readlines()] if rv and rv[0] == 'true': return True if len(rv) > 1: error = rv[1] if error in self._error_codes: raise RuntimeError(self._error_codes[error]) return False
def validate_recaptcha(challenge, response, remote_ip): """Validates the recaptcha. If the validation fails a `RecaptchaValidationFailed` error is raised. """ request = urllib2.Request( VERIFY_SERVER, data=url_encode( { "privatekey": settings.RECAPTCHA_PRIVATE_KEY, "remoteip": remote_ip, "challenge": challenge, "response": response, } ), ) response = urllib2.urlopen(request) rv = response.read().splitlines() response.close() if rv and rv[0] == "true": return True if len(rv) > 1: error = rv[1] if error == "invalid-site-public-key": raise RuntimeError("invalid public key for recaptcha set") if error == "invalid-site-private-key": raise RuntimeError("invalid private key for recaptcha set") if error == "invalid-referrer": raise RuntimeError("key not valid for the current domain") return False
def __call__(self, field, error=None, **kwargs): """Returns the recaptcha input HTML.""" try: public_key = current_app.config['RECAPTCHA_PUBLIC_KEY'] except KeyError: raise RuntimeError("RECAPTCHA_PUBLIC_KEY config not set") query_options = dict(k=public_key) if field.recaptcha_error is not None: query_options['error'] = text_type(field.recaptcha_error) query = url_encode(query_options) _ = field.gettext options = { 'theme': 'clean', 'custom_translations': { 'visual_challenge': _('Get a visual challenge'), 'audio_challenge': _('Get an audio challenge'), 'refresh_btn': _('Get a new challenge'), 'instructions_visual': _('Type the two words:'), 'instructions_audio': _('Type what you hear:'), 'help_btn': _('Help'), 'play_again': _('Play sound again'), 'cant_hear_this': _('Download sound as MP3'), 'incorrect_try_again': _('Incorrect. Try again.'), } } options.update(current_app.config.get('RECAPTCHA_OPTIONS', {})) return self.recaptcha_html(query, options)
def get_recaptcha_html(error=None): """Returns the recaptcha input HTML.""" server = settings.RECAPTCHA_USE_SSL and API_SERVER or SSL_API_SERVER options = dict(k=settings.RECAPTCHA_PUBLIC_KEY) if error is not None: options["error"] = unicode(error) query = url_encode(options) return u""" <script type="text/javascript">var RecaptchaOptions = %(options)s;</script> <script type="text/javascript" src="%(script_url)s"></script> <noscript> <div><iframe src="%(frame_url)s" height="300" width="500"></iframe></div> <div><textarea name="recaptcha_challenge_field" rows="3" cols="40"></textarea> <input type="hidden" name="recaptcha_response_field" value="manual_challenge"></div> </noscript> """ % dict( script_url="%schallenge?%s" % (server, query), frame_url="%snoscript?%s" % (server, query), options=dumps( { "theme": "clean", "custom_translations": { "visual_challenge": _("Get a visual challenge"), "audio_challenge": _("Get an audio challenge"), "refresh_btn": _("Get a new challenge"), "instructions_visual": _("Type the two words:"), "instructions_audio": _("Type what you hear:"), "help_btn": _("Help"), "play_again": _("Play sound again"), "cant_hear_this": _("Download sound as MP3"), "incorrect_try_again": _("Incorrect. Try again."), }, } ), )
def url_add(url=None, **kwargs): scheme, netloc, path, query, fragments = urlsplit(url or request.url) params = url_decode(query) for key, value in kwargs.items(): if not value in params.getlist(key): params.add(key, value) return Markup(urlunsplit((scheme, netloc, path, url_encode(params), fragments)))
def validate_recaptcha(challenge, response, remote_ip): """Validates the recaptcha. If the validation fails a `RecaptchaValidationFailed` error is raised. """ app = get_application() request = urllib2.Request(VERIFY_SERVER, data=url_encode({ 'privatekey': app.cfg['recaptcha_private_key'], 'remoteip': remote_ip, 'challenge': challenge, 'response': response })) response = urllib2.urlopen(request) rv = response.read().splitlines() response.close() if rv and rv[0] == 'true': return True if len(rv) > 1: error = rv[1] if error == 'invalid-site-public-key': raise RuntimeError('invalid public key for recaptcha set') if error == 'invalid-site-private-key': raise RuntimeError('invalid private key for recaptcha set') if error == 'invalid-referrer': raise RuntimeError('key not valid for the current domain') return False
def mail_action_new(self, model, res_id, **kwargs): if model not in request.env: return self._redirect_to_messaging() params = {'view_type': 'form', 'model': model} if kwargs.get('view_id'): params['action'] = kwargs['view_id'] return werkzeug.utils.redirect('/web?#%s' % url_encode(params))
def _notification_get_recipient_groups(self, message, recipients): result = super(Holidays, self)._notification_get_recipient_groups(message, recipients) app_action = '/mail/workflow?%s' % url_encode({'model': self._name, 'res_id': self.id, 'signal': 'validate'}) ref_action = '/mail/workflow?%s' % url_encode({'model': self._name, 'res_id': self.id, 'signal': 'refuse'}) actions = [] if self.state == 'confirm': actions.append({'url': app_action, 'title': 'Approve'}) if self.state in ['confirm', 'validate', 'validate1']: actions.append({'url': ref_action, 'title': 'Refuse'}) result['group_hr_user'] = { 'actions': actions } return result
def _validate_recaptcha(self, response, remote_addr): """Performs the actual validation.""" try: private_key = current_app.config['RECAPTCHA_PRIVATE_KEY'] except KeyError: raise RuntimeError("No RECAPTCHA_PRIVATE_KEY config set") data = url_encode({ 'secret': private_key, 'remoteip': remote_addr, 'response': response }) http_response = http.urlopen(RECAPTCHA_VERIFY_SERVER, to_bytes(data)) if http_response.code != 200: return False json_resp = json.loads(to_unicode(http_response.read())) if json_resp["success"]: return True for error in json_resp.get("error-codes", []): if error in RECAPTCHA_ERROR_CODES: raise ValidationError(RECAPTCHA_ERROR_CODES[error]) return False
def paypal_validate_data(self, **post): """ Paypal IPN: three steps validation to ensure data correctness - step 1: return an empty HTTP 200 response -> will be done at the end by returning '' - step 2: POST the complete, unaltered message back to Paypal (preceded by cmd=_notify-validate), with same encoding - step 3: paypal send either VERIFIED or INVALID (single word) Once data is validated, process it. """ res = False new_post = dict(post, cmd='_notify-validate') cr, uid, context = request.cr, request.uid, request.context reference = post.get('item_number') tx = None if reference: tx_ids = request.registry['payment.transaction'].search(cr, uid, [('reference', '=', reference)], context=context) if tx_ids: tx = request.registry['payment.transaction'].browse(cr, uid, tx_ids[0], context=context) paypal_urls = request.registry['payment.acquirer']._get_paypal_urls(cr, uid, tx and tx.acquirer_id and tx.acquirer_id.environment or 'prod', context=context) validate_url = paypal_urls['paypal_form_url'] urequest = urllib2.Request(validate_url, werkzeug.url_encode(new_post)) uopen = urllib2.urlopen(urequest) resp = uopen.read() if resp == 'VERIFIED': _logger.info('Paypal: validated data') res = request.registry['payment.transaction'].form_feedback(cr, SUPERUSER_ID, post, 'paypal', context=context) elif resp == 'INVALID': _logger.warning('Paypal: answered INVALID on data verification') else: _logger.warning('Paypal: unrecognized paypal answer, received %s instead of VERIFIED or INVALID' % resp.text) return res
def web_auth_signup(self, *args, **kw): qcontext = self.get_auth_signup_qcontext() if not qcontext.get('token') and not qcontext.get('signup_enabled'): raise werkzeug.exceptions.NotFound() if 'error' not in qcontext and request.httprequest.method == 'POST': try: self.do_signup(qcontext) # Send an account creation confirmation email if qcontext.get('token'): user_sudo = request.env['res.users'].sudo().search([('login', '=', qcontext.get('login'))]) template = request.env.ref('auth_signup.mail_template_user_signup_account_created', raise_if_not_found=False) if user_sudo and template: template.sudo().with_context( lang=user_sudo.lang, auth_login=werkzeug.url_encode({'auth_login': user_sudo.email}), ).send_mail(user_sudo.id, force_send=True) return super(AuthSignupHome, self).web_login(*args, **kw) except UserError as e: qcontext['error'] = e.name or e.value except (SignupError, AssertionError) as e: if request.env["res.users"].sudo().search([("login", "=", qcontext.get("login"))]): qcontext["error"] = _("Another user is already registered using this email address.") else: _logger.error("%s", e) qcontext['error'] = _("Could not create a new account.") response = request.render('auth_signup.signup', qcontext) response.headers['X-Frame-Options'] = 'DENY' return response
def __call__(self, command, *args, **kw): transform = _transformers[kw.pop('transform', 'unicode')] return_list = kw.pop('list', False) assert not kw, 'Unknown keyword argument.' payload = {'cmd': command, 'sessionId': self._session_id} for idx, arg in enumerate(args): payload[str(idx + 1)] = arg request = Request(self._server_url, url_encode(payload), { 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8'}) logger.debug('selenium(%s, %r)', command, args) response = urlopen(request).read() if not response.startswith('OK'): raise RuntimeError(response.encode('utf-8')) if response == 'OK': return data = response[3:] if return_list: rows = list(csv.reader(StringIO(data))) return [transform(col) for col in rows[0]] else: return transform(data)
def avatar(self, size=30): if not app.cfg.user_use_gravatar: return None from MoinMoin.themes import get_current_theme from flask.ext.themes import static_file_url theme = get_current_theme() email = self.email if not email: return static_file_url(theme, theme.info.get('default_avatar', 'img/default_avatar.png')) param = {} param['gravatar_id'] = hashlib.md5(email.lower()).hexdigest() param['default'] = static_file_url(theme, theme.info.get('default_avatar', 'img/default_avatar.png'), True) param['size'] = str(size) # TODO: use same protocol of Moin site (might be https instead of http)] gravatar_url = "http://www.gravatar.com/avatar.php?" gravatar_url += werkzeug.url_encode(param) return gravatar_url
def mail_action_new(self, model, res_id, **kwargs): if model not in request.env: return self._redirect_to_messaging() params = {"view_type": "form", "model": model} if kwargs.get("action_id"): params["action"] = kwargs["action_id"] return werkzeug.utils.redirect("/web?#%s" % url_encode(params))
def inline_object_repl(self, stack, object, object_url=None, object_item=None, object_text=None, object_args=None): """Handles objects included in the page.""" if object_args: args = parse_arguments(object_args).keyword # XXX needs different parsing else: args = {} if object_item is not None: query = url_encode(args, charset=CHARSET, encode_keys=True) att = 'attachment:' # moin 1.9 needed this for an attached file if object_item.startswith(att): object_item = '/' + object_item[len(att):] # now we have a subitem target = Iri(scheme='wiki.local', path=object_item, query=query, fragment=None) text = object_item attrib = {xinclude.href: target} element = xinclude.include(attrib=attrib) stack.top_append(element) else: target = Iri(object_url) text = object_url attrib = {xlink.href: target} if object_text is not None: attrib[moin_page.alt] = object_text element = moin_page.object(attrib) stack.push(element) if object_text: self.parse_inline(object_text, stack, self.inlinedesc_re) else: stack.top_append(text) stack.pop()
def modify_query(**new_values): args = request.args.copy() for key, value in new_values.items(): args[key] = value return "{}?{}".format(request.path, url_encode(args))
def handle_oauth2_response(self): """Handles an oauth2 authorization response. The return value of this method is forwarded as first argument to the handling view function. """ remote_args = { 'code': request.args.get('code'), 'client_id': self.consumer_key, 'client_secret': self.consumer_secret, 'redirect_uri': session.get(self.name + '_oauthredir') } remote_args.update(self.access_token_params) if self.access_token_method == 'POST': resp, content = self._client.request(self.access_token_url, self.access_token_method, url_encode(remote_args)) elif self.access_token_method == 'GET': url = add_query(self.expand_url(self.access_token_url), remote_args) resp, content = self._client.request(url, self.access_token_method) else: raise OAuthException('Unsupported access_token_method: ' + self.access_token_method) data = parse_response(resp, content) if resp['status'] != '200': raise OAuthException('Invalid response from ' + self.name, data) return data
def _validate_recaptcha(self, challenge, response, remote_addr): """Performs the actual validation.""" private_key = get_config('tipfy.ext.wtforms', 'recaptcha_private_key') result = urlfetch.fetch(url=RECAPTCHA_VERIFY_SERVER, method=urlfetch.POST, headers={'Content-Type': 'application/x-www-form-urlencoded'}, payload=url_encode({ 'privatekey': private_key, 'remoteip': remote_addr, 'challenge': challenge, 'response': response })) if result.status_code != 200: return False rv = [l.strip() for l in result.content.splitlines()] if rv and rv[0] == 'true': return True if len(rv) > 1: error = rv[1] if error in self._error_codes: raise RuntimeError(self._error_codes[error]) return False
def ogone_form_generate_values(self, values): base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url') ogone_tx_values = dict(values) param_plus = { 'return_url': ogone_tx_values.pop('return_url', False) } temp_ogone_tx_values = { 'PSPID': self.ogone_pspid, 'ORDERID': values['reference'], 'AMOUNT': float_repr(float_round(values['amount'], 2) * 100, 0), 'CURRENCY': values['currency'] and values['currency'].name or '', 'LANGUAGE': values.get('partner_lang'), 'CN': values.get('partner_name'), 'EMAIL': values.get('partner_email'), 'OWNERZIP': values.get('partner_zip'), 'OWNERADDRESS': values.get('partner_address'), 'OWNERTOWN': values.get('partner_city'), 'OWNERCTY': values.get('partner_country') and values.get('partner_country').code or '', 'OWNERTELNO': values.get('partner_phone'), 'ACCEPTURL': urls.url_join(base_url, OgoneController._accept_url), 'DECLINEURL': urls.url_join(base_url, OgoneController._decline_url), 'EXCEPTIONURL': urls.url_join(base_url, OgoneController._exception_url), 'CANCELURL': urls.url_join(base_url, OgoneController._cancel_url), 'PARAMPLUS': url_encode(param_plus), } if self.save_token in ['ask', 'always']: temp_ogone_tx_values.update({ 'ALIAS': 'ODOO-NEW-ALIAS-%s' % time.time(), # something unique, 'ALIASUSAGE': values.get('alias_usage') or self.ogone_alias_usage, }) shasign = self._ogone_generate_shasign('in', temp_ogone_tx_values) temp_ogone_tx_values['SHASIGN'] = shasign ogone_tx_values.update(temp_ogone_tx_values) return ogone_tx_values
def ogone_s2s_do_transaction(self, **kwargs): # TODO: create tx with s2s type account = self.acquirer_id reference = self.reference or "ODOO-%s-%s" % (datetime.datetime.now().strftime('%y%m%d_%H%M%S'), self.partner_id.id) param_plus = { 'return_url': kwargs.get('return_url', False) } data = { 'PSPID': account.ogone_pspid, 'USERID': account.ogone_userid, 'PSWD': account.ogone_password, 'ORDERID': reference, 'AMOUNT': int(self.amount * 100), 'CURRENCY': self.currency_id.name, 'OPERATION': 'SAL', 'ECI': 9, # Recurring (from eCommerce) 'ALIAS': self.payment_token_id.acquirer_ref, 'RTIMEOUT': 30, 'PARAMPLUS': url_encode(param_plus), 'EMAIL': self.partner_id.email or '', 'CN': self.partner_id.name or '', } if request: data['REMOTE_ADDR'] = request.httprequest.remote_addr if kwargs.get('3d_secure'): data.update({ 'FLAG3D': 'Y', 'LANGUAGE': self.partner_id.lang or 'en_US', }) for url in 'accept decline exception'.split(): key = '{0}_url'.format(url) val = kwargs.pop(key, None) if val: key = '{0}URL'.format(url).upper() data[key] = val data['SHASIGN'] = self.acquirer_id._ogone_generate_shasign('in', data) direct_order_url = 'https://secure.ogone.com/ncol/%s/orderdirect.asp' % (self.acquirer_id.environment) logged_data = data.copy() logged_data.pop('PSWD') _logger.info("ogone_s2s_do_transaction: Sending values to URL %s, values:\n%s", direct_order_url, pformat(logged_data)) result = requests.post(direct_order_url, data=data).content try: tree = objectify.fromstring(result) _logger.info('ogone_s2s_do_transaction: Values received:\n%s', etree.tostring(tree, pretty_print=True, encoding='utf-8')) except etree.XMLSyntaxError: # invalid response from ogone _logger.exception('Invalid xml response from ogone') _logger.info('ogone_s2s_do_transaction: Values received:\n%s', result) raise return self._ogone_s2s_validate_tree(tree)
def authorize(self, callback=None, state=None): """Returns a redirect response to the remote authorization URL with the signed callback given. The callback must be `None` in which case the application will most likely switch to PIN based authentication or use a remotely stored callback URL. Alternatively it's an URL on the system that has to be decorated as :meth:`authorized_handler`. """ if self.request_token_url: token = self.generate_request_token(callback)[0] url = '%s?oauth_token=%s' % (self.expand_url(self.authorize_url), url_quote(token)) url = add_query(url, self.authorize_params) else: assert callback is not None, 'Callback is required OAuth2' # This is for things like facebook's oauth. Since we need the # callback for the access_token_url we need to keep it in the # session. params = dict(self.request_token_params) params['redirect_uri'] = callback params['client_id'] = self.consumer_key params['state'] = url_encode(state or {}) session[self.name + '_oauthredir'] = callback url = add_query(self.expand_url(self.authorize_url), params) return redirect(url)
def new_database(self, **post): params = post.copy() state = simplejson.loads(post.get('state')) if not state.get('p'): state['p'] = request.env.ref('saas_client.saas_oauth_provider').id params['state'] = simplejson.dumps(state) return werkzeug.utils.redirect('/auth_oauth/signin?%s' % werkzeug.url_encode(params))
def create_template(self): assert len(self)==1, 'This method is applied only for single record' # TODO use create_new_database function plan = self[0] state = { 'd': plan.template_id.name, 'demo': plan.demo and 1 or 0, 'addons': [], 'lang': plan.lang, 'tz': plan.tz, 'is_template_db': 1, } client_id = plan.template_id.client_id plan.template_id.server_id = plan.server_id params = plan.server_id._request_params(path='/saas_server/new_database', state=state, client_id=client_id)[0] access_token = plan.template_id.oauth_application_id._get_access_token(create=True) params.update({ 'token_type': 'Bearer', 'access_token': access_token, 'expires_in': 3600, }) url = '{scheme}://{saas_server}:{port}{path}?{params}'.format(scheme=plan.server_id.request_scheme, saas_server=plan.server_id.name, port=plan.server_id.request_port, path='/saas_server/new_database', params=werkzeug.url_encode(params)) res = requests.get(url, verify=(plan.server_id.request_scheme == 'https' and plan.server_id.verify_ssl)) if res.ok != True: msg = """Status Code - %s Reason - %s URL - %s """ % (res.status_code, res.reason, res.url) raise Warning(msg) return self.action_sync_server()
def partners(self, country=None, grade=None, page=0, **post): country_all = post.pop('country_all', False) partner_obj = request.env['res.partner'] country_obj = request.env['res.country'] search = post.get('search', '') base_partner_domain = [('is_company', '=', True), ('grade_id', '!=', False), ('website_published', '=', True)] if not request.env['res.users'].has_group('website.group_website_publisher'): base_partner_domain += [('grade_id.website_published', '=', True)] if search: base_partner_domain += ['|', ('name', 'ilike', search), ('website_description', 'ilike', search)] # group by grade grade_domain = list(base_partner_domain) if not country and not country_all: country_code = request.session['geoip'].get('country_code') if country_code: country = country_obj.search([('code', '=', country_code)], limit=1) if country: grade_domain += [('country_id', '=', country.id)] grades = partner_obj.sudo().read_group( grade_domain, ["id", "grade_id"], groupby="grade_id") grades_partners = partner_obj.sudo().search_count(grade_domain) # flag active grade for grade_dict in grades: grade_dict['active'] = grade and grade_dict['grade_id'][0] == grade.id grades.insert(0, { 'grade_id_count': grades_partners, 'grade_id': (0, _("All Categories")), 'active': bool(grade is None), }) # group by country country_domain = list(base_partner_domain) if grade: country_domain += [('grade_id', '=', grade.id)] countries = partner_obj.sudo().read_group( country_domain, ["id", "country_id"], groupby="country_id", orderby="country_id") countries_partners = partner_obj.sudo().search_count(country_domain) # flag active country for country_dict in countries: country_dict['active'] = country and country_dict['country_id'] and country_dict['country_id'][0] == country.id countries.insert(0, { 'country_id_count': countries_partners, 'country_id': (0, _("All Countries")), 'active': bool(country is None), }) # current search if grade: base_partner_domain += [('grade_id', '=', grade.id)] if country: base_partner_domain += [('country_id', '=', country.id)] # format pager if grade and not country: url = '/partners/grade/' + slug(grade) elif country and not grade: url = '/partners/country/' + slug(country) elif country and grade: url = '/partners/grade/' + slug(grade) + '/country/' + slug(country) else: url = '/partners' url_args = {} if search: url_args['search'] = search if country_all: url_args['country_all'] = True partner_count = partner_obj.sudo().search_count(base_partner_domain) pager = request.website.pager( url=url, total=partner_count, page=page, step=self._references_per_page, scope=7, url_args=url_args) # search partners matching current search parameters partner_ids = partner_obj.sudo().search( base_partner_domain, order="grade_sequence DESC, implemented_count DESC, display_name ASC, id ASC", offset=pager['offset'], limit=self._references_per_page) partners = partner_ids.sudo() google_map_partner_ids = ','.join(str(p.id) for p in partners) google_maps_api_key = request.website.google_maps_api_key values = { 'countries': countries, 'current_country': country, 'grades': grades, 'current_grade': grade, 'partners': partners, 'google_map_partner_ids': google_map_partner_ids, 'pager': pager, 'searches': post, 'search_path': "%s" % werkzeug.url_encode(post), 'google_maps_api_key': google_maps_api_key, } return request.render("website_crm_partner_assign.index", values, status=partners and 200 or 404)
def shop(self, page=0, category=None, search='', **post): cr, uid, context, pool = (request.cr, request.uid, request.context, request.registry) values = {} child_prod_id = [] domain = request.website.sale_product_domain() if search: domain += [ '|', '|', '|', ('name', 'ilike', search), ('description', 'ilike', search), ('description_sale', 'ilike', search), ('product_variant_ids.default_code', 'ilike', search) ] if category: domain += [('public_categ_ids', 'child_of', int(category))] attrib_list = request.httprequest.args.getlist('attrib') attrib_values = [map(int, v.split('-')) for v in attrib_list if v] attrib_set = set([v[1] for v in attrib_values]) if attrib_values: attrib = None ids = [] for value in attrib_values: if not attrib: attrib = value[0] ids.append(value[1]) elif value[0] == attrib: ids.append(value[1]) else: domain += [('attribute_line_ids.value_ids', 'in', ids)] attrib = value[0] ids = [value[1]] if attrib: domain += [('attribute_line_ids.value_ids', 'in', ids)] keep = QueryURL('/shop', category=category and int(category), search=search, attrib=attrib_list) if not context.get('pricelist'): pricelist = self.get_pricelist() context['pricelist'] = int(pricelist) else: pricelist = pool.get('product.pricelist').browse( cr, uid, context['pricelist'], context) product_obj = pool.get('product.template') # Brand's product search if post.get('brand'): product_designer_obj = pool.get('product.brand') brand_ids = product_designer_obj.search( cr, SUPERUSER_ID, [('id', '=', int(post.get('brand')))]) domain = [('product_brand_id', 'in', brand_ids)] url = '/shop' product_count = product_obj.search_count(cr, uid, domain, context=context) if search: post['search'] = search if category: category = pool['product.public.category'].browse(cr, uid, int(category), context=context) url = '/shop/category/%s' % slug(category) pager = request.website.pager(url=url, total=product_count, page=page, step=PPG, scope=7, url_args=post) product_ids = product_obj.search( cr, uid, domain, limit=PPG, offset=pager['offset'], order='website_published desc, website_sequence desc', context=context) products = product_obj.browse(cr, uid, product_ids, context=context) style_obj = pool['product.style'] style_ids = style_obj.search(cr, uid, [], context=context) styles = style_obj.browse(cr, uid, style_ids, context=context) category_obj = pool['product.public.category'] category_ids = category_obj.search(cr, uid, [], context=context) categories = category_obj.browse(cr, uid, category_ids, context=context) categs = filter(lambda x: not x.parent_id, categories) if category: selected_id = int(category) child_prod_ids = category_obj.search( cr, uid, [('parent_id', '=', selected_id)], context=context) children_ids = category_obj.browse(cr, uid, child_prod_ids) values.update({'child_list': children_ids}) attributes_obj = request.registry['product.attribute'] attributes_ids = attributes_obj.search(cr, uid, [], context=context) attributes = attributes_obj.browse(cr, uid, attributes_ids, context=context) from_currency = pool.get('product.price.type')._get_field_currency( cr, uid, 'list_price', context) to_currency = pricelist.currency_id compute_currency = lambda price: pool['res.currency']._compute( cr, uid, from_currency, to_currency, price, context=context) values.update({ 'search': search, 'category': category, 'attrib_values': attrib_values, 'attrib_set': attrib_set, 'pager': pager, 'pricelist': pricelist, 'products': products, 'bins': table_compute().process(products), 'rows': PPR, 'styles': styles, 'categories': categs, 'attributes': attributes, 'compute_currency': compute_currency, 'keep': keep, 'style_in_product': lambda style, product: style.id in [s.id for s in product.website_style_ids], 'attrib_encode': lambda attribs: werkzeug.url_encode([('attrib', i) for i in attribs]) }) return request.website.render('website_sale.products', values)
def shop(self, page=0, category=None, search='', ppg=False, **post): cr, uid, context, pool = request.cr, request.uid, request.context, request.registry if ppg: try: ppg = int(ppg) except ValueError: ppg = PPG post["ppg"] = ppg else: ppg = PPG attrib_list = request.httprequest.args.getlist('attrib') attrib_values = [map(int, v.split("-")) for v in attrib_list if v] attributes_ids = set([v[0] for v in attrib_values]) attrib_set = set([v[1] for v in attrib_values]) domain = self._get_search_domain(search, category, attrib_values) keep = QueryURL('/shop', category=category and int(category), search=search, attrib=attrib_list) if not context.get('pricelist'): pricelist = self.get_pricelist() context['pricelist'] = int(pricelist) else: pricelist = pool.get('product.pricelist').browse(cr, uid, context['pricelist'], context) url = "/shop" if search: post["search"] = search if category: category = pool['product.public.category'].browse(cr, uid, int(category), context=context) url = "/shop/category/%s" % slug(category) if attrib_list: post['attrib'] = attrib_list style_obj = pool['product.style'] style_ids = style_obj.search(cr, uid, [], context=context) styles = style_obj.browse(cr, uid, style_ids, context=context) category_obj = pool['product.public.category'] category_ids = category_obj.search(cr, uid, [('parent_id', '=', False)], context=context) categs = category_obj.browse(cr, uid, category_ids, context=context) product_obj = pool.get('product.template') parent_category_ids = [] if category: parent_category_ids = [category.id] current_category = category while current_category.parent_id: parent_category_ids.append(current_category.parent_id.id) current_category = current_category.parent_id product_count = product_obj.search_count(cr, uid, domain, context=context) pager = request.website.pager(url=url, total=product_count, page=page, step=ppg, scope=7, url_args=post) product_ids = product_obj.search(cr, uid, domain, limit=ppg, offset=pager['offset'], order='website_published desc, website_sequence desc', context=context) products = product_obj.browse(cr, uid, product_ids, context=context) attributes_obj = request.registry['product.attribute'] if product_ids: attributes_ids = attributes_obj.search(cr, uid, [('attribute_line_ids.product_tmpl_id', 'in', product_ids)], context=context) attributes = attributes_obj.browse(cr, uid, attributes_ids, context=context) from_currency = pool['res.users'].browse(cr, uid, uid, context=context).company_id.currency_id to_currency = pricelist.currency_id compute_currency = lambda price: pool['res.currency']._compute(cr, uid, from_currency, to_currency, price, context=context) values = { 'search': search, 'category': category, 'attrib_values': attrib_values, 'attrib_set': attrib_set, 'pager': pager, 'pricelist': pricelist, 'products': products, 'bins': table_compute().process(products, ppg), 'rows': PPR, 'styles': styles, 'categories': categs, 'attributes': attributes, 'compute_currency': compute_currency, 'keep': keep, 'parent_category_ids': parent_category_ids, 'style_in_product': lambda style, product: style.id in [s.id for s in product.website_style_ids], 'attrib_encode': lambda attribs: werkzeug.url_encode([('attrib',i) for i in attribs]), } if category: values['main_object'] = category return request.website.render("website_sale.products", values)
class SaasPortalPlan(models.Model): _name = 'saas_portal.plan' name = fields.Char('Plan', required=True) summary = fields.Char('Summary') template_id = fields.Many2one('saas_portal.database', 'Template', ondelete='restrict') demo = fields.Boolean('Install Demo Data') maximum_allowed_dbs_per_partner = fields.Integer( help='maximum allowed non-trial databases per customer', require=True, default=0) maximum_allowed_trial_dbs_per_partner = fields.Integer( help='maximum allowed trial databases per customer', require=True, default=0) max_users = fields.Char('Initial Max users', default='0', help='leave 0 for no limit') total_storage_limit = fields.Integer('Total storage limit (MB)', help='leave 0 for no limit') block_on_expiration = fields.Boolean('Block clients on expiration', default=False) block_on_storage_exceed = fields.Boolean('Block clients on storage exceed', default=False) def _get_default_lang(self): return self.env.user.lang def _default_tz(self): return self.env.user.tz lang = fields.Selection(scan_languages(), 'Language', default=_get_default_lang) tz = fields.Selection(_tz_get, 'TimeZone', default=_default_tz) sequence = fields.Integer('Sequence') state = fields.Selection([('draft', 'Draft'), ('confirmed', 'Confirmed')], 'State', compute='_compute_get_state', store=True) expiration = fields.Integer('Expiration (hours)', help='time to delete database. Use for demo') _order = 'sequence' grace_period = fields.Integer('Grace period (days)', help='initial days before expiration') dbname_template = fields.Char( 'DB Names', help= 'Used for generating client database domain name. Use %i for numbering. Ignore if you use manually created db names', placeholder='crm-%i.odoo.com') server_id = fields.Many2one('saas_portal.server', string='SaaS Server', ondelete='restrict', help='User this saas server or choose random') website_description = fields.Html('Website description') logo = fields.Binary('Logo') on_create = fields.Selection([ ('login', 'Log into just created instance'), ], string="Workflow on create", default='login') on_create_email_template = fields.Many2one( 'mail.template', default=lambda self: self.env.ref( 'saas_portal.email_template_create_saas')) @api.multi @api.depends('template_id.state') def _compute_get_state(self): for plan in self: if plan.template_id.state == 'template': plan.state = 'confirmed' else: plan.state = 'draft' @api.multi def _new_database_vals(self, vals): self.ensure_one() vals['max_users'] = vals.get('max_users', self.max_users) vals['total_storage_limit'] = vals.get('total_storage_limit', self.total_storage_limit) vals['block_on_expiration'] = vals.get('block_on_expiration', self.block_on_expiration) vals['block_on_storage_exceed'] = vals.get( 'block_on_storage_exceed', self.block_on_storage_exceed) return vals @api.multi def _prepare_owner_user_data(self, user_id): """ Prepare the dict of values to update owner user data in client instalnce. This method may be overridden to implement custom values (making sure to call super() to establish a clean extension chain). """ self.ensure_one() owner_user = self.env['res.users'].browse(user_id) or self.env.user owner_user_data = { 'user_id': owner_user.id, 'login': owner_user.login, 'name': owner_user.name, 'email': owner_user.email, 'password_crypt': owner_user.password_crypt, } return owner_user_data @api.multi def _get_expiration(self, trial): self.ensure_one() trial_hours = trial and self.expiration initial_expiration_datetime = datetime.now() trial_expiration_datetime = (initial_expiration_datetime + timedelta( hours=trial_hours)).strftime(DEFAULT_SERVER_DATETIME_FORMAT) return trial and trial_expiration_datetime or initial_expiration_datetime.strftime( DEFAULT_SERVER_DATETIME_FORMAT) @api.multi def create_new_database(self, **kwargs): return self._create_new_database(**kwargs) @api.multi def _create_new_database(self, dbname=None, client_id=None, partner_id=None, user_id=None, notify_user=True, trial=False, support_team_id=None, async=None): self.ensure_one() p_client = self.env['saas_portal.client'] p_server = self.env['saas_portal.server'] server = self.server_id if not server: server = p_server.get_saas_server() # server.action_sync_server() if not partner_id and user_id: user = self.env['res.users'].browse(user_id) partner_id = user.partner_id.id if not trial and self.maximum_allowed_dbs_per_partner != 0: db_count = p_client.search_count([('partner_id', '=', partner_id), ('state', '=', 'open'), ('plan_id', '=', self.id), ('trial', '=', False)]) if db_count >= self.maximum_allowed_dbs_per_partner: raise MaximumDBException( "Limit of databases for this plan is %(maximum)s reached" % {'maximum': self.maximum_allowed_dbs_per_partner}) if trial and self.maximum_allowed_trial_dbs_per_partner != 0: trial_db_count = p_client.search_count([('partner_id', '=', partner_id), ('state', '=', 'open'), ('plan_id', '=', self.id), ('trial', '=', True)]) if trial_db_count >= self.maximum_allowed_trial_dbs_per_partner: raise MaximumTrialDBException( "Limit of trial databases for this plan is %(maximum)s reached" % {'maximum': self.maximum_allowed_trial_dbs_per_partner}) client_expiration = self._get_expiration(trial) vals = { 'name': dbname or self.generate_dbname(), 'server_id': server.id, 'plan_id': self.id, 'partner_id': partner_id, 'trial': trial, 'support_team_id': support_team_id, 'expiration_datetime': client_expiration, } client = None if client_id: vals['client_id'] = client_id client = p_client.search([('client_id', '=', client_id)]) vals = self._new_database_vals(vals) if client: client.write(vals) else: client = p_client.create(vals) client_id = client.client_id owner_user_data = self._prepare_owner_user_data(user_id) state = { 'd': client.name, 'public_url': client.public_url, 'e': client_expiration, 'r': client.public_url + 'web', 'h': client.host, 'owner_user': owner_user_data, 't': client.trial, } if self.template_id: state.update({'db_template': self.template_id.name}) scope = ['userinfo', 'force_login', 'trial', 'skiptheuse'] req, req_kwargs = server._request_server( path='/saas_server/new_database', state=state, client_id=client_id, scope=scope, ) res = requests.Session().send(req, **req_kwargs) if res.status_code != 200: raise Warning( _('Error on request: %s\nReason: %s \n Message: %s') % (req.url, res.reason, res.content)) data = simplejson.loads(res.text) params = { 'state': data.get('state'), 'access_token': client.oauth_application_id._get_access_token(user_id, create=True), } url = '{url}?{params}'.format(url=data.get('url'), params=werkzeug.url_encode(params)) auth_url = url # send email if there is mail template record template = self.on_create_email_template if template and notify_user: # we have to have a user in this place (how to user without a user?) user = self.env['res.users'].browse(user_id) client.with_context(user=user).message_post_with_template( template.id, composition_mode='comment') client.send_params_to_client_db() # TODO make async call of action_sync_server here # client.server_id.action_sync_server() client.sync_client() return { 'url': url, 'id': client.id, 'client_id': client_id, 'auth_url': auth_url }
def events(self, page=1, **searches): Event = request.env['event.event'] EventType = request.env['event.type'] searches.setdefault('date', 'all') searches.setdefault('type', 'all') searches.setdefault('country', 'all') domain_search = {} def sdn(date): return fields.Datetime.to_string( date.replace(hour=23, minute=59, second=59)) def sd(date): return fields.Datetime.to_string(date) today = datetime.today() dates = [ ['all', _('Next Events'), [("date_end", ">", sd(today))], 0], [ 'today', _('Today'), [("date_end", ">", sd(today)), ("date_begin", "<", sdn(today))], 0 ], [ 'week', _('This Week'), [("date_end", ">=", sd(today + relativedelta(days=-today.weekday()))), ("date_begin", "<", sdn(today + relativedelta(days=6 - today.weekday())))], 0 ], [ 'nextweek', _('Next Week'), [("date_end", ">=", sd(today + relativedelta(days=7 - today.weekday()))), ("date_begin", "<", sdn(today + relativedelta(days=13 - today.weekday())))], 0 ], [ 'month', _('This month'), [("date_end", ">=", sd(today.replace(day=1))), ("date_begin", "<", (today.replace(day=1) + relativedelta(months=1)).strftime('%Y-%m-%d 00:00:00'))], 0 ], [ 'nextmonth', _('Next month'), [("date_end", ">=", sd(today.replace(day=1) + relativedelta(months=1))), ("date_begin", "<", (today.replace(day=1) + relativedelta(months=2)).strftime('%Y-%m-%d 00:00:00'))], 0 ], [ 'old', _('Old Events'), [("date_end", "<", today.strftime('%Y-%m-%d 00:00:00'))], 0 ], ] # search domains current_date = None current_type = None current_country = None for date in dates: if searches["date"] == date[0]: domain_search["date"] = date[2] if date[0] != 'all': current_date = date[1] if searches["type"] != 'all': current_type = EventType.browse(int(searches['type'])) domain_search["type"] = [("event_type_id", "=", int(searches["type"]))] if searches["country"] != 'all' and searches["country"] != 'online': current_country = request.env['res.country'].browse( int(searches['country'])) domain_search["country"] = [ '|', ("country_id", "=", int(searches["country"])), ("country_id", "=", False) ] elif searches["country"] == 'online': domain_search["country"] = [("country_id", "=", False)] def dom_without(without): domain = [('state', "in", ['draft', 'confirm', 'done'])] for key, search in domain_search.items(): if key != without: domain += search return domain # count by domains without self search for date in dates: if date[0] != 'old': date[3] = Event.search_count(dom_without('date') + date[2]) domain = dom_without('type') types = Event.read_group(domain, ["id", "event_type_id"], groupby=["event_type_id"], orderby="event_type_id") types.insert( 0, { 'event_type_id_count': sum([int(type['event_type_id_count']) for type in types]), 'event_type_id': ("all", _("All Categories")) }) domain = dom_without('country') countries = Event.read_group(domain, ["id", "country_id"], groupby="country_id", orderby="country_id") countries.insert( 0, { 'country_id_count': sum([ int(country['country_id_count']) for country in countries ]), 'country_id': ("all", _("All Countries")) }) step = 10 # Number of events per page event_count = Event.search_count(dom_without("none")) pager = request.website.pager(url="/event", url_args={ 'date': searches.get('date'), 'type': searches.get('type'), 'country': searches.get('country') }, total=event_count, page=page, step=step, scope=5) order = 'date_begin' if searches.get('date', 'all') == 'old': order = 'date_begin desc' if searches[ "country"] != 'all': # if we are looking for a specific country order = 'is_online, ' + order # show physical events first order = 'website_published desc, ' + order events = Event.search(dom_without("none"), limit=step, offset=pager['offset'], order=order) values = { 'current_date': current_date, 'current_country': current_country, 'current_type': current_type, 'event_ids': events, # event_ids used in website_event_track so we keep name as it is 'dates': dates, 'types': types, 'countries': countries, 'pager': pager, 'searches': searches, 'search_path': "?%s" % werkzeug.url_encode(searches), } return request.render("website_event.index", values)
def mail_action_view(self, model=None, res_id=None, message_id=None): """ Generic access point from notification emails. The heuristic to choose where to redirect the user is the following : - find a public URL - if none found - users with a read access are redirected to the document - users without read access are redirected to the Messaging - not logged users are redirected to the login page """ uid = request.session.uid if message_id: try: message = request.env['mail.message'].sudo().browse(int(message_id)).exists() except: message = request.env['mail.message'] if message: model, res_id = message.model, message.res_id else: # either a wrong message_id, either someone trying ids -> just go to messaging return self._redirect_to_messaging() elif res_id and isinstance(res_id, basestring): res_id = int(res_id) # no model / res_id, meaning no possible record -> redirect to login if not model or not res_id or model not in request.env: return self._redirect_to_messaging() # find the access action using sudo to have the details about the access link RecordModel = request.env[model] record_sudo = RecordModel.sudo().browse(res_id).exists() if not record_sudo: # record does not seem to exist -> redirect to login return self._redirect_to_messaging() record_action = record_sudo.get_access_action() # the record has an URL redirection: use it directly if record_action['type'] == 'ir.actions.act_url': return werkzeug.utils.redirect(record_action['url']) # other choice: act_window (no support of anything else currently) elif not record_action['type'] == 'ir.actions.act_window': return self._redirect_to_messaging() # the record has a window redirection: check access rights if not RecordModel.sudo(uid).check_access_rights('read', raise_exception=False): return self._redirect_to_messaging() try: RecordModel.sudo(uid).browse(res_id).exists().check_access_rule('read') except AccessError: return self._redirect_to_messaging() query = {} url_params = { 'view_type': record_action['view_type'], 'model': model, 'id': res_id, 'active_id': res_id, 'view_id': record_sudo.get_formview_id(), 'action': record_action.get('id'), } url = '/web?%s#%s' % (url_encode(query), url_encode(url_params)) return werkzeug.utils.redirect(url)
def modify_query(key, value): args = request.args.copy() args[key] = value return '{}?{}'.format(request.path, url_encode(args))
def _redirect_to_messaging(cls): url = '/web#%s' % url_encode({'action': 'mail.action_discuss'}) return werkzeug.utils.redirect(url)
def expense_post_payment(self): self.ensure_one() context = dict(self._context or {}) active_ids = context.get('active_ids', []) expense_sheet = self.env['hr.expense.sheet'].browse(active_ids) # Create payment and post it payment = self.env['account.payment'].create(self._get_payment_vals()) payment.post() # salvar la referencia al pago para poder verificar si esta al revertir. expense_sheet.payment_ref = payment.id # Log the payment in the chatter body = (_("A payment of %s %s with the reference <a href='/mail/view?%s'>%s</a> related to your expense %s has been made.") % (payment.amount, payment.currency_id.symbol, url_encode({'model': 'account.payment', 'res_id': payment.id}), payment.name, expense_sheet.name)) expense_sheet.message_post(body=body) # Reconcile the payment and the expense, i.e. lookup on the payable account move lines account_move_lines_to_reconcile = self.env['account.move.line'] for line in payment.move_line_ids + expense_sheet.account_move_id.line_ids: if line.account_id.internal_type == 'payable': account_move_lines_to_reconcile |= line account_move_lines_to_reconcile.reconcile() return {'type': 'ir.actions.act_window_close'}
def add_query(url, args): if not args: return url return url + ('?' in url and '&' or '?') + url_encode(args)
def _redirect_to_messaging(self): messaging_action = request.env['mail.thread']._get_inbox_action_xml_id() url = '/web#%s' % url_encode({'action': messaging_action}) return werkzeug.utils.redirect(url)
def shop(self, page=0, ppg=False, house_type=False, gemeente=False, min_price=False, max_price=False, slaapkamers=False, soort_bebouwing=False, tuin=False, terras=False, garage=False, zwembad=False, lift=False, **post): old_sold = super(website_houses, self).shop(page=page, ppg=PPG, post=post) old_qcontext = old_sold.qcontext product_obj = http.request.env["product.template"] attribute_obj = http.request.env["xx.house.attribute"] filtered_products_list = product_obj.search([]) if house_type: filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), ('xx_house_type', '=', house_type) ]) post["house_type"] = house_type if gemeente: gemeente_string = gemeente post["gemeente"] = gemeente gem = "Ditisgeenechtegemeente" zip = "" words = gemeente_string.split() for word in words: if product_obj.search([('xx_city', 'ilike', word)]): gem = word else: if product_obj.search([('xx_zip', '=ilike', word)]): zip = word filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), '|', ('xx_zip', '=ilike', zip), ('xx_city', 'ilike', gem) ]) if min_price: filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), ('xx_current_price', '>=', min_price) ]) post["min_price"] = min_price if max_price: filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), ('xx_current_price', '<=', max_price) ]) post["max_price"] = max_price if slaapkamers: if slaapkamers == "4": attribute_list = attribute_obj.search([('name', 'ilike', "slaapkamer"), ('xx_value', '>=', 5)]) else: attribute_list = attribute_obj.search([ ('name', 'ilike', "slaapkamer"), ('xx_value', '=', int(slaapkamers) + 1) ]) house_id_set = set() for attr in attribute_list: house_id_set.add(attr.xx_house.id) house_id_list = list(house_id_set) filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), ('id', 'in', house_id_list) ]) post["slaapkamers"] = slaapkamers if soort_bebouwing: filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), ('xx_building_type', '=', soort_bebouwing) ]) post["soort_bebouwing"] = soort_bebouwing if tuin: attribute_list = attribute_obj.search([ ('name', 'ilike', tuin), '!', ('xx_value', 'ilike', 'nee') ]) house_id_set = set() for attr in attribute_list: house_id_set.add(attr.xx_house.id) house_id_list = list(house_id_set) filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), ('id', 'in', house_id_list) ]) post["tuin"] = tuin if terras: attribute_list = attribute_obj.search([ ('name', 'ilike', terras), '!', ('xx_value', 'ilike', 'nee') ]) house_id_set = set() for attr in attribute_list: house_id_set.add(attr.xx_house.id) house_id_list = list(house_id_set) filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), ('id', 'in', house_id_list) ]) post["terras"] = terras if garage: attribute_list = attribute_obj.search([ ('name', 'ilike', garage), '!', ('xx_value', 'ilike', 'nee') ]) house_id_set = set() for attr in attribute_list: house_id_set.add(attr.xx_house.id) house_id_list = list(house_id_set) filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), ('id', 'in', house_id_list) ]) post["garage"] = garage if zwembad: attribute_list = attribute_obj.search([ ('name', 'ilike', zwembad), '!', ('xx_value', 'ilike', 'nee') ]) house_id_set = set() for attr in attribute_list: house_id_set.add(attr.xx_house.id) house_id_list = list(house_id_set) filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), ('id', 'in', house_id_list) ]) post["zwembad"] = zwembad if lift: attribute_list = attribute_obj.search([ ('name', 'ilike', lift), '!', ('xx_value', 'ilike', 'nee') ]) house_id_set = set() for attr in attribute_list: house_id_set.add(attr.xx_house.id) house_id_list = list(house_id_set) filtered_products_list = product_obj.search([ ('id', 'in', filtered_products_list._ids), ('id', 'in', house_id_list) ]) post["lift"] = lift new_product_count = len(filtered_products_list._ids) pager = request.website.pager(url="/shop", total=new_product_count, page=page, step=PPG, scope=7, url_args=post) new_product_ids = product_obj.search( [('id', 'in', filtered_products_list._ids)], limit=PPG, offset=pager['offset'], order='website_published desc, website_sequence desc') house_type = [] for h_type in http.request.env['xx.house.type'].search([]): house_type.append(h_type.name) values = { 'search': old_qcontext.get('search'), 'category': old_qcontext.get('category'), 'attrib_values': old_qcontext.get('attrib_values'), 'attrib_set': old_qcontext.get('attrib_set'), 'pager': pager, 'pricelist': old_qcontext.get('pricelist'), 'products': new_product_ids, 'bins': table_compute().process(new_product_ids, PPG), 'rows': PPR, 'styles': old_qcontext.get('styles'), 'categories': old_qcontext.get('categs'), 'attributes': old_qcontext.get('attributes'), 'compute_currency': old_qcontext.get('compute_currency'), 'keep': old_qcontext.get('keep'), 'parent_category_ids': old_qcontext.get('parent_category_ids'), 'style_in_product': lambda style, product: style.id in [s.id for s in product.website_style_ids], 'attrib_encode': lambda attribs: werkzeug.url_encode([('attrib', i) for i in attribs]), 'house_types': house_type } return request.website.render("website_sale.products", values)
def kanban(self, cr, uid, ids, model, domain, column, template, step=None, scope=None, orderby=None, context=None): step = step and int(step) or 10 scope = scope and int(scope) or 5 orderby = orderby or "name" get_args = dict(request.httprequest.args or {}) model_obj = self.pool[model] relation = model_obj._columns.get(column)._obj relation_obj = self.pool[relation] get_args.setdefault('kanban', "") kanban = get_args.pop('kanban') kanban_url = "?%s&kanban=" % werkzeug.url_encode(get_args) pages = {} for col in kanban.split(","): if col: col = col.split("-") pages[int(col[0])] = int(col[1]) objects = [] for group in model_obj.read_group(cr, uid, domain, ["id", column], groupby=column): obj = {} # browse column relation_id = group[column][0] obj['column_id'] = relation_obj.browse(cr, uid, relation_id) obj['kanban_url'] = kanban_url for k, v in pages.items(): if k != relation_id: obj['kanban_url'] += "%s-%s" % (k, v) # pager number = model_obj.search(cr, uid, group['__domain'], count=True) obj['page_count'] = int(math.ceil(float(number) / step)) obj['page'] = pages.get(relation_id) or 1 if obj['page'] > obj['page_count']: obj['page'] = obj['page_count'] offset = (obj['page'] - 1) * step obj['page_start'] = max( obj['page'] - int(math.floor((scope - 1) / 2)), 1) obj['page_end'] = min(obj['page_start'] + (scope - 1), obj['page_count']) # view data obj['domain'] = group['__domain'] obj['model'] = model obj['step'] = step obj['orderby'] = orderby # browse objects object_ids = model_obj.search(cr, uid, group['__domain'], limit=step, offset=offset, order=orderby) obj['object_ids'] = model_obj.browse(cr, uid, object_ids) objects.append(obj) values = { 'objects': objects, 'range': range, 'template': template, } return request.website._render("website.kanban_contain", values)
def current_url(**new_values): args = request.args.copy() for key, value in new_values.items(): args[key] = value return '{}?{}'.format(request.path, url_encode(args))
def get_url(page): _url = "%s/page/%s" % (url, page) if page > 1 else url if url_args: _url = "%s?%s" % (_url, werkzeug.url_encode(url_args)) return _url
def _request(self, **kwargs): self.ensure_one() params = self._request_params(**kwargs) url = '/oauth2/auth?%s' % werkzeug.url_encode(params) return url
def response_wrap(*args, **kw): # ~ _logger.warn('\n\npath: %s\n' % request.httprequest.path) if routing.get('key'): # Function that returns a raw string for key making # Format {path}{session}{etc} key_raw = routing['key'](kw).format(path=request.httprequest.path, session='%s' % {k:v for k,v in request.session.items() if len(k)<40}, device_type='%s' % request.session.get('device_type','md'), # xs sm md lg context='%s' % {k:v for k,v in request.env.context.items() if not k == 'uid'}, context_uid='%s' % {k:v for k,v in request.env.context.items()}, uid=request.env.context.get('uid'), logged_in='1' if request.env.context.get('uid') > 0 else '0', db=request.env.cr.dbname, lang=request.env.context.get('lang'), country='%s' % request.env.user.partner_id.commercial_partner_id.country_id.code, post='%s' % kw, xmlid='%s' % kw.get('xmlid'), version='%s' % kw.get('version'), is_user='******' if (request.website and request.website.is_user()) else '0', employee='1' if request.env.ref('base.group_user') in request.env.user.groups_id else '0', publisher='1' if request.env.ref('base.group_website_publisher') in request.env.user.groups_id else '0', designer='1' if request.env.ref('base.group_website_designer') in request.env.user.groups_id else '0', ).encode('latin-1', 'replace') #~ raise Warning(request.env['res.users'].browse(request.uid).group_ids) key = str(MEMCACHED_HASH(key_raw)) else: key_raw = ('%s,%s,%s' % (request.env.cr.dbname,request.httprequest.path,request.env.context)).encode('latin-1', 'replace') # Default key key = str(MEMCACHED_HASH(key_raw)) # ~ _logger.warn('\n\ndefault key_raw: %s\nkey: %s\n' % (key_raw, key)) ############### Key is ready if 'cache_invalidate' in kw.keys(): kw.pop('cache_invalidate',None) mc_delete(key) page_dict = None error = None try: page_dict = mc_load(key) except MemcacheClientError as e: error = "MemcacheClientError %s " % e _logger.warn(error) except MemcacheUnknownCommandError as e: error = "MemcacheUnknownCommandError %s " % e _logger.warn(error) except MemcacheIllegalInputError as e: error = "MemcacheIllegalInputError %s " % e _logger.warn(error) except MemcacheServerError as e: error = "MemcacheServerError %s " % e _logger.warn(error) except MemcacheUnknownError as e: error = clean_text(str(e)) _logger.warn("MemcacheUnknownError %s key: %s path: %s" % (eror, key, request.httprequest.path)) return werkzeug.wrappers.Response(status=500,headers=[ ('X-CacheKey',key), ('X-CacheError','MemcacheUnknownError %s' %error), ('X-CacheKeyRaw',key_raw), ('Server','Odoo %s Memcached %s' % (common.exp_version().get('server_version'), MEMCACHED_VERSION)), ]) except MemcacheUnexpectedCloseError as e: error = "MemcacheUnexpectedCloseError %s " % e _logger.warn(error) except Exception as e: err = sys.exc_info() # ~ error = "Memcached Error %s key: %s path: %s %s" % (e,key,request.httprequest.path, ''.join(traceback.format_exception(err[0], err[1], err[2]))) error = clean_text(''.join(traceback.format_exception(err[0], err[1], err[2]))) _logger.warn("Memcached Error %s key: %s path: %s" % (error, key, request.httprequest.path)) error = clean_text(str(e)) return werkzeug.wrappers.Response(status=500,headers=[ ('X-CacheKey',key), ('X-CacheError','Memcached Error %s' % error), ('X-CacheKeyRaw',key_raw), ('Server','Odoo %s Memcached %s' % (common.exp_version().get('server_version'), MEMCACHED_VERSION)), ]) if page_dict and not page_dict.get('db') == request.env.cr.dbname: _logger.warn('Database violation key=%s stored for=%s env db=%s ' % (key,page_dict.get('db'),request.env.cr.dbname)) page_dict = None # Blacklist if page_dict and any([p if p in request.httprequest.path else '' for p in kw.get('blacklist','').split(',')]): page_dict = None if 'cache_viewkey' in kw.keys(): if page_dict: res = mc_meta(key) view_meta = '<h2>Metadata</h2><table>%s</table>' % ''.join(['<tr><td>%s</td><td>%s</td></tr>' % (k,v) for k,v in res['page_dict'].items() if not k == 'page']) view_meta += 'Page Len : %.2f Kb<br>' % res['size'] #~ view_meta += 'Chunks : %s<br>' % ', '.join([len(c) for c in res['chunks']]) #~ view_meta += 'Chunks : %s<br>' % res['chunks'] return http.Response('<h1>Key <a href="/mcpage/%s">%s</a></h1>%s' % (key,key,view_meta)) else: if error: error = '<h1>Error</h1><h2>%s</h2>' % error return http.Response('%s<h1>Key is missing %s</h1>' % (error if error else '',key)) if routing.get('add_key') and not 'cache_key' in kw.keys(): #~ raise Warning(args,kw,request.httprequest.args.copy()) args = request.httprequest.args.copy() args['cache_key'] = key return werkzeug.utils.redirect('{}?{}'.format(request.httprequest.path, url_encode(args)), 302) max_age = routing.get('max_age',600) # 10 minutes cache_age = routing.get('cache_age',24 * 60 * 60) # One day s_maxage = routing.get('s_maxage',max_age) page = None if not page_dict: # MISS. Render the page. # ~ _logger.warn('\n\nMISS. Rendering the page.\n') page_dict = {} controller_start = timer() response = f(*args, **kw) # calls original controller render_start = timer() if routing.get('content_type'): response.headers['Content-Type'] = routing.get('content_type') #~ if isinstance(response.headers,list) and isinstance(response.headers[0],tuple): #~ _logger.error('response is list and tuple') #~ header_dict = {h[0]: h[1] for h in response.headers} #~ header_dict['Content-Type'] = routing.get('content_type') #~ response.headers = [(h[0],h[1]) for h in header_dict.items()] if response.template: #~ _logger.error('template %s values %s response %s' % (response.template,response.qcontext,response.response)) page = response.render() else: page = ''.join(response.response) flush_type = routing['flush_type'](kw).lower().encode('ascii', 'replace').replace(' ', '-') if routing.get('flush_type', None) else "" page_dict = { 'max-age': max_age, 'cache-age':cache_age, 'private': routing.get('private',False), 'key_raw': key_raw, 'render_time': '%.3f sec' % (timer()-render_start), 'controller_time': '%.3f sec' % (render_start-controller_start), 'path': request.httprequest.path, 'db': request.env.cr.dbname, 'page': base64.b64encode(page), 'date': http_date(), 'module': f.__module__, 'status_code': response.status_code, 'flush_type': flush_type, 'headers': response.headers, } if routing.get('no_cache'): page_dict['ETag'] = '%s' % MEMCACHED_HASH(page) # ~ _logger.warn('\n\npath: %s\nstatus_code: %s\nETag: %s\n' % (page_dict.get('path'), page_dict.get('status_code'), page_dict.get('ETag'))) mc_save(key, page_dict, cache_age) if flush_type: add_flush_type(request.cr.dbname, flush_type) #~ raise Warning(f.__module__,f.__name__,route()) else: # HIT in cache. ETag not checked yet... request_dict = {h[0]: h[1] for h in request.httprequest.headers} #~ _logger.warn('Page Exists If-None-Match %s Etag %s' %(request_dict.get('If-None-Match'), page_dict.get('ETag'))) if request_dict.get('If-None-Match') and (request_dict.get('If-None-Match') == page_dict.get('ETag')): # HIT with correct ETag header = [ ('X-CacheETag', page_dict.get('ETag')), ('X-CacheKey', key), ('X-Cache', 'from cache'), ('X-CacheKeyRaw', key_raw), ('X-CacheController', page_dict.get('controller_time')), ('X-CacheRender', page_dict.get('render_time')), ('X-CacheCacheAge', cache_age), ('Server', 'Odoo %s Memcached %s' % (common.exp_version().get('server_version'), MEMCACHED_VERSION)), ] # TODO: Do we need this? Odoo provides its own ETag for some files, which cause mixups. # Might be that these headers are needed when downloading files in /web (we had some such problem earlier). header += [(k,v) for k,v in page_dict.get('headers',[(None,None)])] # ~ _logger.warn('returns 304 headers %s' % header) if page_dict.get('status_code') in [301, 302, 307, 308]: # ~ _logger.warn('\n\nHIT, but weird status_code: %s\n' % page_dict.get('status_code')) return werkzeug.wrappers.Response(status=page_dict['status_code'],headers=header) # ~ _logger.warn('\n\nHIT. ETag matches: %s\n' % request_dict.get('If-None-Match')) return werkzeug.wrappers.Response(status=304,headers=header) # HIT, but ETag does not match. # ~ _logger.warn('\n\nHIT. ETag did not match.\n') response = http.Response(base64.b64decode(page_dict.get('page'))) # always create a new response (drop response from controller) # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control # https://developers.google.com/web/fundamentals/performance/optimizing-content-efficiency/http-caching # https://jakearchibald.com/2016/caching-best-practices/ #~ if page_dict.get('headers') and isinstance(page_dict['headers'],dict): #~ _logger.error('respnse headers dict') #~ for k,v in page_dict['headers'].items(): #~ response.headers.add(k,v) #response.headers[k] = v #~ if page_dict.get('headers') and isinstance(page_dict['headers'],list): #~ _logger.error('respnse headers list') #~ response.headers = {h[0]: h[1] for h in response.headers} # ~ _logger.warn('\n\nclean headers: %s\n' % response.headers) if page_dict.get('headers'): for k,v in page_dict['headers'].items(): #~ response.headers.add(k,v) response.headers[k] = v # ~ _logger.warn('\n\ndirty headers: %s\n' % response.headers) response.headers['Cache-Control'] ='max-age=%s,s-maxage=%s,%s' % (max_age, s_maxage, ','.join([keyword for keyword in ['no-store', 'immutable', 'no-transform', 'no-cache', 'must-revalidate', 'proxy-revalidate'] if routing.get(keyword.replace('-', '_'))] + [routing.get('private', 'public')])) # private: must not be stored by a shared cache. if page_dict.get('ETag'): response.headers['ETag'] = page_dict.get('ETag') response.headers['X-CacheKey'] = key response.headers['X-Cache'] = 'newly rendered' if page else 'from cache' response.headers['X-CacheKeyRaw'] = key_raw response.headers['X-CacheController'] = page_dict.get('controller_time') response.headers['X-CacheRender'] = page_dict.get('render_time') response.headers['X-CacheCacheAge'] = cache_age response.headers['X-CacheBlacklist'] = kw.get('blacklist','') response.headers['Date'] = page_dict.get('date',http_date()) response.headers['Server'] = 'Odoo %s Memcached %s' % (common.exp_version().get('server_version'), MEMCACHED_VERSION) response.status_code = page_dict.get('status_code', 200) # ~ _logger.warn('\n\nfinal headers: %s\n' % response.headers) # ~ _logger.warn('\n%s\n' % ''.join(traceback.format_stack())) return response
class SaasPortalPlan(models.Model): _name = 'saas_portal.plan' name = fields.Char('Plan', required=True) summary = fields.Char('Summary') template_id = fields.Many2one('saas_portal.database', 'Template', ondelete='restrict') demo = fields.Boolean('Install Demo Data') maximum_allowed_dbs_per_partner = fields.Integer( help='maximum allowed non-trial databases per customer', require=True, default=0) maximum_allowed_trial_dbs_per_partner = fields.Integer( help='maximum allowed trial databases per customer', require=True, default=0) max_users = fields.Char('Initial Max users', default='0') total_storage_limit = fields.Integer('Total storage limit (MB)') block_on_expiration = fields.Boolean('Block clients on expiration', default=False) block_on_storage_exceed = fields.Boolean('Block clients on storage exceed', default=False) def _get_default_lang(self): return self.env.lang def _default_tz(self): return self.env.user.tz lang = fields.Selection(scan_languages(), 'Language', default=_get_default_lang) tz = fields.Selection(_tz_get, 'TimeZone', default=_default_tz) sequence = fields.Integer('Sequence') state = fields.Selection([('draft', 'Draft'), ('confirmed', 'Confirmed')], 'State', compute='_get_state', store=True) expiration = fields.Integer('Expiration (hours)', help='time to delete database. Use for demo') _order = 'sequence' dbname_template = fields.Char( 'DB Names', help= 'Used for generating client database domain name. Use %i for numbering. Ignore if you use manually created db names', placeholder='crm-%i.odoo.com') server_id = fields.Many2one('saas_portal.server', string='SaaS Server', ondelete='restrict', help='User this saas server or choose random') website_description = fields.Html('Website description') logo = fields.Binary('Logo') @api.one @api.depends('template_id.state') def _get_state(self): if self.template_id.state == 'template': self.state = 'confirmed' else: self.state = 'draft' @api.one def _new_database_vals(self, vals): vals['max_users'] = self.max_users vals['total_storage_limit'] = self.total_storage_limit vals['block_on_expiration'] = self.block_on_expiration vals['block_on_storage_exceed'] = self.block_on_storage_exceed return vals @api.multi def create_new_database(self, **kwargs): return self._create_new_database(**kwargs) @api.multi def _create_new_database(self, dbname=None, client_id=None, partner_id=None, user_id=None, notify_user=False, trial=False, support_team_id=None, async=None): self.ensure_one() server = self.server_id if not server: server = self.env['saas_portal.server'].get_saas_server() server.action_sync_server() if not partner_id and user_id: user = self.env['res.users'].browse(user_id) partner_id = user.partner_id.id if not trial and self.maximum_allowed_dbs_per_partner != 0: db_count = self.env['saas_portal.client'].search_count([ ('partner_id', '=', partner_id), ('state', '=', 'open'), ('plan_id', '=', self.id), ('trial', '=', False) ]) if db_count >= self.maximum_allowed_dbs_per_partner: raise MaximumDBException( "Limit of databases for this plan is %(maximum)s reached" % {'maximum': self.maximum_allowed_dbs_per_partner}) if trial and self.maximum_allowed_trial_dbs_per_partner != 0: trial_db_count = self.env['saas_portal.client'].search_count([ ('partner_id', '=', partner_id), ('state', '=', 'open'), ('plan_id', '=', self.id), ('trial', '=', True) ]) if trial_db_count >= self.maximum_allowed_trial_dbs_per_partner: raise MaximumTrialDBException( "Limit of trial databases for this plan is %(maximum)s reached" % {'maximum': self.maximum_allowed_trial_dbs_per_partner}) vals = { 'name': dbname or self.generate_dbname()[0], 'server_id': server.id, 'plan_id': self.id, 'partner_id': partner_id, 'trial': trial, 'support_team_id': support_team_id, } client = None if client_id: vals['client_id'] = client_id client = self.env['saas_portal.client'].search([('client_id', '=', client_id)]) vals = self._new_database_vals(vals)[0] if client: client.write(vals) else: client = self.env['saas_portal.client'].create(vals) client_id = client.client_id scheme = server.request_scheme port = server.request_port if user_id: owner_user = self.env['res.users'].browse(user_id) else: owner_user = self.env.user owner_user_data = { 'user_id': owner_user.id, 'login': owner_user.login, 'name': owner_user.name, 'email': owner_user.email, } trial_expiration_datetime = ( datetime.strptime(client.create_date, DEFAULT_SERVER_DATETIME_FORMAT) + timedelta(hours=self.expiration)).strftime( DEFAULT_SERVER_DATETIME_FORMAT) # for trial state = { 'd': client.name, 'e': trial and trial_expiration_datetime or client.create_date, 'r': '%s://%s:%s/web' % (scheme, client.name, port), 'owner_user': owner_user_data, 't': client.trial, } if self.template_id: state.update({'db_template': self.template_id.name}) scope = ['userinfo', 'force_login', 'trial', 'skiptheuse'] url = server._request_server( path='/saas_server/new_database', scheme=scheme, port=port, state=state, client_id=client_id, scope=scope, )[0] res = requests.get(url, verify=(self.server_id.request_scheme == 'https' and self.server_id.verify_ssl)) if res.status_code != 200: # TODO /saas_server/new_database show more details here raise exceptions.Warning('Error %s' % res.status_code) data = simplejson.loads(res.text) params = { 'state': data.get('state'), 'access_token': client.oauth_application_id._get_access_token(user_id, create=True), } url = '{url}?{params}'.format(url=data.get('url'), params=werkzeug.url_encode(params)) # send email if notify_user: template = self.env.ref('saas_portal.email_template_create_saas') client.message_post_with_template(template.id, composition_mode='comment') if trial: client.expiration_datetime = trial_expiration_datetime client.send_params_to_client_db() client.server_id.action_sync_server() return {'url': url, 'id': client.id, 'client_id': client_id}
def new_database(self, **post): return werkzeug.utils.redirect('/auth_oauth/signin?%s' % werkzeug.url_encode(post))
def _payson_send_post(self, url, post): """ Sends a post to the given url with the correct headers. """ self.ensure_one() headers={ #HTTP Headers #Headers are used to specify API credentials and HTTP content type. The following HTTP Headers must be submitted with each request to Payson: 'PAYSON-SECURITY-USERID': self.acquirer_id.payson_agent_id, #Required. Your API User ID (AgentId). 'PAYSON-SECURITY-PASSWORD': self.acquirer_id.payson_key, #Required. Your API Password (MD5-key). 'Content-Type': 'application/x-www-form-urlencoded', #Required. Value must be: application/x-www-form-urlencoded } if self.acquirer_id.payson_application_id: headers['PAYSON-APPLICATION-ID'] = self.acquirer_id.payson_application_id, #Optional. Your Application ID. (Only applicable if you have received one) try: if self.acquirer_id.environment == 'test': payson_response = urllib2.urlopen(urllib2.Request('https://test-%s' % url, data=werkzeug.url_encode(post), headers=headers), timeout=10).read() else: payson_response = urllib2.urlopen(urllib2.Request('https://%s' % url, data=werkzeug.url_encode(post), headers=headers), timeout=10).read() except: return False return payson_response
def url_rewrite(url=None, **kwargs): scheme, netloc, path, query, fragments = urlsplit(url or request.url) params = url_decode(query) for key, value in kwargs.items(): params.setlist(key, value if isinstance(value, (list, tuple)) else [value]) return Markup(urlunsplit((scheme, netloc, path, url_encode(params), fragments)))
def inline_link_repl(self, stack, link, link_url=None, link_item=None, link_args=u'', external_link_url=None, alt_text=u''): """Handle all kinds of links.""" link_text = '' link_args_list = [] # Remove the first pipe/space, example of link_args : |arg1|arg2 or " arg1 arg2" parsed_args = self.parse_args(link_args[1:]) query = None if parsed_args.keyword: query = url_encode(parsed_args.keyword, charset=CHARSET, encode_keys=True, sort=True) # Take the last of positional parameters as link_text(caption) if parsed_args.positional: link_text = parsed_args.positional.pop() if link_item is not None: if '#' in link_item: path, fragment = link_item.rsplit('#', 1) else: path, fragment = link_item, None target = Iri(scheme='wiki.local', path=path, query=query, fragment=fragment) text = link_item else: if link_url and len(link_url.split(':')) > 0 and link_url.split( ':')[0] == 'File': object_item = ':'.join(link_url.split(':')[1:]) args = parsed_args.keyword if object_item is not None: if 'do' not in args: # by default, we want the item's get url for transclusion of raw data: args['do'] = 'get' query = url_encode(args, charset=CHARSET, encode_keys=True, sort=True) target = Iri(scheme='wiki.local', path=object_item, query=query, fragment=None) text = object_item else: target = Iri(scheme='wiki.local', path=link_url) text = link_url if not link_text: link_text = text attrib = {xlink.href: target} attrib[moin_page.alt] = link_text element = moin_page.object(attrib) stack.push(element) if link_text: self.preprocessor.push() self.parse_inline(link_text, stack, self.inlinedesc_re) self.preprocessor.pop() else: stack.top_append(text) stack.pop() return target = Iri(scheme='wiki.local', path=link_url) text = link_url if external_link_url: target = Iri(external_link_url) text = alt_text element = moin_page.a(attrib={xlink.href: target}) stack.push(element) if link_text: self.preprocessor.push() self.parse_inline(link_text, stack, self.inlinedesc_re) self.preprocessor.pop() else: stack.top_append(text) stack.pop()
if self.template_id: state.update({'db_template': self.template_id.name}) scope = ['userinfo', 'force_login', 'trial', 'skiptheuse'] req, req_kwargs = server._request_server(path='/saas_server/new_database', state=state, client_id=client_id, scope=scope,) res = requests.Session().send(req, **req_kwargs) if res.status_code != 200: raise Warning('Error on request: %s\nReason: %s \n Message: %s' % (req.url, res.reason, res.content)) data = simplejson.loads(res.text) params = { 'state': data.get('state'), 'access_token': client.oauth_application_id._get_access_token(user_id, create=True), } url = '{url}?{params}'.format(url=data.get('url'), params=werkzeug.url_encode(params)) # send email if notify_user: template = self.env.ref('saas_portal.email_template_create_saas') client.message_post_with_template(template.id, composition_mode='comment') client.send_params_to_client_db() # TODO make async call of action_sync_server here # client.server_id.action_sync_server() return {'url': url, 'id': client.id, 'client_id': client_id} @api.one def generate_dbname(self, raise_error=True): if not self.dbname_template:
def _create_new_database(self, dbname=None, client_id=None, partner_id=None, user_id=None, notify_user=True, trial=False, support_team_id=None, async_=None): self.ensure_one() p_client = self.env['saas_portal.client'] p_server = self.env['saas_portal.server'] server = self.server_id if not server: server = p_server.get_saas_server() # server.action_sync_server() if not partner_id and user_id: user = self.env['res.users'].browse(user_id) partner_id = user.partner_id.id if not trial and self.maximum_allowed_dbs_per_partner != 0: db_count = p_client.search_count([('partner_id', '=', partner_id), ('state', '=', 'open'), ('plan_id', '=', self.id), ('trial', '=', False)]) if db_count >= self.maximum_allowed_dbs_per_partner: raise MaximumDBException("Limit of databases for this plan is %(maximum)s reached" % { 'maximum': self.maximum_allowed_dbs_per_partner}) if trial and self.maximum_allowed_trial_dbs_per_partner != 0: trial_db_count = p_client.search_count([('partner_id', '=', partner_id), ('state', '=', 'open'), ('plan_id', '=', self.id), ('trial', '=', True)]) if trial_db_count >= self.maximum_allowed_trial_dbs_per_partner: raise MaximumTrialDBException("Limit of trial databases for this plan is %(maximum)s reached" % { 'maximum': self.maximum_allowed_trial_dbs_per_partner}) client_expiration = self._get_expiration(trial) vals = {'name': dbname or self.generate_dbname(), 'server_id': server.id, 'plan_id': self.id, 'partner_id': partner_id, 'trial': trial, 'support_team_id': support_team_id, 'expiration_datetime': client_expiration, } client = None if client_id: vals['client_id'] = client_id client = p_client.search( [('client_id', '=', client_id)]) vals = self._new_database_vals(vals) if client: client.write(vals) else: client = p_client.create(vals) client_id = client.client_id owner_user_data = self._prepare_owner_user_data(user_id) state = { 'd': client.name, 'public_url': client.public_url, 'e': client_expiration, 'r': client.public_url + 'web', 'h': client.host, 'owner_user': owner_user_data, 't': client.trial, } if self.template_id: state.update({'db_template': self.template_id.name}) scope = ['userinfo', 'force_login', 'trial', 'skiptheuse'] req, req_kwargs = server._request_server(path='/saas_server/new_database', state=state, client_id=client_id, scope=scope,) res = requests.Session().send(req, **req_kwargs) if res.status_code != 200: raise Warning(_('Error on request: %s\nReason: %s \n Message: %s') % ( req.url, res.reason, res.content)) data = simplejson.loads(res.text) params = { 'state': data.get('state'), 'access_token': client.oauth_application_id._get_access_token(user_id, create=True), } url = '{url}?{params}'.format(url=data.get( 'url'), params=werkzeug.url_encode(params)) auth_url = url # send email if there is mail template record template = self.on_create_email_template if template and notify_user: # we have to have a user in this place (how to user without a user?) user = self.env['res.users'].browse(user_id) client.with_context(user=user).message_post_with_template( template.id, composition_mode='comment') client.send_params_to_client_db() # TODO make async call of action_sync_server here # client.server_id.action_sync_server() client.sync_client() return {'url': url, 'id': client.id, 'client_id': client_id, 'auth_url': auth_url}
scope=scope, ) res = requests.Session().send(req, **req_kwargs) if res.status_code != 200: raise Warning('Error on request: %s\nReason: %s \n Message: %s' % (req.url, res.reason, res.content)) data = simplejson.loads(res.text) params = { 'state': data.get('state'), 'access_token': client.oauth_application_id._get_access_token(user_id, create=True), } url = '{url}?{params}'.format(url=data.get('url'), params=werkzeug.url_encode(params)) # send email if notify_user: template = self.env.ref('saas_portal.email_template_create_saas') client.message_post_with_template(template.id, composition_mode='comment') if trial: client.expiration_datetime = trial_expiration_datetime client.send_params_to_client_db() #TODO make async call of action_sync_server here #client.server_id.action_sync_server() return {'url': url, 'id': client.id, 'client_id': client_id}
def ogone_s2s_do_transaction(self, **kwargs): # TODO: create tx with s2s type account = self.acquirer_id reference = self.reference or "ODOO-%s-%s" % (datetime.datetime.now( ).strftime('%y%m%d_%H%M%S'), self.partner_id.id) param_plus = {'return_url': kwargs.get('return_url', False)} data = { 'PSPID': account.ogone_pspid, 'USERID': account.ogone_userid, 'PSWD': account.ogone_password, 'ORDERID': reference, 'AMOUNT': int(self.amount * 100), 'CURRENCY': self.currency_id.name, 'OPERATION': 'SAL', 'ECI': 9, # Recurring (from eCommerce) 'ALIAS': self.payment_token_id.acquirer_ref, 'RTIMEOUT': 30, 'PARAMPLUS': url_encode(param_plus), 'EMAIL': self.partner_id.email or '', 'CN': self.partner_id.name or '', } if request: data['REMOTE_ADDR'] = request.httprequest.remote_addr if kwargs.get('3d_secure'): data.update({ 'FLAG3D': 'Y', 'LANGUAGE': self.partner_id.lang or 'en_US', }) for url in 'accept decline exception'.split(): key = '{0}_url'.format(url) val = kwargs.pop(key, None) if val: key = '{0}URL'.format(url).upper() data[key] = val data['SHASIGN'] = self.acquirer_id._ogone_generate_shasign('in', data) direct_order_url = 'https://secure.ogone.com/ncol/%s/orderdirect.asp' % ( 'prod' if self.acquirer_id.state == 'enabled' else 'test') logged_data = data.copy() logged_data.pop('PSWD') _logger.info( "ogone_s2s_do_transaction: Sending values to URL %s, values:\n%s", direct_order_url, pformat(logged_data)) result = requests.post(direct_order_url, data=data).content try: tree = objectify.fromstring(result) _logger.info( 'ogone_s2s_do_transaction: Values received:\n%s', etree.tostring(tree, pretty_print=True, encoding='utf-8')) except etree.XMLSyntaxError: # invalid response from ogone _logger.exception('Invalid xml response from ogone') _logger.info('ogone_s2s_do_transaction: Values received:\n%s', result) raise return self._ogone_s2s_validate_tree(tree)
def authorize(self, req, **query): # First, validate client_id and redirect_uri params. try: client = self.__validate_client(req, **query) redirect_uri = self.__validate_redirect_uri(client, req, **query) except OAuthException as e: # If those are not valid, we must not redirect back to the client # - instead, we display a message to the user return req.render('galicea_openid_connect.error', {'exception': e}) scopes = query['scope'].split(' ') if query.get('scope') else [] is_openid_request = 'openid' in scopes # state, if present, is just mirrored back to the client response_params = {} if 'state' in query: response_params['state'] = query['state'] response_mode = query.get('response_mode') try: if response_mode and response_mode not in ['query', 'fragment']: response_mode = None raise OAuthException( 'The only supported response_modes are \'query\' and \'fragment\'', OAuthException.INVALID_REQUEST) if 'response_type' not in query: raise OAuthException( 'response_type param is missing', OAuthException.INVALID_REQUEST, ) response_type = query['response_type'] if response_type not in RESPONSE_TYPES_SUPPORTED: raise OAuthException( 'The only supported response_types are: {}'.format( ', '.join(RESPONSE_TYPES_SUPPORTED)), OAuthException.UNSUPPORTED_RESPONSE_TYPE, ) except OAuthException as e: response_params['error'] = e.type response_params['error_description'] = e.message return self.__redirect(redirect_uri, response_params, response_mode or 'query') if not response_mode: response_mode = 'query' if response_type == 'code' else 'fragment' user = req.env.user # In case user is not logged in, we redirect to the login page and come back needs_login = user.login == 'public' # Also if they didn't authenticate recently enough if 'max_age' in query and http.request.session.get( 'auth_time', 0) + int(query['max_age']) < time.time(): needs_login = True if needs_login: params = { 'force_auth_and_redirect': '/oauth/authorize?{}'.format(werkzeug.url_encode(query)) } return self.__redirect('/web/login', params, 'query') response_types = response_type.split() extra_claims = { 'sid': http.request.httprequest.session.sid, } if 'nonce' in query: extra_claims['nonce'] = query['nonce'] if 'code' in response_types: # Generate code that can be used by the client server to retrieve # the token. It's set to be valid for 60 seconds only. # TODO: The spec says the code should be single-use. We're not enforcing # that here. payload = { 'redirect_uri': redirect_uri, 'client_id': client.client_id, 'user_id': user.id, 'scopes': scopes, 'exp': int(time.time()) + 60 } payload.update(extra_claims) key = self.__get_authorization_code_jwk(req) response_params['code'] = jwt_encode(payload, key) if 'token' in response_types: access_token = req.env['galicea_openid_connect.access_token'].sudo( ).retrieve_or_create(user.id, client.id).token response_params['access_token'] = access_token response_params['token_type'] = 'bearer' digest = hashes.Hash(hashes.SHA256(), backend=default_backend()) digest.update(access_token.encode('ascii')) at_hash = digest.finalize() extra_claims['at_hash'] = base64.urlsafe_b64encode( at_hash[:16]).strip('=') if 'id_token' in response_types: response_params['id_token'] = self.__create_id_token( req, user.id, client, extra_claims) return self.__redirect(redirect_uri, response_params, response_mode)
def _request(self, **kwargs): params = self._request_params(**kwargs)[0] url = '/oauth2/auth?%s' % werkzeug.url_encode(params) return url
def _redirect_to_record(cls, model, res_id, access_token=None, **kwargs): # access_token and kwargs are used in the portal controller override for the Send by email or Share Link # to give access to the record to a recipient that has normally no access. uid = request.session.uid user = request.env['res.users'].sudo().browse(uid) cids = False # no model / res_id, meaning no possible record -> redirect to login if not model or not res_id or model not in request.env: return cls._redirect_to_messaging() # find the access action using sudo to have the details about the access link RecordModel = request.env[model] record_sudo = RecordModel.sudo().browse(res_id).exists() if not record_sudo: # record does not seem to exist -> redirect to login return cls._redirect_to_messaging() # the record has a window redirection: check access rights if uid is not None: if not RecordModel.with_user(uid).check_access_rights( 'read', raise_exception=False): return cls._redirect_to_messaging() try: # We need here to extend the "allowed_company_ids" to allow a redirection # to any record that the user can access, regardless of currently visible # records based on the "currently allowed companies". cids = request.httprequest.cookies.get('cids', str(user.company_id.id)) cids = [int(cid) for cid in cids.split(',')] try: record_sudo.with_user(uid).with_context( allowed_company_ids=cids).check_access_rule('read') except AccessError: # In case the allowed_company_ids from the cookies (i.e. the last user configuration # on his browser) is not sufficient to avoid an ir.rule access error, try to following # heuristic: # - Guess the supposed necessary company to access the record via the method # _get_mail_redirect_suggested_company # - If no company, then redirect to the messaging # - Merge the suggested company with the companies on the cookie # - Make a new access test if it succeeds, redirect to the record. Otherwise, # redirect to the messaging. suggested_company = record_sudo._get_mail_redirect_suggested_company( ) if not suggested_company: raise AccessError() cids += [suggested_company.id] record_sudo.with_user(uid).with_context( allowed_company_ids=cids).check_access_rule('read') except AccessError: return cls._redirect_to_messaging() else: record_action = record_sudo.get_access_action(access_uid=uid) else: record_action = record_sudo.get_access_action() if record_action[ 'type'] == 'ir.actions.act_url' and record_action.get( 'target_type') != 'public': return cls._redirect_to_messaging() record_action.pop('target_type', None) # the record has an URL redirection: use it directly if record_action['type'] == 'ir.actions.act_url': return werkzeug.utils.redirect(record_action['url']) # other choice: act_window (no support of anything else currently) elif not record_action['type'] == 'ir.actions.act_window': return cls._redirect_to_messaging() url_params = { 'model': model, 'id': res_id, 'active_id': res_id, 'action': record_action.get('id'), } view_id = record_sudo.get_formview_id() if view_id: url_params['view_id'] = view_id if cids: url_params['cids'] = ','.join([str(cid) for cid in cids]) url = '/web?#%s' % url_encode(url_params) return werkzeug.utils.redirect(url)
def shop(self, page=0, category=None, search='', brand=None, **post): values = {} domain = request.website.sale_product_domain() if search: domain += [ '|', '|', '|', ('name', 'ilike', search), ('description', 'ilike', search), ('description_sale', 'ilike', search), ('product_variant_ids.default_code', 'ilike', search) ] if category: domain += [('public_categ_ids', 'child_of', int(category))] attrib_list = request.httprequest.args.getlist('attrib') attrib_values = [map(int, v.split('-')) for v in attrib_list if v] attrib_set = set([v[1] for v in attrib_values]) if attrib_values: attrib = None ids = [] for value in attrib_values: if not attrib: attrib = value[0] ids.append(value[1]) elif value[0] == attrib: ids.append(value[1]) else: domain += [('attribute_line_ids.value_ids', 'in', ids)] attrib = value[0] ids = [value[1]] if attrib: domain += [('attribute_line_ids.value_ids', 'in', ids)] keep = QueryURL('/shop', category=category and int(category), search=search, attrib=attrib_list) pricelist_context = dict(request.env.context) if not pricelist_context.get('pricelist'): pricelist = request.website.get_current_pricelist() pricelist_context['pricelist'] = pricelist.id else: pricelist = request.env['product.pricelist'].browse( pricelist_context['pricelist']) product_obj = request.env['product.template'] # Brand's product search if brand: values.update({'brand': brand}) product_designer_obj = request.env['product.brand'] brand_ids = product_designer_obj.search([('id', '=', int(brand))]) domain += [('product_brand_id', 'in', brand_ids.ids)] url = '/shop' product_count = product_obj.search_count(domain) if search: post['search'] = search if category: category = request.env['product.public.category'].\ browse(int(category)) url = '/shop/category/%s' % slug(category) pager = request.website.\ pager(url=url, total=product_count, page=page, step=PPG, scope=7, url_args=post) products = product_obj.\ search(domain, limit=PPG, offset=pager['offset'], order='website_published desc, website_sequence desc') style_obj = request.env['product.style'] styles = style_obj.search([]) category_obj = request.env['product.public.category'] categories = category_obj.search([]) categs = filter(lambda x: not x.parent_id, categories) if category: selected_id = int(category) children_ids = category_obj.\ search([('parent_id', '=', selected_id)]) values.update({'child_list': children_ids}) attributes_obj = request.env['product.attribute'] attributes = attributes_obj.search([]) from_currency = request.env.user.company_id.currency_id to_currency = pricelist.currency_id compute_currency = self.currency_compute(from_currency, to_currency) values.update({ 'search': search, 'category': category, 'attrib_values': attrib_values, 'attrib_set': attrib_set, 'pager': pager, 'pricelist': pricelist, 'products': products, 'bins': TableCompute().process(products), 'rows': PPR, 'styles': styles, 'categories': categs, 'attributes': attributes, 'compute_currency': compute_currency, 'keep': keep, 'style_in_product': lambda style, product: style.id in [s.id for s in product.website_style_ids], 'attrib_encode': lambda attribs: werkzeug.url_encode([('attrib', i) for i in attribs]) }) return request.render('website_sale.products', values)