def get_page(url): response = requests.get(url) doc = {'original_url': url, 'final_url': response.url, 'status_code': response.status_code, 'text': response.text} if doc['text']: doc['html'] = html_fromstring(doc['text']) return doc
def html(self): if not hasattr(self, '_html'): self._html = TreeInterface( html_fromstring( self.response.content.encode(self.response.charset or 'utf-8'), base_url=self.response.url ) ) return self._html
def get_body_readonly_elements(self, res_id): if MAJOR_ODOO_VERSION < (10, 0): _super = super(EmailTemplate, self).generate_email_batch else: _super = super(EmailTemplate, self).generate_email msg_dict = _super([self.res_id], fields=['body_html']) template_body = msg_dict.get(res_id, {}).get('body_html') if not template_body: return [] html_element = html_fromstring(template_body) return html_element.find_class('readonly')
def _add_rec_mp_field_mapping(cls, mp_field_mappings=None): if not mp_field_mappings: mp_field_mappings = [] marketplace = 'blibli' mp_field_mapping = { 'logistics_code': ('code', None), 'logistics_name': ('name', None), 'is_selected': ('selected', None), 'geolocation': ('geolocation', None), 'info_additional': ('information/additional', lambda env, r: html_fromstring(r).text_content() if r else False), 'info_highlight': ('information/highlighted', lambda env, r: html_fromstring(r).text_content() if r else False), } mp_field_mappings.append((marketplace, mp_field_mapping)) super(MPBlibliLogistic, cls)._add_rec_mp_field_mapping(mp_field_mappings)
def _build_model_attributes(cls, pool): cls._rec_mp_field_mapping = dict( cls._rec_mp_field_mapping, **{ 'blibli': { 'mp_external_id': ('code', None), 'logistics_code': ('code', None), 'logistics_name': ('name', None), 'is_selected': ('selected', None), 'geolocation': ('geolocation', None), 'info_additional': ('information/additional', lambda env, r: html_fromstring(r).text_content() if r else False), 'info_highlight': ('information/highlighted', lambda env, r: html_fromstring(r).text_content() if r else False), } }) super(MPBlibliLogistic, cls)._build_model_attributes(pool)
def clean_html(self, html): """Apply ``Cleaner`` to HTML string or document and return a cleaned string or document.""" result_type = type(html) if isinstance(html, six.string_types): doc = html_fromstring(html) else: doc = copy.deepcopy(html) self(doc) if issubclass(result_type, six.binary_type): return tostring(doc, encoding='utf-8') elif issubclass(result_type, six.text_type): return tostring(doc, encoding='unicode') else: return doc
def render(self): """render the webstats snippet adding a div arround it """ ptool = getToolByName(self.context, "portal_properties") snippet = safe_unicode(ptool.site_properties.webstats_js) # Putting a div arround the snippets div = html_builder.DIV({'id': 'plone-analytics'}) # create the div tags = html_fromstring(snippet) # parse the snippets from html to lxml classes div.extend(tags) # insert the tags into the div snippet = safe_unicode(html_tostring(div)) # convert back to string the new tag # Putting a div arround the snippets return snippet
def render(self): """render the webstats snippet adding a div arround it """ ptool = getToolByName(self.context, 'portal_properties') snippet = safe_unicode(ptool.site_properties.webstats_js) # Putting a div arround the snippets div = html_builder.DIV({'id': 'plone-analytics'}) # create the div tags = html_fromstring( snippet) # parse the snippets from html to lxml classes div.extend(tags) # insert the tags into the div snippet = safe_unicode( html_tostring(div)) # convert back to string the new tag # Putting a div arround the snippets return snippet
def _parsing_logistic_description(env, data): if data: return html_fromstring(data).text_content() else: return None
def _handle_description(env, data): if data: return html_fromstring(data).text_content() else: return None
def __init__(self, html, ident = None): self._ident = ident self._etree = clean_html(html_fromstring(html))