コード例 #1
0
ファイル: app.py プロジェクト: PabloRosales/im-core-python
    def _get_current_url(self, root_only=False, strip_querystring=False,
                         host_only=False, **kwargs):

        current_url = get_current_url(self.environ,
            root_only, strip_querystring, host_only)

        if (root_only or host_only) and not kwargs:
            return current_url

        qs = {}
        querystring = ''

        if not strip_querystring:
            qs = urlparse.parse_qs(''.join(current_url.split('?')[1:]))
            for arg in qs:
                qs[arg] = qs[arg][0]

        if kwargs:

            for arg in kwargs:
                qs[arg] = kwargs[arg]

            querystring = '?' + urllib.urlencode(qs)

        url = '%s' % get_current_url(self.environ, strip_querystring=True)

        if url[-1] != '/':
            url += '/'

        url += querystring

        return url
コード例 #2
0
ファイル: app.py プロジェクト: interactuamovil/im-core-python
    def _get_current_url(self,
                         root_only=False,
                         strip_querystring=False,
                         host_only=False,
                         **kwargs):

        current_url = get_current_url(self.environ, root_only,
                                      strip_querystring, host_only)

        if (root_only or host_only) and not kwargs:
            return current_url

        qs = {}
        querystring = ''

        if not strip_querystring:
            qs = urlparse.parse_qs(''.join(current_url.split('?')[1:]))
            for arg in qs:
                qs[arg] = qs[arg][0]

        if kwargs:

            for arg in kwargs:
                qs[arg] = kwargs[arg]

            querystring = '?' + urllib.urlencode(qs)

        url = '%s' % get_current_url(self.environ, strip_querystring=True)

        if url[-1] != '/':
            url += '/'

        url += querystring

        return url
コード例 #3
0
ファイル: test_wsgi.py プロジェクト: TheVinhLuong102/werkzeug
def test_get_current_url_invalid_utf8():
    env = create_environ()
    # set the query string *after* wsgi dance, so \xcf is invalid
    env["QUERY_STRING"] = "foo=bar&baz=blah&meh=\xcf"
    rv = wsgi.get_current_url(env)
    # it remains percent-encoded
    assert rv == "http://localhost/?foo=bar&baz=blah&meh=%CF"
コード例 #4
0
ファイル: test_wsgi.py プロジェクト: pallets/werkzeug
def test_get_current_url_invalid_utf8():
    env = create_environ()
    # set the query string *after* wsgi dance, so \xcf is invalid
    env["QUERY_STRING"] = "foo=bar&baz=blah&meh=\xcf"
    rv = wsgi.get_current_url(env)
    # it remains percent-encoded
    strict_eq(rv, u"http://localhost/?foo=bar&baz=blah&meh=%CF")
コード例 #5
0
ファイル: openid.py プロジェクト: vonHabsi/isso
    def finalize(self, environ, request):
        cur_url = get_current_url(environ)
        session_id = request.args.get('state', '')
        session = self.isso.db.openid_sessions.get(session_id)
        if session is None:
            return BadRequest("Session expired or invalid")
        session['code'] = request.args.get('code', '')
        session['redirect_uri'] = cur_url
        session['redirect_uri'] = cur_url[:cur_url.rfind("/finalize"
                                                         )] + "/finalize"

        if not self.token_request(session):
            return BadRequest("OpenID Token Request failed")
        if not self.userinfo_request(session):
            return BadRequest("OpenID UserInfo Request failed")

        self.isso.db.openid_sessions.authorize(session_id)

        html = self.FINAL_LOGIN_RESPONSE % (
            session['id'], session['identifier'], session['userinfo'].get(
                'name', ""),
            session['userinfo'].get('email', ""), session['userinfo'].get(
                'picture', ""), session['userinfo'].get('profile', False)
            or session['userinfo'].get('website', ""), local("origin"))
        return Response(html, 200, content_type="text/html")
コード例 #6
0
ファイル: test.py プロジェクト: MHordecki/hextrainer
 def extract_wsgi(self, environ, headers):
     """Extract the server's set-cookie headers as cookies into the
     cookie jar.
     """
     self.extract_cookies(
         _TestCookieResponse(headers),
         U2Request(get_current_url(environ)),
     )
コード例 #7
0
ファイル: rules.py プロジェクト: sdaexpertise/facturation
 def execute(self, environ, start_response):
     url = get_current_url(environ)
     if self._redirect:
         groups = self._match_url(url).groups()
         parts = urlparse(url)
         new_parts = urlparse(self._redirect.format(*groups))
         response = redirect(urlunparse(new_parts[:2] + parts[2:]))
         return response(environ, start_response)
コード例 #8
0
 def extract_wsgi(self, environ, headers):
     """Extract the server's set-cookie headers as cookies into the
     cookie jar.
     """
     self.extract_cookies(
         _TestCookieResponse(headers),
         U2Request(get_current_url(environ)),
     )
コード例 #9
0
ファイル: middleware.py プロジェクト: vvangelovski/sentry
 def handle_exception(self, exc_info, environ):
     event_id = capture('Exception',
         exc_info=exc_info,
         http={
             'method': environ.get('REQUEST_METHOD'),
             'url': get_current_url(environ, strip_querystring=True),
             'querystring': environ.get('QUERY_STRING'),
         },
     )
     return event_id
コード例 #10
0
 def profile(self, environ, start_response):
     """
     Profile the request. Exceptions encountered during the profile are
     logged before being propagated for further handling.
     """
     method = environ.get('REQUEST_METHOD', 'GET')
     url = get_current_url(environ)
     logging.debug("Profiling call for '%s %s'", method, url)
     try:
         res = self.run_profile(self.app, (environ, start_response))
     except Exception, e:
         logging.exception("Exception while profiling '%s %s'", method, url)
         raise
コード例 #11
0
ファイル: middleware.py プロジェクト: gandalfar/sentry
 def handle_exception(self, exc_info, environ):
     event_id = capture(
         "Exception",
         exc_info=exc_info,
         data={
             "sentry.interfaces.Http": {
                 "method": environ.get("REQUEST_METHOD"),
                 "url": get_current_url(environ, strip_querystring=True),
                 "querystring": environ.get("QUERY_STRING"),
             }
         },
     )
     return event_id
コード例 #12
0
 def login(self, env, req):
     data = req.form
     password = self.isso.conf.get("general", "admin_password")
     if data['password'] and data['password'] == password:
         response = redirect(get_current_url(env, host_only=True) + '/admin')
         cookie = functools.partial(dump_cookie,
             value=self.isso.sign({"logged": True}),
             expires=datetime.now() + timedelta(1))
         response.headers.add("Set-Cookie", cookie("admin-session"))
         response.headers.add("X-Set-Cookie", cookie("isso-admin-session"))
         return response
     else:
         return render_template('login.html')
コード例 #13
0
ファイル: comments.py プロジェクト: panta82/isso
 def login(self, env, req):
     data = req.form
     password = self.isso.conf.get("general", "admin_password")
     if data['password'] and data['password'] == password:
         response = redirect(re.sub(
             r'/login$',
             '/admin',
             get_current_url(env, strip_querystring=True)
         ))
         cookie = functools.partial(dump_cookie,
                                    value=self.isso.sign({"logged": True}),
                                    expires=datetime.now() + timedelta(1))
         response.headers.add("Set-Cookie", cookie("admin-session"))
         response.headers.add("X-Set-Cookie", cookie("isso-admin-session"))
         return response
     else:
         return render_template('login.html')
コード例 #14
0
 def login(self, env, req):
     data = req.form
     password = self.isso.conf.get("general", "admin_password")
     if data['password'] and data['password'] == password:
         response = redirect(re.sub(
             r'/login$',
             '/admin',
             get_current_url(env, strip_querystring=True)
         ))
         cookie = functools.partial(dump_cookie,
                                    value=self.isso.sign({"logged": True}),
                                    expires=datetime.now() + timedelta(1))
         response.headers.add("Set-Cookie", cookie("admin-session"))
         response.headers.add("X-Set-Cookie", cookie("isso-admin-session"))
         return response
     else:
         isso_host_script = self.isso.conf.get("server", "public-endpoint") or local.host
         return render_template('login.html', isso_host_script=isso_host_script)
コード例 #15
0
ファイル: comments.py プロジェクト: posativ/isso
 def login(self, env, req):
     if not self.isso.conf.getboolean("admin", "enabled"):
         return render_template('disabled.html')
     data = req.form
     password = self.isso.conf.get("admin", "password")
     if data['password'] and data['password'] == password:
         response = redirect(re.sub(
             r'/login$',
             '/admin',
             get_current_url(env, strip_querystring=True)
         ))
         cookie = functools.partial(dump_cookie,
                                    value=self.isso.sign({"logged": True}),
                                    expires=datetime.now() + timedelta(1))
         response.headers.add("Set-Cookie", cookie("admin-session"))
         response.headers.add("X-Set-Cookie", cookie("isso-admin-session"))
         return response
     else:
         isso_host_script = self.isso.conf.get("server", "public-endpoint") or local.host
         return render_template('login.html', isso_host_script=isso_host_script)
コード例 #16
0
    def get_wsgi_headers(self, environ):
        headers = Headers(self.headers)
        location = headers.get('location')
        if location is not None:
            if isinstance(location, unicode):
                location = iri_to_uri(location)
            headers['Location'] = urlparse.urljoin(get_current_url(environ, root_only=True), location)
        content_location = headers.get('content-location')
        if content_location is not None and isinstance(content_location, unicode):
            headers['Content-Location'] = iri_to_uri(content_location)
        if 100 <= self.status_code < 200 or self.status_code == 204:
            headers['Content-Length'] = '0'
        elif self.status_code == 304:
            remove_entity_headers(headers)
        if self.is_sequence and 'content-length' not in self.headers:
            try:
                content_length = sum((len(str(x)) for x in self.response))
            except UnicodeError:
                pass
            else:
                headers['Content-Length'] = str(content_length)

        return headers
コード例 #17
0
    def get_wsgi_headers(self, environ):
        headers = Headers(self.headers)
        location = headers.get('location')
        if location is not None:
            if isinstance(location, unicode):
                location = iri_to_uri(location)
            headers['Location'] = urlparse.urljoin(get_current_url(environ, root_only=True), location)
        content_location = headers.get('content-location')
        if content_location is not None and isinstance(content_location, unicode):
            headers['Content-Location'] = iri_to_uri(content_location)
        if 100 <= self.status_code < 200 or self.status_code == 204:
            headers['Content-Length'] = '0'
        elif self.status_code == 304:
            remove_entity_headers(headers)
        if self.is_sequence and 'content-length' not in self.headers:
            try:
                content_length = sum((len(str(x)) for x in self.response))
            except UnicodeError:
                pass
            else:
                headers['Content-Length'] = str(content_length)

        return headers
コード例 #18
0
ファイル: openid.py プロジェクト: vonHabsi/isso
    def login(self, environ, request):
        cur_url = get_current_url(environ)
        session = {}
        session['identifier'] = request.args.get('isso-openid-identifier', '')
        session['redirect_uri'] = cur_url[:cur_url.rfind("/login"
                                                         )] + "/finalize"
        if not self.discovery(session):
            return BadRequest("OpenID Provider Discovery failed")
        if not self.dyn_register(session):
            return BadRequest("OpenID Dynamic Client Registration failed")

        session['id'] = rand_string(32)
        self.isso.db.openid_sessions.purge(self.SESSION_LIFETIME)
        self.isso.db.openid_sessions.add(session)
        params = {
            'response_type': "code",
            'scope': "openid profile email",
            'client_id': session['client_id'],
            'redirect_uri': session['redirect_uri'],
            'state': session['id'],
        }
        auth_url = "%s/oauth/auth?%s" % (session['issuer'], urlencode(params))
        return redirect(auth_url, code=303)
コード例 #19
0
 def uri(self):
     return get_current_url(self._handle.environ)
コード例 #20
0
 def demo(self, env, req):
     return redirect(
         get_current_url(env, strip_querystring=True) + '/index.html'
     )
コード例 #21
0
ファイル: rules.py プロジェクト: sdaexpertise/facturation
 def match(self, environ):
     url = get_current_url(environ)
     return bool(self._match_url(url))
コード例 #22
0
ファイル: test.py プロジェクト: connoryang/1v1dec
 def extract_wsgi(self, environ, headers):
     self.extract_cookies(_TestCookieResponse(headers),
                          U2Request(get_current_url(environ)))
コード例 #23
0
def test_get_current_url_unicode():
    env = create_environ()
    env['QUERY_STRING'] = 'foo=bar&baz=blah&meh=\xcf'
    rv = wsgi.get_current_url(env)
    strict_eq(rv, u'http://localhost/?foo=bar&baz=blah&meh=\ufffd')
コード例 #24
0
 def base_url(self):
     return get_current_url(self.environ, strip_querystring=True)
コード例 #25
0
 def host_url(self):
     return get_current_url(self.environ, host_only=True)
コード例 #26
0
ファイル: comments.py プロジェクト: posativ/isso
 def demo(self, env, req):
     return redirect(
         get_current_url(env, strip_querystring=True) + '/index.html'
     )
コード例 #27
0
ファイル: test_wsgi.py プロジェクト: TheVinhLuong102/werkzeug
def test_get_current_url_unicode():
    env = create_environ(query_string="foo=bar&baz=blah&meh=\xcf")
    rv = wsgi.get_current_url(env)
    assert rv == "http://localhost/?foo=bar&baz=blah&meh=\xcf"
コード例 #28
0
ファイル: had.py プロジェクト: karlmoubarak/had-py-1
  def on_article(self, request, typography=typography, fix_extlinks_a=fix_extlinks_a, page_title=None, section_title=None, wk_nav_main=nav_main(), wk_nav_sections=nav_sections()):
    base_url = 'https://wiki.hackersanddesigners.nl/'
    api_call =  'api.php?'

    # fetch page-content
    page_options = {'action': 'parse', 'page': page_title, 'format': 'json', 'formatversion': '2', 'disableeditsection': 'true'}
    response_content = requests.get(base_url + api_call, params=page_options)
    wk_data = response_content.json()

    wk_title = wk_data['parse']['title']
    wk_bodytext = wk_data['parse']['text']

    try:
      # --- if it has [Category:Event]
      # fetch page-metadata for Event
      page_meta_options = {'action': 'browsebysubject', 'subject': page_title, 'format': 'json', 'formatversion': '2'}
      response_meta = requests.get(base_url + api_call, params=page_meta_options)
      wkdata_meta = response_meta.json()

      def extract_metadata(query):
        item_list = []
        for item in query:
          print(item)
          str = item['item']
          # strip out weird hash at the end 
          # (see why https://www.semantic-mediawiki.org/wiki/Ask_API#BrowseBySubject)
          item = re.sub(r'#\d##', '', str).replace('_', ' ')
          item_list.append(item)
        return item_list

      wk_date = None
      wk_time = None
      wk_venue = None
      wk_peopleorgs = None

      for item in wkdata_meta['query']['data']:
        # --- Date
        if 'OnDate' in item['property']:
          wk_date = extract_metadata(item['dataitem'])
        # --- Time
        if 'Time' in item['property']:
          wk_time = extract_metadata(item['dataitem'])
        # --- Venue
        if 'Venue' in item['property']:
          wk_venue = extract_metadata(item['dataitem'])
        # --- PeopleOrgs
        if 'PeopleOrganisations' in item['property']:
          wk_peopleorgs = extract_metadata(item['dataitem'])
    
    # --- if it has not, set Event's metadata to `None`
    except KeyError:
      print('No Event metadata')
    
    # fix rel-links to be abs-ones
    soup_bodytext = BeautifulSoup(wk_bodytext, 'html.parser')
   
    envy = request.environ
    p_url = get_current_url(envy)
    p_url = p_url.rsplit('/', 1)

    fix_extlinks_a(soup_bodytext, url=p_url[0] + '/')

    # --- images
    for img in soup_bodytext.find_all('img', src=re.compile(r'/images/.*')):
      src_rel_link = img.get('src')
      srcset_rel_link = img.get('srcset')
      if src_rel_link:
        split = re.split(r'[/]\s*', src_rel_link)
        if 'thumb' in split:
          del split[2]
          del split[-1]
          split = '/'.join(split)
          out_link = urljoin(base_url, split)
          img['src'] = out_link
        else:
          out_link = urljoin(base_url, src_rel_link)
          img['src'] = out_link
      if (srcset_rel_link):
        split = re.split(r'[/]\s*', src_rel_link)
        if 'thumb' in split:
          del img['srcset']
        else:
          srcset_list = re.split(r'[,]\s*', srcset_rel_link)
          srcset_lu = srcset_list
          srcset_list[:] = [urljoin(base_url, srcset_i) for srcset_i in srcset_list]
          srcset_s = ', '.join(srcset_lu)
          img['srcset'] = srcset_s

    # --- flickity slideshow
    for gallery_item in soup_bodytext.find_all('li', class_='gallerybox'):
      # img div wrapper (from <li> to <div>)
      gallery_item.name = 'div'
      del gallery_item['style']
      gallery_item['class'] = 'gallery-item'

      # delete extra <div>s before and after img div wrapper
      gallery_item_div = gallery_item.find('div', class_='thumb')
      gallery_pp = gallery_item_div.parent
      gallery_pp.unwrap()
      child = gallery_item_div.div
      child.unwrap()
      gallery_item_div.unwrap()

      # set img caption
      gallery_item_caption = gallery_item.find('div', class_='gallerytext')
      if gallery_item_caption.content:
        gallery_item_caption.name = 'figcaption'
        gallery_item_caption['class'] = 'pd-t--1 mg-auto w--copy ft-sans t-a--c'
      else:
        gallery_item_caption.unwrap()

      # get parent <ul>
      gallerybox = gallery_item.find_parent('ul')
      gallerybox['class'] = 'gallery'

    # --- set class to flickity.js
    for gallery in soup_bodytext.find_all('ul', class_='gallery'):
      gallery.name = 'div'
      gallery['class'] = 'gallery flex-c w--copy mg-v--3'

    # --- typography
    typography(soup_bodytext)

    wk_bodytext = soup_bodytext

    # build template
    return self.render_template('article.html',
                                nav_main=wk_nav_main,
                                nav_sections=wk_nav_sections,
                                title=wk_title,
                                date=wk_date,
                                time=wk_time,
                                venue=wk_venue,
                                peopleorgs=wk_peopleorgs,
                                bodytext=wk_bodytext
                                )
コード例 #29
0
ファイル: had.py プロジェクト: karlmoubarak/had-py-1
  def on_section(self, request, fix_extlinks_a=fix_extlinks_a, typography=typography, section_title=None, page_title=None, wk_nav_main=nav_main(), wk_nav_sections=nav_sections()):
    base_url = 'https://wiki.hackersanddesigners.nl/'
    api_call =  'api.php?'

    # fetch page-content
    page_head_options = {'action': 'parse', 'page': 'Concept:' + section_title, 'format': 'json', 'formatversion': '2'}
    response_head = requests.get(base_url + api_call, params=page_head_options)
    wkdata_head = response_head.json()

    wk_title = wkdata_head['parse']['title']
   
    if wkdata_head['parse']['text']:
      wk_intro = wkdata_head['parse']['text']
      soup_wk_intro = BeautifulSoup(wk_intro, 'html.parser')
      typography(soup_wk_intro)
      
      # fix rel-links to be abs-ones
      envy = request.environ
      p_url = get_current_url(envy)
      fix_extlinks_a(soup_wk_intro, url=p_url + '/p/')

      p_intro = soup_wk_intro.find('p')
      if p_intro.string:
        wk_intro = p_intro
      else:
        wk_intro = None

    # --------------------------
    today = datetime.date.today()
    today = today.strftime('%Y/%m/%d')
    
    # recursively fetch all pages using `askargs`
    def query(request):
      request['action'] = 'askargs'
      request['format'] = 'json'
      request['formatversion'] = '2'
      lastContinue = ''
      while True:
        # clone original request
        req = request.copy()
        # modify it with the values returned in the 'query-continue-offset' section of the last result
        parameters = req['parameters']
        continue_offset = [parameters, '|offset=', str(lastContinue)]
        continue_offset = ''.join(continue_offset)

        parameters = {'parameters': continue_offset}
        req.update(parameters)
        
        # call API
        result = requests.get(base_url + api_call, params=req).json()
        if 'error' in result:
          raise Error(result['error'])
        if 'warnings' in result:
          print(result['warnings'])
        if 'query' in result:
          yield result['query']
        if 'query-continue-offset' not in result:
          break
        lastContinue = result['query-continue-offset']

    # make section_items list by fetching item's title and img (if any)
    # ---- Activities
    if 'Activities' in section_title:
      wk_section_items = None

      # --- upcoming items
      wk_section_upitems = []
      for result in query({'conditions': 'Concept:' + section_title + '|OnDate::>' + today, 'printouts': 'NameOfEvent|OnDate|Venue|Time', 'parameters': 'sort=OnDate|order=asc'}):
        try:
          for item in result['results'].items():
            title = item[1]['fulltext']
            wk_section_upitems.append(title)
            date = item[1]['printouts']['OnDate'][0]['fulltext']
            wk_section_upitems.append(date)

            # fetch section item's content
            item_introtext_options = {'action': 'parse', 'page': title, 'format': 'json', 'formatversion': '2', 'disableeditsection': 'true'}
            response_introtext_item = requests.get(base_url + api_call , params=item_introtext_options)
            wkdata_introtext_item = response_introtext_item.json()

            wkdata_text_item = wkdata_introtext_item['parse']['text']

            # get section item's img
            soup_wk_introtext = BeautifulSoup(wkdata_text_item, 'html.parser')
            if soup_wk_introtext.img:
              cover_img = soup_wk_introtext.img
              cover_img['class'] = 'mg-t--1 shadow'

              # setup <noscript> tag for original images
              # in case of no js browser-enabled
              noscript = soup_wk_introtext.new_tag('noscript')
              noscript.append(cover_img)
              ns_cover_img = noscript

              src_rel_link = cover_img.get('src')
              if src_rel_link:
                src_c = re.split(r'[/]\s*', src_rel_link)
                src_c = '/'.join(src_c)
                out_link = urljoin(base_url, src_rel_link)
                cover_img['src'] = out_link

              srcset_rel_link = cover_img.get('srcset')
              if srcset_rel_link:
                del cover_img['srcset']

              # duplicate img tag and 
              # replace `src` w/ `data-src`
              import copy
              dcover_img = copy.copy(cover_img)
              dcover_img['data-src'] = dcover_img['src']
              dcover_img['class'] += ' cover-img d-n'
              del dcover_img['src']

              dsrc_rel_link = dcover_img.get('data-src')
              if dsrc_rel_link:
                src_c = re.split(r'[/]\s*', dsrc_rel_link)
                src_c = '/'.join(src_c)
                out_link = urljoin(base_url, dsrc_rel_link)
                dcover_img['data-src'] = out_link

              dsrcset_rel_link = dcover_img.get('srcset')
              if dsrcset_rel_link:
                del dcover_img['srcset']

            else:
              cover_img = None
              ns_cover_img = None
              dcover_img = None

            # add `cover_img` & `dcover_img` to `wk_section_items`
            wk_section_upitems.append(ns_cover_img)
            wk_section_upitems.append(dcover_img)

        except AttributeError:
          print('No upcoming event')

      # ---- * * *
      wk_section_upitems = list(zip(*[iter(wk_section_upitems)]*4))
      wk_section_upitems = sorted(wk_section_upitems, key=lambda x: x[1])
     
      # ---- past items
      wk_section_pastitems = []

      for result in query({'conditions': 'Concept:' + section_title + '|OnDate::<' + today, 'printouts': 'NameOfEvent|OnDate|Venue|Time', 'parameters': 'sort=OnDate|order=desc'}):
        
        for item in result['results'].items():
          title = item[1]['fulltext']
          wk_section_pastitems.append(title)
          date = item[1]['printouts']['OnDate'][0]['fulltext']
          wk_section_pastitems.append(date)

          # fetch section item's content
          item_introtext_options = {'action': 'parse', 'page': title, 'format': 'json', 'formatversion': '2', 'disableeditsection': 'true'}
          response_introtext_item = requests.get(base_url + api_call , params=item_introtext_options)
          wkdata_introtext_item = response_introtext_item.json()

          wkdata_text_item = wkdata_introtext_item['parse']['text']

          # get section item's img
          soup_wk_introtext = BeautifulSoup(wkdata_text_item, 'html.parser')
          if soup_wk_introtext.img:
            cover_img = soup_wk_introtext.img
            cover_img['class'] = 'mg-t--1 shadow'

            # setup <noscript> tag for original images
            # in case of no js browser-enabled
            noscript = soup_wk_introtext.new_tag('noscript')
            noscript.append(cover_img)
            ns_cover_img = noscript

            src_rel_link = cover_img.get('src')
            if src_rel_link:
              src_c = re.split(r'[/]\s*', src_rel_link)
              src_c = '/'.join(src_c)
              out_link = urljoin(base_url, src_rel_link)
              cover_img['src'] = out_link

            srcset_rel_link = cover_img.get('srcset')
            if srcset_rel_link:
              del cover_img['srcset']

            # duplicate img tag and 
            # replace `src` w/ `data-src`
            import copy
            dcover_img = copy.copy(cover_img)
            dcover_img['data-src'] = dcover_img['src']
            dcover_img['class'] += ' cover-img d-n'
            del dcover_img['src']

            dsrc_rel_link = dcover_img.get('data-src')
            if dsrc_rel_link:
              src_c = re.split(r'[/]\s*', dsrc_rel_link)
              src_c = '/'.join(src_c)
              out_link = urljoin(base_url, dsrc_rel_link)
              dcover_img['data-src'] = out_link

            dsrcset_rel_link = dcover_img.get('srcset')
            if dsrcset_rel_link:
              del dcover_img['srcset']

          else:
            cover_img = None
            ns_cover_img = None
            dcover_img = None

          # add `cover_img` & `dcover_img` to `wk_section_items`
          wk_section_pastitems.append(ns_cover_img)
          wk_section_pastitems.append(dcover_img)
    
      # ---- * * *
      wk_section_pastitems = list(zip(*[iter(wk_section_pastitems)]*4))
      wk_section_pastitems = sorted(wk_section_pastitems, key=lambda x: x[1], reverse=True)

    # --------------
    # other sections
    else:
      wk_section_upitems = None
      wk_section_pastitems = None
      wk_section_items = []
      for result in query({'conditions': 'Concept:' + section_title, 'printouts': 'Modification date|NameOfEvent|OnDate|Venue|Time', 'parameters': 'sort=Modification date|OnDate|order=desc'}):

        for item in result['results'].items():
          title = item[1]['fulltext']
          wk_section_items.append(title)
          if len(item) > 1 and len(item[1]['printouts']['OnDate']) > 0:
            date = item[1]['printouts']['OnDate'][0]['fulltext']
            wk_section_items.append(date)
          else:
            date = None
            wk_section_items.append(date)

          # fetch section item's content
          item_introtext_options = {'action': 'parse', 'page': title, 'format': 'json', 'formatversion': '2', 'disableeditsection': 'true'}
          response_introtext_item = requests.get(base_url + api_call , params=item_introtext_options)
          wkdata_introtext_item = response_introtext_item.json()

          wkdata_text_item = wkdata_introtext_item['parse']['text']
          # get section item's img
          soup_wk_introtext = BeautifulSoup(wkdata_text_item, 'html.parser')
          if soup_wk_introtext.img:
            cover_img = soup_wk_introtext.img
            cover_img['class'] = 'mg-t--1 shadow'

            src_rel_link = cover_img.get('src')
            srcset_rel_link = cover_img.get('srcset')
            if src_rel_link:
              out_link = urljoin(base_url, src_rel_link)
              cover_img['src'] = out_link
            if srcset_rel_link:
              srcset_list = re.split(r'[,]\s*', srcset_rel_link)
              srcset_lu = srcset_list
              srcset_list[:] = [urljoin(base_url, srcset_i) for srcset_i in srcset_list]
              srcset_s = ', '.join(srcset_lu)
              cover_img['srcset'] = srcset_s
          else:
            cover_img = None

          # add `cover_img` to `wk_section_items`
          wk_section_items.append(cover_img)
    
      # ---- * * *
      wk_section_items = list(zip(*[iter(wk_section_items)]*3))
      try:
        wk_section_items = sorted(wk_section_items, key=lambda x: x[1])
      except TypeError:
        wk_section_items = sorted(wk_section_items, key=lambda x: x[0])
    
    # build template
    return self.render_template('section_list.html',
                                nav_main=wk_nav_main,
                                nav_sections=wk_nav_sections,
                                title=wk_title,
                                intro=wk_intro,
                                section_upitems=wk_section_upitems,
                                section_pastitems=wk_section_pastitems, 
                                section_items=wk_section_items
                                )
コード例 #30
0
ファイル: wsgi.py プロジェクト: tom2jack/pj-redis
 def test_get_current_url_unicode(self):
     env = create_environ()
     env['QUERY_STRING'] = 'foo=bar&baz=blah&meh=\xcf'
     rv = wsgi.get_current_url(env)
     self.assertEqual(rv, 'http://localhost/?foo=bar&baz=blah&meh=%CF')
コード例 #31
0
ファイル: http.py プロジェクト: Pholey/python-armet
 def uri(self):
     return get_current_url(self._handle.environ)
コード例 #32
0
 def url_root(self):
     return get_current_url(self.environ, True)
コード例 #33
0
ファイル: wsgi.py プロジェクト: TheWaWaR/werkzeug
 def test_get_current_url_unicode(self):
     env = create_environ()
     env['QUERY_STRING'] = 'foo=bar&baz=blah&meh=\xcf'
     rv = wsgi.get_current_url(env)
     self.assert_strict_equal(rv,
         u'http://localhost/?foo=bar&baz=blah&meh=\ufffd')
コード例 #34
0
ファイル: test.py プロジェクト: connoryang/dec-eve-serenity
 def extract_wsgi(self, environ, headers):
     self.extract_cookies(_TestCookieResponse(headers), U2Request(get_current_url(environ)))
コード例 #35
0
 def url(self):
     return get_current_url(self.environ)
コード例 #36
0
 def url(self):
     return get_current_url(self.environ)
コード例 #37
0
 def demo(self, env, req):
     return redirect(get_current_url(env) + '/index.html')
コード例 #38
0
 def base_url(self):
     return get_current_url(self.environ, strip_querystring=True)
コード例 #39
0
ファイル: test_wsgi.py プロジェクト: pallets/werkzeug
def test_get_current_url_unicode():
    env = create_environ(query_string=u"foo=bar&baz=blah&meh=\xcf")
    rv = wsgi.get_current_url(env)
    strict_eq(rv, u"http://localhost/?foo=bar&baz=blah&meh=\xcf")
コード例 #40
0
 def url_root(self):
     return get_current_url(self.environ, True)
コード例 #41
0
    def salary_package(self, contract_id=None, **kw):

        # Used to flatten the response after the rollback.
        # Otherwise assets are generated and rollbacked before the page loading.
        # Leading to crashes (assets not found) when loading the page.
        response = False
        request.env.cr.execute('SAVEPOINT salary')

        contract = request.env['hr.contract'].sudo().browse(contract_id)
        if not contract.exists():
            return request.render(
                'http_routing.http_error', {
                    'status_code':
                    'Oops',
                    'status_message':
                    'This contract has been updated, please request an updated link..'
                })

        if not request.env.user.has_group(
                'hr_contract.group_hr_contract_manager'):
            if kw.get('applicant_id'):
                applicant = request.env['hr.applicant'].sudo().browse(
                    int(kw.get('applicant_id')))
                if not kw.get('token') or \
                        not applicant.access_token or \
                        not consteq(applicant.access_token, kw.get('token')) or \
                        applicant.access_token_end_date < fields.Date.today():
                    return request.render(
                        'http_routing.http_error', {
                            'status_code':
                            'Oops',
                            'status_message':
                            'This link is invalid. Please contact the HR Responsible to get a new one...'
                        })
            if contract.employee_id and not contract.employee_id.user_id and not kw.get(
                    'applicant_id'):
                return request.render(
                    'http_routing.http_error', {
                        'status_code':
                        'Oops',
                        'status_message':
                        'The employee is not linked to an existing user, please contact the administrator..'
                    })
            if contract.employee_id and contract.employee_id.user_id != request.env.user:
                raise NotFound()

        if kw.get('employee_contract_id'):
            employee_contract = request.env['hr.contract'].sudo().browse(
                int(kw.get('employee_contract_id')))
            if not request.env.user.has_group('hr_contract.group_hr_contract_manager') and employee_contract.employee_id \
                    and employee_contract.employee_id.user_id != request.env.user:
                raise NotFound()

        if not contract.employee_id:
            be_country = request.env["res.country"].search([("code", "=", "BE")
                                                            ])
            contract.employee_id = request.env['hr.employee'].sudo().create({
                'name':
                '',
                'active':
                False,
                'country_id':
                be_country.id,
                'certificate':
                False,  # To force encoding it
            })
            contract.employee_id.address_home_id = request.env[
                'res.partner'].sudo().create({
                    'name': 'Simulation',
                    'type': 'private',
                    'country_id': be_country.id,
                    'active': False,
                })

        if 'applicant_id' in kw:
            contract = contract.with_context(is_applicant=True)
        values = self._get_salary_package_values(contract)

        redirect_to_job = False
        applicant_id = False
        contract_type = False
        employee_contract_id = False
        job_title = False

        final_yearly_costs = contract.final_yearly_costs

        for field_name, value in kw.items():
            old_value = contract
            if field_name == 'job_id':
                redirect_to_job = value
            elif field_name == 'applicant_id':
                applicant_id = value
            elif field_name == 'employee_contract_id':
                employee_contract_id = value
            elif field_name == 'contract_type':
                contract_type = value
            elif field_name == 'job_title':
                job_title = value
            elif field_name in old_value:
                old_value = old_value[field_name]
            else:
                old_value = ""

            if isinstance(old_value, models.BaseModel):
                old_value = ""
            elif old_value:
                try:
                    value = float(value)
                except:
                    continue
                if field_name == "final_yearly_costs":
                    final_yearly_costs = value
                else:
                    setattr(contract, field_name, value)

        new_gross = contract.sudo()._get_gross_from_employer_costs(
            final_yearly_costs)
        contract.wage = new_gross

        values.update({
            'need_personal_information':
            not redirect_to_job,
            'submit':
            not redirect_to_job,
            'redirect_to_job':
            redirect_to_job,
            'applicant_id':
            applicant_id,
            'employee_contract_id':
            employee_contract_id,
            'contract_type':
            contract_type,
            'job_title':
            job_title,
            'default_mobile':
            request.env['ir.default'].sudo().get('hr.contract', 'mobile'),
            'original_link':
            get_current_url(request.httprequest.environ),
            'token':
            kw.get('token')
        })

        response = request.render("hr_contract_salary.salary_package", values)
        response.flatten()
        request.env['hr.contract'].sudo().flush()
        request.env.cr.precommit.clear()
        request.env.cr.execute('ROLLBACK TO SAVEPOINT salary')
        return response
コード例 #42
0
 def host_url(self):
     return get_current_url(self.environ, host_only=True)
コード例 #43
0
    def salary_package(self, contract_id=None, **kw):

        # Used to flatten the response after the rollback.
        # Otherwise assets are generated and rollbacked before the page loading.
        # Leading to crashes (assets not found) when loading the page.
        response = False
        request.env.cr.execute('SAVEPOINT salary')

        contract = request.env['hr.contract'].sudo().browse(contract_id)
        if not contract.exists():
            return request.render(
                'website.http_error', {
                    'status_code':
                    'Oops',
                    'status_message':
                    'This contract has been updated, please request an updated link..'
                })

        if not request.env.user.has_group(
                'hr_contract.group_hr_contract_manager'):
            if kw.get('applicant_id'):
                applicant = request.env['hr.applicant'].sudo().browse(
                    int(kw.get('applicant_id')))
                if not kw.get('token') or \
                        not applicant.access_token or \
                        not consteq(applicant.access_token, kw.get('token')) or \
                        applicant.access_token_end_date < fields.Date.today():
                    return request.render(
                        'website.http_error', {
                            'status_code':
                            'Oops',
                            'status_message':
                            'This link is invalid. Please contact the HR Responsible to get a new one...'
                        })
            if contract.employee_id and not contract.employee_id.user_id and not kw.get(
                    'applicant_id'):
                return request.render(
                    'website.http_error', {
                        'status_code':
                        'Oops',
                        'status_message':
                        'The employee is not linked to an existing user, please contact the administrator..'
                    })
            if contract.employee_id and contract.employee_id.user_id != request.env.user:
                return request.render('website.404')

        if kw.get('employee_contract_id'):
            employee_contract = request.env['hr.contract'].sudo().browse(
                int(kw.get('employee_contract_id')))
            if not request.env.user.has_group('hr_contract.group_hr_contract_manager') and employee_contract.employee_id \
                    and employee_contract.employee_id.user_id != request.env.user:
                return request.render('website.404')

        contract.sudo().configure_access_token()
        if not contract.employee_id:
            contract.employee_id = request.env['hr.employee'].sudo().create({
                'name':
                'Enter your name',
                'active':
                False,
                'country_id':
                request.env.ref('base.be').id,
            })
            contract.employee_id.address_home_id = request.env[
                'res.partner'].sudo().create({
                    'name':
                    'Simulation',
                    'type':
                    'private',
                    'country_id':
                    request.env.ref('base.be').id,
                    'active':
                    False,
                })

        values = self.get_salary_package_values(contract)

        redirect_to_job = False
        applicant_id = False
        customer_relation = False
        new_car = False
        contract_type = False
        employee_contract_id = False
        job_title = False
        freeze = False

        final_yearly_costs = contract.final_yearly_costs

        for field_name, value in kw.items():
            old_value = contract
            if field_name == 'car_id':
                contract.car_id = int(value)
                contract.new_car = False
                if int(value) not in values['available_cars'].ids:
                    values['available_cars'] |= request.env[
                        'fleet.vehicle'].sudo().browse(int(value))
                    values['available_cars'] = values['available_cars'].sorted(
                        key=lambda car: car.total_depreciated_cost)
            elif field_name == 'new_car_model_id':
                contract.new_car_model_id = int(value)
                contract.new_car = True
                if int(value) not in values['can_be_requested_models'].ids:
                    values['can_be_requested_models'] |= request.env[
                        'fleet.vehicle.model'].sudo().browse(int(value))
                    values['can_be_requested_models'] = values[
                        'can_be_requested_models'].sorted(
                            key=lambda model: model.
                            default_total_depreciated_cost)
            elif field_name == 'job_id':
                redirect_to_job = value
            elif field_name == 'applicant_id':
                applicant_id = value
            elif field_name == 'employee_contract_id':
                employee_contract_id = value
            elif field_name == 'customer_relation':
                customer_relation = value
            elif field_name == 'new_car':
                new_car = value
            elif field_name == 'contract_type':
                contract_type = value
            elif field_name == 'job_title':
                job_title = value
            elif field_name == 'freeze':
                freeze = value
            elif field_name == 'debug':
                pass
            elif field_name in old_value:
                old_value = old_value[field_name]
            else:
                old_value = ""

            if isinstance(old_value, models.BaseModel):
                old_value = ""
            elif old_value:
                value = float(value)
                if field_name in [
                        "final_yearly_costs", "monthly_yearly_costs"
                ]:
                    final_yearly_costs = (field_name == "monthly_yearly_costs"
                                          ) and value * 12.0 or value
                else:
                    setattr(contract, field_name, value)

        new_gross = contract.sudo()._get_gross_from_employer_costs(
            final_yearly_costs)
        contract.wage = new_gross

        values.update({
            'need_personal_information':
            not redirect_to_job,
            'submit':
            not redirect_to_job,
            'simulation':
            False,
            'redirect_to_job':
            redirect_to_job,
            'applicant_id':
            applicant_id,
            'employee_contract_id':
            employee_contract_id,
            'customer_relation':
            customer_relation,
            'new_car':
            new_car,
            'contract_type':
            contract_type,
            'job_title':
            job_title,
            'freeze':
            freeze,
            'default_mobile':
            request.env['ir.default'].sudo().get('hr.contract', 'mobile'),
            'original_link':
            get_current_url(request.httprequest.environ)
        })

        values.update(self._get_documents_src(contract.employee_id))
        response = request.render("hr_contract_salary.salary_package", values)
        response.flatten()
        request.env['hr.contract'].flush()
        request.env.cr.execute('ROLLBACK TO SAVEPOINT salary')
        return response
コード例 #44
0
ファイル: test_wsgi.py プロジェクト: auready/werkzeug
def test_get_current_url_unicode():
    env = create_environ()
    env["QUERY_STRING"] = "foo=bar&baz=blah&meh=\xcf"
    rv = wsgi.get_current_url(env)
    strict_eq(rv, u"http://localhost/?foo=bar&baz=blah&meh=\ufffd")