示例#1
0
文件: ir_http.py 项目: zhkx1994/odoo
    def _serve_page(cls):
        req_page = request.httprequest.path
        page_domain = [('url', '=', req_page)
                       ] + request.website.website_domain()

        published_domain = page_domain
        # need to bypass website_published, to apply is_most_specific
        # filter later if not publisher
        pages = request.env['website.page'].sudo().search(published_domain,
                                                          order='website_id')
        pages = pages.filtered(pages._is_most_specific_page)

        if not request.website.is_publisher():
            pages = pages.filtered('is_visible')

        mypage = pages[0] if pages else False
        _, ext = os.path.splitext(req_page)
        if mypage:
            return request.render(
                mypage.get_view_identifier(),
                {
                    # 'path': req_page[1:],
                    'deletable': True,
                    'main_object': mypage,
                },
                mimetype=_guess_mimetype(ext))
        return False
示例#2
0
文件: ir_http.py 项目: GSLabIt/odoo
    def _serve_page(cls):
        req_page = request.httprequest.path
        page_domain = [('url', '=', req_page)
                       ] + request.website.website_domain()

        published_domain = page_domain
        # specific page first
        page = request.env['website.page'].sudo().search(
            published_domain, order='website_id asc', limit=1)

        # redirect withtout trailing /
        if not page and req_page != "/" and req_page.endswith("/"):
            # mimick `_postprocess_args()` redirect
            path = request.httprequest.path[:-1]
            if request.lang != cls._get_default_lang():
                path = '/' + request.lang.url_code + path
            if request.httprequest.query_string:
                path += '?' + request.httprequest.query_string.decode('utf-8')
            return request.redirect(path, code=301)

        if page and (request.website.is_publisher() or page.is_visible):
            _, ext = os.path.splitext(req_page)
            response = request.render(page.view_id.id, {
                'deletable': True,
                'main_object': page,
            },
                                      mimetype=_guess_mimetype(ext))
            return response
        return False
示例#3
0
    def pagenew(self,
                path="",
                noredirect=False,
                add_menu=False,
                template=False,
                **kwargs):
        # for supported mimetype, get correct default template
        _, ext = os.path.splitext(path)
        ext_special_case = ext and ext in _guess_mimetype() and ext != '.html'

        if not template and ext_special_case:
            default_templ = 'website.default_%s' % ext.lstrip('.')
            if request.env.ref(default_templ, False):
                template = default_templ

        template = template and dict(template=template) or {}
        page = request.env['website'].new_page(path,
                                               add_menu=add_menu,
                                               **template)
        url = page['url']
        if noredirect:
            return werkzeug.wrappers.Response(url, mimetype='text/plain')

        if ext_special_case:  # redirect non html pages to backend to edit
            return werkzeug.utils.redirect('/web#id=' +
                                           str(page.get('view_id')) +
                                           '&view_type=form&model=ir.ui.view')
        return werkzeug.utils.redirect(url + "?enable_editor=1")
示例#4
0
    def _serve_page(cls):
        req_page = request.httprequest.path
        page_domain = [('url', '=', req_page)] + request.website.website_domain()

        published_domain = page_domain
        # specific page first
        page = request.env['website.page'].sudo().search(published_domain, order='website_id asc', limit=1)

        # redirect withtout trailing /
        if not page and req_page != "/" and req_page.endswith("/"):
            # mimick `_postprocess_args()` redirect
            path = request.httprequest.path[:-1]
            if request.lang != cls._get_default_lang():
                path = '/' + request.lang.url_code + path
            if request.httprequest.query_string:
                path += '?' + request.httprequest.query_string.decode('utf-8')
            return request.redirect(path, code=301)

        if page:
            # prefetch all menus (it will prefetch website.page too)
            request.website.menu_id

        if page and (request.website.is_publisher() or page.is_visible):
            need_to_cache = False
            cache_key = page._get_cache_key(request)
            if (
                page.cache_time  # cache > 0
                and request.httprequest.method == "GET"
                and request.env.user._is_public()    # only cache for unlogged user
                and 'nocache' not in request.params  # allow bypass cache / debug
                and not request.session.debug
                and len(cache_key) and cache_key[-1] is not None  # nocache via expr
            ):
                need_to_cache = True
                try:
                    r = page._get_cache_response(cache_key)
                    if r['time'] + page.cache_time > time.time():
                        response = odoo.http.Response(r['content'], mimetype=r['contenttype'])
                        response._cached_template = r['template']
                        response._cached_page = page
                        return response
                except KeyError:
                    pass

            _, ext = os.path.splitext(req_page)
            response = request.render(page.view_id.id, {
                'deletable': True,
                'main_object': page,
            }, mimetype=_guess_mimetype(ext))

            if need_to_cache and response.status_code == 200:
                r = response.render()
                page._set_cache_response(cache_key, {
                    'content': r,
                    'contenttype': response.headers['Content-Type'],
                    'time': time.time(),
                    'template': getattr(response, 'qcontext', {}).get('response_template')
                })
            return response
        return False
示例#5
0
    def _serve_page(cls):
        req_page = request.httprequest.path
        page_domain = [('url', '=', req_page)] + request.website.website_domain()

        published_domain = page_domain
        # specific page first
        page = request.env['website.page'].sudo().search(published_domain, order='website_id asc', limit=1)
        if page and (request.website.is_publisher() or page.is_visible):
            _, ext = os.path.splitext(req_page)
            return request.render(page.get_view_identifier(), {
                'deletable': True,
                'main_object': page,
            }, mimetype=_guess_mimetype(ext))
        return False
示例#6
0
    def _serve_page(cls):
        req_page = request.httprequest.path

        domain = [('url', '=', req_page), '|', ('website_ids', 'in', request.website.id), ('website_ids', '=', False)]

        if not request.website.is_publisher:
            domain += [('is_visible', '=', True)]

        mypage = request.env['website.page'].search(domain, limit=1)
        _, ext = os.path.splitext(req_page)
        if mypage:
            return request.render(mypage.view_id.id, {
                # 'path': req_page[1:],
                'deletable': True,
                'main_object': mypage,
            }, mimetype=_guess_mimetype(ext))
        return False
示例#7
0
文件: ir_http.py 项目: 1806933/odoo
    def _serve_page(cls):
        req_page = request.httprequest.path

        domain = [('url', '=', req_page), '|', ('website_ids', 'in', request.website.id), ('website_ids', '=', False)]

        if not request.website.is_publisher:
            domain += [('is_visible', '=', True)]

        mypage = request.env['website.page'].search(domain, limit=1)
        _, ext = os.path.splitext(req_page)
        if mypage:
            return request.render(mypage.view_id.id, {
                # 'path': req_page[1:],
                'deletable': True,
                'main_object': mypage,
            }, mimetype=_guess_mimetype(ext))
        return False
示例#8
0
    def _serve_page(cls):
        req_page = request.httprequest.path

        domain = [('url', '=', req_page), '|', ('website_ids', 'in', request.website.id), ('website_ids', '=', False)]
        pages = request.env['website.page'].search(domain)

        if not request.website.is_publisher():
            pages = pages.filtered('is_visible')

        mypage = pages[0] if pages else False
        _, ext = os.path.splitext(req_page)
        if mypage:
            return request.render(mypage.get_view_identifier(), {
                # 'path': req_page[1:],
                'deletable': True,
                'main_object': mypage,
            }, mimetype=_guess_mimetype(ext))
        return False
示例#9
0
文件: main.py 项目: Gorrice/odoo
    def pagenew(self, path="", noredirect=False, add_menu=False, template=False, **kwargs):
        # for supported mimetype, get correct default template
        _, ext = os.path.splitext(path)
        ext_special_case = ext and ext in _guess_mimetype() and ext != '.html'

        if not template and ext_special_case:
            default_templ = 'website.default_%s' % ext.lstrip('.')
            if request.env.ref(default_templ, False):
                template = default_templ

        template = template and dict(template=template) or {}
        page = request.env['website'].new_page(path, add_menu=add_menu, **template)
        url = page['url']
        if noredirect:
            return werkzeug.wrappers.Response(url, mimetype='text/plain')

        if ext_special_case:  # redirect non html pages to backend to edit
            return werkzeug.utils.redirect('/web#id=' + str(page.get('view_id')) + '&view_type=form&model=ir.ui.view')
        return werkzeug.utils.redirect(url + "?enable_editor=1")
示例#10
0
文件: ir_http.py 项目: Vauxoo/odoo
    def _serve_page(cls):
        req_page = request.httprequest.path
        page_domain = [('url', '=', req_page)] + request.website.website_domain()

        published_domain = page_domain
        # need to bypass website_published, to apply is_most_specific
        # filter later if not publisher
        pages = request.env['website.page'].sudo().search(published_domain, order='website_id')
        pages = pages.filtered(pages._is_most_specific_page)

        if not request.website.is_publisher():
            pages = pages.filtered('is_visible')

        mypage = pages[0] if pages else False
        _, ext = os.path.splitext(req_page)
        if mypage:
            return request.render(mypage.get_view_identifier(), {
                # 'path': req_page[1:],
                'deletable': True,
                'main_object': mypage,
            }, mimetype=_guess_mimetype(ext))
        return False
示例#11
0
文件: website.py 项目: leavems/odoo-1
 def guess_mimetype(self):
     return _guess_mimetype()
示例#12
0
文件: website.py 项目: CRITEAN/Odoo
 def guess_mimetype(self):
     return _guess_mimetype()
示例#13
0
    def _serve_page(cls):
        red = redis.Redis("sgs-docker_redis_1")
        req_page = request.httprequest.path
        page_domain = [('url', '=', req_page)] + \
            request.website.website_domain()
        published_domain = page_domain
        # need to bypass website_published, to apply is_most_specific
        # filter later if not publisher
        pages = request.env['website.page'].sudo().search(
            published_domain, order='website_id')
        pages = pages.filtered("is_published")

        if not request.website.is_publisher():
            pages = pages.filtered('is_visible')

        mypage = pages[0] if pages else False
        _, ext = os.path.splitext(req_page)
        if mypage:
            # OVERRIDE TO ADD LOCATION POINTES
            blog = request.env['blog.post'].sudo()
            event_obj = request.env['sgs.event'].sudo()
            partner_obj = request.env['sgs.partner'].sudo()
            res_partner_obj =request.env['res.partner'].sudo()
            # league_obj = request.env['sgs.league'].sudo()
            line_obj = request.env['league.fixture'].sudo()
            fixture_obj = request.env['league.fixture'].sudo()
            photo_obj = request.env['sgs.photo'].sudo()
            attachment = request.env['ir.attachment'].sudo()
            blog_id = http.request.env.ref("sgs.news_sgs").id
            try:

                res_events = []
                if( type( red.get('res_events_timestamp')) == bytes ):
                    prev_time = float( red.get('res_events_timestamp'))
                else:
                    prev_time = 0.0
                if( prev_time > time.time() - 600 ):
                    events_list = event_obj.search([('event_type', '=', 'public'), ('date', '>=', date.today())])
                    for event in events_list:
                        image_id = attachment.sudo().search([
                            ('res_model', '=', "sgs.event"),
                            ('res_field', '=', "cover"),
                            ('res_id', '=', event.id)
                        ])
                        res_events.append({
                            'name': event.name,
                            'date': event.date,
                            'description': event.description,
                            'id': event.id,
                            'image_url': ("/web/binary/image?model=ir.attachment&field=datas&id=" + str(image_id.id)) if image_id else '#',
                        })
                    red.mset({'res_events': pickle.dumps( res_events ), 'res_events_timestamp': time.time()})
                else:
                    res_events = pickle.loads( red.get('res_events'))

            except Exception as e:
                print( f"{e} {traceback.format_exc}" )


          
            partners_list = []
            try : 
                if( type( red.get('partners_list_timestamp')) == bytes ):
                    prev_time = float( red.get('partners_list_timestamp'))
                else:
                    prev_time = 0.0
            except Exception as e:
                prev_time = 0.0

            if( prev_time < time.time() - 600 ):
                for partner in partner_obj.search([]):
                    image_id = attachment.sudo().search([
                        ('res_model', '=', "sgs.partner"),
                        ('res_field', '=', "cover"),
                        ('res_id', '=', partner.id)
                    ])
                    if image_id:
                        partners_list.append(
                            "/web/binary/image?model=ir.attachment&field=datas&id=" + str(image_id.id))
                red.mset({'partners_list': pickle.dumps( partners_list ), 'partners_list_timestamp': time.time()})
            else:
                partners_list = pickle.loads( red.get('partners_list'))

            events_gallery = []
            try : 
                if( type( red.get('events_gallery_timestamp')) == bytes ):
                    prev_time = float( red.get('events_gallery_timestamp'))
                else:
                    prev_time = 0.0
            except Exception as e:
                prev_time = 0.0
            if( prev_time < time.time() - 600 ):
                for e in photo_obj.search([('section', '=', 'event')]):
                    image_id = attachment.sudo().search([
                        ('res_model', '=', "sgs.photo"),
                        ('res_field', '=', "image"),
                        ('res_id', '=', e.id)
                    ])
                    if image_id:
                        events_gallery.append(
                            "/web/binary/image?model=ir.attachment&field=datas&id=" + str(image_id.id))
                red.mset({'events_gallery': pickle.dumps( events_gallery ), 'events_gallery_timestamp': time.time()})
            else:
                events_gallery = pickle.loads( red.get('events_gallery'))

            tournament_gallery = []
            try : 
                if( type( red.get('tournament_gallery_timestamp')) == bytes ):
                    prev_time = float( red.get('tournament_gallery_timestamp'))
                else:
                    prev_time = 0.0
            except Exception as e:
                prev_time = 0.0
            if( prev_time < time.time() - 600 ):
                for e in photo_obj.search([('section', '=', 'tournament')]):
                    image_id = attachment.sudo().search([
                        ('res_model', '=', "sgs.photo"),
                        ('res_field', '=', "image"),
                        ('res_id', '=', e.id)
                    ])
                    if image_id:
                        tournament_gallery.append(
                            "/web/binary/image?model=ir.attachment&field=datas&id=" + str(image_id.id))
                red.mset({'tournament_gallery': pickle.dumps( tournament_gallery ), 'tournament_gallery_timestamp': time.time()})
            else:
                tournament_gallery = pickle.loads( red.get('tournament_gallery'))

            academy_gallery = []
            for e in photo_obj.search([('section', '=', 'academy')]):
                image_id = attachment.sudo().search([
                    ('res_model', '=', "sgs.photo"),
                    ('res_field', '=', "image"),
                    ('res_id', '=', e.id)
                ])
                if image_id:
                    academy_gallery.append(
                        "/web/binary/image?model=ir.attachment&field=datas&id=" + str(image_id.id))
            # league_gallery = []
            # for e in photo_obj.search([('section', '=', 'league')]):
            #     image_id = attachment.sudo().search([
            #         ('res_model', '=', "sgs.photo"),
            #         ('res_field', '=', "image"),
            #         ('res_id', '=', e.id)
            #     ])
            #     if image_id:
            #         league_gallery.append(
            #             "/web/binary/image?model=ir.attachment&field=datas&id=" + str(image_id.id))
            all_gallery = []
            for e in photo_obj.search([('section', '!=', '')]):
                image_id = attachment.sudo().search([
                    ('res_model', '=', "sgs.photo"),
                    ('res_field', '=', "image"),
                    ('res_id', '=', e.id)
                ])
                if image_id:
                    all_gallery.append(
                        "/web/binary/image?model=ir.attachment&field=datas&id=" + str(image_id.id))
            league_res = []
            today = datetime.today()
            fixtures_list = line_obj.search([('date', '>',today - timedelta(days=7)), ('state', '=', 'close')])


            for fixture in fixtures_list:
                fixture_res = []
                ts = fixture.players.search([('id', 'in', fixture.players.ids)], order='rank', limit=5)
                _logger.info(ts)
                for player in ts:
                    fixture_res.append({
                        'id':player.name.id,
                        'rank':player.rank,
                        'name': player.name.name,
                        'net': player.net,
                    })
                wn_partner = False
                wn = False
                wi = False
                if fixture_res:
                    wn = fixture_res[0]['name']
                    wn_partner = res_partner_obj.sudo().search([
                        ('id', '=', fixture_res[0]['id'])
                    ])




                    wi = '/image/get/' + str(fixture_res[0]['id'])
                    image_id = attachment.sudo().search([
                        ('res_model', '=', "res.partner"),
                        ('res_field', '=', "image"),
                        ('res_id', '=', fixture_res[0]['id'])
                    ])
                    if image_id:
                        wi ="/web/binary/image?model=ir.attachment&field=datas&id=" + str(image_id.id)
                league_res.append({
                    'fixture': fixture.name,
                    'winner': {'name':wn,'image':wi},
                    'league': fixture.league_id.name,
                    'fixture_results': fixture_res,
                    'wn_partner': wn_partner.image,

                })

            try : 
                if( type( red.get('leagues_timestamp')) == bytes ):
                    prev_time = float( red.get('leagues_timestamp'))
                else:
                    prev_time = 0.0
            except Exception as e:
                prev_time = 0.0

            if( prev_time < time.time() - 600 ):
                red.set('leagues_timestamp', time.time())
                leagues = cls.get_all_leagues(cls)
                red.mset({'leagues': pickle.dumps( leagues ), 'leagues_timestamp': time.time()})
            else:
                leagues = pickle.loads( red.get('leagues'))

            try : 
                if( type( red.get('league_results_box_timestamp')) == bytes ):
                    prev_time = float( red.get('league_results_box_timestamp'))
                else:
                    prev_time = 0.0
            except Exception as e:
                prev_time = 0.0

            if( prev_time < time.time() - 600 ):
                red.set('league_results_box_timestamp', time.time())
                running_leagues = cls.get_latest_running_leagues(cls, leagues)
                with open('/tmp/python-logs.log', 'a') as f:
                    f.write(f"running_leagues: {running_leagues} \n")

                # Now looping the running league and collecting their first tournament results in a list
                league_result_boxes = []
                for running_league in running_leagues:
                    league_results_box = cls.get_league_tournament_results(cls, running_league['event'])
                    league_results_box['sgs_leagueID'] = running_league['event']['id']
                    league_result_boxes.append(league_results_box)

                red.mset({
                    'league_result_boxes': pickle.dumps( league_result_boxes ),
                    'league_results_box_timestamp': time.time(),
                    'running_leagues': pickle.dumps(running_leagues)
                })
            else:
                league_result_boxes = pickle.loads( red.get('league_result_boxes'))
                running_leagues = pickle.loads(red.get('running_leagues'))

            return request.render('sgs.home', {
                # 'path': req_page[1:],

                'deletable': True,
                'main_object': mypage,
                'blog_posts': blog.search([('blog_id', '=', blog_id), ('website_published', '=', True)], limit=4),
                'partners': partners_list,
                'events': res_events,
                'events_gallery': events_gallery,
                'tournament_gallery': tournament_gallery,
                'academy_gallery': academy_gallery,
                'all_gallery': all_gallery,
                'league_results': league_res,
                'partner_presentation': request.env.ref('sgs.partner_book_init').id,
                'banner_images': request.env.ref('sgs.banner_images_init'),
                'test': "Hello World",
                'league_result_boxes': league_result_boxes,
                'leagues': running_leagues

            }, mimetype=_guess_mimetype(ext))
        return False
示例#14
0
文件: ir_http.py 项目: dzywenji/odoo
        page = request.env['website.page'].sudo().search(published_domain, order='website_id asc', limit=1)
<<<<<<< HEAD
        if page and (request.website.is_publisher() or page.is_visible):
            _, ext = os.path.splitext(req_page)
            return request.render(page.get_view_identifier(), {
=======
        if page:
            # prefetch all menus (it will prefetch website.page too)
            request.website.menu_id
        if page and (request.website.is_publisher() or page.is_visible):
            _, ext = os.path.splitext(req_page)
            return request.render(page.view_id.id, {
>>>>>>> f0a66d05e70e432d35dc68c9fb1e1cc6e51b40b8
                'deletable': True,
                'main_object': page,
            }, mimetype=_guess_mimetype(ext))
        return False

    @classmethod
    def _serve_redirect(cls):
        req_page = request.httprequest.path
        domain = [
            ('redirect_type', 'in', ('301', '302')),
            ('url_from', '=', req_page)
        ]
        domain += request.website.website_domain()
        return request.env['website.rewrite'].sudo().search(domain, limit=1)

    @classmethod
    def _serve_fallback(cls, exception):
        # serve attachment before
示例#15
0
    def _serve_page(cls):
        req_page = request.httprequest.path
        page_domain = [('url', '=', req_page)
                       ] + request.website.website_domain()

        published_domain = page_domain
        # specific page first
        page = request.env['website.page'].sudo().search(
            published_domain, order='website_id asc', limit=1)

        if page:
            # prefetch all menus (it will prefetch website.page too)
            request.website.menu_id

        if page and (request.website.is_publisher() or page.is_visible):
            need_to_cache = False
            cache_key = page._get_cache_key(request)
            if (page.cache_time  # cache > 0
                    and request.httprequest.method == "GET" and request.env.
                    user._is_public()  # only cache for unlogged user
                    and 'nocache'
                    not in request.params  # allow bypass cache / debug
                    and len(cache_key)
                    and cache_key[-1] is not None  # nocache via expr
                ):
                need_to_cache = True
                if not hasattr(cls, '_cached_pages'):
                    cls.clear_cached_pages()  # init LRU

                # use cached version
                try:
                    r = cls._cached_pages[cache_key]
                    if r['time'] + page.cache_time > time.time():
                        response = werkzeug.Response(r['content'],
                                                     mimetype=r['contenttype'])
                        response._cached_template = r['template']
                        response._cached_page = page
                        return response
                    else:
                        cls._cached_pages[cache_key]
                except KeyError:
                    pass

            _, ext = os.path.splitext(req_page)
            response = request.render(page.view_id.id, {
                'deletable': True,
                'main_object': page,
            },
                                      mimetype=_guess_mimetype(ext))

            if need_to_cache and response.status_code == 200:
                r = response.render()
                cls._cached_pages[cache_key] = {
                    'content':
                    r,
                    'contenttype':
                    response.headers['Content-Type'],
                    'time':
                    time.time(),
                    'template':
                    getattr(response, 'qcontext', {}).get('response_template')
                }

            return response
        return False