def init(self):
        type = self.url.split('sankakucomplex.com')[0].split('//')[-1].strip('.').split('.')[-1]
        if type == '':
            type = 'www'
        if type not in ['chan', 'idol', 'www']:
            raise Exception('Not supported subdomain')
        self.type_sankaku = type
        self.url = self.url.replace('&commit=Search', '')
        self.url = clean_url(self.url)
        self.session = Session()

        if self.type_sankaku != 'www':
            login(type, self.session, self.cw)

        if self.type_sankaku == 'www':
            html = downloader.read_html(self.url, session=self.session)
            self.soup = Soup(html)
Пример #2
0
    def init(self):
        self.url = self.url.replace('sankaku_', '')
        if '/post/' in self.url:
            return self.Invalid('Single post is not supported')

        if 'sankakucomplex.com' in self.url:
            self.url = self.url.replace('http://', 'https://')
            type = self.url.split('sankakucomplex.com')[0].split(
                '//')[-1].strip('.').split('.')[-1]
            if type == '':
                type = 'www'
            if type not in ['chan', 'idol', 'www']:
                raise Exception('Not supported subdomain')
        else:
            url = self.url
            url = url.replace(' ', '+')
            while '++' in url:
                url = url.replace('++', '+')
            url = urllib.quote(url)
            url = url.replace('%2B', '+')
            url = url.replace('%20', '+')  #
            if url.startswith('[chan]'):
                type = 'chan'
                url = url.replace('[chan]', '', 1).strip()
            elif url.startswith('[idol]'):
                type = 'idol'
                url = url.replace('[idol]', '', 1).strip()
            elif url.startswith('[www]'):
                type = 'www'
                url = url.replace('[www]', '', 1).strip()
            else:
                raise Exception('Not supported subdomain')
            self.url = u'https://{}.sankakucomplex.com/?tags={}'.format(
                type, url)
        self.type_sankaku = type
        self.url = self.url.replace('&commit=Search', '')
        self.url = clean_url(self.url)
        self.session = Session()

        if self.type_sankaku != 'www':
            login(type, self.session, self.customWidget)

        if self.type_sankaku == 'www':
            html = downloader.read_html(self.url, session=self.session)
            self.soup = Soup(html)