Example #1
0
    def do_search(self, metadata):
        # type: (osdlyrics.metadata.Metadata) -> List[SearchResult]
        keys = []
        if metadata.title:
            keys.append(metadata.title)
        if metadata.artist:
            keys.append(metadata.artist)
        url = NETEASE_HOST + NETEASE_SEARCH_URL
        urlkey = '+'.join(keys).replace(' ', '+')
        params = 's=%s&type=1' % urlkey

        status, content = http_download(url=url,
                                        method='POST',
                                        params=params.encode('utf-8'),
                                        proxy=get_proxy_settings(
                                            self.config_proxy))

        if status < 200 or status >= 400:
            raise http.client.HTTPException(status, '')

        def map_func(song):
            if song['artists']:
                artist_name = song['artists'][0]['name']
            else:
                artist_name = ''
            url = NETEASE_HOST + NETEASE_LYRIC_URL + '?id=' + \
                str(song['id']) + '&lv=-1&kv=-1&tv=-1'
            return SearchResult(title=song['name'],
                                artist=artist_name,
                                album=song['album']['name'],
                                sourceid=self.id,
                                downloadinfo=url)

        parsed = json.loads(content.decode('utf-8'))
        result = list(map(map_func, parsed['result']['songs']))

        # If there are more than 10 songs we do a second request.
        song_count = parsed['result']['songCount']
        if song_count > 10:
            params = params + '&offset=10'
            status, content = http_download(url=url,
                                            method='POST',
                                            params=params.encode('utf-8'),
                                            proxy=get_proxy_settings(
                                                self.config_proxy))
        if status < 200 or status >= 400:
            raise http.client.HTTPException(status, '')
        parsed = json.loads(content.decode('utf-8'))
        result = result + list(map(map_func, parsed['result']['songs']))
        return result
Example #2
0
    def do_search(self, metadata):
        keys = []
        if metadata.title:
            keys.append(metadata.title)
        if metadata.artist:
            keys.append(metadata.artist)
        urlkey = ensure_utf8('+'.join(keys)).replace(' ', '+')
        url = NETEASE_HOST + NETEASE_SEARCH_URL
        params = 's=%s&type=1' % urlkey

        status, content = http_download(url=url,
                                        method='POST',
                                        params=params,
                                        proxy=get_proxy_settings(self.config_proxy))

        if status < 200 or status >= 400:
            raise httplib.HTTPException(status, '')

        def map_func(song):
            if len(song['artists']) > 0:
                artist_name = song['artists'][0]['name']
            else:
                artist_name = ''
            url = NETEASE_HOST + NETEASE_LYRIC_URL + '?id=' + str(song['id']) + '&lv=-1&kv=-1&tv=-1'
            return SearchResult(title=song['name'],
                                artist=artist_name,
                                album=song['album']['name'],
                                sourceid=self.id,
                                downloadinfo=url)

        parsed = json.loads(content)
        result = list(map(map_func, parsed['result']['songs']))

        return result
Example #3
0
 def do_search(self, metadata):
     keys = []
     if metadata.title:
         keys.append(metadata.title)
     if metadata.artist:
         keys.append(metadata.artist)
     urlkey = ensure_utf8('+'.join(keys)).replace(' ', '+')
     url = XIAMI_HOST + XIAMI_SEARCH_URL
     status, content = http_download(url=url,
                                     params={'key': urlkey},
                                     proxy=get_proxy_settings(
                                         self.config_proxy))
     if status < 200 or status >= 400:
         raise httplib.HTTPException(status, '')
     match = XIAMI_SEARCH_PATTERN.findall(content)
     result = []
     if match:
         for title_elem, id, artist_elem, album_elem in match:
             title = TITLE_ATTR_PATTERN.search(title_elem).group(1)
             artist = TITLE_ATTR_PATTERN.search(artist_elem).group(1)
             album = TITLE_ATTR_PATTERN.search(album_elem).group(1)
             url = self.get_url(id)
             if url is not None:
                 result.append(
                     SearchResult(title=title,
                                  artist=artist,
                                  album=album,
                                  sourceid=self.id,
                                  downloadinfo=url))
     return result
Example #4
0
 def do_search(self, metadata):
     keys = []
     if metadata.title:
         keys.append(metadata.title)
     if metadata.artist:
         keys.append(metadata.artist)
     urlkey = ensure_utf8('+'.join(keys)).replace(' ', '+')
     url = XIAMI_HOST + XIAMI_SEARCH_URL
     status, content = http_download(url=url,
                                     params={'key': urlkey},
                                     proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         raise httplib.HTTPException(status, '')
     match = XIAMI_SEARCH_PATTERN.findall(content)
     result = []
     if match:
         for title_elem, id, artist_elem, album_elem in match:
             title = TITLE_ATTR_PATTERN.search(title_elem).group(1)
             artist = TITLE_ATTR_PATTERN.search(artist_elem).group(1)
             album = TITLE_ATTR_PATTERN.search(album_elem).group(1)
             url = self.get_url(id)
             if url is not None:
                 result.append(SearchResult(title=title,
                                            artist=artist,
                                            album=album,
                                            sourceid=self.id,
                                            downloadinfo=url))
     return result
Example #5
0
    def do_search(self, metadata):
        keys = []
        if metadata.title:
            keys.append(metadata.title)
        if metadata.artist:
            keys.append(metadata.artist)
        urlkey = (' '.join(keys))
        params = {'keyword': urlkey,
                  'field': 'all'}
        try:
            status, content = http_download(
                url=HOST + '/',
                params=params,
                proxy=get_proxy_settings(config=self.config_proxy))
        except pycurl.error as e:
            logging.error('Download failed. %s', e.args[1])
            return []

        if status < 200 or status >= 400:
            raise httplib.HTTPException(status)
        match = RESULT_PATTERN.findall(content)
        result = []
        if match:
            for artist, album, title, url in match:
                title = title.replace('<span class="highlighter">', '').replace('</span>', '')
                artist = artist.replace('<span class="highlighter">', '').replace('</span>', '')
                album = album.replace('<span class="highlighter">', '').replace('</span>', '')
                url = DOWNLOAD_URL_PREFIX + url
                result.append(SearchResult(title=title,
                                           artist=artist,
                                           album=album,
                                           sourceid=self.id,
                                           downloadinfo=url))
        return result
Example #6
0
 def do_download(self, downloadinfo):
     # type: (Any) -> bytes
     status, content = http_download(url=HOST + downloadinfo,
                                     proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         raise http.client.HTTPException(status, '')
     return content
Example #7
0
    def do_search(self, metadata):
        # type: (osdlyrics.metadata.Metadata) -> List[SearchResult]
        keys = []
        if metadata.title:
            keys.append(metadata.title)
        if metadata.artist:
            keys.append(metadata.artist)
        url = NETEASE_HOST + NETEASE_SEARCH_URL
        urlkey = '+'.join(keys).replace(' ', '+')
        params = 's=%s&type=1' % urlkey

        status, content = http_download(url=url,
                                        method='POST',
                                        params=params.encode('utf-8'),
                                        proxy=get_proxy_settings(self.config_proxy))

        if status < 200 or status >= 400:
            raise http.client.HTTPException(status, '')

        def map_func(song):
            if song['artists']:
                artist_name = song['artists'][0]['name']
            else:
                artist_name = ''
            url = NETEASE_HOST + NETEASE_LYRIC_URL + '?id=' + str(song['id']) + '&lv=-1&kv=-1&tv=-1'
            return SearchResult(title=song['name'],
                                artist=artist_name,
                                album=song['album']['name'],
                                sourceid=self.id,
                                downloadinfo=url)

        parsed = json.loads(content.decode('utf-8'))
        result = list(map(map_func, parsed['result']['songs']))

        return result
Example #8
0
    def do_search(self, metadata):
        # type: (osdlyrics.metadata.Metadata) -> List[SearchResult]
        keys = []
        if metadata.title:
            keys.append(metadata.title)
        if metadata.artist:
            keys.append(metadata.artist)
        urlkey = (' '.join(keys))
        params = {'keyword': urlkey,
                  'field': 'all'}
        try:
            status, content = http_download(
                url=HOST + '/',
                params=params,
                proxy=get_proxy_settings(config=self.config_proxy))
        except pycurl.error as e:
            logging.error('Download failed. %s', e.args[1])
            return []

        if status < 200 or status >= 400:
            raise http.client.HTTPException(status)
        match = RESULT_PATTERN.findall(content)
        result = []
        if match:
            for artist, album, title, url in match:
                title = title.replace('<span class="highlighter">', '').replace('</span>', '')
                artist = artist.replace('<span class="highlighter">', '').replace('</span>', '')
                album = album.replace('<span class="highlighter">', '').replace('</span>', '')
                url = DOWNLOAD_URL_PREFIX + url
                result.append(SearchResult(title=title,
                                           artist=artist,
                                           album=album,
                                           sourceid=self.id,
                                           downloadinfo=url))
        return result
Example #9
0
 def do_download(self, downloadinfo):
     if not isinstance(downloadinfo, str) and not isinstance(downloadinfo, unicode):
         raise TypeError("Expect the downloadinfo as a string of url, but got type ", type(downloadinfo))
     status, content = http_download(url=HOST + downloadinfo, proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         raise httplib.HTTPException(status, "")
     return content
Example #10
0
    def do_search(self, metadata):
        keys = []
        if metadata.title:
            keys.append(metadata.title)
        if metadata.artist:
            keys.append(metadata.artist)
        urlkey = " ".join(keys)
        params = {"keyword": urlkey, "field": "all"}
        try:
            status, content = http_download(
                url=HOST + "/", params=params, proxy=get_proxy_settings(config=self.config_proxy)
            )
        except pycurl.error as e:
            logging.error("Download failed. %s", e.args[1])
            return []

        if status < 200 or status >= 400:
            raise httplib.HTTPException(status)
        match = RESULT_PATTERN.findall(content)
        result = []
        if match:
            for artist, album, title, url in match:
                title = title.replace('<span class="highlighter">', "").replace("</span>", "")
                artist = artist.replace('<span class="highlighter">', "").replace("</span>", "")
                album = album.replace('<span class="highlighter">', "").replace("</span>", "")
                url = DOWNLOAD_URL_PREFIX + url
                result.append(SearchResult(title=title, artist=artist, album=album, sourceid=self.id, downloadinfo=url))
        return result
Example #11
0
 def do_download(self, downloadinfo):
     # type: (Any) -> bytes
     # `downloadinfo` is what you set in SearchResult
     status, content = http_download(url=downloadinfo,
                                     proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         raise http.client.HTTPException(status, '')
     return content
Example #12
0
 def do_download(self, downloadinfo):
     # type: (Any) -> bytes
     status, content = http_download(url=HOST + downloadinfo,
                                     proxy=get_proxy_settings(
                                         self.config_proxy))
     if status < 200 or status >= 400:
         raise http.client.HTTPException(status, '')
     return content
Example #13
0
    def real_search(self, title='', artist='', page=0):
        query = VIEWLYRICS_QUERY_FORM
        query = query.replace('%title', title)
        query = query.replace('%artist', artist)
        query = ensure_utf8(
            query.replace('%etc', ' client=\"MiniLyrics\" RequestPage=\'%d\'' %
                          page))  #Needs real RequestPage

        queryhash = hashlib.md5()
        queryhash.update(query)
        queryhash.update(VIEWLYRICS_KEY)

        masterquery = '\2\0\4\0\0\0' + queryhash.digest() + query

        url = VIEWLYRICS_HOST + VIEWLYRICS_SEARCH_URL
        status, content = http_download(url=url,
                                        method='POST',
                                        params=masterquery,
                                        proxy=get_proxy_settings(
                                            self.config_proxy))

        if status < 200 or status >= 400:
            raise httplib.HTTPException(status, '')

        contentbytes = map(ord, content)
        codekey = contentbytes[1]
        deccontent = ''
        for char in contentbytes[22:]:
            deccontent += chr(char ^ codekey)

        result = []
        pagesleft = 0
        tagreturn = parseString(deccontent).getElementsByTagName('return')[0]
        if tagreturn:
            pagesleftstr = self.alternative_gettagattribute(
                tagreturn.attributes.items(),
                'PageCount')  #tagreturn.attributes['PageCount'].value
            if pagesleftstr == '':
                pagesleft = 0
            else:
                pagesleft = int(pagesleftstr)
            tagsfileinfo = tagreturn.getElementsByTagName('fileinfo')
            if tagsfileinfo:
                for onefileinfo in tagsfileinfo:
                    if onefileinfo.hasAttribute('link'):
                        title = onefileinfo.getAttribute('title')
                        artist = onefileinfo.getAttribute('artist')
                        album = onefileinfo.getAttribute('album')
                        url = VIEWLYRICS_BASE_LRC_URL + onefileinfo.getAttribute(
                            'link')
                        result.append(
                            SearchResult(title=title,
                                         artist=artist,
                                         album=album,
                                         sourceid=self.id,
                                         downloadinfo=url))
        return result, (pagesleft - page)
Example #14
0
 def do_download(self, downloadinfo):
     # type: (Any) -> bytes
     # `downloadinfo` is what you set in SearchResult
     status, content = http_download(url=downloadinfo,
                                     proxy=get_proxy_settings(
                                         self.config_proxy))
     if status < 200 or status >= 400:
         raise http.client.HTTPException(status, '')
     return content
Example #15
0
 def do_download(self, downloadinfo):
     if not isinstance(downloadinfo, str) and \
             not isinstance(downloadinfo, unicode):
         raise TypeError('Expect the downloadinfo as a string of url, but got type ',
                         type(downloadinfo))
     status, content = http_download(url=HOST+downloadinfo,
                                     proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         raise httplib.HTTPException(status, '')
     return content
Example #16
0
 def get_songid(self, id):
     status, content = http_download(url=XIAMI_HOST + XIAMI_SONG_URL + str(id),
                                     proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         return None
     match = XIAMI_ID_PATTERN.search(content)
     if not match:
         return None
     songid = match.group(1).strip()
     return songid
Example #17
0
 def get_songid(self, id):
     status, content = http_download(url=XIAMI_HOST + XIAMI_SONG_URL + str(id),
                                     proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         return None
     match = XIAMI_ID_PATTERN.search(content)
     if not match:
         return None
     songid = match.group(1).strip()
     return songid
Example #18
0
 def do_download(self, downloadinfo):
     if not isinstance(downloadinfo, str) and not isinstance(downloadinfo, unicode):
         raise TypeError("Expect the downloadinfo as a string of url, but got type ", type(downloadinfo))
     parts = urlparse.urlparse(downloadinfo)
     status, content = http_download(downloadinfo, proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         raise httplib.HTTPException(status)
     if content:
         content = HTMLParser.HTMLParser().unescape(content.decode("utf-8"))
     return content.encode("utf-8")
 def do_download(self, downloadinfo):
     # type: (Any) -> bytes
     # parts = urlparse.urlparse(downloadinfo)
     status, content = http_download(downloadinfo,
                                     proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         raise http.client.HTTPException(status)
     if content:
         content = html.parser.HTMLParser().unescape(content.decode('utf-8'))
     return content.encode('utf-8')
Example #20
0
 def do_search(self, metadata):
     # Preparing keywords for search.
     keys = []
     if metadata.artist:
         keys.append('"' + metadata.artist + '"')
     if metadata.title:
         keys.append('"' + metadata.title + '"')
     # Joining search terms.
     urlkey = '+'.join(keys).replace(' ', '+')
     # Building the URL.
     url = MEGALOBIZ_HOST + MEGALOBIZ_SEARCH_PATH
     # Request the HTTP page, storing its status and content.
     status, content = http_download(url=url,
                                     params={'qry': urlkey},
                                     proxy=get_proxy_settings(
                                         self.config_proxy))
     # Checking against HTTP response codes.
     if status < 200 or status >= 400:
         raise http.client.HTTPException(status, '')
     # Filter all occurences of links belonging to search results.
     matches = MEGALOBIZ_SEARCH_RESULT_PATTERN.findall(
         content.decode('utf8'))
     # Populate osdlyrics' search results for the user to choose from.
     result = []
     if matches:
         for match in matches:
             # The first three groups in a match are
             # lyric_id, full_name (artist + song title) and lyric path.
             # lyric_id = match[0]
             full_name = match[1]
             path = match[2]
             # Decompose optimistically the full_name into artist and song title
             name_parts = MEGALOBIZ_TITLE_ARTIST_SPLIT_PATTERN.findall(
                 full_name)
             if isinstance(name_parts, list) and len(name_parts) > 0:
                 name_parts = len(name_parts) > 0 and name_parts[0] or exit(
                 )
                 title = name_parts[1] == ' by ' and name_parts[
                     0] or name_parts[2]
                 artist = name_parts[1] == ' by ' and name_parts[
                     2] or name_parts[0]
             # In case we can't split, use the full string as title.
             else:
                 title = full_name
                 artist = ''
             title = html.unescape(title)
             artist = html.unescape(artist)
             lyric_url = MEGALOBIZ_HOST + path
             if path is not None:
                 result.append(
                     SearchResult(title=title,
                                  artist=artist,
                                  sourceid=self.id,
                                  downloadinfo=lyric_url))
     return result
    def do_download(self, downloadinfo):
        # type: (Any) -> bytes
        status, content = http_download(url=downloadinfo,
                                        proxy=get_proxy_settings(self.config_proxy))
        if status < 200 or status >= 400:
            raise http.client.HTTPException(status)

        parsed = json.loads(content.decode('utf-8'))
        if 'nolyric' in parsed:
            raise ValueError('This item has no lyrics.')
        lyric = parsed['lrc']['lyric']
        return lyric.encode('utf-8')
Example #22
0
 def do_download(self, downloadinfo):
     # return a string
     # downloadinfo is what you set in SearchResult
     if not isinstance(downloadinfo, str) and \
             not isinstance(downloadinfo, unicode):
         raise TypeError('Expect the downloadinfo as a string of url, but got type ',
                         type(downloadinfo))
     status, content = http_download(url=downloadinfo,
                                     proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         raise httplib.HTTPException(status, '')
     return content
Example #23
0
 def do_download(self, downloadinfo):
     if not isinstance(downloadinfo, str) and \
             not isinstance(downloadinfo, unicode):
         raise TypeError('Expect the downloadinfo as a string of url, but got type ',
                         type(downloadinfo))
     # parts = urlparse.urlparse(downloadinfo)
     status, content = http_download(downloadinfo,
                                     proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         raise httplib.HTTPException(status)
     if content:
         content = HTMLParser.HTMLParser().unescape(content.decode('utf-8'))
     return content.encode('utf-8')
Example #24
0
 def get_url(self, id):
     status, content = http_download(url=XIAMI_HOST + XIAMI_LRC_URL + str(id),
                                     proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         return None
     match = XIAMI_URL_PATTERN.search(content)
     if not match:
         return None
     url = match.group(1).strip()
     if url.lower().endswith('.lrc'):
         return url
     else:
         return None
Example #25
0
 def get_url(self, id):
     status, content = http_download(
         url=XIAMI_HOST + XIAMI_LRC_URL + str(id),
         proxy=get_proxy_settings(self.config_proxy))
     if status < 200 or status >= 400:
         return None
     match = XIAMI_URL_PATTERN.search(content)
     if not match:
         return None
     url = match.group(1).strip()
     if url.lower().endswith('.lrc'):
         return url
     else:
         return None
Example #26
0
    def do_download(self, downloadinfo):
        if not isinstance(downloadinfo, str) and \
                not isinstance(downloadinfo, unicode):
            raise TypeError('Expect the downloadinfo as a string of url, but got type ',
                            type(downloadinfo))

        status, content = http_download(url=downloadinfo,
                                        proxy=get_proxy_settings(self.config_proxy))
        if status < 200 or status >= 400:
            raise httplib.HTTPException(status)

        parsed = json.loads(content)
        lyric = parsed['lrc']['lyric']
        return lyric
Example #27
0
 def real_search(self, title='', artist='', page = 0):
     query = VIEWLYRICS_QUERY_FORM
     query =  query.replace('%title', title)
     query =  query.replace('%artist', artist)
     query =  ensure_utf8(query.replace('%etc', ' client=\"MiniLyrics\" RequestPage=\'%d\'' % page)) #Needs real RequestPage
     
     queryhash = hashlib.md5()
     queryhash.update(query)
     queryhash.update(VIEWLYRICS_KEY)
     
     masterquery = '\2\0\4\0\0\0' + queryhash.digest() + query
     
     url = VIEWLYRICS_HOST + VIEWLYRICS_SEARCH_URL
     status, content = http_download(url=url,
                                     method='POST',
                                     params=masterquery,
                                     proxy=get_proxy_settings(self.config_proxy))
     
     if status < 200 or status >= 400:
             raise httplib.HTTPException(status, '')
     
     contentbytes = map(ord, content)
     codekey = contentbytes[1]
     deccontent = ''
     for char in contentbytes[22:]:
             deccontent += unichr(char ^ codekey)
     
     result = []
     pagesleft = 0
     tagreturn = parseString(deccontent).getElementsByTagName('return')[0]
     if tagreturn:
             pagesleftstr = self.alternative_gettagattribute(tagreturn.attributes.items(), 'PageCount') #tagreturn.attributes['PageCount'].value
             if pagesleftstr == '':
                 pagesleft = 0
             else:
                 pagesleft = int(pagesleftstr)
             tagsfileinfo = tagreturn.getElementsByTagName('fileinfo')
             if tagsfileinfo:
                 for onefileinfo in tagsfileinfo:
                     if onefileinfo.hasAttribute('link'):
                         title = onefileinfo.getAttribute('title')
                         artist = onefileinfo.getAttribute('artist')
                         album = onefileinfo.getAttribute('album')
                         url = VIEWLYRICS_BASE_LRC_URL + onefileinfo.getAttribute('link')
                         result.append(SearchResult(title=title,
                                                    artist=artist,
                                                    album=album,
                                                    sourceid=self.id,
                                                    downloadinfo=url))
     return result, (pagesleft - page)
Example #28
0
 def do_download(self, downloadinfo):
     status, content = http_download(downloadinfo,
                                     proxy=get_proxy_settings(
                                         self.config_proxy))
     # Checking against HTTP response codes.
     if status < 200 or status >= 400:
         raise http.client.HTTPException(status)
     # Checking the presence of a HTTP payload.
     if content:
         content = S4S_LRC_PATTERN.search(content.decode('utf8')).group(2)
         # Replacing html breaks with new lines.
         content = re.sub('<br />', "\n", content)
         # Debrand the lyrics.
         content = re.sub(BRAND_PATTERN, " ", content, 0, re.MULTILINE)
     return content.encode('utf-8')
Example #29
0
    def real_search(self, title='', artist='', page=0):
        query = VIEWLYRICS_QUERY_FORM
        query = query.replace('%title', title)
        query = query.replace('%artist', artist)
        query = query.replace('%etc',
                              ' client="MiniLyrics" RequestPage=\'%d\'' %
                              page)  # Needs real RequestPage
        query = query.encode('utf-8')

        queryhash = hashlib.md5()
        queryhash.update(query)
        queryhash.update(VIEWLYRICS_KEY)

        masterquery = b'\2\0\4\0\0\0' + queryhash.digest() + query

        url = VIEWLYRICS_HOST + VIEWLYRICS_SEARCH_URL
        status, content = http_download(url=url,
                                        method='POST',
                                        params=masterquery,
                                        proxy=get_proxy_settings(
                                            self.config_proxy))

        if status < 200 or status >= 400:
            raise http.client.HTTPException(status, '')

        contentbytes = bytearray(content)
        codekey = contentbytes[1]
        deccontent = bytes(map(codekey.__xor__, contentbytes[22:]))

        root = xet.fromstring(deccontent)  # tagName == 'return'
        pagesleft = int(
            next(
                (v
                 for k, v in root.items() if k.lower() == 'PageCount'.lower()),
                0))
        result = [
            SearchResult(
                title=entry.get('title', ''),
                artist=entry.get('artist', ''),
                album=entry.get('album', ''),
                sourceid=self.id,
                downloadinfo=VIEWLYRICS_BASE_LRC_URL + entry.get('link'),
            ) for entry in root.findall('fileinfo[@link]')
        ]
        return result, (pagesleft - page)
Example #30
0
 def do_download(self, downloadinfo):
     ''' Download handler for this data source.
     '''
     status, content = http_download(downloadinfo,
                                     proxy=get_proxy_settings(
                                         self.config_proxy))
     # Checking against HTTP response codes.
     if status < 200 or status >= 400:
         raise http.client.HTTPException(status)
     # Checking the presence of a HTTP payload.
     if content:
         content = MEGALOBIZ_LRC_PATTERN.search(
             content.decode('utf-8')).group(1)
         # Replacing html breaks with new lines.
         content = re.sub('<br>', "\n", content)
         # Replace html entities present in the lyric content.
         content = html.unescape(content)
     return content.encode('utf-8')
Example #31
0
    def do_download(self, downloadinfo):
        # type: (Any) -> bytes
        status, content = http_download(url=downloadinfo,
                                        proxy=get_proxy_settings(self.config_proxy))
        if status < 200 or status >= 400:
            raise http.client.HTTPException(status)

        parsed = json.loads(content.decode('utf-8'))
        # Avoid processing results with no lyrics.
        if 'nolyric' in parsed or 'uncollected' in parsed:
            raise ValueError('This item has no lyrics.')

        if self.attempt_use_translation:
            lyric = parsed['tlyric']['lyric']
            if not lyric:
                lyric = parsed['lrc']['lyric']
        else:
            lyric = parsed['lrc']['lyric']

        return lyric.encode('utf-8')
Example #32
0
    def real_search(self, title='', artist='', page=0):
        query = VIEWLYRICS_QUERY_FORM
        query = query.replace('%title', title)
        query = query.replace('%artist', artist)
        query = query.replace('%etc', ' client="MiniLyrics" RequestPage=\'%d\'' % page)  # Needs real RequestPage
        query = query.encode('utf-8')

        queryhash = hashlib.md5()
        queryhash.update(query)
        queryhash.update(VIEWLYRICS_KEY)

        masterquery = b'\2\0\4\0\0\0' + queryhash.digest() + query

        url = VIEWLYRICS_HOST + VIEWLYRICS_SEARCH_URL
        status, content = http_download(url=url,
                                        method='POST',
                                        params=masterquery,
                                        proxy=get_proxy_settings(self.config_proxy))

        if status < 200 or status >= 400:
            raise http.client.HTTPException(status, '')

        contentbytes = bytearray(content)
        codekey = contentbytes[1]
        deccontent = bytes(map(codekey.__xor__, contentbytes[22:]))

        root = xet.fromstring(deccontent)  # tagName == 'return'
        pagesleft = int(next((v for k, v in root.items() if k.lower() == 'PageCount'.lower()), 0))
        result = [
            SearchResult(
                title=entry.get('title', ''),
                artist=entry.get('artist', ''),
                album=entry.get('album', ''),
                sourceid=self.id,
                downloadinfo=VIEWLYRICS_BASE_LRC_URL + entry.get('link'),
            )
            for entry in root.findall('fileinfo[@link]')
        ]
        return result, (pagesleft - page)
Example #33
0
 def do_search(self, metadata):
     # Preparing keywords for search.
     keys = []
     if metadata.artist:
         keys.append(metadata.artist)
     if metadata.title:
         keys.append(metadata.title)
     # Joining search terms.
     urlkey = '+'.join(keys).replace(' ', '+')
     # Building the URL.
     url = S4S_HOST + S4S_SEARCH_PATH
     # Request the HTTP page, storing its status and content.
     status, content = http_download(url=url,
                                     params={'q': urlkey},
                                     proxy=get_proxy_settings(
                                         self.config_proxy))
     # Checking against HTTP response codes.
     if status < 200 or status >= 400:
         raise http.client.HTTPException(status, '')
     # Filter all occurences of links belonging to search results.
     matches = S4S_SEARCH_RESULT_PATTERN.findall(content.decode('utf8'))
     # Populate osdlyrics' search results for the user to choose from.
     result = []
     if matches:
         for match in matches:
             # Decompose the HTML matches into title, artist and url.
             title = unquote(TITLE_PATTERN.search(match).group(1))
             artist = unquote(ARTIST_PATTERN.search(match).group(1))
             # Build a download URL for this match.
             url = S4S_HOST + S4S_SUBTITLE_PATH + match + "&type=lrc"
             # Add the match metadata for the window with search results.
             if url is not None:
                 result.append(
                     SearchResult(title=title,
                                  artist=artist,
                                  sourceid=self.id,
                                  downloadinfo=url))
     return result