コード例 #1
0
ファイル: listview.py プロジェクト: snail1994/PyEhViewer
 def open_url(self, sender):
     text = clipboard.get()
     try:
         verify_url(text)
     except:
         input = ''
     else:
         input = text
     url = console.input_alert('直接打开url', '', input)
     try:
         verify_url(url)
     except:
         console.hud_alert('URL错误', 'error')
     else:
         galleryview(url)
コード例 #2
0
ファイル: galleryview.py プロジェクト: snail1994/PyEhViewer
 def _search_old_version(self):
     parent_url = self.info.get('parent_url')
     if not parent_url:
         return 
     filename = verify_url(parent_url)
     clause = """SELECT DISTINCT gid||'_'||token
         FROM downloads
         WHERE gid = ?
         LIMIT 1
         """
     args = (filename[: filename.find('_')],)
     result = search(clause, args=args)
     if result:
         return filename
     else:
         clause = """SELECT DISTINCT gid||'_'||token
         FROM downloads
         WHERE uploader=?
         AND english_title=?
         AND gid < ?
         ORDER BY gid DESC
         LIMIT 1
         """
         args = (
             self.info.get('uploader'),
             self.info.get('english_title'),
             self.info.get('gid')
             )
         result = search(clause, args=args)
         if result:
             return list(result[0].values())[0]
コード例 #3
0
def update_infos():
    n = 1
    for i in l:
       # print(n)
        p = os.path.join(IMAGEPATH, i, 'manga_infos.json')
        infos = json.loads(open(p).read())
        foldername = verify_url(infos['url'])
        if foldername != i:
            print(i)
            continue
        parser = exhentaiparser.ExhentaiParser(
            cookies_dict=json.loads(open(COOKIES_FILE).read())
                )
        image_path = os.path.abspath(parser.storage_path)
        dl_path = os.path.join(image_path, foldername)
        thumbnails_dl_path = os.path.join(dl_path, 'thumbnails')
        if not os.path.exists(thumbnails_dl_path):
            os.mkdir(thumbnails_dl_path)
        manga_infos_file = os.path.join(image_path, foldername, 'manga_infos.json')
        if infos['version'] != VERSION:
            try:
                infos = parser.get_gallery_infos_mpv(infos['url'])
                parser.save_mangainfo(infos, dl_path)
            except:
                print('fail:' + i)
        n += 1
コード例 #4
0
ファイル: galleryview.py プロジェクト: snail1994/PyEhViewer
 def will_close(self):
     """功能:
     - 将阅读过的gallery存入db
     - 更新manga_infos.json
     """
     if len(list(Path(IMAGEPATH).joinpath(verify_url(self.info['url'])).iterdir())) > 2:
         insert_info(self.info)
     glv.PARSER.save_mangainfo(self.info, self.dl_path)
コード例 #5
0
ファイル: galleryview.py プロジェクト: dbdoer/PyEhViewer
 def _get_info(self, url, refresh=False):
     foldername = verify_url(url)
     dl_path = os.path.join(IMAGEPATH, foldername)
     manga_infos_file = os.path.join(dl_path, 'manga_infos.json')
     if not refresh and os.path.exists(manga_infos_file):
         info = json.loads(open(manga_infos_file).read())
     else:
         info = glv.PARSER.get_gallery_infos_mpv(url)
         glv.PARSER.save_mangainfo(info, dl_path)
     return dl_path, info
コード例 #6
0
ファイル: mpv.py プロジェクト: dbdoer/PyEhViewer
 def xdid_load(self, url, page=0):
     self.url = url
     # 获取info和下载列表
     self.dlpath = os.path.join(IMAGEPATH, verify_url(self.url))
     manga_infos_file = os.path.join(self.dlpath, 'manga_infos.json')
     self.infos = json.loads(open(manga_infos_file).read())
     self.thread_list = glv.PARSER.start_download_mpv(self.infos['pics'], self.dlpath, start=False)
     # 其他标记
     self.page = page # 当前页码(从0开始),核心变量
     self.length = int(self.infos['length']) # 总页码
     self['text_total_page'].text = str(self.length)
     self.thread_list.sort(key=self._sort_func) # 排序从page的页面开始下载
     self.refresh()
コード例 #7
0
def fix_infos():
    parser = exhentaiparser.ExhentaiParser(
        cookies_dict=json.loads(open(COOKIE_FILE).read()))
    n = 1
    for i in l:
        # print(n)
        p = os.path.join(IMAGEPATH, i, 'manga_infos.json')
        infos = json.loads(open(p).read())
        foldername = verify_url(infos['url'])
        image_path = os.path.abspath(parser.storage_path)
        dl_path = os.path.join(image_path, foldername)
        infos['url'] = infos['url'].replace('e-hentai', 'exhentai')
        parser.save_mangainfo(infos, dl_path)
コード例 #8
0
ファイル: listview.py プロジェクト: snail1994/PyEhViewer
 def delete_download(self, sender):
     # 此参数用来恢复scrollview的阅读位置
     content_offset = self['scrollview'].content_offset
     foldername = verify_url(sender.superview.url)
     folderpath = os.path.join(IMAGEPATH, foldername)
     title = sender.superview['label_title'].text
     t = console.alert('确认删除?', title, 'Yes')
     if t == 1:
         shutil.rmtree(folderpath)
         gid = foldername[:foldername.find('_')]
         delete_by_gid(gid)
         self.refresh()
         self['scrollview'].content_offset = content_offset
         console.hud_alert('已删除')
コード例 #9
0
ファイル: galleryview.py プロジェクト: AtumGame/PyEhViewer
 def confirm_rate_gallery(self, sender):
     rating = sender.superview['label1'].text
     try:
         glv.PARSER.rate_gallery(rating, self.info['apikey'],
                                 self.info['apiuid'], self.info['gid'],
                                 self.info['token'])
     except:
         console.hud_alert('失败', 'error')
     else:
         self.info['is_personal_rating'] = True
         self.info['display_rating'] = rating
         self['gallery_info_view'].refresh()
         glv.PARSER.save_mangainfo(
             self.info,
             os.path.join(glv.PARSER.storage_path,
                          verify_url(self.info['url'])))
         sender.superview.close()
コード例 #10
0
ファイル: galleryview.py プロジェクト: AtumGame/PyEhViewer
 def confirm_favorite(self, sender):
     if len(sender.superview['textview1'].text.encode('utf-8')) > 200:
         console.hud_alert('Favorite Notes超字数限制', 'error')
     else:
         t = sender.superview.new_favcat_selected
         try:
             glv.PARSER.add_fav(self.info['url'],
                                favcat=t,
                                favnote=sender.superview['textview1'].text,
                                old_is_favorited=self.old_is_favorited)
         except:
             console.hud_alert('失败', 'error')
         else:
             if t != 'favdel':
                 self.info['favcat'] = t
                 self.info['favcat_title'] = sender.superview[
                     'tableview1'].data_source.items[sender.superview[
                         'tableview1'].selected_row[1]]['title']
                 self['gallery_info_view'][
                     'label_favorite_title'].background_color = get_color_from_favcat(
                         self.info['favcat'])
                 self['gallery_info_view'][
                     'label_favorite_title'].text = self.info[
                         'favcat_title']
                 self['gallery_info_view'][
                     'label_favorite_title'].text_color = 'white'
             else:
                 self.info['favcat'] = None
                 self.info['favcat_title'] = None
                 self['gallery_info_view'][
                     'label_favorite_title'].background_color = 'white'
                 self['gallery_info_view'][
                     'label_favorite_title'].text = '未收藏'
                 self['gallery_info_view'][
                     'label_favorite_title'].text_color = 'black'
             glv.PARSER.save_mangainfo(
                 self.info,
                 os.path.join(glv.PARSER.storage_path,
                              verify_url(self.info['url'])))
             sender.superview.close()
コード例 #11
0
ファイル: galleryview.py プロジェクト: dbdoer/PyEhViewer
 def will_close(self):
     """功能:
     - 将阅读过的gallery存入db
     """
     if len(list(Path(IMAGEPATH).joinpath(verify_url(self.info['url'])).iterdir())) > 2:
         insert_info(self.info)
コード例 #12
0
ファイル: galleryview.py プロジェクト: dbdoer/PyEhViewer
 def try_import_old_version(self, sender):
     def escape(keyword):
         keyword = keyword.replace("/", "//")
         keyword = keyword.replace("'", "''")
         keyword = keyword.replace("[", "/[")
         keyword = keyword.replace("]", "/]")
         keyword = keyword.replace("%", "/%")
         keyword = keyword.replace("&","/&")
         keyword = keyword.replace("_", "/_")
         keyword = keyword.replace("(", "/(")
         keyword = keyword.replace(")", "/)")
         return keyword
     parent_url = self.info.get('parent_url')
     foldername = verify_url(parent_url)
     if os.path.exists(os.path.join(IMAGEPATH, foldername)):
         old_dl_path, old_info = self._get_info(parent_url)
     else:
         clause = """SELECT DISTINCT gid||'_'||token
         FROM downloads
         WHERE uploader='{}'
         AND english_title='{}'
         AND gid < {}
         ORDER BY gid DESC
         LIMIT 1
         """
         clause = clause.format(
             escape(self.info.get('uploader')),
             escape(self.info.get('english_title')),
             self.info.get('gid')
             )
         print(clause)
         t = [i[0] for i in search(clause)]
         if t:
             old_dl_path = os.path.join(IMAGEPATH, t[0])
             manga_infos_file = os.path.join(old_dl_path, 'manga_infos.json')
             old_info = json.loads(open(manga_infos_file).read())
         else:
             console.hud_alert('未找到旧版本', 'error')
             return
     self.thread_list.clear()
     imgid_extname_dict = dict([
         os.path.splitext(i)
         for i in os.listdir(old_dl_path)
         if i not in ['manga_infos.json', 'thumbnails']
         ])
     old_pics = dict([
         (i['key'], (i['img_id'], imgid_extname_dict[i['img_id']]))
         for i in old_info['pics']
         if i['img_id'] in imgid_extname_dict
         ])
     new_pics = dict([
         (i['key'], i['img_id'])
         for i in self.info['pics']
         ])
     for key in set(old_pics.keys()) & set(new_pics.keys()):
         old_path = os.path.join(old_dl_path, old_pics[key][0] + old_pics[key][1])
         new_path = os.path.join(self.dl_path, new_pics[key] + old_pics[key][1])
         if os.path.exists(old_path) and not os.path.exists(new_path):
             shutil.move(old_path, new_path)
         old_thumbnail_path = os.path.join(old_dl_path, 'thumbnails', old_pics[key][0] + '.jpg')
         new_thumbnail_path = os.path.join(self.dl_path, 'thumbnails', new_pics[key] + '.jpg')
         if os.path.exists(old_thumbnail_path) and not os.path.exists(new_thumbnail_path):
             shutil.move(old_thumbnail_path, new_thumbnail_path)
     delete_by_gid(old_info['gid'])
     shutil.rmtree(old_dl_path)
     self.thread_list = glv.PARSER.start_download_thumbnails(self.info['pics'], os.path.join(self.dl_path, 'thumbnails'), start=False)
コード例 #13
0
ファイル: galleryview.py プロジェクト: dbdoer/PyEhViewer
 def confirm_rate_gallery(self, sender):
     rating = sender.superview['label1'].text
     try:
         glv.PARSER.rate_gallery(rating, self.info['apikey'], self.info['apiuid'], self.info['gid'], self.info['token'])
     except:
         console.hud_alert('失败', 'error')
     else:
         self.info['is_personal_rating'] = True
         self.info['display_rating'] = rating
         self['gallery_info_view']['imageview_rank'].image = ui.Image.named('gui/fivestars_blue.png')
         rating = self.info['display_rating']
         x ,y, w, h = self['gallery_info_view']['imageview_rank'].frame
         self['gallery_info_view']['imageview_mask'].frame = (x + w*float(rating)/5, y, w-w*float(rating)/5, h)
         glv.PARSER.save_mangainfo(self.info, os.path.join(glv.PARSER.storage_path, verify_url(self.info['url'])))
         sender.superview.close()
コード例 #14
0
ファイル: galleryview.py プロジェクト: AtumGame/PyEhViewer
 def try_import_old_version(self, sender):
     sender.hidden = True
     parent_url = self.info.get('parent_url')
     foldername = verify_url(parent_url)
     clause = """SELECT DISTINCT gid||'_'||token
         FROM downloads
         WHERE gid = ?
         LIMIT 1
         """
     args = (foldername[:foldername.find('_')], )
     t = [i[0] for i in search(clause, args=args)]
     if t:
         old_dl_path, old_info = self._get_info(parent_url)
     else:
         clause = """SELECT DISTINCT gid||'_'||token
         FROM downloads
         WHERE uploader=?
         AND english_title=?
         AND gid < ?
         ORDER BY gid DESC
         LIMIT 1
         """
         args = (self.info.get('uploader'), self.info.get('english_title'),
                 self.info.get('gid'))
         t = [i[0] for i in search(clause, args=args)]
         if t:
             old_dl_path = os.path.join(IMAGEPATH, t[0])
             manga_infos_file = os.path.join(old_dl_path,
                                             'manga_infos.json')
             old_info = json.loads(open(manga_infos_file).read())
         else:
             console.hud_alert('未找到旧版本', 'error')
             return
     self.thread_list.clear()
     imgid_extname_dict = dict([
         os.path.splitext(i) for i in os.listdir(old_dl_path)
         if i not in ['manga_infos.json', 'thumbnails']
     ])
     old_pics = dict([
         (i['key'], (i['img_id'], imgid_extname_dict[i['img_id']]))
         for i in old_info['pics'] if i['img_id'] in imgid_extname_dict
     ])
     new_pics = dict([(i['key'], i['img_id']) for i in self.info['pics']])
     for key in set(old_pics.keys()) & set(new_pics.keys()):
         old_path = os.path.join(old_dl_path,
                                 old_pics[key][0] + old_pics[key][1])
         new_path = os.path.join(self.dl_path,
                                 new_pics[key] + old_pics[key][1])
         if os.path.exists(old_path) and not os.path.exists(new_path):
             shutil.move(old_path, new_path)
         old_thumbnail_path = os.path.join(old_dl_path, 'thumbnails',
                                           old_pics[key][0] + '.jpg')
         new_thumbnail_path = os.path.join(self.dl_path, 'thumbnails',
                                           new_pics[key] + '.jpg')
         if os.path.exists(old_thumbnail_path
                           ) and not os.path.exists(new_thumbnail_path):
             shutil.move(old_thumbnail_path, new_thumbnail_path)
     delete_by_gid(old_info['gid'])
     shutil.rmtree(old_dl_path)
     self.thread_list = glv.PARSER.start_download_thumbnails(
         self.info['pics'],
         os.path.join(self.dl_path, 'thumbnails'),
         start=False)