def ffmpeg_listener(**arg): #logger.debug(arg) import ffmpeg refresh_type = None if arg['type'] == 'status_change': if arg['status'] == ffmpeg.Status.DOWNLOADING: pass elif arg['status'] == ffmpeg.Status.COMPLETED: pass elif arg['status'] == ffmpeg.Status.READY: pass elif arg['type'] == 'last': LogicQueue.current_ffmpeg_count += -1 pass elif arg['type'] == 'log': pass elif arg['type'] == 'normal': pass if refresh_type is not None: pass entity = QueueEntity.get_entity_by_entity_id(arg['plugin_id']) if entity is None: return entity.ffmpeg_arg = arg entity.ffmpeg_status = int(arg['status']) entity.ffmpeg_status_kor = str(arg['status']) entity.ffmpeg_percent = arg['data']['percent'] import plugin arg['status'] = str(arg['status']) plugin.socketio_callback('status', arg)
def ffmpeg_listener(**arg): #logger.debug(arg) import ffmpeg refresh_type = None if arg['type'] == 'status_change': if arg['status'] == ffmpeg.Status.DOWNLOADING: episode = db.session.query(ModelAni24).filter_by(episodecode=arg['plugin_id']).with_for_update().first() if episode: episode.ffmpeg_status = int(arg['status']) episode.duration = arg['data']['duration'] db.session.commit() elif arg['status'] == ffmpeg.Status.COMPLETED: pass elif arg['status'] == ffmpeg.Status.READY: pass elif arg['type'] == 'last': LogicQueue.current_ffmpeg_count += -1 episode = db.session.query(ModelAni24).filter_by(episodecode=arg['plugin_id']).with_for_update().first() if arg['status'] == ffmpeg.Status.WRONG_URL or arg['status'] == ffmpeg.Status.WRONG_DIRECTORY or arg['status'] == ffmpeg.Status.ERROR or arg['status'] == ffmpeg.Status.EXCEPTION: episode.etc_abort = 1 elif arg['status'] == ffmpeg.Status.USER_STOP: episode.user_abort = True logger.debug('Status.USER_STOP received..') if arg['status'] == ffmpeg.Status.COMPLETED: episode.completed = True episode.end_time = datetime.now() episode.download_time = (episode.end_time - episode.start_time).seconds episode.filesize = arg['data']['filesize'] episode.filesize_str = arg['data']['filesize_str'] episode.download_speed = arg['data']['download_speed'] logger.debug('Status.COMPLETED received..') elif arg['status'] == ffmpeg.Status.TIME_OVER: episode.etc_abort = 2 elif arg['status'] == ffmpeg.Status.PF_STOP: episode.pf = int(arg['data']['current_pf_count']) episode.pf_abort = 1 elif arg['status'] == ffmpeg.Status.FORCE_STOP: episode.etc_abort = 3 elif arg['status'] == ffmpeg.Status.HTTP_FORBIDDEN: episode.etc_abort = 4 db.session.commit() logger.debug('LAST commit %s', arg['status']) elif arg['type'] == 'log': pass elif arg['type'] == 'normal': pass if refresh_type is not None: pass entity = QueueEntity.get_entity_by_entity_id(arg['plugin_id']) if entity is None: return entity.ffmpeg_arg = arg entity.ffmpeg_status = int(arg['status']) entity.ffmpeg_status_kor = str(arg['status']) entity.ffmpeg_percent = arg['data']['percent'] import plugin arg['status'] = str(arg['status']) plugin.socketio_callback('status', arg)
def download(entity): import plugin from .logic_queue import QueueEntityEpisode LogicNormal.stop_flag = False try: if entity.auto: flag = LogicNormal.is_exist_download_list(entity.title) else: flag = True if flag: if entity.wr_id is None: LogicNormal.make_episode_list_from_manga_id(entity) plugin.socketio_callback('queue_one', entity.as_dict(), encoding=False) else: entity.add(entity.wr_id) plugin.socketio_callback('queue_one', entity.as_dict(), encoding=False) for e in entity.episodes: m = ModelManamoaItem.get(e.wr_id) if m is None: #if LogicNormal.episode_download(e): if LogicNormal.episode_download_old(e): ModelManamoaItem.save(e) else: e.title = m.title e.status = '이미 받음' plugin.socketio_callback('episode', e.as_dict(), encoding=False) if LogicNormal.stop_flag: break entity.status = '완료' plugin.socketio_callback('queue_one', entity.as_dict(), encoding=False) else: entity.status = '제외' plugin.socketio_callback('queue_one', entity.as_dict(), encoding=False) except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) return None
def ffmpeg_listener(**arg): #logger.debug(arg) import ffmpeg refresh_type = None if arg['type'] == 'status_change': if arg['status'] == ffmpeg.Status.DOWNLOADING: pass elif arg['status'] == ffmpeg.Status.COMPLETED: pass elif arg['status'] == ffmpeg.Status.READY: pass elif arg['type'] == 'last': LogicProgram.current_ffmpeg_count += -1 elif arg['type'] == 'log': pass elif arg['type'] == 'normal': pass if refresh_type is not None: pass entity = WavveProgramEntity.get_entity(arg['plugin_id']) if entity is None: return entity.ffmpeg_arg = arg entity.ffmpeg_status = int(arg['status']) entity.ffmpeg_status_kor = str(arg['status']) entity.ffmpeg_percent = arg['data']['percent'] ### edit by lapis if entity.ffmpeg_status == 7 or \ entity.ffmpeg_percent == 100 or \ entity.ffmpeg_status_kor in ['완료']: entity.completed = True entity.completed_time = datetime.now().strftime('%m-%d %H:%M:%S') ModelWavveProgram.update(entity) ### import plugin arg['status'] = str(arg['status']) plugin.socketio_callback('status', arg)
def listener(*args, **kwargs): logger.debug(args) import plugin logger.debug(kwargs) if kwargs['type'] in ['recent_episode', 'all_download']: Logic.current_list = kwargs plugin.socketio_callback('on_connect', kwargs) elif kwargs['type'] == 'completed': plugin.socketio_callback('running_status', kwargs) elif kwargs['type'] == 'episode': if kwargs['status'] == 'ready': Logic.current_epi = None for entity in Logic.current_list['list']: #logger.debug(entity['url']) if entity['url'] == kwargs['url']: Logic.current_epi = entity break elif kwargs['status'] in ['episode_read_fail', 'downloaded']: Logic.current_epi['status'] = kwargs['status'] elif kwargs['status'] == 'downloading': Logic.current_epi['epi_count'] = kwargs['epi_count'] Logic.current_epi['epi_current'] = kwargs['epi_current'] logger.debug('AAA %s %s', kwargs['epi_current'], Logic.current_epi['epi_current']) Logic.current_epi['percent'] = (kwargs['epi_current'] * 100 / (kwargs['epi_count'] - 1)) logger.debug('== %s', Logic.current_epi) plugin.socketio_callback('status', Logic.current_epi) try: pass except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def add_queue(episode_id, episode_idx, episode_title, toon_title, toon_nickname): try: entity = ModelItem.init(episode_id, episode_idx, episode_title, toon_title, toon_nickname) if entity is not None: for idx, e in enumerate(LogicQueue.entity_list): if e['id'] == entity['id']: del LogicQueue.entity_list[idx] #return if entity['status'] <= 10: entity['str_status'] = u'대기' LogicQueue.entity_list.append(entity) LogicQueue.download_queue.put(entity) else: if os.path.exists(entity['filename']): entity['str_status'] = '파일 있음' LogicQueue.entity_list.append(entity) import plugin plugin.socketio_callback('queue_one', entity, encoding=False) return entity except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def entity_update(cmd, entity): import plugin plugin.socketio_callback(cmd, entity, encoding=False)
def entity_update(entity): import plugin plugin.socketio_callback('queue_one', entity, encoding=False)
def episode_download_old(queue_entity_episode): import plugin wr_id = queue_entity_episode.wr_id logger.debug('Episode Download wr_id:%s', wr_id) try: url = '%s/bbs/board.php?bo_table=manga&wr_id=%s' % ( ModelSetting.get('sitecheck'), wr_id) page_source2 = LogicNormal.pageparser(url) soup = BeautifulSoup(page_source2, 'html.parser') mangaid = soup.find('a', 'btn btn-color btn-sm')['href'].replace( '/bbs/page.php?hid=manga_detail&manga_id=', '') queue_entity_episode.manga_id = mangaid mangascore = soup.find('span', 'count').text.replace('인기 : ', '') title = soup.title.text queue_entity_episode.title = LogicNormal.titlereplace(title) #match = re.compile(ur'(?P<main>.*?)((단행본.*?)?|특별편)?(\s(?P<sub>(\d|\-|\.)*?(화|권)))?(\s\(완결\))?\s?$').match(title) #match = re.compile(ur'(?P<main>.*?)((단행본.*?)?|특별편)?(\s(?P<sub>(\d|\-|\.)*?(화|권)))?(\-)?(전|후|중)?(\s\(완결\))?\s?$').match(title) match = re.compile( ur'(?P<main>.*?)((단행본.*?)?|특별편)?(\s(?P<sub>(\d|\-|\.)*?(화|권)))?(\-)?(전|후|중)?(\s?\d+(\-\d+)?화)?(\s\(완결\))?\s?$' ).match(title) if match: queue_entity_episode.maintitle = match.group('main').strip() else: match2 = re.compile(ur'(?P<main>.*?)\s시즌') if match2: queue_entity_episode.maintitle = match2.group( 'main').strip() else: queue_entity_episode.maintitle = title logger.debug('not match') queue_entity_episode.maintitle = LogicNormal.titlereplace( queue_entity_episode.maintitle) if ModelSetting.get('use_title_folder') == 'True': download_path = os.path.join(ModelSetting.get('dfolder'), queue_entity_episode.maintitle, queue_entity_episode.title) else: download_path = os.path.join(ModelSetting.get('dfolder'), queue_entity_episode.title) if page_source2 is None: logger.error('Source is None') queue_entity_episode.status = '실패' return False page_count = page_source2.find('var img_list = [') page_count2 = page_source2.find(']', page_count) mangajpglist = page_source2[page_count + 16:page_count2].replace( '\\', '').replace('"', '').split(',') page_count22 = page_source2.find('var img_list1 = [') page_count222 = page_source2.find(']', page_count22) mangajpglist2 = page_source2[page_count22 + 16:page_count222].replace( '\\', '').replace('"', '').split(',') tmp1 = page_source2.find('var view_cnt =') tmp1 = page_source2.find('=', tmp1) + 1 tmp2 = page_source2.find(';', tmp1) view_cnt = int(page_source2[tmp1:tmp2].strip()) wr_id = int(url.split('wr_id=')[1].split('&')[0]) logger.debug('view_cnt :%s, wr_id:%s', view_cnt, wr_id) if view_cnt == 0: decoder = None else: from .decoder import Decoder decoder = Decoder(view_cnt, wr_id) queue_entity_episode.status = '다운로드중' queue_entity_episode.total_image_count = len(mangajpglist) if not os.path.exists(download_path): os.makedirs(download_path) tmp = os.path.join(download_path, str(1).zfill(5) + '.jpg') logger.debug(mangajpglist[0]) status_code = requests.get(mangajpglist[0]).status_code replace_rule = None if status_code == 200: replace_rule = '' else: status_code = requests.get(mangajpglist[0].replace( 'https://', 'https://s3.')).status_code if status_code == 200: replace_rule = 's3.' else: status_code = requests.get(mangajpglist[0].replace( 'https://', 'https://img.')).status_code if status_code == 200: replace_rule = 'img.' logger.debug('Replace_rule : %s', replace_rule) if replace_rule is not None: for idx, tt in enumerate(mangajpglist): image_filepath = os.path.join( download_path, str(idx + 1).zfill(5) + '.jpg') LogicNormal.image_download( tt.replace('https://', 'https://%s' % replace_rule), image_filepath, decoder) queue_entity_episode.current_image_index = idx plugin.socketio_callback('episode', queue_entity_episode.as_dict(), encoding=False) else: if mangajpglist[0].find('img.') != -1: for idx, tt in enumerate(mangajpglist): image_filepath = os.path.join( download_path, str(idx + 1).zfill(5) + '.jpg') downresult = LogicNormal.image_download( tt.replace('img.', 's3.'), image_filepath, decoder) if downresult != 200 and mangajpglist2[0].find( 'google') != -1: for idx, tt in enumerate(mangajpglist2): image_filepath = os.path.join( download_path, str(idx + 1).zfill(5) + '.jpg') gdd.download_file_from_google_drive( file_id=tt.replace( 'https://drive.google.com/uc?export=view&id=', ''), dest_path=image_filepath) queue_entity_episode.current_image_index = idx plugin.socketio_callback( 'episode', queue_entity_episode.as_dict(), encoding=False) break queue_entity_episode.current_image_index = idx plugin.socketio_callback( 'episode', queue_entity_episode.as_dict(), encoding=False) else: for idx, tt in enumerate(mangajpglist): image_filepath = os.path.join( download_path, str(idx + 1).zfill(5) + '.jpg') downresult = LogicNormal.image_download( tt.replace('s3.', 'img.'), image_filepath, decoder) if downresult != 200 and mangajpglist2[0].find( 'google') != -1: for idx, tt in enumerate(mangajpglist2): image_filepath = os.path.join( download_path, str(idx + 1).zfill(5) + '.jpg') gdd.download_file_from_google_drive( file_id=tt.replace( 'https://drive.google.com/uc?export=view&id=', ''), dest_path=image_filepath) queue_entity_episode.current_image_index = idx plugin.socketio_callback( 'episode', queue_entity_episode.as_dict(), encoding=False) break queue_entity_episode.current_image_index = idx plugin.socketio_callback( 'episode', queue_entity_episode.as_dict(), encoding=False) if ModelSetting.get('zip') == 'True': LogicNormal.makezip(download_path) queue_entity_episode.status = '완료' plugin.socketio_callback('episode', queue_entity_episode.as_dict(), encoding=False) return True except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def episode_download(queue_entity_episode): import plugin wr_id = queue_entity_episode.wr_id logger.debug('Episode Download wr_id:%s', wr_id) try: from selenium.webdriver.support.ui import WebDriverWait from system import SystemLogicSelenium if LogicNormal.driver is None: LogicNormal.driver = SystemLogicSelenium.create_driver() driver = LogicNormal.driver url = '%s/bbs/board.php?bo_table=manga&wr_id=%s' % ( ModelSetting.get('sitecheck'), wr_id) driver.get(url) fix_tag = WebDriverWait( driver, 30 ).until(lambda driver: driver.find_element_by_xpath( '//*[@id="thema_wrapper"]/div[3]/div/div/div[1]/div[2]/div[3]/div' )) SystemLogicSelenium.remove_element(driver, fix_tag) tag = WebDriverWait( driver, 30 ).until(lambda driver: driver.find_element_by_xpath( '//*[@id="thema_wrapper"]/div[3]/div/div/div[1]/div[2]/div[1]/div/div[1]/a[2]' )) queue_entity_episode.manga_id = tag.get_attribute('href').split( '=')[-1] title = driver.title queue_entity_episode.title = LogicNormal.titlereplace(title) match = re.compile( ur'(?P<main>.*?)((단행본.*?)?|특별편)?(\s(?P<sub>(\d|\-|\.)*?(화|권)))?(\-)?(전|후|중)?(\s?\d+(\-\d+)?화)?(\s\(완결\))?\s?$' ).match(title) if match: queue_entity_episode.maintitle = match.group('main').strip() else: match2 = re.compile(ur'(?P<main>.*?)\s시즌') if match2: queue_entity_episode.maintitle = match2.group( 'main').strip() else: queue_entity_episode.maintitle = title logger.debug('not match') queue_entity_episode.maintitle = LogicNormal.titlereplace( queue_entity_episode.maintitle) if ModelSetting.get('use_title_folder') == 'True': download_path = os.path.join(ModelSetting.get('dfolder'), queue_entity_episode.maintitle, queue_entity_episode.title) else: download_path = os.path.join(ModelSetting.get('dfolder'), queue_entity_episode.title) logger.debug(title) logger.debug(queue_entity_episode.maintitle) image_tags = WebDriverWait( driver, 30 ).until(lambda driver: driver.find_elements_by_xpath( '//*[@id="thema_wrapper"]/div[3]/div/div/div[1]/div[2]/div[5]/div/img' )) queue_entity_episode.total_image_count = len(image_tags) if not os.path.exists(download_path): os.makedirs(download_path) queue_entity_episode.status = '캡처중' plugin.socketio_callback('episode', queue_entity_episode.as_dict(), encoding=False) full = SystemLogicSelenium.full_screenshot(driver) if full is None: queue_entity_episode.status = '실패' plugin.socketio_callback('episode', queue_entity_episode.as_dict(), encoding=False) else: queue_entity_episode.status = '파일 생성중' for idx, tag in enumerate(image_tags): image_filepath = os.path.join( download_path, str(idx + 1).zfill(5) + '.png') left = tag.location['x'] top = tag.location['y'] right = tag.location['x'] + tag.size['width'] bottom = top + tag.size['height'] logger.debug('%s %s %s %s %s', idx, left, top, right, bottom) im = full.crop( (left, top, right, bottom)) # defines crop points im.save(image_filepath) queue_entity_episode.current_image_index = idx plugin.socketio_callback('episode', queue_entity_episode.as_dict(), encoding=False) if ModelSetting.get('zip') == 'True': LogicNormal.makezip(download_path) queue_entity_episode.status = '완료' plugin.socketio_callback('episode', queue_entity_episode.as_dict(), encoding=False) return True except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def find_gallery(condition, condition_negative, search=False, scheduler=False): try: # condition: { key1:[val1], key2:[val2] } # key: # type, id, l, n, a [], t [], p [], g [], c [] # galleries0.json is the latest logger.debug("[gallery-dl] manual search positive condition: %s", str(condition)) logger.debug("[gallery-dl] manual search negative condition: %s", str(condition_negative)) ret = [] last_num = int(ModelSetting.get('hitomi_last_num')) for num in range(0, last_num): item = "galleries" + str(num) + ".json" with open(os.path.join(LogicHitomi.basepath, item)) as galleries: json_item = json.loads(galleries.read()) for _, gallery in enumerate(json_item): if (LogicHitomi.stop == True) and (scheduler == False): galleries.close() return ret try: if LogicHitomi.is_satisfied( gallery, condition, condition_negative): gallery[ 'thumbnail'] = LogicHitomi.thumbnail_url( gallery['id']) gallery['url'] = LogicHitomi.baseurl + str( gallery['id']) + '.html' logger.debug('[gallery-dl] found item: %s', gallery['id']) if search == True: from .plugin import send_search_one send_search_one(gallery) if scheduler == True: from logic_queue import LogicQueue import plugin LogicQueue.add_queue(gallery['url']) plugin.socketio_callback('queue_one', gallery, encoding=False) ret.append(gallery) except Exception as e: import traceback logger.error('[gallery-dl] Exception:%s', e) logger.error(traceback.format_exc()) # no such key for this item galleries.close() return ret except Exception as e: logger.error('[gallery-dl] Exception:%s', e) logger.error(traceback.format_exc())