def bangumi_calendar(self, force_update=False, save=True, cover=None): if force_update and not test_connection(): force_update = False print_warning('Network is unreachable') if force_update: print_info('Fetching bangumi info ...') Bangumi.delete_all() weekly_list = self.fetch(save=save) else: weekly_list = Bangumi.get_updating_bangumi() if not weekly_list: print_warning('Warning: no bangumi schedule, fetching ...') weekly_list = self.fetch(save=save) if cover is not None: # download cover to local cover_to_be_download = cover for daily_bangumi in weekly_list.values(): for bangumi in daily_bangumi: _, file_path = convert_cover_to_path(bangumi['cover']) if not glob.glob(file_path): cover_to_be_download.append(bangumi['cover']) if cover_to_be_download: print_info('Updating cover ...') download_cover(cover_to_be_download) return weekly_list
def cal(force_update: bool = False, save: bool = False, cover: Optional[List[str]] = None) -> Dict[str, List[Dict[str, Any]]]: logger.debug("cal force_update: %r save: %r", force_update, save) weekly_list = Bangumi.get_updating_bangumi() if not weekly_list: print_warning("Warning: no bangumi schedule, fetching ...") force_update = True if force_update: print_info("Fetching bangumi info ...") website.fetch(save=save) weekly_list = Bangumi.get_updating_bangumi() if cover is not None: # download cover to local cover_to_be_download = cover for daily_bangumi in weekly_list.values(): for bangumi in daily_bangumi: _, file_path = convert_cover_url_to_path(bangumi["cover"]) if not (os.path.exists(file_path) and bool(imghdr.what(file_path))): cover_to_be_download.append(bangumi["cover"]) if cover_to_be_download: print_info("Updating cover ...") download_cover(cover_to_be_download) runner = ScriptRunner() patch_list = runner.get_models_dict() for i in patch_list: weekly_list[i["update_time"].lower()].append(i) logger.debug(weekly_list) # for web api, return all subtitle group info r = weekly_list # type: Dict[str, List[Dict[str, Any]]] for day, value in weekly_list.items(): for index, bangumi in enumerate(value): bangumi["cover"] = normalize_path(bangumi["cover"]) subtitle_group = list( map( lambda x: { "name": x["name"], "id": x["id"] }, Subtitle.get_subtitle_by_id( bangumi["subtitle_group"].split(", " "")), )) r[day][index]["subtitle_group"] = subtitle_group logger.debug(r) return r
def bangumi_calendar(self, force_update=False, save=True, cover=None): """ :param force_update: :type force_update: bool :param save: set true to enable save bangumi data to database :type save: bool :param cover: list of cover url (of scripts) want to download :type cover: list[str] """ if force_update and not test_connection(): force_update = False print_warning('Network is unreachable') if force_update: print_info('Fetching bangumi info ...') weekly_list = self.fetch(save=save) else: weekly_list = Bangumi.get_updating_bangumi() if not weekly_list: print_warning('Warning: no bangumi schedule, fetching ...') weekly_list = self.fetch(save=save) if cover is not None: # download cover to local cover_to_be_download = cover for daily_bangumi in weekly_list.values(): for bangumi in daily_bangumi: _, file_path = convert_cover_url_to_path(bangumi['cover']) if not (os.path.exists(file_path) and imghdr.what(file_path)): cover_to_be_download.append(bangumi['cover']) if cover_to_be_download: print_info('Updating cover ...') download_cover(cover_to_be_download) return weekly_list