def run():
    for comic in getComics():
        try:
            save = loadSaveData(comic)
            soup = ScraperUtils.getSoup(save['currentLink'])
            if (Batoto.hasUpdates(soup)):
                grab_updates(comic, save)
                writeSaveData(save, comic)
        except Exception as e:
            logError(comic, e)
            continue
def grab_updates(comic, save):
    soup = ScraperUtils.getSoup(save['currentLink'])
    while(Batoto.hasUpdates(soup)):
        ScraperUtils.saveImage(Batoto.getImage(soup), comic, save['currentPage'])
        save['currentLink'] = Batoto.getNextPage(soup)
        save['currentPage'] += 1
        soup = ScraperUtils.getSoup(save['currentLink'])
    ScraperUtils.saveImage(Batoto.getImage(soup), comic, save['currentPage'])
示例#3
0
async def GetAvailability(arrivalDate: str, departureDate: str, continuousCampsites: bool, recAreaName: str,
						  facilityName: str):

	return Utils.FetchCampgroundAvailability(arrivalDate=arrivalDate, departureDate=departureDate,
											 continuousCampsites=continuousCampsites, recAreaName=recAreaName,
											 facilityName=facilityName)