def test_ls_files_and_dirs(tmp_path): """Test ls_files_and_dirs.""" directory = tmp_path / 'sub' mkdir(directory) filename = tmp_path / TXT_FILENAME filename.write_text(TXTSTR) assert len(ls(tmp_path)) == 2
def downloadBooks(books, bookfldr): print "Books to be downloaded:" print books if bookfldr[len(bookfldr)-1] is not ("/" or os.path.sep): bookfldr += os.path.sep bookfldr = bookfldr.replace("/",os.path.sep) for b in books: constants.mkdir((bookfldr+b+os.path.sep+"contents")) crawler.crawl(b,bookfldr)
def writeMetadata(book, onlyFirstTitle=False): bookData = json.loads(open(constants.configPath, 'rb').read()) try: url = bookData[book]['urlBase'].strip() # baseurl tocurl = "" # Table of contents url try: tocurl = bookData[book]['tocUrl'].strip() except KeyError: tocurl = url links = getLinks(url, tocurl, bookData[book]['ext']) path = constants.getMetaPath(book) constants.mkdir(path) print links chapters = [] try: chapters = generateMetadata( links, url, bookData[book]['useFullPathAsFilename'], bool(onlyFirstTitle)) except KeyError: chapters = generateMetadata(links, url, False, bool(onlyFirstTitle)) chaptersFull = open(path + "chapterData.json", "w") chaptersFull.write(json.dumps({"chapters": chapters}, indent="\t")) chaptersFull.close() blacklist = [] try: blacklist = json.loads( open(constants.getMetaPath(book) + "blacklist.json", 'rb').read()) except IOError: pass else: print "loaded Blacklist" chaptersWrite = open(path + "chaptersGen.txt", "w") for c in chapters[:]: if c['url'] in [e['url'] for e in blacklist]: chapters.remove(c) print c, " blacklisted" else: chaptersWrite.write(c['url']) chaptersWrite.write("\r\n") c.pop('url') if len(c['title']) is 1: c['title'] = c['title'][0] chaptersWrite.close() chaptersWriteJSON = open(path + "chaptersGen.json", "w") chaptersWriteJSON.write(json.dumps({"chapters": chapters}, indent="\t")) chaptersWriteJSON.close() print "Metadata generated" except KeyError: print book + " skipped." else: pass
def writeMetadata(book, onlyFirstTitle=False): bookData = json.loads(open(constants.configPath , 'rb').read()) try: url = bookData[book]['urlBase'].strip() # baseurl tocurl="" # Table of contents url try: tocurl = bookData[book]['tocUrl'].strip() except KeyError: tocurl=url links = getLinks(url,tocurl, bookData[book]['ext']) path = constants.getMetaPath(book) constants.mkdir(path) print links chapters=[] try: chapters = generateMetadata(links, url,bookData[book]['useFullPathAsFilename'],bool(onlyFirstTitle)) except KeyError: chapters = generateMetadata(links, url, False, bool(onlyFirstTitle)) chaptersFull = open(path+"chapterData.json","w") chaptersFull.write(json.dumps({"chapters": chapters}, indent="\t")) chaptersFull.close() blacklist = [] try: blacklist = json.loads(open(constants.getMetaPath(book)+"blacklist.json" , 'rb').read()) except IOError: pass else: print "loaded Blacklist" chaptersWrite = open(path+"chaptersGen.txt","w") for c in chapters[:]: if c['url'] in [e['url'] for e in blacklist]: chapters.remove(c) print c , " blacklisted" else: chaptersWrite.write(c['url']) chaptersWrite.write("\r\n") c.pop('url') if len(c['title']) is 1: c['title']=c['title'][0] chaptersWrite.close() chaptersWriteJSON = open(path+"chaptersGen.json","w") chaptersWriteJSON.write(json.dumps({"chapters": chapters}, indent="\t")) chaptersWriteJSON.close() print "Metadata generated" except KeyError: print book + " skipped." else: pass
def test_contains_dir(tmp_path): """Test contains_dir.""" directory = tmp_path / DIRSTR mkdir(directory) # noqa WPS204 assert contains(directory) == 'argument is dir'
def test_rm_dir(tmp_path): """Test rm_dir.""" directory = tmp_path / DIRSTR mkdir(directory) assert rm(directory) == 'argument is dir'
def test_since_empty_dir(tmp_path): """Test since_empty_dir.""" directory = tmp_path / DIRSTR mkdir(directory) assert since(M_NUMBER, directory) == 'dir is empty'
def test_ls_only_dirs(tmp_path): """Test ls_only_dirs.""" directory = tmp_path / 'another_dir' mkdir(directory) assert len(ls(tmp_path)) == 1