def renameFile(allVideoJson, thisvid, filename, contentLength): name = removeIllegalCharForWinOS(allVideoJson[thisvid]["videoName"]) likes = allVideoJson[thisvid]["hearts"] newfilename = gb.path + prepareNameFromLikes(likes) + name + '.mp4' try: os.rename(filename, newfilename) except FileExistsError: #use a nameCounter to avoid duplicate videos if two videos have similar likes #and no description or have same #topic nameCounter = allVideoJson["anchor"]["countForDupName"] newfilename = gb.path + prepareNameFromLikes(likes) + name + str( nameCounter) + '.mp4' #rename file that starts with number of hearts allows user easily #filter high hearts videos and watch os.rename(filename, newfilename) allVideoJson["anchor"]["countForDupName"] = nameCounter + 1 allVideoJson[thisvid][ "download"] = 1 #this to prevent duplicate downloading saveload.saveALLFile(allVideoJson) gb.LengthListPreDownload.insert(0, contentLength) #add to the first position for a faster search if likes > 1000000 - 1: gb.number = gb.number + 1 if gb.number >= 50: sys.exit() #when software download 50 qualified videos it will quit appiumForProject.action.scroll()
def saveNewJSONToLocalJSONFile(currentVideo, saveAllDict, vid, cha, likes, allVideoJson): videoid = currentVideo["aweme_id"] name = currentVideo["desc"] author_id = currentVideo["author_user_id"] author_nickname = currentVideo["author"]["nickname"] music_id = currentVideo["music"]["id_str"] #collect author_id and music_id for further development #Eg. if we really like some channel, and we can write function #to save videos into an seperate folder under the channel's name saveAllDict[vid] = { "videoName": name, "hearts": likes, "videoID": videoid, "channelID": author_id, "channelName": author_nickname, "musicID": music_id, "download": 0 } recordTopic(cha, currentVideo, saveAllDict, vid) allVideoJson.update(saveAllDict) fileSaveAndLoad.saveALLFile(allVideoJson)
def updateExistingVideo(allVideoJson, vid, likes): try: allVideoJson[vid].update({"hearts": likes}) fileSaveAndLoad.saveALLFile(allVideoJson) except Exception: print('something wrong with digg_count data of current Json')