def __init__(self) -> None: self.path = statsFilePath + "twitterStats/" self.askingHit = TextFile(self.path + "askingHit.erinalog", blocking=False) self.directMessagingHit = TextFile(self.path + "directMessagingHit.erinalog", blocking=False) self.responsePolarity = TextFile(self.path + "responsePolarity.erinalog", blocking=False) self.responses = TextFile(self.path + "responses.erinalog", blocking=False) self.streamHit = TextFile(self.path + "streamHit.erinalog", blocking=False)
def anilist_search_caching(query): ''' Caches the first search result from the given query (from AniList API)\n Returns the new cache's filename\n Project Erina © Anime no Sekai - 2020 ''' try: log("ErinaCaches", f'Caching {str(query)} from AniList Search API...') try: apiResponse = anilist.anilist_api_search(query) except: return CachingError("ANILIST_SEARCH_API_RESPONSE", f"An error occured while retrieving AniList Search API Data ({str(query)})") if "errors" in apiResponse: if apiResponse["errors"][0]["status"] == 404: return CachingError("ANILIST_NOT_FOUND", str(query) + " has not been found") else: return CachingError("ANILIST_SERVER_ERROR", f"An error occured with the AniList API: {apiResponse['errors'][0]['message']}") try: cache = anilist.anilist_json_to_cache(apiResponse) except: return CachingError("ERINA_CONVERSION", f"An error occured while converting AniList's Search API Data to a caching format ({str(query)})") try: TextFile(anilist_cache_path + cache['filename'], blocking=False).write(cache["content"]) except: return CachingError("FILE_WRITE", f"An error occured while writing out the cache data to a file ({str(query)})") return anilist_parser.AnilistCache(cache["content"]) except: return CachingError("UNKNOWN_ERROR", f"An unknown error occured while caching AniList Search API Data ({str(query)})")
def verifyPassword(password): hashing = hashlib.sha512() hashing.update(str(password).encode("utf-8")) return TextFile(erina_dir + "/ErinaServer/Erina/auth/password.erina").read().replace( " ", "").replace("\n", "") == hashing.hexdigest().replace( " ", "").replace("\n", "")
def restartErinaServer(num, info): """ SIGUSR1 signal handler --> Restarts Erina """ try: for handler in psutil.Process(os.getpid()).open_files(): os.close(handler.fd) except: pass from ErinaTwitter.utils.Stream import lastDM from ErinaTwitter.utils.Stream import sinceID TextFile(erina_dir + "/ErinaTwitter/lastDM.erina").write(str(lastDM)) TextFile(erina_dir + "/ErinaTwitter/lastStatusID.erina").write(str(sinceID)) TextFile(erina_dir + "/launch.erina").write("0") ErinaWSGIServer.stop() ErinaWSGIServer.close() logFile.blocking = True log("Erina", "Restarting...") os.execl(sys.executable, sys.executable, __file__, *sys.argv[1:])
def shutdownErinaServer(num, info): """ SIGTERM, SIGQUIT, SIGINT signals handler --> Shutdowns Erina """ try: for handler in psutil.Process(os.getpid()).open_files(): os.close(handler.fd) except: pass TextFile(erina_dir + "/launch.erina").write("0") ErinaWSGIServer.stop() ErinaWSGIServer.close() logFile.blocking = True log("Erina", "Goodbye!")
def search_anime_by_anilist_id(anilist_id): """ Let you search through Erina's database and other database (Manami Projects' anime_offline_database and AniList API) with the AniList ID of the Anime!\n © Anime no Sekai - 2020 Project Erina """ filename = str(anilist_id) + ".erina" if isfile(erina_dir + "/ErinaCaches/AniList_Cache/" + filename): data = TextFile(erina_dir + "/ErinaCaches/AniList_Cache/" + filename).read() anilistCacheData = AnilistCache(data) if time( ) - anilistCacheData.cache_timestamp.timestamp > CachesConfig.anilist_expiration: return anilist_caching(anilist_id) else: return anilistCacheData else: return anilist_caching(anilist_id)
def erina_caching(image_hash, database_path, similarity, anilist_id): ''' Caches an ErinaDatabase path according to the image_hash\n Project Erina © Anime no Sekai - 2020 ''' try: log("ErinaCaches", f'Caching {str(image_hash)} Erina Database data...') try: cache = erina.erina_from_data(str(image_hash), database_path, similarity, anilist_id) except: return CachingError("ERINA_CONVERSION", f"An error occured while converting Erina Database Data to a caching format ({str(database_path)})") try: TextFile(erina_cache_path + str(image_hash) + '.erina', blocking=False).write(cache) except: return CachingError("FILE_WRITE", f"An error occured while writing out the cache data to a file {str(database_path)}") return erina_parser.ErinaCache(cache) except: return CachingError("UNKNOWN", "An unknown error occured while caching Erina Database Data")
def __init__(self, key) -> None: self.key = str(key) self.authFile = TextFile(erina_dir + "/ErinaServer/Erina/auth/apiAuth/" + self.key + ".erina") self.name = None self.rate_limit = None self.stats = [] inStats = False for line in self.authFile: currentLine = line.replace("\n", "") if currentLine[:5] == "Name:": self.name = currentLine[6:] elif currentLine[:11] == "Rate Limit:": self.rate_limit = convert_to_float(currentLine[12:]) elif currentLine == "----STATS----": inStats = True elif inStats: self.stats.append(convert_to_int(currentLine))
def searchAnime(query): """ Searches an anime by its title """ query = str(query) cleanQuery = query.lower().replace(" ", '') anilistID, similarity = cosine_similarity.search(cleanQuery) if similarity > 0.95: if isfile(anilistCachesPath + str(anilistID) + ".erina"): data = TextFile(anilistCachesPath + str(anilistID) + ".erina").read() anilistCacheData = AnilistCache(data) if time() - anilistCacheData.cache_timestamp.timestamp > CachesConfig.anilist_expiration: return anilist_caching(anilistID) else: return anilistCacheData else: return anilist_caching(anilistID) return anilist_search_caching(query)
def ErinaServer_Endpoint_Auth_verify(): try: if TextFile(erina_dir + "/ErinaServer/Erina/auth/password.erina").read().replace( " ", "") == "": return makeResponse( { "success": False, "error": "NOT_SET_PASSWORD", "message": "Password Is Not Set" }, 400, request.args) tokenVerification = authManagement.verifyToken(request.values) if not tokenVerification.success: responseBody = None if tokenVerification.expired: responseBody = { "success": False, "error": "EXPIRED_TOKEN", "message": str(tokenVerification) } elif tokenVerification.no_token: responseBody = { "success": False, "error": "NOT_PROVIDED_TOKEN", "message": str(tokenVerification) } else: responseBody = { "success": False, "error": "WRONG_TOKEN", "message": str(tokenVerification) } return makeResponse(responseBody, 401, request.values) else: return makeResponse({"success": True}, 200, request.values) except: return makeResponse({ "success": False, "error": str(exc_info()[0]) }, 500, request.values)
def tracemoe_caching(image_hash): ''' Caches the given Trace.moe API response\n Project Erina © Anime no Sekai - 2020 ''' try: log("ErinaCaches", f'Caching {str(image_hash)} trace.moe data...') try: if image_hash.has_url is not None: if str(config.Caches.keys.tracemoe).replace(" ", "") in ["None", ""]: requestResponse = json.loads(requests.get('https://trace.moe/api/search?url=' + image_hash.url).text) else: requestResponse = json.loads(requests.get('https://trace.moe/api/search?url=' + image_hash.url + '&token=' + str(config.Caches.keys.tracemoe)).text) else: if str(config.Caches.keys.tracemoe).replace(" ", "") in ["None", ""]: requestResponse = json.loads(requests.post('https://trace.moe/api/search', json={'image': image_hash.base64})) else: requestResponse = json.loads(requests.post('https://trace.moe/api/search?token=' + str(config.Caches.keys.tracemoe), json={'image': image_hash.base64})) except: return CachingError("TRACEMOE_API_RESPONSE", "An error occured while retrieving information from the trace.moe API") StatsAppend(ExternalStats.tracemoeAPICalls) try: cache = tracemoe.erina_from_json(requestResponse) except: print(exc_info()[0]) print(exc_info()[1]) print(traceback.print_exception(*exc_info())) return CachingError("ERINA_CONVERSION", f"An error occured while converting trace.moe API Data to a caching format ({str(image_hash)})") try: TextFile(tracemoe_cache_path + str(image_hash) + '.erina', blocking=False).write(cache) except: return CachingError("FILE_WRITE", f"An error occured while writing out the cache data to a file ({str(image_hash)})") return tracemoe_parser.TraceMOECache(cache) except: return CachingError("UNKNOWN_ERROR", f"An unknown error occured while caching trace.moe API Data ({str(image_hash)})")
def saucenao_caching(image_hash): ''' Caches the result from the given url\n Project Erina\n © Anime no Sekai - 2020 ''' try: log("ErinaCaches", f"Caching {str(image_hash)} SauceNAO data...") if str(config.Caches.keys.saucenao).replace(" ", "") not in ["None", ""]: saucenao_api = SauceNao(api_key=config.Caches.keys.saucenao, numres=1) else: saucenao_api = SauceNao(numres=1) if image_hash.has_url: try: api_results = saucenao_api.from_url(image_hash.url)[0] except: return CachingError("SAUCENAO_API_RESPONSE", "An error occured while retrieving SauceNAO API Data") else: try: api_results = saucenao_api.from_file(image_hash.ImageIO)[0] except: return CachingError("SAUCENAO_API_RESPONSE", "An error occured while retrieving SauceNAO API Data") StatsAppend(ExternalStats.saucenaoAPICalls) try: cache = saucenao.erina_from_api(api_results) except: traceback.print_exc() return CachingError("ERINA_CONVERSION", "An error occured while converting SauceNAO API Data to a caching format") try: TextFile(saucenao_cache_path + str(image_hash) + '.erina', blocking=False).write(cache) except: return CachingError("FILE_WRITE", "An error occured while writing out the cache data to a file") return saucenao_parser.SauceNAOCache(cache) except: return CachingError("UNKNOWN", "An unknown error occured while caching SauceNAO API Data")
def search_anime_in_tracemoe_cache(): if os.path.isfile(tracemoe_cache_path + str(image_hash) + '.erina'): return parser.tracemoe_parser.TraceMOECache( TextFile(tracemoe_cache_path + str(image_hash) + '.erina').read()) return None
def search_anime_in_erina_cache(): if os.path.isfile(f"{str(erina_dir)}/{str(image_hash)}.erina"): return parser.erina_parser.ErinaCache( TextFile(f"{str(erina_dir)}/{str(image_hash)}.erina").read()) return None
def __init__(self) -> None: self.path = statsFilePath + "lineStats/" self.descriptionHit = TextFile(self.path + "descriptionHit.erinalog", blocking=False) self.imageSearchHit = TextFile(self.path + "imageSearchHit.erinalog", blocking=False) self.infoHit = TextFile(self.path + "infoHit.erinalog", blocking=False) self.storedImages = TextFile(self.path + "storedImages.erinalog", blocking=False)
def __init__(self) -> None: self.path = statsFilePath + "searchStats/" self.searchCount = TextFile(self.path + "searchCount.erinalog", blocking=False) self.anilistIDSearchCount = TextFile(self.path + "anilistIDSearchCount.erinalog", blocking=False) self.imageSearchCount = TextFile(self.path + "imageSearchCount.erinalog", blocking=False) self.titleSearchCount = TextFile(self.path + "titleSearchCount.erinalog", blocking=False)
def setPassword(password): hashing = hashlib.sha512() hashing.update(str(password).encode("utf-8")) TextFile(erina_dir + "/ErinaServer/Erina/auth/password.erina").write( hashing.hexdigest())
return "Error" def createRandomID(length): idResult = '' for _ in range(length): choice = random.randint(0, 1) if choice == 0: idResult += str(random.randint(0, 9)) else: idResult += str(random.choice(string.ascii_letters)) return idResult if currentSalt is None: salts = TextFile(erina_dir + "/ErinaServer/Erina/auth/salt.erina").readlines() currentSalt = createRandomID(8) while currentSalt in salts: currentSalt = createRandomID(8) TextFile(erina_dir + "/ErinaServer/Erina/auth/salt.erina").append(currentSalt + "\n") lastTokenFile = TextFile(erina_dir + "/ErinaServer/Erina/auth/lastToken.erina") if lastTokenFile.read().replace(" ", "").replace("\n", "") != "": currentToken = lastTokenFile.read().replace(" ", "").replace("\n", "") lastTokenFile.write("") def createToken(lengthWithoutSalt): global currentToken global expiredTokens
def on_status(self, tweet, force=False): """ Tweet Receiving """ global sinceID StatsAppend(TwitterStats.streamHit) if TwitterConfig.ignore_rt and Twitter.isRetweet(tweet): return try: if Twitter.isReplyingToErina( tweet ): # If replying, analyze if it is a positive or a negative feedback responseSentiment = sentiment(tweet.text)[0] StatsAppend(TwitterStats.responsePolarity, responseSentiment) latestResponses.append({ "timestamp": time(), "user": tweet.user.screen_name, "text": tweet.text, "sentiment": responseSentiment, "url": "https://twitter.com/twitter/statuses/" + str(tweet.id), }) except: traceback.print_exc() if isinstance( TwitterConfig.monitoring.accounts, (list, tuple)) and len(TwitterConfig.monitoring.accounts) > 0: if TwitterConfig.monitoring.check_replies and Twitter.isReplyingToErina( tweet): # Monitor Mode ON, Check Replies to Monitored ON log("ErinaTwitter", "New monitoring hit from @" + str(tweet.user.screen_name)) StatsAppend(TwitterStats.askingHit, str(tweet.user.screen_name)) imageURL = Twitter.findImage(tweet) if imageURL is None: imageURL = Twitter.findParentImage(tweet) if imageURL is not None: searchResult = imageSearch(imageURL) tweetResponse = makeTweet(searchResult) if tweetResponse is not None: StatsAppend(TwitterStats.responses) ErinaTwitter.tweet(tweetResponse, replyID=tweet.id) elif tweet.user.screen_name in TwitterConfig.monitoring.accounts: # Monitor Mode ON, Check Replies to Monitored OFF log("ErinaTwitter", "New monitoring hit") StatsAppend(TwitterStats.askingHitstr(tweet.user.screen_name)) imageURL = Twitter.findImage(tweet) if imageURL is not None: searchResult = imageSearch(imageURL) tweetResponse = makeTweet(searchResult) if tweetResponse is not None: StatsAppend(TwitterStats.responses) ErinaTwitter.tweet(tweetResponse, replyID=tweet.id) else: # Monitor Mode OFF, Public Account imageURL = Twitter.findImage(tweet) if imageURL is None: imageURL = Twitter.findParentImage(tweet) if imageURL is not None and Twitter.isAskingForSauce( tweet) or force: log("ErinaTwitter", "New asking hit from @" + str(tweet.user.screen_name)) StatsAppend(TwitterStats.askingHit, str(tweet.user.screen_name)) searchResult = imageSearch(imageURL) tweetResponse = makeTweet(searchResult) if tweetResponse is not None: StatsAppend(TwitterStats.responses) responseImageURL = None if isinstance(searchResult.detectionResult, TraceMOECache): if TwitterConfig.image_preview: if not searchResult.detectionResult.hentai: responseImageURL = f"https://trace.moe/thumbnail.php?anilist_id={str(searchResult.detectionResult.anilist_id)}&file={str(searchResult.detectionResult.filename)}&t={str(searchResult.detectionResult.timing.at)}&token={str(searchResult.detectionResult.tokenthumb)}" ErinaTwitter.tweet(tweetResponse, replyID=tweet.id, imageURL=responseImageURL) elif Twitter.isMention(tweet): ErinaTwitter.tweet( "Sorry, I searched everywhere but coudln't find it...", replyID=tweet.id) TextFile(erina_dir + "/ErinaTwitter/lastStatusID.erina").write( str(tweet.id)) sinceID = tweet.id return
© Anime no Sekai - 2020 """ from sys import platform from os import system from time import time, sleep from safeIO import TextFile from Erina import config from Erina.env_information import erina_dir from ErinaServer.WebSockets import ErinaSockets from ErinaServer.Erina.auth import authManagement import json logFile = TextFile(erina_dir + "/Erina/logs/logs.erinalog", blocking=False) errorsFile = TextFile(erina_dir + "/Erina/logs/errors.erinalog", blocking=False) def log(api, message, error=False): """ Logs something """ message = str(message) if config.Erina.console_log: if not error: print(f"[{api}] {message}") else: print("[Error] [" + api + "] " + message) timestamp = int(time())
def __init__(self) -> None: self.path = statsFilePath + "externalStats/" self.anilistAPICalls = TextFile(self.path + "anilistAPICalls.erinalog", blocking=False) self.iqdbCalls = TextFile(self.path + "iqdbCalls.erinalog", blocking=False) self.saucenaoAPICalls = TextFile(self.path + "saucenaoAPICalls.erinalog", blocking=False) self.tracemoeAPICalls = TextFile(self.path + "tracemoeAPICalls.erinalog", blocking=False)
def __init__(self) -> None: self.path = statsFilePath + "dbStats/" self.erinaDatabaseLookups = TextFile(self.path + "erinaDatabaseLookups.erinalog", blocking=False) self.manamiDBTitleVectorLookups = TextFile(self.path + "manamiDBTitleVectorLookups.erinalog", blocking=False)
def iqdb_caching(image_hash): """ Searches and caches IQDB for anime/manga related images. Erina Project - 2020\n © Anime no Sekai """ try: log("ErinaCaches", 'Searching for IQDB Data...') ### If a file is given, send the file to iqdb. try: if image_hash.has_url: IQDBresponse = requests.get(f'https://iqdb.org/?url={image_hash.url}') StatsAppend(ExternalStats.iqdbCalls, "New Call") else: IQDBresponse = requests.post('https://iqdb.org/', files={'file': ('image_to_search', image_hash.ImageIO) }) StatsAppend(ExternalStats.iqdbCalls, "New Call") except: return CachingError("IQDB_RESPONSE", "An error occured while retrieving IQDB Data") ### If the image format is not supported by IQDB if 'Not an image or image format not supported' in IQDBresponse.text: return CachingError("IQDB_FORMAT_NOT_SUPPORTED", "The given image's format is not supported by IQDB") ###### IQDB SCRAPING try: iqdb = BeautifulSoup(IQDBresponse.text, 'html.parser') ##### Search for the IQDB result try: tables = iqdb.find_all('table') search_result = tables[1].findChildren("th")[0].get_text() except: return CachingError("IQDB_CLIENT_ERROR", f"An error occured while searching for the results: {exc_info()[0]}") ##### Verify if the result is relevant or not iqdb_tags = [] if search_result == 'No relevant matches': return CachingError("IQDB_NO_RELEVANT_MATCH", "No relevant matches was found with IQDB", no_log=True) else: try: ### Getting the tags from IQDB alt_string = tables[1].findChildren("img")[0]['alt'] iqdb_tags = alt_string.split('Tags: ')[1].split(' ') except: iqdb_tags = [] #### Getting the Database URL from IQDB try: url = tables[1].find_all('td', attrs={'class': 'image'})[0].findChildren('a')[0]['href'] url = 'https://' + url.split('//')[1] except: url = 'No URL' #### Getting the result image size try: size = tables[1].find_all('tr')[3].get_text().split(' [')[0] except: size = 'Unknown' #### Getting the image rating (if it is NSFW or not) if tables[1].find_all('tr')[3].get_text().split()[1].replace('[', '').replace(']', '').replace(' ', '') == 'Safe': is_safe = True else: is_safe = False #### Getting the similarity try: similarity = tables[1].find_all('tr')[4].get_text().replace('% similarity', '') except: similarity = '0' ############ FUNCTION DEFINITION FOR RESULTS SCRAPING database = "Unknown" if url.find('gelbooru.') != -1: database = 'Gelbooru' elif url.find('danbooru.') != -1: database = 'Danbooru' elif url.find('zerochan.') != -1: database = 'Zerochan' elif url.find('konachan.') != -1: database = 'Konachan' elif url.find('yande.re') != -1: database = 'Yande.re' elif url.find('anime-pictures.') != -1: database = 'Anime-Pictures' elif url.find('e-shuushuu') != -1: database = 'E-Shuushuu' title = "Unknown" try: databaseWebsiteData = requests.get(url).text databaseWebsite = BeautifulSoup(databaseWebsiteData.text, 'html.parser') title = databaseWebsite.find("title").get_text() except: title = "Unkown" except: return CachingError("IQDB_PARSING", "An error occured while parsing the data from IQDB") try: #### Adding the results to the main result variable newCacheFile = TextFile(erina_dir + "/ErinaCaches/IQDB_Cache/" + str(image_hash) + ".erina") newCacheFile.append(" --- IQDB CACHE --- \n") newCacheFile.append('\n') newCacheFile.append('IQDB Tags: ' + ":::".join(iqdb_tags) + "\n") newCacheFile.append('URL: ' + str(url) + "\n") newCacheFile.append('Title: ' + str(title) + "\n") newCacheFile.append('Size: ' + str(size) + "\n") newCacheFile.append('isSafe: ' + str(is_safe) + "\n") newCacheFile.append('Similarity: ' + str(similarity) + "\n") newCacheFile.append('Database: ' + str(database) + "\n") return iqdb_parser.IQDBCache(newCacheFile.read()) except: return CachingError("FILE_WRITE", f"An error occured while writing out the cache data to a file") except: return CachingError("UNKNOWN", "An unknown error occured while caching IQDB Data")
def decorated(*args, **kwargs): TextFile(erina_dir + "/Erina/stats/userdefinedStats/" + str(name).replace("/", "_") + ".erinalog").append(str(int(time())) + "\n") return function(*args, **kwargs)
def search_anime_in_saucenao_cache(): if os.path.isfile(saucenao_cache_path + str(image_hash) + '.erina'): return parser.saucenao_parser.SauceNAOCache( TextFile(saucenao_cache_path + str(image_hash) + '.erina').read()) return None
def __init__(self) -> None: self.path = statsFilePath + "erinahashStats/" self.createdBase64String = TextFile(self.path + "createdBase64String.erinalog", blocking=False) self.createdHashes = TextFile(self.path + "createdHashes.erinalog", blocking=False)
def search_anime_in_iqdb_cache(): if os.path.isfile(iqdb_cache_path + str(image_hash) + '.erina'): return parser.iqdb_parser.IQDBCache( TextFile(iqdb_cache_path + str(image_hash) + '.erina').read()) return None
@author: Anime no Sekai Erina Project - 2020 """ import re import json import datetime import requests from safeIO import JSONFile, TextFile from Erina.erina_log import log from Erina.env_information import erina_dir from ErinaDB.ManamiDB.manami_db_data import Database manami_database_path = erina_dir + "/ErinaDB/ManamiDB/" currentReleaseFile = TextFile(manami_database_path + 'current_release.txt') def convert_to_int(element): element = str(element).split('.')[0] element = re.sub("[^0-9]", "", str(element)) if element != '': return int(element) else: return 0 def verify_manami_adb(force=False): """ Checks for a new version of the database on GitHub """ ## Checking if new week
from Erina.config import Twitter as TwitterConfig from Erina.config import Erina as ErinaConfig from Erina.Errors import TwitterError, isAnError from ErinaTwitter.utils import Twitter from ErinaTwitter.erina_twitterbot import ErinaTwitter, latestResponses from ErinaTwitter.utils.Parser import makeTweet, makeImageResponse from ErinaSearch.erinasearch import imageSearch from Erina.env_information import erina_dir from Erina.erina_stats import StatsAppend from Erina.erina_stats import twitter as TwitterStats from Erina.utils import convert_to_int sinceID = TextFile(erina_dir + "/ErinaTwitter/lastStatusID.erina").read().replace( "\n", "") lastDM = convert_to_int( TextFile(erina_dir + "/ErinaTwitter/lastDM.erina").read().replace( "\n", "")) class Listener(tweepy.StreamListener): """ Tweet Listener Class (Twitter Stream Handler) Erina Project\n © Anime no Sekai - 2020 """ def on_connect(self): """
def __init__(self) -> None: self.path = statsFilePath + "erinaStats/" self.cacheFilesCount = TextFile(self.path + "cacheFilesCount.erinalog", blocking=False) self.erinaParsingCount = TextFile(self.path + "erinaParsingCount.erinalog", blocking=False) self.errorsCount = TextFile(self.path + "errorsCount.erinalog", blocking=False)