Example #1
0
def export_cache(destination=working_dir):
    """
    Export the cache file to be imported later (after an update for example).

    © Anime no Sekai - 2020
    """
    write_file('pyGoogleTranslateCacheExport.animenosekai_caches', read_file(translation_caches_path + translation_caches_name), destination=destination)
Example #2
0
def import_cache(cache_file_location):
    """
    Import a .animenosekai_caches translation cache file.

    © Anime no Sekai - 2020
    """
    current_cache_file_content = read_file(translation_caches_path + translation_caches_name).split('\n')
    new_cache_file_content = read_file(cache_file_location).split('\n')
    write_file(translation_caches_name, f'\n\n[IMPORTED {today()} {current_time()}]\n', translation_caches_path, append=True)
    for line in new_cache_file_content:
        if not line in current_cache_file_content:
            write_file(translation_caches_name, line, translation_caches_path, append=True)
def translate(text, destination_language, source_language="auto", cache=False, debug=False):
    """
    Translates the given text into the chosen language by scraping Google Translate with Selenium.

    Returns a string with the text translated.\n
    Returns "An error occured while translating: translation not found." if the translation was not found in the webpage. This might come from a mistyped language code.
    """
    from .internal.domain import gt_domain
    global last_translation
    if debug:
        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Starting Translation...\n', append=True)
    if debug:
        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Searching Caches...\n', append=True)
    cache_result = search_translation_cache(source_language=source_language, destination_language=destination_language, source=text)
    if not cache_result is None:
        if debug:
            line_number = cache_result['line_number']
            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Translation found in Caches (line {line_number})\n', append=True)
        return cache_result['result']
    else:
        if driver is None:
            if debug:
                write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - No driver selected\n', append=True)
            raise BrowserError("Browser is not set yet.\n Please set it with browser()")
        if not connected:
            if debug:
                write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Driver disconnected, last driver: {driver_name}\n', append=True)
            raise BrowserError(f'You disconnected the last browser in use ({driver_name}).\n Please reconnect one with browser()')
        else:
            if debug:
                write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - First attempt url is: https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}\n', append=True)
            driver.get(f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}")
            try:
                if debug:
                    write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Getting DOM Element by Class Name (tlid-translation)\n', append=True)
                result = driver.find_element_by_class_name("tlid-translation")
                if result.text == last_translation or result.text == str(last_translation + '...'):
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Translation not finished detected... Refreshing page before new attempt...\n', append=True)
                    driver.refresh()
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Getting DOM Element by Class Name (tlid-translation)\n', append=True)
                    result = driver.find_element_by_class_name("tlid-translation")
                if debug:
                    write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Setting last_translation global variable to new translation...\n', append=True)
                last_translation = str(result.text)
                if cache:
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n', append=True)
                    add_translation_cache(source_language=source_language, destination_language=destination_language, source=text, result=str(result.text))
                if debug:
                    write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Returning value... {result.text}\n', append=True)
                return str(result.text)
            except NoSuchElementException:
                if debug:
                    write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Element not found on page...\n', append=True)
                try:
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] New attempt...\n', append=True)
                    driver.get(f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}")
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n', append=True)
                    result = driver.find_element_by_class_name("tlid-translation")
                    if result.text == last_translation or result.text == str(last_translation + '...'):
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Translation not finished detected... Refreshing page before new attempt...\n', append=True)
                        driver.refresh()
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n', append=True)
                        result = driver.find_element_by_class_name("tlid-translation")
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Setting last_translation global variable to new translation...\n', append=True)
                    last_translation = str(result.text)
                    if cache:
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n', append=True)
                        add_translation_cache(source_language=source_language, destination_language=destination_language, source=text, result=str(result.text))
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Returning value... {result.text}\n', append=True)
                    return str(result.text)
                except NoSuchElementException:
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Element not found on page...\n', append=True)
                    try:
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] New attempt...\n', append=True)
                        driver.get(f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}")
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Translation not finished detected... Refreshing page before new attempt...\n', append=True)
                        driver.refresh()
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Getting DOM Element by Class Name (tlid-translation)\n', append=True)
                        result = driver.find_element_by_class_name("tlid-translation")
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Setting last_translation global variable to new translation...\n', append=True)
                        last_translation = str(result.text)
                        if cache:
                            if debug:
                                write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n', append=True)
                            add_translation_cache(source_language=source_language, destination_language=destination_language, source=text, result=str(result.text))
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Returning value... {result.text}\n', append=True)
                        return str(result.text)
                    except NoSuchElementException:
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Element not found, aborting...\n', append=True)
                        return "An error occured while translating: translation not found."
                    except Exception as e:
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Unknown error\n', append=True)
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n', append=True)
                        return "An error occured while translating: unknown error."
                except Exception as e:
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Unknown error\n', append=True)
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n', append=True)
                    return "An error occured while translating: unknown error."
            except Exception as e:
                if debug:
                    write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Unknown error\n', append=True)
                    write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n', append=True)
                try:
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] New attempt...\n', append=True)
                    driver.get(f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}")
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n', append=True)
                    result = driver.find_element_by_class_name("tlid-translation")
                    if result.text == last_translation or result.text == str(last_translation + '...'):
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Translation not finished detected... Refreshing page before new attempt...\n', append=True)
                        driver.refresh()
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n', append=True)
                        result = driver.find_element_by_class_name("tlid-translation")
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Setting last_translation global variable to new translation...\n', append=True)
                    last_translation = str(result.text)
                    if cache:
                        if debug:
                            write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n', append=True)
                        add_translation_cache(source_language=source_language, destination_language=destination_language, source=text, result=str(result.text))
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Returning value... {result.text}\n', append=True)
                    return str(result.text)
                except Exception as e:
                    if debug:
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Unknown error\n', append=True)
                        write_file('logs.txt', today() + ' ' + current_time() + f'   text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n', append=True)
                    return "An error occured while translating: unknown error."
Example #4
0
def search_iqdb(image_hash, image_url='', file_io=None):
    """
    Searches and caches IQDB for anime/manga related images.

    Erina Project - 2020\n
    © Anime no Sekai
    """

    erina_log.logcaches(f'Searching for IQDB Data...', 'iqdb', str(image_hash))
    StatsAppend(ExternalStats.iqdbCalls, "New Call")
    results = {}

    ### If a file is given, send the file to iqdb.
    if file_io is not None:
        response = requests.post('https://iqdb.org/',
                                 files={'file': ('image_to_search', file_io)})
    else:
        if image_url == '':
            erina_log.logerror('[ErinaCaches] [IQDB] No file or URL provided')
            return {'error': 'no file or url provided'}
        else:
            response = request(f'https://iqdb.org/?url={image_url}')

    ### If the image format is not supported by IQDB
    if 'Not an image or image format not supported' in response.text:
        print('Format not supported.')
        erina_log.logerror('[ErinaCaches] [IQDB] Format not supported')
        return {'error': 'format not supported'}

###### IQDB SCRAPING
    iqdb = BeautifulSoup(response.text, 'html.parser')

    ##### Search for the IQDB result
    try:
        tables = iqdb.find_all('table')
        search_result = tables[1].findChildren("th")[0].get_text()
    except Exception as e:
        erina_log.logerror(
            f'[ErinaCaches] [IQDB] Client Error, Error details: {str(e)}')
        return {'error': 'client error', 'error_details': e}

##### Verify if the result is relevant or not
    iqdb_tags = []
    if search_result == 'No relevant matches':
        erina_log.logerror('[ErinaCaches] [IQDB] No relevant matches found')
        return {'error': 'not found'}
    else:
        try:
            ### Getting the tags from IQDB
            alt_string = tables[1].findChildren("img")[0]['alt']
            iqdb_tags = alt_string.split('Tags: ')[1].split(' ')
        except:
            iqdb_tags = []

    #### Getting the Database URL from IQDB
    try:
        url = tables[1].find_all('td',
                                 attrs={'class': 'image'
                                        })[0].findChildren('a')[0]['href']
        url = 'https://' + url.split('//')[1]
    except:
        url = ''

    #### Getting the result image size
    try:
        size = tables[1].find_all('tr')[3].get_text().split(' [')[0]
    except:
        size = ''

    #### Getting the image rating (if it is NSFW or not)
    if tables[1].find_all('tr')[3].get_text().split()[1].replace(
            '[', '').replace(']', '').replace(' ', '') == 'Safe':
        is_safe = True
    else:
        is_safe = False

    #### Getting the similarity
    try:
        similarity = tables[1].find_all('tr')[4].get_text().replace(
            '% similarity', '')
    except:
        similarity = ''

    #### Adding the results to the main result variable
    results['iqdb_tags'] = iqdb_tags
    results['url'] = url
    results['size'] = size
    results['is_safe'] = is_safe
    results['similarity'] = similarity

    ############ FUNCTION DEFINITION FOR RESULTS SCRAPING
    if url.find('gelbooru.') != -1:
        results['database'] = 'gelbooru'
        results['gelbooru_results'] = search_gelbooru(url)

    elif url.find('danbooru.') != -1:
        results['database'] = 'danbooru'
        results['danbooru_results'] = search_danbooru(url)

    elif url.find('zerochan.') != -1:
        results['database'] = 'zerochan'
        results['zerochan_results'] = search_zerochan(url)

    elif url.find('konachan.') != -1:
        results['database'] = 'konachan'
        results['konachan_results'] = search_konachan(url)

    elif url.find('yande.re') != -1:
        results['database'] = 'yandere'
        results['yandere_results'] = search_yandere(url)

    elif url.find('anime-pictures.') != -1:
        results['database'] = 'anime_pictures'
        results['anime_pictures_results'] = search_animepictures(url)

    elif url.find('e-shuushuu') != -1:
        results['database'] = 'e_shuushuu'
        results['e_shuushuu_results'] = search_eshuushuu(url)

#################### CACHING ##########

    new_cache_content = []
    new_cache_content.append('   --- IQDB CACHE ---   ')
    new_cache_content.append('')

    new_cache_content.append('IQDB Tags: ' +
                             create_erina_list(results['iqdb_tags']))
    new_cache_content.append('URL: ' + results['url'])
    new_cache_content.append('Size: ' + results['size'])
    new_cache_content.append('isSafe: ' + str(results['is_safe']))
    new_cache_content.append('Similarity: ' + results['similarity'])
    new_cache_content.append('Database: ' + results['database'])
    new_cache_content.append('')

    if results['database'] == 'gelbooru':

        new_cache_content.append(
            'Gelbooru Characters: ' +
            create_erina_list(results['gelbooru_results']['characters']))
        new_cache_content.append(
            'Gelbooru Copyrights: ' +
            create_erina_list(results['gelbooru_results']['copyrights']))
        new_cache_content.append(
            'Gelbooru Metadatas: ' +
            create_erina_list(results['gelbooru_results']['metadatas']))
        new_cache_content.append(
            'Gelbooru Tags: ' +
            create_erina_list(results['gelbooru_results']['tags']))

        new_cache_content.append('Gelbooru ID: ' +
                                 results['gelbooru_results']['id'])
        new_cache_content.append('Gelbooru Size: ' +
                                 results['gelbooru_results']['size'])
        new_cache_content.append('Gelbooru Source: ' +
                                 results['gelbooru_results']['source'])
        new_cache_content.append('Gelbooru Rating: ' +
                                 results['gelbooru_results']['rating'])
        new_cache_content.append('Gelbooru Date: ' +
                                 results['gelbooru_results']['date'])
        new_cache_content.append('Gelbooru Uploader: ' +
                                 results['gelbooru_results']['uploader'])
        new_cache_content.append('Gelbooru Score: ' +
                                 results['gelbooru_results']['score'])

    elif results['database'] == 'danbooru':

        new_cache_content.append(
            'Danbooru Artists: ' +
            create_erina_list(results['danbooru_results']['artists']))
        new_cache_content.append(
            'Danbooru Characters: ' +
            create_erina_list(results['danbooru_results']['characters']))
        new_cache_content.append(
            'Danbooru Copyrights: ' +
            create_erina_list(results['danbooru_results']['copyrights']))
        new_cache_content.append(
            'Danbooru Metadatas: ' +
            create_erina_list(results['danbooru_results']['metadatas']))
        new_cache_content.append(
            'Danbooru Tags: ' +
            create_erina_list(results['danbooru_results']['tags']))

        new_cache_content.append('Danbooru ID: ' +
                                 results['danbooru_results']['id'])
        new_cache_content.append('Danbooru Uploader: ' +
                                 results['danbooru_results']['uploader'])
        new_cache_content.append('Danbooru Date: ' +
                                 results['danbooru_results']['date'])
        new_cache_content.append('Danbooru Content Size: ' +
                                 results['danbooru_results']['content_size'])
        new_cache_content.append('Danbooru Format: ' +
                                 results['danbooru_results']['format'])
        new_cache_content.append('Danbooru Size: ' +
                                 results['danbooru_results']['size'])
        new_cache_content.append('Danbooru Source: ' +
                                 results['danbooru_results']['source'])
        new_cache_content.append('Danbooru Rating: ' +
                                 results['danbooru_results']['rating'])
        new_cache_content.append('Danbooru Score: ' +
                                 results['danbooru_results']['score'])
        new_cache_content.append('Danbooru Favorites: ' +
                                 results['danbooru_results']['favorites'])
        new_cache_content.append('Danbooru Status: ' +
                                 results['danbooru_results']['status'])

    elif results['database'] == 'zerochan':
        new_cache_content.append('Zerochan ID: ' +
                                 results['zerochan_results']['id'])
        new_cache_content.append('Zerochan Uploader: ' +
                                 results['zerochan_results']['uploader'])
        new_cache_content.append('Zerochan Content URL: ' +
                                 results['zerochan_results']['content_url'])
        new_cache_content.append('Zerochan Thumbnail: ' +
                                 results['zerochan_results']['thumbnail'])
        new_cache_content.append('Zerochan Format: ' +
                                 results['zerochan_results']['format'])
        new_cache_content.append('Zerochan Post Date: ' +
                                 results['zerochan_results']['post_date'])
        new_cache_content.append('Zerochan Name: ' +
                                 results['zerochan_results']['name'])
        new_cache_content.append('Zerochan Width: ' +
                                 results['zerochan_results']['width'])
        new_cache_content.append('Zerochan Height: ' +
                                 results['zerochan_results']['height'])
        new_cache_content.append('Zerochan Content Size: ' +
                                 results['zerochan_results']['content_size'])
        new_cache_content.append('Zerochan Mangaka: ' +
                                 results['zerochan_results']['mangaka'])
        new_cache_content.append('Zerochan Series: ' +
                                 results['zerochan_results']['series'])
        new_cache_content.append('Zerochan Character: ' +
                                 results['zerochan_results']['character'])
        new_cache_content.append('Zerochan Source: ' +
                                 results['zerochan_results']['source'])

    elif results['database'] == 'konachan':

        new_cache_content.append(
            'Konachan Copyrights: ' +
            create_erina_list(results['konachan_results']['copyrights']))
        new_cache_content.append(
            'Konachan Styles: ' +
            create_erina_list(results['konachan_results']['styles']))
        new_cache_content.append(
            'Konachan Artists: ' +
            create_erina_list(results['konachan_results']['artists']))
        new_cache_content.append(
            'Konachan Characters: ' +
            create_erina_list(results['konachan_results']['characters']))
        new_cache_content.append(
            'Konachan Tags: ' +
            create_erina_list(results['konachan_results']['tags']))
        new_cache_content.append(
            'Konachan Favorited By: ' +
            create_erina_list(results['konachan_results']['favorited_by']))

        new_cache_content.append('Konachan ID: ' +
                                 results['konachan_results']['id'])
        new_cache_content.append('Konachan Size: ' +
                                 results['konachan_results']['size'])
        new_cache_content.append('Konachan Source: ' +
                                 results['konachan_results']['source'])
        new_cache_content.append('Konachan Rating: ' +
                                 results['konachan_results']['rating'])
        new_cache_content.append('Konachan Date: ' +
                                 results['konachan_results']['date'])
        new_cache_content.append('Konachan Uploader: ' +
                                 results['konachan_results']['uploader'])
        new_cache_content.append('Konachan Score: ' +
                                 results['konachan_results']['score'])

    elif results['database'] == 'yandere':

        new_cache_content.append(
            'Yandere Copyrights: ' +
            create_erina_list(results['yandere_results']['copyrights']))
        new_cache_content.append(
            'Yandere Styles: ' +
            create_erina_list(results['yandere_results']['styles']))
        new_cache_content.append(
            'Yandere Artists: ' +
            create_erina_list(results['yandere_results']['artists']))
        new_cache_content.append(
            'Yandere Characters: ' +
            create_erina_list(results['yandere_results']['characters']))
        new_cache_content.append(
            'Yandere Tags: ' +
            create_erina_list(results['yandere_results']['tags']))
        new_cache_content.append(
            'Yandere Favorited By: ' +
            create_erina_list(results['yandere_results']['favorited_by']))

        new_cache_content.append('Yandere ID: ' +
                                 results['yandere_results']['id'])
        new_cache_content.append('Yandere Size: ' +
                                 results['yandere_results']['size'])
        new_cache_content.append('Yandere Source: ' +
                                 results['yandere_results']['source'])
        new_cache_content.append('Yandere Rating: ' +
                                 results['yandere_results']['rating'])
        new_cache_content.append('Yandere Date: ' +
                                 results['yandere_results']['date'])
        new_cache_content.append('Yandere Uploader: ' +
                                 results['yandere_results']['uploader'])
        new_cache_content.append('Yandere Score: ' +
                                 results['yandere_results']['score'])

    elif results['database'] == 'anime_pictures':

        new_cache_content.append('Anime-Pictures ID: ' +
                                 results['anime_pictures_results']['id'])
        new_cache_content.append('Anime-Pictures Uploader: ' +
                                 results['anime_pictures_results']['uploader'])
        new_cache_content.append(
            'Anime-Pictures Last Editing User: '******'anime_pictures_results']['last_editing_user'])
        new_cache_content.append(
            'Anime-Pictures Post Date: ' +
            results['anime_pictures_results']['post_date'])
        new_cache_content.append(
            'Anime-Pictures Published Date: ' +
            results['anime_pictures_results']['published_date'])
        new_cache_content.append(
            'Anime-Pictures Download Count: ' +
            results['anime_pictures_results']['download_count'])
        new_cache_content.append('Anime-Pictures Size: ' +
                                 results['anime_pictures_results']['size'])
        new_cache_content.append(
            'Anime-Pictures Aspect Ratio: ' +
            results['anime_pictures_results']['aspect_ratio'])
        new_cache_content.append(
            'Anime-Pictures Content Size: ' +
            results['anime_pictures_results']['content_size'])
        new_cache_content.append(
            'Anime-Pictures Artefacts Degree: ' +
            results['anime_pictures_results']['artefacts_degree'])
        new_cache_content.append(
            'Anime-Pictures Smooth Degree: ' +
            results['anime_pictures_results']['smoothness_degree'])
        new_cache_content.append(
            'Anime-Pictures Complexity: ' +
            results['anime_pictures_results']['complexity'])
        new_cache_content.append(
            'Anime-Pictures Copyright: ' +
            results['anime_pictures_results']['copyright'])
        new_cache_content.append('Anime-Pictures Artist: ' +
                                 results['anime_pictures_results']['artist'])
        new_cache_content.append(
            'Anime-Pictures Average Color: ' + create_erina_list(
                results['anime_pictures_results']['average_color']))
        new_cache_content.append(
            'Anime-Pictures References: ' +
            create_erina_list(results['anime_pictures_results']['references']))
        new_cache_content.append(
            'Anime-Pictures Objects: ' +
            create_erina_list(results['anime_pictures_results']['objects']))
        new_cache_content.append(
            'Anime-Pictures Similar Images: ' + create_erina_list(
                results['anime_pictures_results']['similar_images_id']))
        new_cache_content.append(
            'Anime-Pictures Artist Links: ' + create_erina_list(
                results['anime_pictures_results']['artist_links']))

    elif results['database'] == 'e_shuushuu':

        new_cache_content.append('E-Shuushuu Posting Uploader: ' +
                                 results['e_shuushuu_results']['uploader'])
        new_cache_content.append('E-Shuushuu Posting Post Date: ' +
                                 results['e_shuushuu_results']['post_date'])
        new_cache_content.append('E-Shuushuu Posting Filename: ' +
                                 results['e_shuushuu_results']['filename'])
        new_cache_content.append(
            'E-Shuushuu Posting Original Filename: ' +
            results['e_shuushuu_results']['original_filename'])
        new_cache_content.append('E-Shuushuu Posting Content Size: ' +
                                 results['e_shuushuu_results']['content_size'])
        new_cache_content.append('E-Shuushuu Posting Size: ' +
                                 results['e_shuushuu_results']['size'])
        new_cache_content.append('E-Shuushuu Posting Favorites: ' +
                                 results['e_shuushuu_results']['favorites'])
        new_cache_content.append('E-Shuushuu Posting Image Rating: ' +
                                 results['e_shuushuu_results']['image_rating'])

        new_cache_content.append(
            'E-Shuushuu Tags: ' +
            create_erina_list(results['e_shuushuu_results']['tags']))
        new_cache_content.append(
            'E-Shuushuu Sources: ' +
            create_erina_list(results['e_shuushuu_results']['sources']))
        new_cache_content.append(
            'E-Shuushuu Characters: ' +
            create_erina_list(results['e_shuushuu_results']['characters']))
        new_cache_content.append(
            'E-Shuushuu Artists: ' +
            create_erina_list(results['e_shuushuu_results']['artists']))

    new_cache_content.append('')
    new_cache_content.append('Cache Timestamp: ' +
                             str(datetime.timestamp(datetime.today())))
    new_cache_content.append('Cache Timestamp (formatted): ' + today() +
                             ' at ' + current_time())

    new_cache_destination = env_information.erina_dir + '/ErinaCaches/IQDB_Cache/'
    new_cache_filename = str(image_hash) + '.erina'
    erina_log.logcaches(f'Caching IQDB and {results["database"]} data...')
    write_file(file_title=new_cache_filename,
               text=new_cache_content,
               destination=new_cache_destination)
    return results
            def analyze_episode(episode):
                """
                Internal Function that is ran by multiple thread to analyze each frame of an anime episode and create a file with its hash.
                Erina Project
                © Anime no Sekai - 2020
                """

                global fps_dict
                global frames_stats_dict
                global queue

                queue += 1
                episode_number = erina_aniep.episode_from_filename(episode, anime)
                
                frames_stats_dict[episode_number] = {'frame': 0, 'total_frames': 0, 'status': 'Starting'}

                video = cv2.VideoCapture(erina_add_animes_path + anime + '/' + episode)

                frame_count = video.get(cv2.CAP_PROP_FRAME_COUNT)
                framerate = video.get(cv2.CAP_PROP_FPS)
                duration = frame_count/framerate

                success, image = video.read()
                
                count = 0
                fps = 0
                start_loop_fps = 0
                end_loop_fps = 0
                start_loop_time = process_time()
                from_dict = {}
                first_frame_dict = {}

                new_database_path = erina_database_path + anime + '/' + episode_number
                if filecenter.isdir(new_database_path) == False:
                    filecenter.make_dir(new_database_path)
                
                skip = False
                while success:
                    if skip == False:
                        hashing_image = Image.fromarray(image)
                        image_hash = imagehash.average_hash(hashing_image)

                        second = count/framerate

                        try:
                            from_second = from_dict[str(image_hash)]
                            at_second = (float(from_second) + second) / 2
                        except:
                            from_dict[str(image_hash)] = second
                            from_second = second
                            at_second = second

                        try:
                            first_frame = first_frame_dict[str(image_hash)]
                        except:
                            first_frame_dict[str(image_hash)] = count
                            first_frame = count
                                
                        database_content = []
                        database_content.append('AniList ID: ' + anilist_id)
                        database_content.append('Anime: ' + anime)
                        database_content.append('Season: ' + season)
                        database_content.append('Episode: ' + episode_number)
                        database_content.append('First Frame: ' + str(first_frame))
                        database_content.append('Last Frame: ' + str(count))
                        database_content.append('From: ' + str(from_second))
                        database_content.append('To: ' + str(second))
                        database_content.append('At: ' + str(at_second))
                        database_content.append('Hash: ' + str(image_hash))
                        database_content.append('Hashing Algorithm: Average Hash (aHash)')
                        database_content.append('Filename: ' + episode)
                        database_content.append('Episode Framerate: ' + str(framerate))
                        database_content.append('Episode Duration: ' + str(duration))
                        database_content.append('Episode Frame Count: ' + str(frame_count))
                        database_content.append('')
                        database_content.append('Analyze Date: ' + str(datetime.timestamp(datetime.today())))
                        
                        lifeeasy.write_file(str(image_hash) + '.erina', database_content, new_database_path)

                    frames_stats_dict[episode_number] = {'frame': count, 'total_frames': frame_count, 'status': 'Active'}
                    
                    count += 1

                    end_loop_fps += 1
                    end_loop_time = process_time()
                    if end_loop_time - start_loop_time > 1:
                        fps = int(end_loop_fps - start_loop_fps)
                        fps_dict[episode_number] = fps
                        start_loop_time = process_time()
                        start_loop_fps = end_loop_fps

                    if skip == True:
                        skip = False
                        success, image = video.read()
                    else:
                        skip = True
                        success = video.grab()
                queue -= 1
                frames_stats_dict[episode_number] = {'frame': count, 'total_frames': frame_count, 'status': 'Done'}
                fps_dict.pop(episode_number, None)
Example #6
0
async def _translate(text,
                     destination_language,
                     source_language="auto",
                     cache=False,
                     debug=False):
    """
    Internal Function to translate
    """
    from .domain import gt_domain
    global last_translation
    if debug:
        write_file(
            'logs.txt',
            today() + ' ' + current_time() +
            f'   text={text}|sl={source_language}|dl={destination_language} - Starting Translation...\n',
            append=True)
    if debug:
        write_file(
            'logs.txt',
            today() + ' ' + current_time() +
            f'   text={text}|sl={source_language}|dl={destination_language} - Searching Caches...\n',
            append=True)
    cache_result = search_translation_cache(
        source_language=source_language,
        destination_language=destination_language,
        source=text)
    if not cache_result is None:
        if debug:
            line_number = cache_result['line_number']
            write_file(
                'logs.txt',
                today() + ' ' + current_time() +
                f'   text={text}|sl={source_language}|dl={destination_language} - Translation found in Caches (line {line_number})\n',
                append=True)
        return cache_result['result']
    else:
        if debug:
            write_file(
                'logs.txt',
                today() + ' ' + current_time() +
                f'   text={text}|sl={source_language}|dl={destination_language} - First attempt url is: https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}\n',
                append=True)
        await page.goto(
            f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}"
        )
        try:
            if debug:
                write_file(
                    'logs.txt',
                    today() + ' ' + current_time() +
                    f'   text={text}|sl={source_language}|dl={destination_language} - Getting DOM Element by Class Name (tlid-translation)\n',
                    append=True)
            result = await page.evaluate(
                "document.getElementsByClassName(\"tlid-translation\")[0].innerText"
            )
            if result == last_translation or result == str(last_translation +
                                                           '...'):
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - Translation not finished detected... Refreshing page before new attempt...\n',
                        append=True)
                await page.reload()
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - Getting DOM Element by Class Name (tlid-translation)\n',
                        append=True)
                result = await page.evaluate(
                    "document.getElementsByClassName(\"tlid-translation\")[0].innerText"
                )
            if debug:
                write_file(
                    'logs.txt',
                    today() + ' ' + current_time() +
                    f'   text={text}|sl={source_language}|dl={destination_language} - Setting last_translation global variable to new translation...\n',
                    append=True)
            last_translation = str(result)
            if cache:
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n',
                        append=True)
                add_translation_cache(
                    source_language=source_language,
                    destination_language=destination_language,
                    source=text,
                    result=str(result))
            if debug:
                write_file(
                    'logs.txt',
                    today() + ' ' + current_time() +
                    f'   text={text}|sl={source_language}|dl={destination_language} - Returning value... {result}\n',
                    append=True)
            return str(result)
        except ElementHandleError:
            if debug:
                write_file(
                    'logs.txt',
                    today() + ' ' + current_time() +
                    f'   text={text}|sl={source_language}|dl={destination_language} - Element not found on page...\n',
                    append=True)
            try:
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] New attempt...\n',
                        append=True)
                await page.goto(
                    f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}"
                )
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n',
                        append=True)
                result = await page.evaluate(
                    "document.getElementsByClassName(\"tlid-translation\")[0].innerText"
                )
                if result == last_translation or result == str(
                        last_translation + '...'):
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Translation not finished detected... Refreshing page before new attempt...\n',
                            append=True)
                    await page.reload()
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n',
                            append=True)
                    result = await page.evaluate(
                        "document.getElementsByClassName(\"tlid-translation\")[0].innerText"
                    )
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Setting last_translation global variable to new translation...\n',
                        append=True)
                last_translation = str(result)
                if cache:
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n',
                            append=True)
                    add_translation_cache(
                        source_language=source_language,
                        destination_language=destination_language,
                        source=text,
                        result=str(result))
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Returning value... {result}\n',
                        append=True)
                return str(result)
            except ElementHandleError:
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Element not found on page...\n',
                        append=True)
                try:
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] New attempt...\n',
                            append=True)
                    await page.goto(
                        f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}"
                    )
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Translation not finished detected... Refreshing page before new attempt...\n',
                            append=True)
                    await page.reload()
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Getting DOM Element by Class Name (tlid-translation)\n',
                            append=True)
                    result = await page.evaluate(
                        "document.getElementsByClassName(\"tlid-translation\")[0].innerText"
                    )
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Setting last_translation global variable to new translation...\n',
                            append=True)
                    last_translation = str(result)
                    if cache:
                        if debug:
                            write_file(
                                'logs.txt',
                                today() + ' ' + current_time() +
                                f'   text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n',
                                append=True)
                        add_translation_cache(
                            source_language=source_language,
                            destination_language=destination_language,
                            source=text,
                            result=str(result))
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Returning value... {result}\n',
                            append=True)
                    return str(result)
                except ElementHandleError:
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Element not found, aborting...\n',
                            append=True)
                    return "An error occured while translating: translation not found."
                except Exception as e:
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Unknown error\n',
                            append=True)
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n',
                            append=True)
                    return "An error occured while translating: unknown error."
            except Exception as e:
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Unknown error\n',
                        append=True)
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n',
                        append=True)
                return "An error occured while translating: unknown error."
        except Exception as e:
            if debug:
                write_file(
                    'logs.txt',
                    today() + ' ' + current_time() +
                    f'   text={text}|sl={source_language}|dl={destination_language} - Unknown error\n',
                    append=True)
                write_file(
                    'logs.txt',
                    today() + ' ' + current_time() +
                    f'   text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n',
                    append=True)
            try:
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] New attempt...\n',
                        append=True)
                await page.goto(
                    f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}"
                )
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n',
                        append=True)
                result = await page.evaluate(
                    "document.getElementsByClassName(\"tlid-translation\")[0].innerText"
                )
                if result == last_translation or result == str(
                        last_translation + '...'):
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Translation not finished detected... Refreshing page before new attempt...\n',
                            append=True)
                    await page.reload()
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n',
                            append=True)
                    result = await page.evaluate(
                        "document.getElementsByClassName(\"tlid-translation\")[0].innerText"
                    )
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Setting last_translation global variable to new translation...\n',
                        append=True)
                last_translation = str(result)
                if cache:
                    if debug:
                        write_file(
                            'logs.txt',
                            today() + ' ' + current_time() +
                            f'   text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n',
                            append=True)
                    add_translation_cache(
                        source_language=source_language,
                        destination_language=destination_language,
                        source=text,
                        result=str(result))
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Returning value... {result}\n',
                        append=True)
                return str(result)
            except Exception as e:
                if debug:
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - Unknown error\n',
                        append=True)
                    write_file(
                        'logs.txt',
                        today() + ' ' + current_time() +
                        f'   text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n',
                        append=True)
                return "An error occured while translating: unknown error."
Example #7
0
def add_translation_cache(source_language, destination_language, source, result):
    """
    Adds a translation to the cache file.
    """   
    line = f'[sl={verify_language_code(source_language)}]|[dl={verify_language_code(destination_language)}]|[source={source}]|[result={result}]\n'
    write_file(translation_caches_name, line, translation_caches_path, append=True)
def setup(customurl=False, force_upgrade=False):
    global package_name
    global upgrade
    global custom_setup

    upgrade = force_upgrade

    setup = []

    # AUTHOR
    author = input('Who is the author? ')
    print('')

    def naming_package():
        global package_name
        global upgrade
        # NAME
        package_name = input("What's the name of your package? ")
        print('')

        lifeeasy.display_action('Verification', delay=0.1)
        name_verification = lifeeasy.request(
            'https://pypi.org/project/' + package_name + '/', 'get')
        if name_verification.status_code == 404:
            print('The name is available!')
        elif name_verification.status_code == 200:
            request = lifeeasy.request(
                'https://pypi.org/pypi/' + package_name + '/json', 'get')
            request_json = json.loads(request.text)
            if request_json['info']['author'] == author:
                print('upload mode: upgrade')
                print('Do you want to change some metadatas or keep them?')
                user_choice = input(
                    'press [enter] to continue with current metadatas or type [continue] to continue modifying the metadatas... '
                )
                if user_choice.lower() == 'continue' or user_choice.lower(
                ) == 'ontinue' or user_choice.lower(
                ) == 'cntinue' or user_choice.lower(
                ) == 'coninue' or user_choice.lower(
                ) == 'contnue' or user_choice.lower(
                ) == 'contiue' or user_choice.lower(
                ) == 'contine' or user_choice.lower() == 'continu':
                    upgrade = False
                else:
                    upgrade = True
            else:
                print('This name is already taken!')
                print('Please try giving another name to the package...')
                print('')
                naming_package()
        else:
            print('An error occured with the name verification...')
            return 1

    if upgrade == False:
        if naming_package() == 1:
            return 1
    else:
        # NAME
        package_name = input("What's the name of your package? ")
        print('')

    # VERSION
    version = input("What's the version of " + package_name + '? ')
    print('')

    if upgrade == False:
        # DESCRIPTION
        print('Write a little summary/description of ' + package_name)
        desc = input('> ')
        print('')

        # EMAIL
        email = input('What is his ' + author + "'s email? ")
        print('')

        # LICENSE
        print('Warning: the license name is case-sensitive!')
        package_license = input('What is the license for ' + package_name +
                                ' ? ')
        package_license_classifier = 'License :: OSI Approved :: ' + package_license + ' License'
        print('')

        request = lifeeasy.request(
            'https://github.com/' + author + '/' + package_name, 'get')
        if request.status_code == 404:
            # GITHUB REPO
            print("What is the GitHub repository for this package?")
            url = input('> ')
            print('')
        else:
            url = 'https://github.com/' + author + '/' + package_name

        # ARCHIVE
        if url[-1] == '/':
            download_url_try = url + 'archive/' + version + '.tar.gz'
        else:
            download_url_try = url + '/archive/' + version + '.tar.gz'
        request = lifeeasy.request(method='get', url=download_url_try)
        if request.status_code == 200:
            download_url = download_url_try
        else:
            if url[-1] == '/':
                download_url_try = url + 'archive/v' + version + '.tar.gz'
            else:
                download_url_try = url + '/archive/v' + version + '.tar.gz'
            request = lifeeasy.request(method='get', url=download_url_try)
            if request.status_code == 200:
                download_url = download_url_try
            else:
                github_release = input(
                    "What is the name of the GitHub release? ")
                print('')
                if url[-1] == '/':
                    download_url_try = url + 'archive/' + github_release + '.tar.gz'
                else:
                    download_url_try = url + '/archive/' + github_release + '.tar.gz'
                request = lifeeasy.request(method='get', url=download_url_try)
                if request.status_code == 200:
                    download_url = download_url_try
                else:

                    def ask_for_github_release():
                        global download_url
                        print(
                            'What is the URL of your GitHub release? (it ends with .tar.gz)'
                        )
                        download_url_try = input('> ')
                        print('')
                        request = lifeeasy.request(method='get',
                                                   url=download_url_try)
                        if request.status_code == 200:
                            download_url = download_url_try
                        else:
                            print(
                                "It seems that you mistyped the URL or that the repository is private..."
                            )
                            lifeeasy.sleep(2)
                            print(
                                "Please put your GitHub repository visibility in public and retry..."
                            )
                            print('')
                            lifeeasy.sleep(2)
                            ask_for_github_release()

                    ask_for_github_release()

        # CUSTOM URL
        if customurl == True:
            print('What is the URL of the website for this package?')
            url = input('> ')
            print('')

        # KEYWORDS
        print('Enter a comma-separated list of keywords for your package')
        keywords = input('> ')
        keywords = keywords.split(',')
        print('')

        # DEPENDENCIES
        print('Enter a comma-separated list of dependencies for your package')
        dependencies = input('> ')
        dependencies = dependencies.replace(' ', '')
        dependencies = dependencies.split(',')
        print('')

        # PYTHON VERSIONS
        print(
            'Enter a comma-separated list of supported Python version numbers for this package'
        )
        print('(i.e 3,3.4,3.5,3.6,3.7,3.8)')
        python_versions = input('> ')
        print('')
        python_versions = python_versions.replace(' ', '')
        python_versions = python_versions.split(',')

        versions_classifiers = []
        for python_version in python_versions:
            versions_classifiers.append('Programming Language :: Python :: ' +
                                        python_version)

        dev_status = 'Development Status :: 4 - Beta'

        def development_status():
            global dev_status
            global customclassifiers_bool
            lifeeasy.clear()
            print('Choose a development status from the following')
            print('')
            print('Alpha')
            print('Beta')
            print('Stable')
            print('')
            print(
                'Or press [enter] to add a custom one when adding classifiers...'
            )
            print('')
            print('')
            dev_status_try = input('> ')

            if dev_status_try.lower() == 'alpha':
                dev_status = 'Development Status :: 3 - Alpha'
            elif dev_status_try.lower() == 'beta':
                dev_status = 'Development Status :: 4 - Beta'
            elif dev_status_try.lower() == 'stable':
                dev_status = 'Development Status :: 5 - Production/Stable'
            elif dev_status_try == '':
                customclassifiers_bool = True
            else:
                print("Sorry but I couldn't recognize the status.")
                lifeeasy.sleep(1)
                print('Please try again...')
                lifeeasy.sleep(1)
                development_status()

        development_status()

        # CUSTOM CLASSIFIERS
        custom_classifiers = []
        if customclassifiers_bool == True:
            lifeeasy.clear()
            print("What are the custom classifiers that you want to add?")
            print('')
            print("You need to enter your classifiers one-by-one")
            print("You need to write the full classifier")
            print(
                "When you are done press [enter] again without entering anything."
            )
            print('')
            print('')
            user_choice = input('> ')
            if user_choice != '':
                custom_classifiers.append(user_choice)
            while user_choice != '':
                lifeeasy.clear()
                print("What are the custom classifiers that you want to add?")
                print('')
                print("You need to enter your classifiers one-by-one")
                print("You need to write the full classifier")
                print(
                    "When you are done press [enter] again without entering anything."
                )
                print('')
                print('')
                user_choice = input('> ')
                if user_choice != '':
                    custom_classifiers.append(user_choice)

    else:
        package_infos = lifeeasy.request(
            'https://pypi.org/pypi/' + package_name + '/json', 'get')
        package_infos = json.loads(package_infos.text)
        package_license = package_infos['info']['license']
        desc = package_infos['info']['summary']
        author = package_infos['info']['author']
        email = package_infos['info']['author_email']
        url = package_infos['info']['home_page']

        def ask_for_github_release():
            global download_url
            print(
                'What is the URL of your GitHub release? (it ends with .tar.gz)'
            )
            download_url_try = input('> ')
            print('')
            if lifeeasy.request_statuscode(method='get',
                                           url=download_url_try) == 200:
                download_url = download_url_try
            else:
                print(
                    "It seems that you mistyped the URL or that the repository is private..."
                )
                lifeeasy.sleep(2)
                print(
                    "Please put your GitHub repository visibility in public and retry..."
                )
                print('')
                lifeeasy.sleep(2)
                ask_for_github_release()

        ask_for_github_release()
        download_url = package_infos['info']['download_url']
        keywords_string = package_infos['info']['keywords']
        keywords = keywords_string.split(',')
        dependencies = package_infos['info']['requires_dist']
        classifiers = package_infos['info']['classifiers']

    # CUSTOM SETUP

    if custom_setup == True:
        print('Add your custom setup sections (comma-separated)')
        setup_customized = input('> ')
        if len(setup_customized) == 0:
            custom_setup = False

    lifeeasy.clear()
    print('Building your setup file')
    lifeeasy.sleep(random.uniform(0.126, 0.31))

    print('adding imports')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('from setuptools import setup')

    # README

    if filecenter.exists(lifeeasy.working_dir() + '/README.md'):
        print('adding the package readme')
        lifeeasy.sleep(random.uniform(0.126, 0.31))
        setup.append('from os import path')
        setup.append(
            "with open(path.join(path.abspath(path.dirname(__file__)), 'README.md'), encoding='utf-8') as f:"
        )
        setup.append("    readme_description = f.read()")

        long_description_type = 'text/markdown'

    elif filecenter.exists(lifeeasy.working_dir() + '/readme.md'):
        print('adding the package readme')
        lifeeasy.sleep(random.uniform(0.126, 0.31))
        setup.append('from os import path')
        setup.append('')
        setup.append(
            "with open(path.join(path.abspath(path.dirname(__file__)), 'readme.md'), encoding='utf-8') as f:"
        )
        setup.append("    readme_description = f.read()")
        setup.append('')
        setup.append('')

        long_description_type = 'text/markdown'

    else:
        long_description_type = ''

    # Need to add more readme type

    print('creating the setup class')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('setup(')

    print('adding the package name')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('name = "' + package_name + '",')

    print('adding the packages name')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('packages = ["' + package_name + '"],')

    print('adding the package version')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('version = "' + version + '",')

    print('adding the package license')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('license = "' + package_license + '",')

    print('adding the package description')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('description = "' + desc + '",')

    print('adding the package author')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('author = "' + author + '",')

    print('adding the package email')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('author_email = "' + email + '",')

    print('adding the package url')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('url = "' + url + '",')

    print('adding the package download url')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('download_url = "' + download_url + '",')

    print('adding the package keywords')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('keywords = ' + str(keywords) + ',')

    print('adding the package dependencies')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('install_requires = ' + str(dependencies) + ',')

    print('creating the package classifiers')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    if upgrade == False:
        classifiers = []
        classifiers.append(dev_status)
        classifiers.append(package_license_classifier)
        classifiers.extend(versions_classifiers)
        classifiers.extend(custom_classifiers)

    print('adding the package classifiers')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('classifiers = ' + str(classifiers) + ',')

    print('adding the package readme')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('long_description = readme_description,')

    print('adding the package readme type')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append('long_description_content_type = "' + long_description_type +
                 '",')

    setup.append('include_package_data=True,')

    if custom_setup == True:
        print('adding your custom setup sections')
        lifeeasy.sleep(random.uniform(0.126, 0.31))
        setup.append(setup_customized)

    print('finishing...')
    lifeeasy.sleep(random.uniform(0.126, 0.31))
    setup.append(')')

    print('creating the file...')
    lifeeasy.write_file('setup.py', setup)
    lifeeasy.sleep(random.uniform(1.5, 2.3))
    return 0