def is_nude(fileName):

    nudity = Nudity()
    nude_ret = nudity.has(fileName)
    # gives you True or False

    #print(nudity.score(fileName))

    return nude_ret
Example #2
0
def check_pornographic_content(image_path):
    nudity = Nudity()
    # returns True or False
    is_offensive = nudity.has(image_path)
    # gives nudity / inappropriate content score between 0.0 - 1.0
    score = nudity.score(image_path)

    print(is_offensive)
    if score < 0.0001:
        score = 0.0
    print(score)
    return [is_offensive, score]
Example #3
0
def longLoading(loop, message, file):
    try:

        nudity = Nudity()
        if nudity.has(file):
            loop.create_task(message.edit("nude detected.."))
        else:
            loop.create_task(message.edit("no nude detected.."))
        if file != None:
            os.remove(file)
    except Exception as e:
        loop.create_task(message.edit(str(e)))
        if file != None:
            os.remove(file)
Example #4
0
HOST = 'localhost'  # Standard loopback interface address (localhost)
PORT = 5678  # Port to listen on (non-privileged ports are > 1023)

sys.stdout = sys.stderr = open('/var/log/wevip', 'a')
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'  # Disables tensorflow spamflow

from nudity import Nudity

with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
    s.bind((HOST, PORT))
    s.listen()
    while True:
        conn, addr = s.accept()
        with conn:
            print('Connected by', addr)
            while True:
                data = conn.recv(1024).strip()
                if not data:
                    break
                else:
                    print("Received", data)
                    image = data.decode()
                    nudity = Nudity()
                    if nudity.has(image):
                        print("P**n")
                        conn.sendall(b'1')
                    else:
                        print("Clear")
                        conn.sendall(b'0')
        sys.stdout.flush()
Example #5
0
for i in cur_dir:
    if 'mamad' in i:
        os.remove(i)
    elif 'nude' in i:
        os.remove(i)

def sayy(text):
    engine = pyttsx3.init()
    engine.say(text)
    engine.runAndWait()

nudity = Nudity()
threshold = 0
while True:
    img = ImageGrab.grab()
    img.save('images_trash/mamad.png')
    Has_nude = nudity.has('images_trash/mamad.png')
    threshold = threshold + 1
    if Has_nude == True:
         os.rename('images_trash/mamad.png','nude%s.png'%threshold)
         try:
            shutil.move('C:/Users/Lenovo/Desktop/nodyab/nude%s.png'%threshold,'C:/Users/Lenovo/Desktop/nuudes')

         except:
             thresholdnew = threshold+1
             shutil.move('C:/Users/Lenovo/Desktop/nodyab/nude%s.png'%threshold,'C:/Users/Lenovo/Desktop/nuudes/nuude%s.png'%thresholdnew)
         print('shit')
    elif Has_nude == False:
        os.remove('images_trash/mamad.png')
os.remove('images_trash/mamad.png')
Example #6
0
def nudity_filter(file):
    nudity = Nudity()
    if nudity.has(file) == True:
        statement = 'image is above obscenity threshold'
        return(nudity.score(file), statement)
Example #7
0
#!/usr/bin/env python
import imp
import os
import sys
sys.path.insert(
    0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../')))
from nudity import Nudity

if __name__ == '__main__':
    nudity = Nudity()
    base_path = os.path.dirname(os.path.abspath(__file__))
    none_samples = os.path.abspath(base_path + "/samples/none")
    nude_samples = os.path.abspath(base_path + "/samples/nude")
    for sample in os.listdir(none_samples):
        file_name = os.path.abspath(none_samples + "/" + sample)
        if False != nudity.has(file_name):
            print("Error: " + file_name)
        else:
            print("Success: " + file_name)

    for sample in os.listdir(nude_samples):
        file_name = os.path.abspath(nude_samples + "/" + sample)
        if False == nudity.has(file_name):
            print("Error: " + file_name)
        else:
            print("Success: " + file_name)