def __get_news(self, url, limit) -> Union[List[dict], list]: news = [] try: fp = feedparser.parse(url) except Exception: logger.info(f'Temporary failure in name resolution {url}') return [] all_feed = fp["items"] if limit is None: limit = len(all_feed) for feed in all_feed: if limit is not None and limit > 0: piece_news = {} pub_date = dt.strptime(feed['published'], '%a, %d %b %Y %H:%M:%S %z') piece_news['title'] = feed['title'] piece_news['link'] = feed['link'] piece_news['desc'] = feed['description'] piece_news['published'] = pub_date.strftime('%d.%m.%Y %H:%M') news.append(piece_news) limit -= 1 return news
def get_picture(self, tags, rating): """ Get picture with tag and rating :param tags: tags of picture :param rating: picture rating :return: picture url and hash or empty strings :rtype: (str, str) """ logger.info("get_picture()") url = self._url % (tags, rating) logger.debug("Get url: " + str(url)) try: picture_object = requests.get(url).json() except ConnectionError: logger.info("Return empty strings: connection error") return "", "" except JSONDecodeError: logger.info("Picture not found") return "", "" logger.debug("Get picture: " + str(picture_object)) try: logger.info("Try to return picture link and hash") return picture_object[0]['file_url'], picture_object[0]['md5'] except Exception: logger.info("Return empty strings: picture not found") return "", ""
def get_news_page(self, link) -> Union[requests.Request, None]: try: r = requests.get(link) return r except requests.exceptions.RequestException: logger.info(f'Connection error by {link}.') return None
def vt_run(self, scan_type, QRY): url = f"{self.base_url}/{scan_type}/{QRY}" data = json.dumps(self.vt_connect(url)) json_resp = json.loads(data) if json_resp: good = 0 bad = 0 try: results = json_resp["data"]["attributes"] except AttributeError: pass else: if results["meaningful_name"]: logger.info("Filename: ", results["meaningful_name"]) for engine, result in results["last_analysis_results"].items(): if result["category"] == "malicious": bad += 1 logger.error( f"\u2718 {engine}: {result['category'].upper()}") else: good += 1 if bad == 0: logger.success( f"\u2714 {good} engines deemed '{QRY}' as harmless\n" ) # nopep8 else: logger.info(f"{bad} engines deemed '{QRY}' as malicious\n")
def send_mail(self): ''' Send emails ''' data = self.cleaned_data try: subject = F"[MillworkPioneers Website] Contact Form Request, by {data['name']}" message = F"""Somebody send an contact form request from website and below is the info: Name: {data['name']} Email: {data['email']} Phone: {data.get('phone', 'N/A')} Message: {data.get('message', 'No message')} """ from_email = settings.WORKING_EMAIL to_email = settings.WORKING_EMAIL send_mail(subject, message, from_email, [to_email], fail_silently=False) except Exception as e: logger.error("[Send email ERROR]: {}, type:{}".format(e, type(e))) else: logger.info("Success send email from :{}, email:{}".format( data['name'], data['email']))
def map_maxmind(QRY): try: geo_reader = geoip2.database.Reader(gl_file) ip_map = Map([40, -5], tiles='OpenStreetMap', zoom_start=3) response = geo_reader.city(QRY) if response.location: lat = response.location.latitude lon = response.location.longitude popup = Popup(QRY) Marker([lat, lon], popup=popup).add_to(ip_map) ip_map.save(ip_map_file) except geoip2.errors.AddressNotFoundError: logger.warning(f"[-] Address {QRY} is not in the geoip database.") except FileNotFoundError: logger.info(f"\n[*] Please download the GeoLite2-City database file: ") print(" --> https://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz") time.sleep(2)
def send_mail(self, attach_file): ''' Custom send mail for career form ''' data = self.cleaned_data try: email = data["email"] name = data["name"] subject = F"[MillworkPioneers Website] Career application form, email:{data['email']}, name: {data['name']}" message = F"""Somebody send an application from website and below is the info: Name: {data['name']} Email: {data['email']} Phone: {data.get('phone', 'N/A')} Installation: {data['installation']} Union or UBC number: {data.get('union_ubc_number', 'N/A')} Office: {data['office']} Finish carpenter: {data['finish_carpenter']} Years of experience: {data.get('years_of_experience', 'N/A')} Message: {data.get('message', 'No message')} """ from_email = settings.WORKING_EMAIL to_email = settings.WORKING_EMAIL if settings.DEBUG: logger.info(F"EMAIL MSG: {message}") sent_email = EmailMultiAlternatives(subject, message, from_email, [to_email]) if attach_file: sent_email.attach(attach_file.name, attach_file.read(), attach_file.content_type) sent_email.send() import code code.interact(local=locals()) except Exception as e: logger.error("[Send email ERROR]: {}, type:{}".format(e, type(e))) else: logger.info("Success receive email:{}".format(email))
def news_list(db: Session = Depends(utils.get_db), limit: int = Query(None, gt=0, description='Количество новостей')): grabber = Grabber() data = grabber.news(limit) for event in data: news = get_news_by_link(db, event.get('link')) if news is None: try: news = grabber.grub(event.get('link')) pub_date = dt.strptime(event.get('published'), '%d.%m.%Y %H:%M') news = NewsCreate(**news, pub_date=pub_date) create_news(db, news) except Exception as e: logger.info( f'Не возможно обработать новость: {event}. Error {e}') return get_all_news(db, limit)
def send_mail(self): ''' Send email ''' try: email = self.cleaned_data["ask_email"] subject = "[MillworkPioneers Website] ASK EMAIL: {}".format(email) message = "Somebody ask for suscription with his/her email: {}".format( email) from_email = settings.WORKING_EMAIL to_email = settings.WORKING_EMAIL send_mail(subject, message, from_email, [to_email], fail_silently=False) except Exception as e: logger.error("[Send email ERROR]: {}, type:{}".format(e, type(e))) else: logger.info("Success receive email:{}".format(email))
def vt_run(self, scan_type, QRY): url = f"{self.base_url}/{scan_type}/{QRY}" data = json.dumps(self.vt_connect(url)) json_resp = json.loads(data) if json_resp: good = 0 bad = 0 results = json_resp['data']['attributes'] try: if results['meaningful_name']: logger.info("Filename: ", results['meaningful_name']) except: pass for engine, result in results['last_analysis_results'].items(): if result['category'] == 'malicious': bad += 1 logger.error(f"\u2718 {engine}: {result['category'].upper()}") else: good += 1 if bad == 0: logger.success(f"\u2714 {good} engines deemed '{QRY}' as harmless\n") else: logger.info(f"{bad} engines deemed '{QRY}' as malicious\n")
def send_mail(self, attach_file): ''' Send mail ''' data = self.cleaned_data try: email = self.cleaned_data["email"] subject = "[MillworkPioneers Website] ASK QUOTE FORM: {}".format( email) message = F"""Somebody send a quote form from website and below is the info: Name: {data['name']} Email: {data['email']} Phone: {data.get('phone', 'N/A')} Company: {data['company']} Project: {data.get('project', 'N/A')} Bid Due: {data['bid_due'].strftime("%A, %d. %B %Y")} Start Date: {data['start_date'].strftime("%A, %d. %B %Y")} Prevailing Wage: {data.get('prevailing_wage', 'N/A')} Description: {data.get('description', 'No message')} """ from_email = settings.WORKING_EMAIL to_email = settings.WORKING_EMAIL sent_email = EmailMultiAlternatives(subject, message, from_email, [to_email]) if attach_file: sent_email.attach(attach_file.name, attach_file.read(), attach_file.content_type) sent_email.send() except Exception as e: logger.error("[Send email ERROR]: {}, type:{}".format(e, type(e))) else: logger.info("Success receive email:{}".format(email))
def main(): banner = ''' ________ __ ____ / ____/ /_ ___ _____/ /__ / __ \___ ____ / / / __ \/ _ \/ ___/ //_/ / /_/ / _ \/ __ \ / /___/ / / / __/ /__/ ,< / _, _/ __/ /_/ / \____/_/ /_/\___/\___/_/|_| /_/ |_|\___/ .___/ /_/ ''' print(Fore.CYAN + banner + Style.RESET_ALL) print("Check IP and Domain Reputation") parser = argparse.ArgumentParser( description='Check IP or Domain Reputation', formatter_class=argparse.RawTextHelpFormatter, epilog=''' Geolocation Options -------------------- freegeoip [freegeoip.live] - free/opensource geolocation service maxmind [dev.maxmind.com] - uses a geolite db file for geolocation * NOTE: Use of the VirusTotal option requires an API key. The service is "free" to use, however you must register for an account to receive an API key.''') optional = parser._action_groups.pop() required = parser.add_argument_group('required arguments') required.add_argument('-q', metavar='query', help='query ip address or domain') optional.add_argument('--log', action='store_true', help='log results to file') optional.add_argument('--vt', action='store_true', help='check virustotal') group = optional.add_mutually_exclusive_group() group.add_argument('--fr', action='store_true', help='use freegeoip for geolocation') group.add_argument('--mm', action='store_true', help='use maxmind (geolite) for geolocation') group.add_argument('--mx', nargs='+', metavar='FILE', help='geolocate multiple ip addresses or domains') parser._action_groups.append(optional) args = parser.parse_args() QRY = args.q if len(sys.argv[1:]) == 0: parser.print_help() parser.exit() # Initialize utilities workers = Workers(QRY) print("\n" + Fore.GREEN + "[+] Running checks..." + Style.RESET_ALL) if args.log: if not os.path.exists('logfile'): os.mkdir('logfile') dt_stamp = datetime.now().strftime("%Y-%m-%d_%H%M%S") file_log = logging.FileHandler(f"logfile/logfile_{dt_stamp}.txt") file_log.setFormatter( logging.Formatter("[%(asctime)s %(levelname)s] %(message)s", datefmt="%m/%d/%Y %I:%M:%S")) logger.addHandler(file_log) if args.fr: map_free_geo(QRY) if args.mm: geo_query_map(QRY) if args.mx: print( colored.stylize("\n--[ Processing Geolocation Map ]--", colored.attr("bold"))) multi_map(input_file=args.mx[0]) print(colored.stylize("\n--[ GeoIP Map File ]--", colored.attr("bold"))) try: multi_map_file = Path('multi_map.html').resolve(strict=True) except FileNotFoundError: logger.info( "[-] Geolocation map file was not created or does not exist.") else: logger.info(f"[>] Geolocation map file saved to: {multi_map_file}") sys.exit(1) if args.vt: print( colored.stylize("\n--[ VirusTotal Detections ]--", colored.attr("bold"))) if not config['VIRUS-TOTAL']['api_key']: logger.warning( "Please add VirusTotal API key to the 'settings.yml' file, or add it below" ) user_vt_key = input("Enter key: ") config['VIRUS-TOTAL']['api_key'] = user_vt_key with open('settings.yml', 'w') as output: yaml.dump(config, output) api_key = config['VIRUS-TOTAL']['api_key'] virustotal = VirusTotalChk(api_key) if DOMAIN.findall(QRY): virustotal.vt_run('domains', QRY) elif IP.findall(QRY): virustotal.vt_run('ip_addresses', QRY) elif URL.findall(QRY): virustotal.vt_run('urls', QRY) else: virustotal.vt_run('files', QRY) print( colored.stylize("\n--[ Team Cymru Detection ]--", colored.attr("bold"))) workers.tc_query(qry=QRY) sys.exit("\n") if DOMAIN.findall(QRY) and not EMAIL.findall(QRY): print( colored.stylize("\n--[ Querying Domain Blacklists ]--", colored.attr("bold"))) workers.spamhaus_dbl_worker() workers.blacklist_dbl_worker() print( colored.stylize(f"\n--[ WHOIS for {QRY} ]--", colored.attr("bold"))) workers.whois_query(QRY) elif IP.findall(QRY): # Check if cloudflare ip print( colored.stylize("\n--[ Using Cloudflare? ]--", colored.attr("bold"))) if workers.cflare_results(QRY): logger.info("Cloudflare IP: Yes") else: logger.info("Cloudflare IP: No") print( colored.stylize("\n--[ Querying DNSBL Lists ]--", colored.attr("bold"))) workers.dnsbl_mapper() workers.spamhaus_ipbl_worker() print( colored.stylize("\n--[ Querying IP Blacklists ]--", colored.attr("bold"))) workers.blacklist_ipbl_worker() elif NET.findall(QRY): print( colored.stylize("\n--[ Querying NetBlock Blacklists ]--", colored.attr("bold"))) workers.blacklist_netblock_worker() else: print(Fore.YELLOW + "[!] Please enter a valid query -- Domain or IP address" + Style.RESET_ALL) print("=" * 60, "\n") parser.print_help() parser.exit() # ---[ Results output ]------------------------------- print(colored.stylize("\n--[ Results ]--", colored.attr("bold"))) TOTALS = workers.DNSBL_MATCHES + workers.BL_MATCHES BL_TOTALS = workers.BL_MATCHES if TOTALS == 0: logger.info(f"[-] {QRY} is not listed in any Blacklists") else: color_QRY = Fore.YELLOW + QRY + Style.BRIGHT + Style.RESET_ALL color_DNSBL_MATCHES = Fore.WHITE + Back.RED + str( workers.DNSBL_MATCHES) + Style.BRIGHT + Style.RESET_ALL color_BL_TOTALS = Fore.WHITE + Back.RED + str( BL_TOTALS) + Style.BRIGHT + Style.RESET_ALL print( f"[>] {color_QRY} is listed in {color_DNSBL_MATCHES} DNSBL lists and {color_BL_TOTALS} Blacklists\n" ) # ---[ Geo Map output ]------------------------------- if args.fr or args.mm: print(colored.stylize("\n--[ GeoIP Map File ]--", colored.attr("bold"))) time_format = "%d %B %Y %H:%M:%S" try: ip_map_file = prog_root.joinpath('geomap/ip_map.html').resolve( strict=True) except FileNotFoundError: logger.warning( "[-] Geolocation map file was not created/does not exist.\n") else: ip_map_timestamp = datetime.fromtimestamp( os.path.getctime(ip_map_file)) logger.info( f"[>] Geolocation map file created: {ip_map_file} [{ip_map_timestamp.strftime(time_format)}]\n" )
import torch.utils.tensorboard as tensorboard from core.dataset import MBSpaceDataBase from core.dataset import MBSpaceDataset, CachedSubset from core.model import MBSpaceNAC from core.controller import MBSpaceController from core.engine.nac import train, evaluate from core.engine.policy_learning import train_controller from core.config import args from core.utils import logger, set_reproducible from core.utils import device if __name__ == "__main__": logger.info(args) set_reproducible(args.seed) writer = tensorboard.SummaryWriter(args.output) logger.info(f"Loading database from {args.data}...") nac = MBSpaceNAC(20 * 3 * 2, 512).to(device=device) max_epochs = args.nac_epochs if args.space == "mobilespace": database = MBSpaceDataBase.from_file(args.data) dataset = MBSpaceDataset(database=database, seed=args.seed) trainset = CachedSubset(dataset, list(range(args.trainset_size))) valset = CachedSubset( dataset, list(
def main(): banner = r""" ________ __ ____ / ____/ /_ ___ _____/ /__ / __ \___ ____ / / / __ \/ _ \/ ___/ //_/ / /_/ / _ \/ __ \ / /___/ / / / __/ /__/ ,< / _, _/ __/ /_/ / \____/_/ /_/\___/\___/_/|_| /_/ |_|\___/ .___/ /_/ """ print(f"{Fore.CYAN}{banner}{Style.RESET_ALL}") print("Check IP and Domain Reputation") parser = argparse.ArgumentParser( description="Check IP or Domain Reputation", formatter_class=argparse.RawTextHelpFormatter, epilog=""" Options -------------------- freegeoip [freegeoip.live] - free/opensource geolocation service virustotal [virustotal.com] - online multi-antivirus scan engine * NOTE: Use of the VirusTotal option requires an API key. The service is "free" to use, however you must register for an account to receive an API key.""", ) optional = parser._action_groups.pop() required = parser.add_argument_group("required arguments") required.add_argument("query", help="query ip address or domain") optional.add_argument("--log", action="store_true", help="log results to file") optional.add_argument("--vt", action="store_true", help="check virustotal") group = optional.add_mutually_exclusive_group() group.add_argument("--fg", action="store_true", help="use freegeoip for geolocation") group.add_argument("--mx", nargs="+", metavar="FILE", help="geolocate multiple ip addresses or domains") parser._action_groups.append(optional) args = parser.parse_args() QRY = args.query if len(sys.argv[1:]) == 0: parser.print_help() parser.exit() # Initialize utilities workers = Workers(QRY) print(f"\n{Fore.GREEN}[+] Running checks...{Style.RESET_ALL}") if args.log: if not os.path.exists("logfile"): os.mkdir("logfile") dt_stamp = datetime.now().strftime("%Y-%m-%d_%H%M%S") file_log = logging.FileHandler(f"logfile/logfile_{dt_stamp}.txt") file_log.setFormatter( logging.Formatter("[%(asctime)s %(levelname)s] %(message)s", datefmt="%m/%d/%Y %I:%M:%S")) logger.addHandler(file_log) if args.fg: map_free_geo(QRY) if args.mx: print( colored.stylize("\n--[ Processing Geolocation Map ]--", colored.attr("bold"))) multi_map(input_file=args.mx[0]) print(colored.stylize("\n--[ GeoIP Map File ]--", colored.attr("bold"))) try: multi_map_file = Path("multi_map.html").resolve(strict=True) except FileNotFoundError: logger.info( "[-] Geolocation map file was not created or does not exist.") else: logger.info(f"> Geolocation map file saved to: {multi_map_file}") sys.exit(1) if args.vt: print( colored.stylize("\n--[ VirusTotal Detections ]--", colored.attr("bold"))) if not config["VIRUS-TOTAL"]["api_key"]: logger.warning( "Please add VirusTotal API key to the 'settings.yml' file, or add it below" ) user_vt_key = input("Enter key: ") config["VIRUS-TOTAL"]["api_key"] = user_vt_key with open("settings.yml", "w") as output: yaml.dump(config, output) api_key = config["VIRUS-TOTAL"]["api_key"] virustotal = VirusTotalChk(api_key) if DOMAIN.findall(QRY): virustotal.vt_run("domains", QRY) elif IP.findall(QRY): virustotal.vt_run("ip_addresses", QRY) elif URL.findall(QRY): virustotal.vt_run("urls", QRY) else: virustotal.vt_run("files", QRY) print( colored.stylize("\n--[ Team Cymru Detection ]--", colored.attr("bold"))) workers.tc_query(qry=QRY) sys.exit("\n") if DOMAIN.findall(QRY) and not EMAIL.findall(QRY): print( colored.stylize("\n--[ Querying Domain Blacklists ]--", colored.attr("bold"))) workers.spamhaus_dbl_worker() workers.blacklist_dbl_worker() print( colored.stylize(f"\n--[ WHOIS for {QRY} ]--", colored.attr("bold"))) workers.whois_query(QRY) elif IP.findall(QRY): # Check if cloudflare ip print( colored.stylize("\n--[ Using Cloudflare? ]--", colored.attr("bold"))) if workers.cflare_results(QRY): logger.info("Cloudflare IP: Yes") else: logger.info("Cloudflare IP: No") print( colored.stylize("\n--[ Querying DNSBL Lists ]--", colored.attr("bold"))) workers.dnsbl_mapper() workers.spamhaus_ipbl_worker() print( colored.stylize("\n--[ Querying IP Blacklists ]--", colored.attr("bold"))) workers.blacklist_ipbl_worker() elif NET.findall(QRY): print( colored.stylize("\n--[ Querying NetBlock Blacklists ]--", colored.attr("bold"))) workers.blacklist_netblock_worker() else: print( f"{Fore.YELLOW}[!] Please enter a valid query -- Domain or IP address{Style.RESET_ALL}" ) print("=" * 60, "\n") parser.print_help() parser.exit() # ---[ Results output ]------------------------------- print(colored.stylize("\n--[ Results ]--", colored.attr("bold"))) TOTALS = workers.DNSBL_MATCHES + workers.BL_MATCHES BL_TOTALS = workers.BL_MATCHES if TOTALS == 0: logger.info(f"[-] {QRY} is not listed in any Blacklists\n") else: _QRY = Fore.YELLOW + QRY + Style.BRIGHT + Style.RESET_ALL _DNSBL_MATCHES = f"{Fore.WHITE}{Back.RED}{str(workers.DNSBL_MATCHES)}{Style.BRIGHT}{Style.RESET_ALL}" _BL_TOTALS = f"{Fore.WHITE}{Back.RED}{str(BL_TOTALS)}{Style.BRIGHT}{Style.RESET_ALL}" logger.info( f"> {_QRY} is listed in {_DNSBL_MATCHES} DNSBL lists and {_BL_TOTALS} Blacklists\n" ) # ---[ Geo Map output ]------------------------------- if args.fg or args.mx: print(colored.stylize("--[ GeoIP Map File ]--", colored.attr("bold"))) time_format = "%d %B %Y %H:%M:%S" try: ip_map_file = prog_root.joinpath("geomap/ip_map.html").resolve( strict=True) except FileNotFoundError: logger.warning( "[-] Geolocation map file was not created/does not exist.\n") else: ip_map_timestamp = datetime.fromtimestamp( os.path.getctime(ip_map_file)) logger.info( f"> Geolocation map file created: {ip_map_file} [{ip_map_timestamp.strftime(time_format)}]\n" )
def get_picture(self, tags, rating, from_user): """ Get picture :param tags: picture tags :param rating: picture rating :param from_user: type of chat message from :raise: PictureNotFoundException if picture not found :raise: TagsNotFoundException if request picture without tags :raise: EcchiDeniedException if request ecchi in non private chat :raise: HentaiDeniedException if request hentai on non private chat :rtype: None """ logger.info("PictureGrabber get_picture()") if (not from_user) and rating != "safe": if rating == "questionable": logger.info( "Request ecchi in non private chat. Denied. Current rating: '" + str(rating) + "'") raise EcchiDeniedException else: logger.info( "Request hentai on non private chat. Denied. Current rating: '" + str(rating) + "'") raise HentaiDeniedException logger.debug("Join tags list by '+'") if isinstance(tags, list): if len(tags) != 0: tags = '+'.join(tags) logger.info("Tags in request: '" + str(tags) + "'") else: logger.info( "Not found tags in request. Raise TagsNotFoundException") raise TagsNotFoundException url, picture_hash = self._kon.get_picture(tags, rating) logger.debug("Get url '" + str(url) + "' and hash '" + str(picture_hash) + "'") if picture_hash != "": logger.info("Found picture in Konachan. Returning") self._download_picture(url) else: logger.info("Not found picture in Konachan. Continue") url, picture_hash = self._ya.get_picture(tags, rating) logger.debug("Get url '" + str(url) + "' and hash '" + str(picture_hash) + "'") if picture_hash != "": logger.info("Found picture in Yandere. Returning") self._download_picture(url) else: logger.info( "Not found picture in Yandere. Raise PictureNotFoundException" ) raise PictureNotFoundException()