def test_failed_inner_checkPuppy(): foo_content = '<source src="http://test.com" >' with requests_mock.Mocker() as m: m.get('http://200.7.6.134', status_code=200, text=foo_content) m.get('http://test.com', status_code=500) ok, resp = check.check_url('http://200.7.6.134') assert ok == False
def check_puppy(bot, job): global last_status_ok current_status_ok, msg = check.check_url(PUPPY_ADDRESS) if current_status_ok is not last_status_ok: for chat_id in data['subs_ids']: if not current_status_ok: bot.send_message(chat_id = chat_id, text = "{} is down! error: {}".format(PUPPY_ADDRESS,msg)) else: bot.send_message(chat_id = chat_id, text = "{} is up!".format(PUPPY_ADDRESS)) last_status_ok = current_status_ok logging.info("Status check done!")
args = parser.parse_args() if args.check: from check import check_install check_install() # default url, file, burp = '', '', '' filter = False # default use number of cpu-core as processes num = cpu_count() # default coroutine = 200 if args.url: from check import check_url url = args.url num = 1 check_url(url) if args.file: file = args.file if args.burp: burp = args.burp if args.filter: filter = args.filter browser = '' if args.browser: browser = args.browser # default 2 if use browser num = 2 if args.url: num = 1 if args.process: num = args.process
def test_failed_checkPuppy(): with requests_mock.Mocker() as m: m.get('http://200.7.6.134', status_code=401) ok, resp = check.check_url('http://200.7.6.134') assert ok == False
def analyze(): url = session.get('url', None) results = check_url(url) if results: seo_optimized = results['Most probably optimized'] seo_not_optimized = results['Most probably not optimized'] seo_likely_optimized = results['Probably optimized'] seo_likely_not = results['Probably not optimized'] seo_unsure = results['Uncertain'] source_ads = results['classes']['source ads'] source_company = results['classes']['source company'] source_known = results['classes']['source known'] source_news = results['classes']['source news'] source_not_optimized = results['classes']['source not optimized'] source_shop = results['classes']['source shop'] description = results['description'] site_title = results['title'] identical = results['identical title'] https = results['https'] speed = results['speed'] micros = results['micros'] nofollow = results['nofollow'] robots = results['robots'] viewport = results['viewport'] canonicals = results['canonicals'] seo_plugins = results['plugins']['plugins'] analytics_tools = results['plugins']['analytics'] save_main = main = '{0.netloc}/'.format(urlsplit(url)) save_main = save_main.replace("/", "") save_main = save_main.replace("'", "") save_main = save_main.replace('"', '') save_main = save_main.replace("*", "") save_main = save_main.replace(".", "") save_main = save_main.replace(":", "") save_main = save_main.replace("?", "") save_main = save_main.replace(",", "") save_main = save_main.replace(";", "") content = results content.update({'Description': 'Most probably optimized: The webpage is most probably optimized when an SEO tool either was found in the HTML code, it is on the list of news services, on the list of customers of SEO agencies, or is on the list with websites with ads, or has at least one microdata.\nProbably optimized: The webpage is not most probably optimized and meets one of the following criteria: (1) It is on the list of shops or business websites, (2) it uses Analytics Tools or advertisement, (3) it uses https, (4) it has SEO indicators in its ro-bots.txt, (5) the website has a sitemap, (6) a viewport is de-fined, (7) it has at least one nofollow link or canonical link, (8) its loading time is less than 3 seconds.\nMost probably not optimized: The main domain is on the list of non-optimized websites.\nProbably not optimized: The webpage is probably not optimized when it is not most probably optimized, it is not classified as not optimized, and it meets at least one of the following criteria: (1) the description tag is empty, (2) the ti-tle tag is empty or identical on subpages, (3) it has no Open Graph tags.'}) results_folder = "app/results/" dt = datetime.now() # current date and time now = dt.strftime("%m_%d_%Y_%H_%M_%S") file = results_folder+now+"_"+save_main json_file = file+".json" json_file_download = now+"_"+save_main+".json" with open(json_file, 'w+') as outfile: json.dump(content, outfile) return render_template('analyze.html', title='Results', url=url, results=results, optimized=seo_optimized, not_optimized=seo_not_optimized, m_optimized=seo_likely_optimized, m_not_optimized=seo_likely_not, uncertain=seo_unsure, source_ads = source_ads, source_company = source_company, source_known = source_known, source_news = source_news, source_not_optimized = source_not_optimized, source_shop = source_shop, description=description, site_title = site_title, identical_title = identical, speed=speed, https=https, robots=robots, viewport=viewport, micros = micros, nofollow = nofollow, canonicals=canonicals, seo_plugins=seo_plugins, analytics_tools=analytics_tools) else: return render_template('error.html', title='Error')