def handle_erratic(opt_input_uri, opt_crop_uri, cnt, detected_car): img_path = opt_input_uri.replace("*", str(cnt)) crop_path = opt_crop_uri.replace("*", str(cnt)) crop_img(img_path, crop_path, detected_car) plate, conf = run_alpr(crop_path) swerve.reset() reporter.report(img_path, 69, plate, conf)
def home(): if request.method == 'POST': upload_file = request.files['file-upload'] upload_file.save(os.path.join('uploads', upload_file.filename)) report(upload_file) return render_template('index.html', message = "Success !") return render_template('index.html', message = "Upload")
def main(): print_header("START", CONFIG, level=0) data = load(CONFIG) print(data.head()) data.to_csv("./outputs/data.csv", index=False) describe(data, CONFIG) test(data, CONFIG) forecast(data, CONFIG) predict(data, CONFIG) report(data, CONFIG) print_header("DONE", CONFIG, level=0)
def run_app_crawler(apk_file, log_folder, port, device_id, device_version, country): """ args: - `country`: 如果是`None`, 代表不需要选择国家 """ pkg_info = get_pkg_info_from_apk_file(apk_file) pkg_name = pkg_info['name'] uninstall(device_id, pkg_name) install(device_id, device_version, apk_file) if country is not None: choose_country(port, device_version, pkg_name, country) os.system('bash ./run_case.sh {} {} {}'.format(log_folder, port, pkg_name)) report(log_folder)
def main(argv): parser = argparse.ArgumentParser() parser.add_argument("hibp_keyfile", type=str, help="HIBP API Key File") parser.add_argument("URL", type=str, help="Target URL") parser.add_argument("depth", type=str, help="Cewl Spidering Depth (2 or 3 recommended") parser.add_argument("output_file", type=str, help="Output File") parser.add_argument("timeout", type=str, help="Webscraper Timeout") args = parser.parse_args() os.system('clear') print("\n----------------------------------------------") print("Hookshot Breach Comparison Tool") print("v1.1 - Stable") print("----------") # Run URL scraper account_dict = webwork.webscraper(args.URL, args.depth, args.timeout) # Run HIBP routine main_dict, blank_list = hibp.hibp_checker(args.hibp_keyfile, account_dict) # Re-run on blank/empty URLs deep_dict = {} fail_list = [] # Check if we have blank/missing URLs if len(blank_list) > 0: # Re-scrape the URLs with more depth and more time for command to complete account_dict = webwork.webscraper(blank_list, args.depth, (int(args.timeout) * 10)) # Send the new accounts against HIBP deep_dict, fail_list = hibp.hibp_checker(args.hibp_keyfile, account_dict) # Update the main dict to be analyzed main_dict = main_dict | deep_dict # Run analysis analysis_dict = reporter.analyze(main_dict, blank_list) # Produce report reporter.report(analysis_dict, args.output_file) # Close print("\n----------------------------------------------") print("PROGRAM COMPLETE") print("------------------") return
def main(): """Main program. Parse arguments, set up event loop, run crawler, print report. """ args = ARGS.parse_args() if not args.roots: print('Use --help for command line help') return levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] logging.basicConfig(level=levels[min(args.level, len(levels) - 1)]) if args.iocp: from asyncio.windows_events import ProactorEventLoop loop = ProactorEventLoop() asyncio.set_event_loop(loop) elif args.select: loop = asyncio.SelectorEventLoop() asyncio.set_event_loop(loop) else: loop = asyncio.get_event_loop() roots = {fix_url(root) for root in args.roots} c = crawler.Crawler( roots, exclude=args.exclude, strict=args.strict, max_redirect=args.max_redirect, max_tries=args.max_tries, max_tasks=args.max_tasks, ) try: loop.run_until_complete(c.crawl()) # Crawler gonna crawl. except KeyboardInterrupt: sys.stderr.flush() print('\nInterrupted\n') finally: reporter.report(c) c.close() # next two lines are required for actual aiohttp resource cleanup loop.stop() loop.run_forever() loop.close()
def main(): """Main program. Parse arguments, set up event loop, run crawler, print report. """ args = ARGS.parse_args() if not args.roots: print('Use --help for command line help') return levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] logging.basicConfig(level=levels[min(args.level, len(levels)-1)]) if args.iocp: from asyncio.windows_events import ProactorEventLoop loop = ProactorEventLoop() asyncio.set_event_loop(loop) elif args.select: loop = asyncio.SelectorEventLoop() asyncio.set_event_loop(loop) else: loop = asyncio.get_event_loop() roots = {fix_url(root) for root in args.roots} c = crawler.Crawler(roots, exclude=args.exclude, strict=args.strict, max_redirect=args.max_redirect, max_tries=args.max_tries, max_tasks=args.max_tasks, ) try: loop.run_until_complete(c.crawl()) # Crawler gonna crawl. except KeyboardInterrupt: sys.stderr.flush() print('\nInterrupted\n') finally: reporter.report(c) c.close() # next two lines are required for actual aiohttp resource cleanup loop.stop() loop.run_forever() loop.close()
def report(batch, remote, debug, dependency = []): params = cache.get("batch/%s/params" % batch, remote) logging.info("running reporter instance") if (remote): k = cloud.call(reporter.report, batch, params, remote, debug, _label = "%s/report" % batch, _depends_on = dependency, _type = 'c1', _max_runtime = 30) logging.info("k %s" % k) return k else: result = reporter.report(batch, params, remote, debug) return result
def common(app_id): """ Checks route and signature Sends payload to modules """ if app_id not in REPOS: LOGGER.info("Unknown request: /%s", app_id) abort(404) app_config = REPOS.get(app_id) check_result = check_signature(app_config.get("MAIN")) if check_result == CertificateError.INVALID: LOGGER.warning("Request with invalid signature: /%s", app_id) return jsonify({"msg": "Invalid signature."}), 403 elif check_result == CertificateError.NO_HEADER: LOGGER.warning("Request with no signature: /%s", app_id) return jsonify({"msg": "No signature."}), 403 elif check_result != 0: LOGGER.error("Unknown error while checking signature") return jsonify({"msg": "Unknown signature error."}), 403 event = request.headers.get("X-GitHub-Event") report( app_id, event, json.loads(request.data), app_config.get('MAIN') ) # if "DEPLOY" in app_config and event == "push": # deploy(json.loads(request.data), app_config.get("DEPLOY")) return "", 200
if run_result.code != 0: return ChallengeError('Error in running the code', run_result) validate_result = challenge_execution.run_step('validate', 'scripts/validate.sh', timeout=10) challenge_execution.run_step('cleanup', 'scripts/cleanup.sh') if validate_result.code != 0: return ChallengeError('Error in validating the result', validate_result) return ChallengeResult(build_result, run_result, validate_result) if __name__ == '__main__': with open('participants.yml') as participants_stream: participants = yaml.load(participants_stream, Loader=yaml.FullLoader) with open('challenges.yml') as challenges_stream: challenges = yaml.load(challenges_stream, Loader=yaml.FullLoader) run_id = int(time.time()) for challenge in challenges: challenge_name = challenge['name'] reporter.start_round(run_id, challenge_name) for p in participants: repository = p['repository'] nickname = p['nickname'] ce = ChallengeExecution(challenge_name, p['repository']) result = run_challenge(ce) reporter.report(nickname, challenge_name, run_id, result) print(result) reporter.finish_round(run_id, challenge_name)
if run_result.code != 0: return ChallengeError('Error in running the code', run_result) validate_result = challenge_execution.run_step('bash', 'validate', 'scripts/validate.sh', timeout=10) if validate_result.code != 0: return ChallengeError('Error in validating the result', validate_result) challenge_execution.run_step('bash', 'cleanup', 'scripts/cleanup.sh') return ChallengeResult(build_result, run_result, validate_result) if __name__ == '__main__': with open('participants.yml') as participants_stream: participants = yaml.load(participants_stream, Loader=yaml.FullLoader) with open('challenges.yml') as challenges_stream: challenges = yaml.load(challenges_stream, Loader=yaml.FullLoader) for challenge_dict in challenges: challenge = Challenge.from_dict(challenge_dict) round_id = int(time.time()) reporter.start_round(round_id, challenge.name) for p in participants: repository = p['repository'] nickname = p['nickname'] ce = ChallengeExecution(challenge, p['repository']) result = run_challenge(ce) reporter.report(nickname, challenge.name, round_id, result) print(result) reporter.finish_round(round_id, challenge.name)
def info(message, sendReport=False): logAndWrite(getTimeStamp(), " | INFO | ", message) if sendReport: reporter.report('info', message)
def report(type, msg): reporter.report(type, msg, 'Backup Manager')
def create_toggles(cls, start_date=None, end_date=None): if not end_date: end_date = datetime.datetime.now(tz=tz.tzstr('UTC+03:30')) if not start_date: start_date = end_date - datetime.timedelta(days=30) if start_date is str: start_date = datetime.datetime.strptime(start_date, '%Y-%m-%d') if end_date is str: end_date = datetime.datetime.strptime(end_date, '%Y-%m-%d') response = requests.get( TOGGLE_API_URL, params={ 'start_date': start_date.strftime('%Y-%m-%dT%H:%M:%S+03:30'), 'end_date': end_date.strftime('%Y-%m-%dT%H:%M:%S+03:30') }, auth=TOGGLE_AUTH) times_json = response.json() entries = [] for time in times_json: entry_id = time.get('id') description = time.get('description') activity = TOGGLE_ACTIVITY_TAGS.get('CODE') tags = time.get('tags') remote = False if tags: if 'PM' in tags: continue elif 'No - PM' in tags: report( 'Skipping entry ("NO - PM" tag): ' + str(entry_id) + ' ' + description, Color.WARNING) continue if TOGGLE_REMOTE_TAG in tags: remote = True tags.remove(TOGGLE_REMOTE_TAG) if len(tags) > 1: report('More than one tag provided: ' + tags, Color.WARNING) report( 'Skipping entry ' + tags + ':' + str(entry_id) + ' ' + description, Color.WARNING) continue elif len(tags) == 1: activity = TOGGLE_ACTIVITY_TAGS.get(tags[0]) if not activity: report('Undefined tag ' + tags[0], Color.WARNING) report( 'Skipping entry (' + tags[0] + '): ' + str(entry_id) + ' ' + description, Color.WARNING) continue issue_description = re.search( '#(?P<issue>\d+) *- *(?P<description>.*)', description) if issue_description: issue = issue_description.group('issue') description = issue_description.group('description') else: report( 'Skipping entry (issue id not found): ' + str(entry_id) + ' ' + description, Color.WARNING) continue start = parser.parse( time.get('start')).astimezone(tz=tz.tzstr('UTC+03:30')) date = start.date().strftime('%Y-%m-%d') start = start.time().strftime('%H:%M') stop_time = time.get('stop') if not stop_time: report( 'Skipping entry (already running): ' + str(entry_id) + ' ' + description, Color.WARNING) continue end = parser.parse(stop_time).astimezone(tz=tz.tzstr('UTC+03:30')) end = end.time().strftime('%H:%M') duration = int(time.get('duration')) m, s = divmod(duration, 60) h, m = divmod(m, 60) duration = "%d:%02d" % (h, m) entries.append( Toggle(entry_id, issue, duration, activity=activity, date=date, start=start, end=end, description=description, remote=remote)) return entries
def summary(inputs, outputs): report(ECLIP, QC, READS, SAMPLES, TYPE)
def make(self, args): report("make!!")
def crit(message, sendReport=False): logAndWrite(getTimeStamp(), " | CRIT | ", message) if sendReport: reporter.report('crit', message)
def clean(self, args): report("clean!!")
def menu(): choice = 0 while (choice == 0): choice = innerMenu() line() if choice is 1: workCollList = workColl.find({}) for data in workCollList: print(data['_id']) print("been there done that") elif choice is 2: workedCollList = workedColl.find({}) for data in workedCollList: print(data['_id']) elif choice is 3: wikiCollList = wikiCollList.find({}) for data in wikiCollList: print(data['_id']) elif choice is 4: searchCollList = searchColl.find({}) for data in searchCollList: print(data['_id']) elif choice is 5: searchCollList = searchColl.find({}) for data in SearchCollList: print(data['_id']) elif choice is 6: webCollList = webColl.find({}) for data in webCollList: print(data['tag'], " ") elif choice is 7: print("continue to wipe database (1/0) ") choice = int(input()) if choice == 1: workColl.drop() workedColl.drop() wikiColl.drop() searchColl.drop() imgColl.drop() webColl.drop() print("your whole database is wiped") else: return True elif choice is 8: feed.menu() elif choice is 9: reporter.report() elif choice is 10: newPass1 = getpass.getpass(prompt='New Password: '******'Repeat New Password', stream=None) if newPass1 == newPass2: f = open('admin.txt', 'w+') f.write(newPass1) f.close() print("Password changed successfully") else: print("both password dont match") else: return False line() return True
def submit_entries(start_date=None, end_date=None): json_template = open(JSON_TEMPLATE_PATH) json_template = json.loads(json_template.read()) report('Fetching time entries from toggl.com', Color.INFO) toggles = Toggle.create_toggles(start_date, end_date) report( 'Submitting' + ' ' + str(len(toggles)) + ' ' + 'entries to Arsh pm.', Color.INFO) for toggle in toggles: report(str(toggle), Color.HEADER, ' ') report('- Preparing...', end=' ') data = render_json(toggle, json_template) response = requests.post(API_URL, data=json.dumps(data), headers=JSON_HEADERS) if not response.status_code == 201: report('Submission failed. Reason: %s' % response.reason, Color.FAILURE) continue report('Submitted', Color.SUCCESS, ' ') toggle.add_tag() report('Tagged.', Color.SUCCESS)
def submit_entries(start_date=None, end_date=None): json_template = open(JSON_TEMPLATE_PATH) json_template = json.loads(json_template.read()) report('Fetching time entries from toggl.com', Color.INFO) toggles = Toggle.create_toggles(start_date, end_date) report('Submitting' + ' ' + str(len(toggles)) + ' ' + 'entries to Arsh pm.', Color.INFO) for toggle in toggles: report(str(toggle), Color.HEADER, ' ') report('- Preparing...', end=' ') data = render_json(toggle, json_template) response = requests.post(API_URL, data=json.dumps(data), headers=JSON_HEADERS) if not response.status_code == 201: report('Submission failed. Reason: %s' % response.reason, Color.FAILURE) continue report('Submitted', Color.SUCCESS, ' ') toggle.add_tag() report('Tagged.', Color.SUCCESS)
def test_report_write_to_file(self): with tempfile.NamedTemporaryFile() as fh: reporter.report(fh.name) self.assertTrue(os.path.exists(fh.name)) self.assertTrue(os.stat(fh.name).st_size > 0)
def create_toggles(cls, start_date=None, end_date=None): if not end_date: end_date = datetime.datetime.now(tz=tz.tzstr('UTC+03:30')) if not start_date: start_date = end_date - datetime.timedelta(days=30) if start_date is str: start_date = datetime.datetime.strptime(start_date, '%Y-%m-%d') if end_date is str: end_date = datetime.datetime.strptime(end_date, '%Y-%m-%d') response = requests.get(TOGGLE_API_URL, params={'start_date': start_date.strftime('%Y-%m-%dT%H:%M:%S+03:30'), 'end_date': end_date.strftime('%Y-%m-%dT%H:%M:%S+03:30')}, auth=TOGGLE_AUTH) times_json = response.json() entries = [] for time in times_json: entry_id = time.get('id') description = time.get('description') activity = TOGGLE_ACTIVITY_TAGS.get('CODE') tags = time.get('tags') remote = False if tags: if 'PM' in tags: continue elif 'No - PM' in tags: report('Skipping entry ("NO - PM" tag): ' + str(entry_id) + ' ' + description, Color.WARNING) continue if TOGGLE_REMOTE_TAG in tags: remote = True tags.remove(TOGGLE_REMOTE_TAG) if len(tags) > 1: report('More than one tag provided: ' + tags, Color.WARNING) report('Skipping entry ' + tags + ':' + str(entry_id) + ' ' + description, Color.WARNING) continue elif len(tags) == 1: activity = TOGGLE_ACTIVITY_TAGS.get(tags[0]) if not activity: report('Undefined tag ' + tags[0], Color.WARNING) report('Skipping entry (' + tags[0] + '): ' + str(entry_id) + ' ' + description, Color.WARNING) continue issue_description = re.search('#(?P<issue>\d+) *- *(?P<description>.*)', description) if issue_description: issue = issue_description.group('issue') description = issue_description.group('description') else: report('Skipping entry (issue id not found): ' + str(entry_id) + ' ' + description, Color.WARNING) continue start = parser.parse(time.get('start')).astimezone(tz=tz.tzstr('UTC+03:30')) date = start.date().strftime('%Y-%m-%d') start = start.time().strftime('%H:%M') stop_time = time.get('stop') if not stop_time: report('Skipping entry (already running): ' + str(entry_id) + ' ' + description, Color.WARNING) continue end = parser.parse(stop_time).astimezone(tz=tz.tzstr('UTC+03:30')) end = end.time().strftime('%H:%M') duration = int(time.get('duration')) m, s = divmod(duration, 60) h, m = divmod(m, 60) duration = "%d:%02d" % (h, m) entries.append( Toggle(entry_id, issue, duration, activity=activity, date=date, start=start, end=end, description=description, remote=remote)) return entries
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) ret, thresh = cv2.threshold(gray, 127, 255, cv2.THRESH_BINARY_INV) thresh2 = cv2.bitwise_not(thresh) rows = 15 cols = 15 contours,hierarchy = cv2.findContours(thresh, cv2.RETR_EXTERNAL, 1) max_area = -1 # find contours with maximum area for cnt in contours: approx = cv2.approxPolyDP(cnt, 0.02*cv2.arcLength(cnt, True), True) if len(approx) == 4: if cv2.contourArea(cnt) > max_area: max_area = cv2.contourArea(cnt) max_cnt = cnt max_approx = approx # cut the crossword region, and resize it to a standard size of 130x130 x,y,w,h = cv2.boundingRect(max_cnt) cross_rect = thresh2[y:y+h, x:x+w] #no resizing #cross_rect = cv2.resize(cross_rect,(rows*10, cols*10)) values = RP.report(cross_rect, rows, cols, w, h) GEN.generate(cross_rect, rows, cols, w, h, values, taskName)
def measure(): bssid = request.form.get('bssid') for i in range(4): reporter.report(bssid) sleep(1) return "ok"
def warn(message, sendReport=False): logAndWrite(getTimeStamp(), " | WARN | ", message) if sendReport: reporter.report('warn', message)