def __init__(self, tp_url_real, context_path, context_file_path, base_iri, base_dir, info_dir, dataset_home, tmp_dir, triplestore_url=None): self.tp_url = triplestore_url self.base_iri = base_iri self.base_dir = base_dir self.info_dir = info_dir self.context_path = context_path self.dataset_home = URIRef(dataset_home) self.tmp_dir = tmp_dir self.tp_res = URIRef(tp_url_real) self.repok = Reporter(prefix="[DatasetHandler: INFO] ") self.reperr = Reporter(prefix="[DatasetHandler: ERROR] ") self.st = Storer(context_map={context_path: context_file_path}, repok=self.repok, reperr=self.reperr) self.st.set_preface_query( u"DELETE { ?res <%s> ?date } WHERE { ?res a <%s> ; <%s> ?date }" % (str(DatasetHandler.modified), str( DatasetHandler.dataset), str(DatasetHandler.modified)))
def __init__( self, stored_file, reference_dir, error_dir, stopper, headers={ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; " "rv:33.0) Gecko/20100101 Firefox/33.0" }, sec_to_wait=10, max_iteration=6, timeout=30): self.headers = headers self.sec_to_wait = sec_to_wait self.max_iteration = max_iteration self.timeout = timeout self.stopper = stopper self.name = "BEE " + self.__class__.__name__ self.repok = Reporter(prefix="[%s - INFO] " % self.name) self.repok.new_article() self.reper = Reporter(prefix="[%s - ERROR] " % self.name) self.reper.new_article() self.rs = BibliographicReferenceStorer(stored_file, reference_dir, error_dir)
def __init__(self, input_dir, output_dir=None, tmp_dir=None): self.input_dir = input_dir self.output_dir = output_dir self.tmp_dir = tmp_dir self.storer = Storer() self.name = self.__class__.__name__ self.repok = Reporter(prefix="[%s - INFO] " % self.name) self.repok.new_article() self.reper = Reporter(prefix="[%s - ERROR] " % self.name) self.reper.new_article()
def __init__(self, conf_file, sec_to_wait=10, max_iteration=6, timeout=30): with open(conf_file) as f: conf_json = json.load(f) self.headers = { "Authorization": "Bearer %s" % conf_json["access_token"], "Content-Type": "application/json" } self.id = "ORCID" self.name = "SPACIN " + self.__class__.__name__ self.repok = Reporter(prefix="[%s - INFO] " % self.name) self.reper = Reporter(prefix="[%s - ERROR] " % self.name) self.__last_query_done = None self.sec_to_wait = sec_to_wait self.max_iteration = max_iteration self.timeout = timeout
def __init__(self): super(Nodule, self).__init__() self.config = CONFIG # Take config from disk - first attempt, only need basics self.UID = get_uid( ) # UID identifies this node on the manager and logger self.client = mqtt.Client(client_id=self.UID) self.channel_mgr = ChannelMgr( self.UID) # ChannelMgr provides easy access to channel URLs self.reporter = Reporter( self ) # Reporter manages sending reports/readings over mqtt, or buffering in case of connectivity issues self.publish = self.reporter.publish # Easy-access reporting functions delegate to reporter self.debug = self.reporter.debug self.log_error = self.reporter.log_error # TODO update internal time if hw==espx self.fetch_remote_config( ) # We've just woken, so try to refresh config from source of truth self.config = load_config_from_disk( ) # Load config from disk now that jobs, components etc are up-to-date self.start_mqtt( ) # Connect to the Mosquitto broker - used to send readings/reports, receive triggers from manager self.gpio_set = GPIO_Set( self ) # Set up sensors/actuators/external components according to config self.jobs = JobList(self) # Set up jobs/schedules according to config self.wake_time = datetime.now( ) # We will sometimes report this/track uptime return
def prompt_for_report(self): """ Promt the user for what kind of analysis they want to perform""" report = Reporter() print("\nChoose an Analysis Option:") inp = input("0: Back\n1: Display Last # Rows \ \n2: Show Functioning Policies \ \n3: Employee Policy Count \ \n4: Column Error Count \ \n5: Find Errors \ \n6: Find Suspicious Activity \ \n7: Make WordCloud \ \n>> ") if inp == "0": self.finished_prompt = True elif inp == "1": self.report_wrapper_count("\nLast Rows", report.log_tail) elif inp == "2": self.report_wrapper_count("\nShow Functioning Policies", report.not_null) elif inp == "3": self.report_wrapper("\nEmployee Policy Count", report.employee_policy_count()) elif inp == "4": self.report_wrapper("\nColumn Error Count", report.col_errors()) elif inp == "5": self.report_wrapper("\nFind Errors", report.find_errors()) elif inp == "6": self.report_wrapper("\nFind Suspicious Activity", report.find_suspicious_activity()) elif inp == "7": self.make_word_cloud(report.log_df)
def getUserData(user, linkTemplate, ids): ''' Paramenters: user: phonenumber or imsi linkTemplate: "errors?phoneNumber=%s&date=%s" or "errors?imsi=%s&date=%s" ''' idList = [] for id in ids: idList.append(int(id)) spec = {"_id": {"$in": idList}} fields = ['category', 'receive_time'] reporter = Reporter() records = reporter.getDatas(spec, fields) data = {} for record in records: recTime = record['receive_time'] category = record['category'] key = recTime.strftime('%Y%m%d') if not key in data: data[key] = { "live": 0, "link": linkTemplate % (user, key), "error": 0 } if category == 'ERROR': data[key]['error'] += 1 else: data[key]['live'] += 1 return data
def main(): """Main driver.""" args = parse_args() args.reporter = Reporter() life_cycle = check_config(args.reporter, args.source_dir) # pre-alpha lessons should report without error if life_cycle == "pre-alpha": args.permissive = True check_source_rmd(args.reporter, args.source_dir, args.parser) args.references = {} if not using_remote_theme(args.source_dir): args.references = read_references(args.reporter, args.reference_path) docs = read_all_markdown(args.source_dir, args.parser) check_fileset(args.source_dir, args.reporter, list(docs.keys())) check_unwanted_files(args.source_dir, args.reporter) for filename in list(docs.keys()): checker = create_checker(args, filename, docs[filename]) checker.check() args.reporter.report() if args.reporter.messages and not args.permissive: exit(1)
def test_reporter(): from reporter import Reporter from view.window import BMCSWindow po = PullOutModel(n_e_x=100, k_max=500, w_max=1.0) po.tline.step = 0.01 po.geometry.L_x = 500.0 po.loading_scenario.set(loading_type='monotonic') po.cross_section.set(A_f=16.67, P_b=1.0, A_m=1540.0) po.run() w = BMCSWindow(model=po) po.add_viz2d('load function', 'Load-time') po.add_viz2d('F-w', 'Load-displacement') po.add_viz2d('field', 'u_C', plot_fn='u_C') po.add_viz2d('field', 'omega', plot_fn='omega') po.add_viz2d('field', 'eps_C', plot_fn='eps_C') po.add_viz2d('field', 's', plot_fn='s') po.add_viz2d('field', 'sig_C', plot_fn='sig_C') po.add_viz2d('field', 'sf', plot_fn='sf') po.add_viz2d('dissipation', 'dissipation') po.add_viz2d('dissipation rate', 'dissipation rate') r = Reporter(report_items=[po, w.viz_sheet]) r.write() r.show_tex() r.run_pdflatex() r.show_pdf()
def test_generate_with_svg(self): tempdir = mkdtemp() json_file = os.path.join(tempdir, 'file.json') html_file = os.path.join(tempdir, 'file.html') svg_file = os.path.join(tempdir, 'file.svg') results = self.make_results() fake_request = FakeRequest() args = self.make_args() reporter = Reporter(bundle='git', results=results, options=args, bundle_yaml='bundle content') with patch('reporter.requests.post', autospec=True, return_value=fake_request) as mock_r: reporter.generate(html_filename=html_file, json_filename=json_file) mock_r.assert_called_once_with('http://svg.juju.solutions', 'bundle content') with open(json_file) as fp: json_content = json.loads(fp.read()) with open(html_file) as fp: html_content = fp.read() with open(svg_file) as fp: svg_content = fp.read() self.assertIn('charm-proof', html_content) self.assertEqual(json_content["bundle"]["name"], 'git') self.assertEqual(json_content["test_id"], '1234') self.assertEqual(svg_content, 'svg content') rmtree(tempdir)
def __init__(self, args, task_id=None, xml_filename=None): parser = TestCaseParser() self.test_case_suites = parser.parse_from_csv(args) self.reporter = Reporter() self.feedback = Feedback(task_id) self.test_result = {} self.test_summary = {}
def setup(self): self.reporter = Reporter(self.host, self.min_wait, self.max_wait, "pagination") locust.events.request_success += self.reporter.request_success locust.events.request_failure += self.reporter.request_failure locust.events.hatch_complete += self.reporter.hatch_complete locust.events.quitting += self.reporter.stop
def get_report(record_id): keyToken = 'token' if keyToken in request.params.keys(): token = request.params.get(keyToken) else: return wrapResults({"error": {'code': 0, 'msg': "No token provided!"}}) accessible = auth.getAccessibleProducts(token) if 'error' in accessible: return wrapResults(accessible) if len(accessible['products']) == 0: return wrapResults( {'error': { 'code': 0, 'msg': 'No accessible products.' }}) reporter = Reporter() data = reporter.get_report(record_id) #print data sysInfo = data.pop('sys_info') si = {} for key in sysInfo: rKey = key.replace(':', '.') si[rKey] = sysInfo[key] if not 'android.os.Build.PRODUCT' in si: return wrapResults({'error': {'code': 0, 'msg': 'Permission denial.'}}) if not si['android.os.Build.PRODUCT'] in accessible['products']: return wrapResults({'error': {'code': 0, 'msg': 'Permission denial.'}}) else: data['sys_info'] = si return wrapResults(data)
def latest(): ''' Get the latest n records. Paramenters: count=n(max=100,default=20) Return: Headers: Content-Type:application/json Body: A json array and every element in the array is a json document. array:[{},{}] document:{"_id":id,"receive_time":time,"json_str":original} ''' keyToken = 'token' if keyToken in request.params.keys(): token = request.params.get(keyToken) else: return wrapResults({"error": "No token provided!"}) accessible = auth.getAccessibleProducts(token) if 'error' in accessible: return wrapResults(accessible) if len(accessible['products']) == 0: return wrapResults({'error': 'No accessible products.'}) limit = request.params.get('limit') reporter = Reporter() data = reporter.latest(limit) result = [] for record in data: if record['product'] in accessible['products']: result.append(record) return wrapResults(result)
def main(): """Run the graph generation script.""" import os import sys from optparse import OptionParser from gmxtree import GromacsTree from reporter import Reporter parser = OptionParser() parser.add_option('-S', '--source-root', help='Source tree root directory') parser.add_option('-B', '--build-root', help='Build tree root directory') parser.add_option( '--ignore-cycles', help='Set file with module dependencies to ignore in cycles') parser.add_option('-o', '--outdir', default='.', help='Specify output directory for graphs') parser.add_option('-q', '--quiet', action='store_true', help='Do not write status messages') options, args = parser.parse_args() reporter = Reporter(quiet=True) if not options.quiet: sys.stderr.write('Scanning source tree...\n') tree = GromacsTree(options.source_root, options.build_root, reporter) if not options.quiet: sys.stderr.write('Reading source files...\n') tree.scan_files() if options.ignore_cycles: tree.load_cycle_suppression_list(options.ignore_cycles) if not options.quiet: sys.stderr.write('Reading Doxygen XML files...\n') tree.load_xml(only_files=True) if not options.quiet: sys.stderr.write('Writing graphs...\n') graphbuilder = GraphBuilder(tree) if not os.path.exists(options.outdir): os.mkdir(options.outdir) filename = os.path.join(options.outdir, 'module-deps.dot') graph = graphbuilder.create_modules_graph() with open(filename, 'w') as outfile: graph.write(outfile) # Skip some modules that are too big to make any sense skippedmodules = ('gmxlib', 'mdlib', 'gmxana', 'gmxpreprocess') for module in tree.get_modules(): if not module.get_name()[7:] in skippedmodules: filename = '{0}-deps.dot'.format(module.get_name()) filename = os.path.join(options.outdir, filename) graph = graphbuilder.create_module_file_graph(module) with open(filename, 'w') as outfile: graph.write(outfile)
def __init__(self): """Инициализация класса. Создать атрибуты self.mailer, self.hh, self.reporter. """ self.mailer = Mailer() self.hh = Hh() self.reporter = Reporter()
def generate_report(bundle, results, options, status, html_filename, json_filename): bundle_yaml = get_bundle_yaml(status) reporter = Reporter(bundle=bundle, results=results, options=options, bundle_yaml=bundle_yaml) reporter.generate(html_filename=html_filename, json_filename=json_filename)
def __init__(self): self.settings = Settings() self.logger = Logger(self.settings.logfile) self.reporter = Reporter(self.settings) self.setup = Setup(self.settings, self.logger) self.grader = Grader(self.settings, self.logger, self.setup) self.analyser = Analyser(self.settings, self.reporter, self.logger, self.setup, self.grader)
def main(): """Run the checking script.""" parser = OptionParser() parser.add_option('-S', '--source-root', help='Source tree root directory') parser.add_option('-B', '--build-root', help='Build tree root directory') parser.add_option( '-l', '--log', help='Write issues into a given log file in addition to stderr') parser.add_option('--ignore', help='Set file with patterns for messages to ignore') parser.add_option( '--ignore-cycles', help='Set file with module dependencies to ignore in cycles') parser.add_option('--check-ignored', action='store_true', help='Issue notes for comments ignored by Doxygen') parser.add_option('-q', '--quiet', action='store_true', help='Do not write status messages') parser.add_option('--exitcode', action='store_true', help='Return non-zero exit code if there are warnings') options, args = parser.parse_args() reporter = Reporter(options.log) if options.ignore: reporter.load_filters(options.ignore) if not options.quiet: sys.stderr.write('Scanning source tree...\n') tree = GromacsTree(options.source_root, options.build_root, reporter) tree.load_git_attributes() tree.load_installed_file_list() if not options.quiet: sys.stderr.write('Reading source files...\n') # TODO: The checking should be possible without storing everything in memory tree.scan_files(keep_contents=True) if options.ignore_cycles: tree.load_cycle_suppression_list(options.ignore_cycles) if not options.quiet: sys.stderr.write('Reading Doxygen XML files...\n') tree.load_xml() reporter.write_pending() if not options.quiet: sys.stderr.write('Checking...\n') check_all(tree, reporter, options.check_ignored) reporter.write_pending() reporter.report_unused_filters() reporter.close_log() if options.exitcode and reporter.had_warnings(): sys.exit(1)
def setup(self): # don't record metric information for stress tests at the moment - Grafana not setup to display them if 'STRESS_TEST' not in env: self.reporter = Reporter(self.host, self.min_wait, self.max_wait, "general") locust.events.request_success += self.reporter.request_success locust.events.request_failure += self.reporter.request_failure locust.events.hatch_complete += self.reporter.hatch_complete locust.events.quitting += self.reporter.stop
def main(cmd, files, output, redis_address): logger = Logger(stream=output == 'stream') reporter = Reporter(**redis_address) perf_report = reporter.get_report(files) schedule = scheduler.make(perf_report, spawner.parallelism()) perf_report = spawner.execute(cmd, schedule, logger) reporter.submit(perf_report)
def run(): configuration = Configurator('QA') database_path = configuration.get_database() connector = Connector(database_path) reporter = Reporter() processor = Processor(configuration, connector, reporter) processor.execute()
def __init__(self, max_scraping_workers=3): self.max_scraping_workers = max_scraping_workers self.session = None self.scrapeLock = Lock() self.reporter = Reporter() self.setting_list = SharedList() self.soup_data = SharedList() self.schedule_data = SharedList()
def __init__(self): self.resolve_target = True self.force_passives = False self._passive_tests_ = {} self._active_tests_ = {} self._targets_ = [] self._protos_ = ["http", "https"] Scanner.logger.debug("Scanner initialized.") self.reporter = Reporter() self.modules = []
def __init__(self, base_iri, context_base, info_dir, entries, agent_id=None): if "doi" in entries: self.doi = entries["doi"].lower() else: self.doi = None if "pmid" in entries: self.pmid = entries["pmid"] else: self.pmid = None if "pmcid" in entries: self.pmcid = entries["pmcid"] else: self.pmcid = None if "url" in entries: self.url = entries["url"].lower() else: self.url = None if "curator" in entries: self.curator = entries["curator"] else: self.curator = None if "source" in entries: self.source = entries["source"] else: self.source = None if "source_provider" in entries: self.source_provider = entries["source_provider"] else: self.source_provider = None self.entries = entries["references"] self.name = "SPACIN " + self.__class__.__name__ self.g_set = GraphSet(base_iri, context_base, info_dir) self.id = agent_id self.repok = Reporter(prefix="[%s - INFO] " % self.name) self.repok.new_article() self.reperr = Reporter(prefix="[%s - ERROR] " % self.name) self.reperr.new_article()
def main(): """ Main driver. """ args = parse_args() reporter = Reporter() repo_url = get_repo_url(args.repo_url) check_labels(reporter, repo_url) reporter.report()
def execute(self): # display controller parameters self.output_params() # initialize driver initializer = DriverInitializer(debug=self.debug) driver = initializer.get_driver() # login and refine research login_agent = LoginAgent(driver, job_title=self.job_title, job_location=self.job_location) login_agent.login() refine_agent = RefineAgent(driver, distance=self.distance, experience=self.experience, order_by_date=self.order_by_date) refine_agent.refine() # create PageLooper object looper = PageLooper(driver, limit=self.limit, duration=self.duration, date_limit=self.date_limit) result = looper.loop() # close driver and all windows if not self.debug: driver.quit() # get data from PageLooper object input_data, summaries, links = looper.get_all() if self.mode == 'browse': # use oracle to process input_data oracle = Oracle() output = oracle.query(input_data) # stop timer and record time elapsed in minutes runtime_duration = round((time.time() - self.timer) / 60) # use reporter to process output reporter = Reporter() reporter.report(output, summaries, links, runtime_duration, self.confidence_threshold) else: # save input_data with open(self.feature_path, 'w') as output: for entry in input_data: output.write(entry) output.write('<<END>>\n\n') # return driver return driver
def main(): """Run the script in for debugging/Doxygen XML output inspection.""" import sys from optparse import OptionParser from reporter import Reporter parser = OptionParser() parser.add_option('-R', '--root-dir', help='Doxygen XML root directory') parser.add_option('-F', '--show-file', action='append', help='Show contents of given file') parser.add_option('-d', '--show-dir', action='append', help='Show contents of given directory') parser.add_option('-g', '--show-group', action='append', help='Show contents of given group') parser.add_option('-n', '--show-namespace', action='append', help='Show contents of given namespace') parser.add_option('-c', '--show-class', action='append', help='Show contents of given class') # TODO: Add option for other types, and make them work parser.add_option('-f', '--show-function', action='append', help='Show details of given function') options, args = parser.parse_args() reporter = Reporter() sys.stderr.write('Loading index.xml...\n') docset = DocumentationSet(options.root_dir, reporter) reporter.write_pending() sys.stderr.write('Loading details...\n') docset.load_details() reporter.write_pending() sys.stderr.write('Processing...\n') docset.merge_duplicates() reporter.write_pending() objlist = [] if options.show_file: objlist.extend(docset.get_files(tuple(options.show_file))) if options.show_dir: objlist.extend(docset.get_directories(tuple(options.show_dir))) if options.show_group: objlist.extend(docset.get_groups(tuple(options.show_group))) if options.show_namespace: # TODO: Replace file names with anonymous_namespace{filename} objlist.extend(docset.get_namespaces(tuple(options.show_namespace))) if options.show_class: objlist.extend(docset.get_classes(tuple(options.show_class))) if options.show_function: objlist.extend(docset.get_functions(tuple(options.show_function))) for obj in objlist: obj.show()
def get_sales_report(account, vendor, date_type, date, path): reporter = Reporter() try: result = reporter.asc_get_sales_report(account, vendor, date_type, date) filename = result.headers['filename'] with open(os.path.join(path, filename), 'wb') as f: f.write(result.content) click.echo('Report saved to {}'.format(os.path.join(path, filename))) except Exception as exc: click.echo(str(exc))
def test_generate_html(self): r = Reporter(None, None, None) with NamedTemporaryFile() as html_file: html_output = r.generate_html(json_content=self.make_json(), output_file=html_file.name, past_results=[]) content = html_file.read() self.assertRegexpMatches(html_output, 'AWS') self.assertRegexpMatches(html_output, 'Joyent') self.assertRegexpMatches(content, 'AWS') self.assertRegexpMatches(content, 'Joyent')