def test_generate_with_svg(self): tempdir = mkdtemp() json_file = os.path.join(tempdir, 'file.json') html_file = os.path.join(tempdir, 'file.html') svg_file = os.path.join(tempdir, 'file.svg') results = self.make_results() fake_request = FakeRequest() args = self.make_args() reporter = Reporter(bundle='git', results=results, options=args, bundle_yaml='bundle content') with patch('reporter.requests.post', autospec=True, return_value=fake_request) as mock_r: reporter.generate(html_filename=html_file, json_filename=json_file) mock_r.assert_called_once_with('http://svg.juju.solutions', 'bundle content') with open(json_file) as fp: json_content = json.loads(fp.read()) with open(html_file) as fp: html_content = fp.read() with open(svg_file) as fp: svg_content = fp.read() self.assertIn('charm-proof', html_content) self.assertEqual(json_content["bundle"]["name"], 'git') self.assertEqual(json_content["test_id"], '1234') self.assertEqual(svg_content, 'svg content') rmtree(tempdir)
def generate_report(bundle, results, options, status, html_filename, json_filename): bundle_yaml = get_bundle_yaml(status) reporter = Reporter(bundle=bundle, results=results, options=options, bundle_yaml=bundle_yaml) reporter.generate(html_filename=html_filename, json_filename=json_filename)
def getUserData(user,linkTemplate,ids): ''' Paramenters: user: phonenumber or imsi linkTemplate: "errors?phoneNumber=%s&date=%s" or "errors?imsi=%s&date=%s" ''' idList=[] for id in ids: idList.append(int(id)) spec={"_id":{"$in":idList}} fields=['category','receive_time'] reporter=Reporter() records=reporter.getDatas(spec,fields) data={} for record in records: recTime=record['receive_time'] category=record['category'] key=recTime.strftime('%Y%m%d') if not key in data: data[key]={"live": 0, "link": linkTemplate%(user,key), "error": 0} if category=='ERROR': data[key]['error']+=1 else: data[key]['live']+=1 return data
def latest(): ''' Get the latest n records. Paramenters: count=n(max=100,default=20) Return: Headers: Content-Type:application/json Body: A json array and every element in the array is a json document. array:[{},{}] document:{"_id":id,"receive_time":time,"json_str":original} ''' keyToken='token' if keyToken in request.params.keys(): token=request.params.get(keyToken) else: return wrapResults({"error":"No token provided!"}) accessible=auth.getAccessibleProducts(token) if 'error' in accessible: return wrapResults(accessible) if len(accessible['products'])==0: return wrapResults({'error':'No accessible products.'}) limit=request.params.get('limit') reporter=Reporter() data=reporter.latest(limit) result=[] for record in data: if record['product'] in accessible['products']: result.append(record) return wrapResults(result)
def download(): keyToken='token' if keyToken in request.params.keys(): token=request.params.get(keyToken) else: return wrapResults({"error":"No token provided!"}) accessible=auth.getAccessibleProducts(token) if 'error' in accessible: return wrapResults(accessible) if len(accessible['products'])==0: return wrapResults({'error':'No accessible products.'}) paging=getPagingParameters() conds = getFilterConditions() conds.pop('token') if 'android.os.Build.PRODUCT' in conds: if not (conds['android.os.Build.PRODUCT'] in accessible['products']): return wrapResults({'error':'You have no rights to view the data of product:%s'%conds['android.os.Build.PRODUCT']}) result=viewer.errors(accessible['products'],paging,conds) if result is None: return wrapResults({'error':{'code':0,'msg':'Result is empty! Change the conditions and try again!'}}) else: reporter=Reporter() ids=result['data'] records=reporter.get_batch_report(ids) result['data']=records print records[0] #return wrapResults(result) f=viewer.error_list_excel(records) response.set_header('Content-Type','application/vnd.ms-excel') response.set_header("Content-Disposition", "attachment;filename=errorlist.xls"); return f
def main(args): if args.v: print args genes = read_genes(args) if args.v: print "%d genes" % len(genes) reporter = Reporter(Host(), genes) try: out = open(args.out_fn, "w") except IOError: out = sys.stdout except TypeError: out = sys.stdout out.write(reporter.report()) for k in sorted(reporter.stats.keys()): out.write("%-20s: %s\n" % (k, reporter.stats[k])) if out != sys.stdout: out.close() return 0
def get_report(record_id): keyToken='token' if keyToken in request.params.keys(): token=request.params.get(keyToken) else: return wrapResults({"error":{'code':0,'msg':"No token provided!"}}) accessible=auth.getAccessibleProducts(token) if 'error' in accessible: return wrapResults(accessible) if len(accessible['products'])==0: return wrapResults({'error':{'code':0,'msg':'No accessible products.'}}) reporter=Reporter() data=reporter.get_report(record_id) #print data sysInfo=data.pop('sys_info') si={} for key in sysInfo: rKey=key.replace(':','.') si[rKey]=sysInfo[key] if not 'android.os.Build.PRODUCT' in si: return wrapResults({'error':{'code':0,'msg':'Permission denial.'}}) if not si['android.os.Build.PRODUCT'] in accessible['products']: return wrapResults({'error':{'code':0,'msg':'Permission denial.'}}) else: data['sys_info']=si return wrapResults(data)
def __init__(self, identity, src, sink, amount, start): Reporter.__init__(self, identity) self.source = src self.dest = sink self.size = float(amount) * 8000.0 # amount in MByte -> 1000*8 KBits self.start_time = float(start) * 1000.0 # 1000ms in a second self.am_i_done = 0
def error(): ''' About paging: request:query/error?conditions&page=1&records=25&paging_token=xxx-xxx response: {"results":{"paging":{"totalrecords":100,"totalpages":10,"records":10,"page":1,"paging_token":"xxx-xxx"},"data":[{},{}]}} ''' keyToken='token' if keyToken in request.params.keys(): token=request.params.get(keyToken) else: return wrapResults({"error":"No token provided!"}) accessible=auth.getAccessibleProducts(token) if 'error' in accessible: return wrapResults(accessible) if len(accessible['products'])==0: return wrapResults({'error':'No accessible products.'}) paging=getPagingParameters() conds = getFilterConditions() conds.pop('token') if 'android.os.Build.PRODUCT' in conds: if not (conds['android.os.Build.PRODUCT'] in accessible['products']): return wrapResults({'error':'You have no rights to view the data of product:%s'%conds['android.os.Build.PRODUCT']}) result=viewer.errors(accessible['products'],paging,conds) if result is None: return wrapResults({'error':{'code':0,'msg':'Result is empty! Change the conditions and try again!'}}) else: reporter=Reporter() ids=result['data'] records=reporter.get_batch_report(ids) result['data']=records return wrapResults(result)
def test_get_test_outcome(self): r = Reporter(None, None, None) results = [r.pass_str, r.pass_str] self.assertEqual(r.get_test_outcome(results), r.all_passed_str) results = [r.pass_str, r.fail_str] self.assertEqual(r.get_test_outcome(results), r.some_failed_str) results = [r.fail_str, r.fail_str] self.assertEqual(r.get_test_outcome(results), r.all_failed_str)
def __init__(self, registry, refresh_interval, filename=None): Reporter.__init__(self, registry, refresh_interval) self._filename = filename if filename: self._fh = open(filename, 'w') else: self._fh = sys.stdout return
def main(cmd, files, output, redis_address): logger = Logger(stream=output == 'stream') reporter = Reporter(**redis_address) perf_report = reporter.get_report(files) schedule = scheduler.make(perf_report, spawner.parallelism()) perf_report = spawner.execute(cmd, schedule, logger) reporter.submit(perf_report)
def __init__(self, input_dir, output_dir=None, tmp_dir=None): self.input_dir = input_dir self.output_dir = output_dir self.tmp_dir = tmp_dir self.storer = Storer() self.name = self.__class__.__name__ self.repok = Reporter(prefix="[%s - INFO] " % self.name) self.repok.new_article() self.reper = Reporter(prefix="[%s - ERROR] " % self.name) self.reper.new_article()
def get_log(record_id): ##print 'get_log()' reporter=Reporter() log=reporter.get_log(record_id) if log==None: abort(404,"Can not find the log!") else: response.set_header('Content-Type','application/x-download') response.set_header('Content-Disposition','attachment; filename=log_'+record_id+'.zip',True) return log
def test_generate_html(self): r = Reporter(None, None, None) with NamedTemporaryFile() as html_file: html_output = r.generate_html( json_content=self.make_json(), output_file=html_file.name, past_results=[]) content = html_file.read() self.assertRegexpMatches(html_output, 'AWS') self.assertRegexpMatches(html_output, 'Joyent') self.assertRegexpMatches(content, 'AWS') self.assertRegexpMatches(content, 'Joyent')
def test_generate(self): results = self.make_results() reporter = Reporter(bundle='git', results=results, options=None) with NamedTemporaryFile() as json_file: with NamedTemporaryFile() as html_file: reporter.generate( html_filename=html_file.name, json_filename=json_file.name) json_content = json_file.read() html_content = html_file.read() json_content = json.loads(json_content) self.assertIn('charm-proof', html_content) self.assertEqual(json_content["bundle"]["name"], 'git')
class Crawler(): def __init__(self): self.vip_info = VipInfo() self.rss_links = self._load_rss_links() self.reporter = Reporter() def _load_rss_links(self): links = [] directory = os.path.join(os.path.dirname(os.path.abspath(__file__)), "rss") for root, dirs, fs in os.walk(directory): for f in fs: links.append(os.path.join(root, f)) return links def is_article_scanned(self, article): #assuming that all aticle updated dae time is in GMT...this comparison can be made epoch_article_time = mktime(article.updated_parsed) if epoch_article_time >= CONF.last_script_run_date_time and epoch_article_time >= CONF.REPORT_START_DATE_TIME: return False return True def crawl(self): new_article_scanned = 0 old_article_scanned = 0 vip_article_found = 0 #not the start time for crawling for this sceduled run crawl_start_time = time() for f in list(self.rss_links): text = open(f, "rb").read() urls = text.split(os.linesep) for url in urls: feed = feedparser.parse(url) for article in feed.entries: # print "Working on", article.link if not self.is_article_scanned(article): new_article_scanned += 1 if self.vip_info.is_there_vip_news(article): vip_article_found += 1 self.reporter.update(article) else: old_article_scanned += 1 #update the crawl start time in config CONF.last_script_run_date_time = crawl_start_time #log print "new articles scanned:", new_article_scanned print "old articles skipped:", old_article_scanned print "vip articles found:", vip_article_found
def main(self, handlers): """ This is the main handler. It prepares the ExecGraph, ContextStack and Reporter. TargetResolver.run is where the actual targets are executed. """ assert handlers, "Need at least one static target to bootstrap" execution_graph = ExecGraph(handlers) stack = ContextStack() reporter = Reporter() self.run(execution_graph, stack, reporter) reporter.flush()
def __init__(self, identity, left, right, rate, delay, size): Reporter.__init__(self, identity) self.left_node = left self.right_node = right # Need to standardize units to kbit/ms, kbits, and ms self.capacity_kbit_per_ms = float(rate) # 1000 Kilobits in a Megabit, 1000 ms in a s self.ms_prop_delay = float(delay) # Already standardized self.kbits_in_each_buffer = 8.0 * float(size) # 8 = conversion from BYTE to BIT, ignore 1024 vs 1000 convention self.left_buff = LinkBuffer(self.kbits_in_each_buffer) self.right_buff = LinkBuffer(self.kbits_in_each_buffer) self.bidirectional_queueing_delay_memory = [-1] * constants.QUEUEING_DELAY_WINDOW
def __init__(self, conf_file, sec_to_wait=10, max_iteration=6, timeout=30): with open(conf_file) as f: conf_json = json.load(f) self.headers = { "Authorization": "Bearer %s" % conf_json["access_token"], "Content-Type": "application/json" } self.id = "ORCID" self.name = "SPACIN " + self.__class__.__name__ self.repok = Reporter(prefix="[%s - INFO] " % self.name) self.reper = Reporter(prefix="[%s - ERROR] " % self.name) self.__last_query_done = None self.sec_to_wait = sec_to_wait self.max_iteration = max_iteration self.timeout = timeout
def report_put(): ''' HTTP PUT to upload a data file ''' contentType=request.get_header('Content-Type') if contentType==None: abort(500,'missing Content-Type') dataTypes = contentType.split(';') recordId=request.get_header('record-id') reporter=Reporter() result=reporter.report_put(dataTypes,recordId,request.body) if 'error' in result: abort(500,result['error']) else: return result
def test_generate_svg(self): tempdir = mkdtemp() svg_file = os.path.join(tempdir, 'foo') r = Reporter(None, None, None, bundle_yaml='foo') fake_request = FakeRequest() with patch('reporter.requests.post', autospec=True, return_value=fake_request) as mock_r: svg = r.generate_svg(svg_file) svg_path = "{}.svg".format(svg_file) with open(svg_path) as fp: content = fp.read() self.assertEqual(content, 'svg content') mock_r.assert_called_once_with('http://svg.juju.solutions', 'foo') self.assertEqual(svg, svg_path) rmtree(tempdir)
def __init__(self, args, task_id=None, xml_filename=None): parser = TestCaseParser() self.test_case_suites = parser.parse_from_csv(args) self.reporter = Reporter() self.feedback = Feedback(task_id) self.test_result = {} self.test_summary = {}
def get_reporter(from_user): reporter = Reporter.get(id=from_user.id) if reporter: reporter.first_name = from_user.first_name reporter.last_name = from_user.last_name reporter.username = from_user.username return reporter
def start(self): """Starts the crawling. Cleans up after crawler is interrupted.""" loop = asyncio.get_event_loop() try: loop.run_until_complete(self._setup()) except KeyboardInterrupt: Reporter.info('Crawler stopping...') finally: loop.run_until_complete(self._close()) # Next 2 lines are needed for aiohttp resource cleanup loop.stop() loop.run_forever() loop.close()
def main(): """Main driver.""" args = parse_args() args.reporter = Reporter() life_cycle = check_config(args.reporter, args.source_dir) # pre-alpha lessons should report without error if life_cycle == "pre-alpha": args.permissive = True check_source_rmd(args.reporter, args.source_dir, args.parser) args.references = {} if not using_remote_theme(args.source_dir): args.references = read_references(args.reporter, args.reference_path) docs = read_all_markdown(args.source_dir, args.parser) check_fileset(args.source_dir, args.reporter, list(docs.keys())) check_unwanted_files(args.source_dir, args.reporter) for filename in list(docs.keys()): checker = create_checker(args, filename, docs[filename]) checker.check() args.reporter.report() if args.reporter.messages and not args.permissive: exit(1)
def __init__(self): super(Nodule, self).__init__() self.config = CONFIG # Take config from disk - first attempt, only need basics self.UID = get_uid( ) # UID identifies this node on the manager and logger self.client = mqtt.Client(client_id=self.UID) self.channel_mgr = ChannelMgr( self.UID) # ChannelMgr provides easy access to channel URLs self.reporter = Reporter( self ) # Reporter manages sending reports/readings over mqtt, or buffering in case of connectivity issues self.publish = self.reporter.publish # Easy-access reporting functions delegate to reporter self.debug = self.reporter.debug self.log_error = self.reporter.log_error # TODO update internal time if hw==espx self.fetch_remote_config( ) # We've just woken, so try to refresh config from source of truth self.config = load_config_from_disk( ) # Load config from disk now that jobs, components etc are up-to-date self.start_mqtt( ) # Connect to the Mosquitto broker - used to send readings/reports, receive triggers from manager self.gpio_set = GPIO_Set( self ) # Set up sensors/actuators/external components according to config self.jobs = JobList(self) # Set up jobs/schedules according to config self.wake_time = datetime.now( ) # We will sometimes report this/track uptime return
def send_report(self, e): uid = self.rep_inp.value if len(uid) >= 7: rep = Reporter( self.hook, self.profile_manager.prf_cfg['img_dir'] + "config.xml") res = rep.send_report(uid) if res: self._show_info("Thanks for Reporting!") self.rep_img.image = toga.Image("resources/logo.png") self.rep_inp.value = "" else: self._throw_error( "Player with ID {} doesn't exist!".format(uid)) self.rep_img.image = toga.Image("resources/logo.png") self.rep_inp.value = ""
def setup(self): self.reporter = Reporter(self.host, self.min_wait, self.max_wait, "pagination") locust.events.request_success += self.reporter.request_success locust.events.request_failure += self.reporter.request_failure locust.events.hatch_complete += self.reporter.hatch_complete locust.events.quitting += self.reporter.stop
def main(): arg_parser = get_arg_parser() args = arg_parser.parse_args() booker = Booker() if args.email and args.password: booker.login_to_page(args.email, args.password) else: booker.login_to_page() if args.output: Reporter.set_output_file(args.output) booker.book_session()
def main(): """Run the graph generation script.""" import os import sys from optparse import OptionParser from gmxtree import GromacsTree from reporter import Reporter parser = OptionParser() parser.add_option('-S', '--source-root', help='Source tree root directory') parser.add_option('-B', '--build-root', help='Build tree root directory') parser.add_option( '--ignore-cycles', help='Set file with module dependencies to ignore in cycles') parser.add_option('-o', '--outdir', default='.', help='Specify output directory for graphs') parser.add_option('-q', '--quiet', action='store_true', help='Do not write status messages') options, args = parser.parse_args() reporter = Reporter(quiet=True) if not options.quiet: sys.stderr.write('Scanning source tree...\n') tree = GromacsTree(options.source_root, options.build_root, reporter) if not options.quiet: sys.stderr.write('Reading source files...\n') tree.scan_files() if options.ignore_cycles: tree.load_cycle_suppression_list(options.ignore_cycles) if not options.quiet: sys.stderr.write('Reading Doxygen XML files...\n') tree.load_xml(only_files=True) if not options.quiet: sys.stderr.write('Writing graphs...\n') graphbuilder = GraphBuilder(tree) if not os.path.exists(options.outdir): os.mkdir(options.outdir) filename = os.path.join(options.outdir, 'module-deps.dot') graph = graphbuilder.create_modules_graph() with open(filename, 'w') as outfile: graph.write(outfile) # Skip some modules that are too big to make any sense skippedmodules = ('gmxlib', 'mdlib', 'gmxana', 'gmxpreprocess') for module in tree.get_modules(): if not module.get_name()[7:] in skippedmodules: filename = '{0}-deps.dot'.format(module.get_name()) filename = os.path.join(options.outdir, filename) graph = graphbuilder.create_module_file_graph(module) with open(filename, 'w') as outfile: graph.write(outfile)
def test_get_locations_by_periods2(): reporter = Reporter(output_path_template="output/test") prediction_index = np.array([('Japan', '', '2020-01-23'), ('Japan', '', '2020-01-27'), ('Japan', '', '2020-01-29'), ('Mainland China', 'Beijing', '2020-01-23'), ('Mainland China', 'Beijing', '2020-01-27')], dtype=object) start_dates, end_dates, countries, regions = reporter.get_locations_by_periods( prediction_index) assert start_dates == ['2020-01-23', '2020-01-29'] assert end_dates == ['2020-01-27', '2020-01-29'] assert countries == [{'Mainland China', 'Japan'}, {'Japan'}] assert regions == [{'', 'Beijing'}, {''}]
def test_get_past_test_results(self): temp = mkdtemp() files = [os.path.join(temp, 'git-2015-12-02T22:22:21-result.json'), os.path.join(temp, 'git-2015-12-02T22:22:21-result.html'), os.path.join(temp, 'git-2015-12-02T22:22:22-result.json'), os.path.join(temp, 'foo-2015-12-02T22:22:23-result.json'), os.path.join(temp, 'git-2015-12-02T22:22:25-result.json')] for f in files: with open(f, 'w') as fp: fp.write(self.make_json()) r = Reporter('git', None, None) results, past_files = r.get_past_test_results(filename=files[0]) self.assertItemsEqual(past_files, [files[2], files[4]]) json_test_result = json.loads(self.make_json()) self.assertItemsEqual(results, [json_test_result, json_test_result]) rmtree(temp)
def main(): sm = sys.argv[1].lower() begin_state = sys.argv[2].split(",") begin_state = tuple(map(int, begin_state)) size = int(math.sqrt(len(begin_state))) hard_state = PuzzleState(begin_state, size) args = { "client_defined_expand": Utilities.expand, "client_defined_goal_state_check": Utilities.goal_state_check, "client_defined_hashed_state": Utilities.hashed_state, "client_defined_compute_state_cost": Utilities.compute_state_cost, "start_state_hash": sm, "start_state": hard_state, } if sm == "bfs": result = Algorithms.search_wrapper( **args, search_type = "bfs" ) elif sm == "dfs": result = Algorithms.search_wrapper( **args, search_type = "dfs" ) elif sm == "ast": result = Algorithms.search_wrapper( **args, search_type = "astar" ) else: print("Enter valid command arguments !") Reporter.write_output(file_name = "output.txt", **result)
def report_post(): ''' HTTP POST to upload a report string or data file ''' reporter = Reporter() contentType = request.get_header('Content-Type') if contentType == None: abort(500, 'missing Content-Type') dataTypes = contentType.split(';') recordId = request.get_header('record-id') result = reporter.report_post(dataTypes, recordId, request.body, request.json) if 'error' in result: abort(500, result['error']) else: return result
def ids(): ''' Get the id(s) for the given creteria. Paramenters: starttime=seconds endtime=seconds Return: Headers: Content-Type:application/json Body: A json document. document:{"start":id,"end":id} If the start id is -1, that means error. ''' reporter = Reporter() return reporter.ids()
def set_ticket_url(record_id, url): print "brstore_api.set_ticket_url(%s)" % record_id #url=request.params.get("url") datatype = request.headers.get('Content-Type').split(';')[0] if datatype == 'application/json': print "json." data = request.json if "url" in data: url = data["url"] print "url=%s" % url else: pass if not url: abort(500, "No url parameter!") #print 'set_ticket_url(%s,%s)'%(record_id,url) reporter = Reporter() return reporter.set_ticket_url(record_id, url)
def main(): puzzle = sys.argv[1] output_csv_file_name = "output.txt" solver = "AC3" results = Sudoku.solve(puzzle, solver) if (results == None): solver = "BTS" results = Sudoku.solve(puzzle, solver) # write lines to output file Reporter.write_output( file_name = output_csv_file_name, content = " ".join([results, solver]), should_overwrite_file = True )
def main(): input_csv_file_name = sys.argv[1] output_csv_file_name = sys.argv[2] # input values are in the form of [feature_1, feature_2, label] input_values = Reader.csv(input_csv_file_name) # Track previous weights and allow to compare against latest weight to check convergence previous_weights = [0, 0, 0] weights = None Reporter.write_output(file_name=output_csv_file_name, content="", should_overwrite_file=True) training_inputs = [[x[0], x[1]] for x in input_values] results = [x[2] for x in input_values] iterations = 0 while (previous_weights != weights): # Past the initial condition, we want to track the previous_weight if (weights != None): # update previous weight so we can remember for comparison previous_weights = weights # import ipdb; ipdb.set_trace() # weights will be list in the form of [b or w_0, w_1, w_2] weights = PerceptronLearning.run(training_inputs=training_inputs, results=results, initial_weights=previous_weights, iterations=1) # write lines to output file Reporter.write_output( file_name=output_csv_file_name, content=','.join(map(str, [weights[1], weights[2], weights[0]])) + "\n", ) # create png images of the figures Visualizer.draw_chart(input_values=input_values, weights=weights, file_name="figures/figure_" + str(iterations)) iterations += 1
def release_products(): ''' Intel request ''' #token=request.params.get("token") imei=request.params.get("imei") ret=viewer.getLatestId(imei) if ret is None: return '-1' else: reporter=Reporter() for id in ret: data=reporter.getData(id) if 'ro:build:revision' in data['sys_info']: if not 'unknown' in data['sys_info']['ro:build:revision'].lower(): return data['sys_info']['ro:build:revision'] return '-1'
def set_ticket_url(record_id): print "brstore_api.set_ticket_url(%s)"%record_id #url=request.params.get("url") datatype = request.headers.get('Content-Type').split(';')[0] if datatype=='application/json': print "json." data=request.json if "url" in data: url=data["url"] print "url=%s"%url else: pass if not url: abort(500,"No url parameter!") #print 'set_ticket_url(%s,%s)'%(record_id,url) reporter=Reporter() return reporter.set_ticket_url(record_id,url)
def ids(): ''' Get the id(s) for the given creteria. Paramenters: starttime=seconds endtime=seconds Return: Headers: Content-Type:application/json Body: A json document. document:{"start":id,"end":id} If the start id is -1, that means error. ''' reporter=Reporter() return reporter.ids()
def __init__(self): self.settings = Settings() self.logger = Logger(self.settings.logfile) self.reporter = Reporter(self.settings) self.setup = Setup(self.settings, self.logger) self.grader = Grader(self.settings, self.logger, self.setup) self.analyser = Analyser(self.settings, self.reporter, self.logger, self.setup, self.grader)
def export(): ''' ''' contentType=request.headers.get('Content-Type') if not contentType: return {'error':'Missing Content-Type'} datatype = request.headers.get('Content-Type').split(';')[0] if datatype=='application/json': ids=request.json if not ids: return {'error':"Invalid parameter!"} elif(ids==None or len(ids)==0): return {'error':"Invalid parameter!"} else: reporter=Reporter() return reporter.export(ids) else: return {'error':"Invalid Content-Type!"}
def test_get_past_test_results(self): temp = mkdtemp() files = [ os.path.join(temp, 'git-2015-12-02T22:22:21-result.json'), os.path.join(temp, 'git-2015-12-02T22:22:21-result.html'), os.path.join(temp, 'git-2015-12-02T22:22:22-result.json'), os.path.join(temp, 'foo-2015-12-02T22:22:23-result.json'), os.path.join(temp, 'git-2015-12-02T22:22:25-result.json') ] for f in files: with open(f, 'w') as fp: fp.write(self.make_json()) r = Reporter('git', None, None) results, past_files = r.get_past_test_results(filename=files[0]) self.assertItemsEqual(past_files, [files[2], files[4]]) json_test_result = json.loads(self.make_json()) self.assertItemsEqual(results, [json_test_result, json_test_result]) rmtree(temp)
def test_prepare_data_and_prediction_for_report(): reporter = Reporter(output_path_template="output/test") prediction_name = 'Test' merged_data_and_predictions = reporter.prepare_data_and_prediction_for_report( ['predictions/' + prediction_name + '.csv']) #print(merged_data_and_predictions.index.values, expected.index.values) #print(merged_data_and_predictions.columns.values, expected.columns.values) #print(merged_data_and_predictions) #print(expected) assert (expected_merged_data_and_predictions.index.values == merged_data_and_predictions.index.values).all() assert (expected_merged_data_and_predictions.columns.values == merged_data_and_predictions.columns.values).all() #print((merged_data_and_predictions == expected).all()) assert (expected_merged_data_and_predictions.fillna(-1) == merged_data_and_predictions.fillna(-1)).all().all()
def sync(): ''' Data sync interface for other database. Paramenters: startid=id(start from 1) count=count Return: Headers: Content-Type:application/json Body: A json array and every element in the array is a json document. array:[{},{}] document:{_id,category,type,name,info,occur_time,receive_time,uuid,sys_info:{}} ''' reporter=Reporter() return reporter.sync()#TODO: update the return data
def __init__(self, config, secrets): for strategy in config['strategies']: self.strategies.append(self.STRATEGIES[strategy](self)) self.exchange = self.EXCHANGES[config['exchange']]( secrets['keys'][config['exchange']]['key'], secrets['keys'][config['exchange']]['secret']) self.symbol = config['symbol'].lower() self.interval = config['interval'] self.reporter = Reporter(config['starting_amount'], self.exchange.FEE) self.type = config['type'] if self.type == 'live': self.paper_trade = config['paper_trade'] if not self.exchange.check_symbol_exists(self.symbol): raise SystemExit( 'ERROR: Symbol "{symbol}" not found on exchange "{id}"'.format( symbol=self.symbol, id=self.exchange.ID))
def export(): ''' ''' contentType = request.headers.get('Content-Type') if not contentType: return {'error': 'Missing Content-Type'} datatype = request.headers.get('Content-Type').split(';')[0] if datatype == 'application/json': ids = request.json if not ids: return {'error': "Invalid parameter!"} elif (ids == None or len(ids) == 0): return {'error': "Invalid parameter!"} else: reporter = Reporter() return reporter.export(ids) else: return {'error': "Invalid Content-Type!"}
def setup(self): # don't record metric information for stress tests at the moment - Grafana not setup to display them if 'STRESS_TEST' not in env: self.reporter = Reporter(self.host, self.min_wait, self.max_wait, "general") locust.events.request_success += self.reporter.request_success locust.events.request_failure += self.reporter.request_failure locust.events.hatch_complete += self.reporter.hatch_complete locust.events.quitting += self.reporter.stop
def sync(): ''' Data sync interface for other database. Paramenters: startid=id(start from 1) count=count Return: Headers: Content-Type:application/json Body: A json array and every element in the array is a json document. array:[{},{}] document:{_id,category,type,name,info,occur_time,receive_time,uuid,sys_info:{}} ''' reporter = Reporter() return reporter.sync() #TODO: update the return data
def release_products(): ''' Intel request ''' #token=request.params.get("token") imei = request.params.get("imei") ret = viewer.getLatestId(imei) if ret is None: return '-1' else: reporter = Reporter() for id in ret: data = reporter.getData(id) if 'ro:build:revision' in data['sys_info']: if not 'unknown' in data['sys_info'][ 'ro:build:revision'].lower(): return data['sys_info']['ro:build:revision'] return '-1'
def main(): '''Run as the main program.''' if len(sys.argv) != 2: print(USAGE, file=sys.stderr) sys.exit(1) root_dir = sys.argv[1] index_file = os.path.join(root_dir, 'index.html') config_file = os.path.join(root_dir, '_config.yml') reporter = Reporter() check_config(reporter, config_file) check_unwanted_files(root_dir, reporter) with open(index_file, encoding='utf-8') as reader: data = reader.read() check_file(reporter, index_file, data) reporter.report()
def run(): configuration = Configurator('QA') database_path = configuration.get_database() connector = Connector(database_path) reporter = Reporter() processor = Processor(configuration, connector, reporter) processor.execute()
def __init__(self, registry, host='0.0.0.0', port=9091): Reporter.__init__(self, registry) self._snapshot = {} reporter = self class ReporterView(FlaskView): route_base = '/' def __init__(self): FlaskView.__init__(self) return @route(route_base) def index(self): return HttpReporter.generate_output( reporter.snapshot ) @route('/metrics/') def metrics(self): return HttpReporter.generate_output( reporter.snapshot.get('metrics') ) @route('/health/') def metrics(self): return HttpReporter.generate_output( reporter.snapshot['health'] ) def start_app(): app = Flask(__name__) ReporterView.register(app) app.run(host=host, port=port) return self._http_thread = Thread( name='metrics_web_server', target=start_app, verbose=True, ) return
def test(testing_data, model, args, epoch, **kwargs): print("************* Start Testing ***************") device = 'cuda' if torch.cuda.is_available() else 'cpu' model.to(device) predict_length = args.prediction_length random_rotation_angle = args.random_rotation_angle reporter = Reporter("ws://localhost:8080") if args.remote_monitor else None total_loss = 0 total_batches = 0 for batch_id, batch in enumerate( tqdm(testing_data, desc="Testing on validation set...")): if random_rotation_angle is not None: batch = rotate_trajectories(batch, random_rotation_angle) batch = torch.from_numpy(batch).to(device).double() observation, target = batch[:, :-(predict_length + 1)], batch[:, -predict_length:] # add one extra dimension indicating the sequence is beginning pad = torch.ones(*observation.shape[:-1], 1).to(device).double() observation = torch.cat((observation, pad), axis=2) # beforehand _, hs, cs = model(observation) # predict steps predicted, hs, cs = model( torch.zeros(batch.size(0), predict_length, 3).to(device), (hs, cs)) loss = SocialModelLoss(predicted, target) if reporter: reporter.report(model_name=kwargs['model_name'] + '_test', loss=loss.item(), batch_id=batch_id, epoch=epoch) num_batches = observation.size(0) total_loss += loss.item() * num_batches total_batches += num_batches return total_loss / total_batches
def __init__(self, identity, left, right, rate, delay, size): Reporter.__init__(self, identity) self.left_node = left self.right_node = right # Need to standardize units to kbit/ms, kbits, and ms self.capacity_kbit_per_ms = float( rate) # 1000 Kilobits in a Megabit, 1000 ms in a s self.ms_prop_delay = float(delay) # Already standardized self.kbits_in_each_buffer = 8.0 * float( size ) # 8 = conversion from BYTE to BIT, ignore 1024 vs 1000 convention self.left_buff = LinkBuffer(self.kbits_in_each_buffer) self.right_buff = LinkBuffer(self.kbits_in_each_buffer) self.bidirectional_queueing_delay_memory = [ -1 ] * constants.QUEUEING_DELAY_WINDOW
def save_report(self): report = Reporter(self.info['option'], self.info, self.result, self) result = report.saver() msg = QMessageBox() msg.setWindowIcon(self.windowIcon()) msg.setWindowTitle("3W-Scanner | Save Report") if not result: self.saved = True msg.setIcon(QMessageBox.Information) msg.setText('Your Report Was Saved Successfully!') msg.setStandardButtons(QMessageBox.Ok) msg.exec_() elif result == 'canceled': pass else: msg.setIcon(QMessageBox.Critical) msg.setText('Error occurred while saving report!\n' + result) msg.setStandardButtons(QMessageBox.Ok) msg.exec_()