def main(modes: T.Set[str]) -> None: logging.config.fileConfig(LOGGING_CONFIG, disable_existing_loggers=False) logger = logging.getLogger("manager") if "reporter" in modes: # Run report generator logger.info("Running reporter") import report report.main() if "inspector" in modes: # Run the inspector, defaulting to adding to SQL DB logger.info("Running inspector") import project_inspector project_inspector.main(tosql=True) if "puppeteer" in modes: # Run puppeteer - defaults to all volumes logger.info("Running puppeteer") import puppeteer puppeteer.main() if "users" in modes: # Run the user_reporter - defaults to all volumes logger.info("Running user reporter") import user_reporter user_reporter.main() if "splitter" in modes: # Run the group splitter module - defaults to upload to S3 logger.info("Running group splitter") import group_splitter group_splitter.main()
def runtests(testlist, compileproc=1): failedtests= [] i=1 n_test=len(testlist) for test in testlist: setup.compile(test, compileproc) #Run and gather the output of the tests print "Running test " +test.testName+ "...\t(",i,"of",n_test,")" runtest(test) i=i+1 #perform the testing logic resultOfTest=checktest.main(test) report.main(test, resultOfTest) if (len(resultOfTest[0]) > 0 or len(resultOfTest[1]) > 0 or test.status > 0): failedtests.append(test.testName) return failedtests
def main(): """ Run all scripts to process the data, generate optimised networks and save figures and a formatted report for a local authority, as defined by the parameters in `config.yml`. """ sensors_urb_obs.main() networks_single_obj.main() networks_multi_objs.main() networks_two_objs.main() figs_demographics.main() figs_single_obj.main() figs_urb_obs.main() figs_multi_objs.main() figs_two_objs.main() report.main()
parser.add_option("-l", "--long", action="store_false", default=True) parser.add_option("-i", "--hidestatus", help="hide verified/unverified status online", action="store_true", default=False) parser.add_option("-e", "--hidesupported", help="hide the support status in generated html pages", action="store_true", default=False) (opts, args) = parser.parse_args() if len(args) < 1: error("must specify an action") elif len(args) > 1: error("only 1 argument allowed") else: action = args[0] if action != "conform" and opts.app == "all": error("all is only a valid action for conform") try: report.main(action, opts.architecture, opts.app, opts.version, opts.build, opts.recurse, opts.long, opts.hidestatus, opts.hidesupported, opts.diff, opts.debug) except IOError, e: print e
""" Backfilling many days of records. See backfill.sh """ import sys import report report.main(int(sys.argv[1])) # wrstat # report.main(int(sys.argv[1]) * 4) # mpistat
def job(): alert() report.main()
import odg_hsi_analytic_wheel.analytics.utilities.binaryhadoop as binaryhadoop import sys import report f=binaryhadoop.readFromHDFS('/user/steve/blobspectra-test-Scan_0000/part-00000') cmrecs = [] for point in f: sys.stdout.write('%s\n'%repr(point)) cmrec = point[2].split(',') cmrecs.append('%d, %i, %i, %i, %i, %i'%(float(cmrec[0]),0, int(cmrec[3]), int(cmrec[5]), int(cmrec[4]), int(cmrec[6]))) report.main(cmrecs)
import report if __name__ == '__main__': sys.exit(report.main())
import odg_hsi_analytic_wheel.analytics.utilities.binaryhadoop as binaryhadoop import sys import report f=binaryhadoop.readFromHDFS('/user/steve/blobspectra-test-Scan_0000/part-00000') cmrecs = [] for point in f: sys.stdout.write('%s\n'%repr(point)) cmrec = point[2].split(',') cmrecs.append('%d, %i, %i, %i, %i, %i'%(float(cmrec[0]),0, int(cmrec[3]), int(cmrec[5]), int(cmrec[4]), int(cmrec[6]))) report.main(cmrecs, sys.argv[1])