def serverLoop(self, opts): # server loop from net.server import Server server = Server(opts.port) try: reporter.Reporter().report( "FuzzerServer", "started v" + __version__ + "\n" + DB().onlineReport()) self.reportDate = datetime.now() return server.run(self.serverCallback) except (Exception, KeyboardInterrupt) as e: server.stop() reporter.Reporter().reportExc("FuzzerServer", e) raise
def test_reporter_have_password_create(faker): request_id = faker.uuid4() user_id = faker.email() password = faker.password() access_token = faker.uuid4() responses.add( responses.POST, sales_url, body=( b'If you generate a new access token, your existing token will be ' b'deleted. You will need to save your new access token within your' b' properties file. Do you still want to continue? (y/n): '), status=200, headers={ 'SERVICE_REQUEST_ID': request_id, }) response_xml = f'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n<ViewToken>\n <AccessToken>{access_token}</AccessToken>\n <ExpirationDate>2018-09-24</ExpirationDate>\n <Message>Your new access token has been generated.</Message>\n</ViewToken>\n' responses.add( responses.POST, sales_url, status=200, body=response_xml, ) new_reporter = reporter.Reporter(user_id=user_id, password=password) assert type(new_reporter) is reporter.Reporter assert new_reporter.access_token
def redebug(patch_path, source_path): # parse arguments start_time = time.time() # initialize a magic cookie pointer common.magic_cookie = magic.open(magic.MAGIC_MIME) common.magic_cookie.load() ret = [] # traverse patch files patch = patchloader.PatchLoader() npatch = patch.traverse(patch_path) ret.append(npatch) # traverse source files source = sourceloader.SourceLoader() nmatch = source.traverse(source_path, patch) ret.append(nmatch) # generate a report report = reporter.Reporter(patch, source) exact_nmatch, html = report.output() ret.append(exact_nmatch) ret.append(html) common.magic_cookie.close() elapsed_time = time.time() - start_time #print '[+] %d matches given %d patches ... %.1fs' % (exact_nmatch, npatch, elapsed_time) ret.append(elapsed_time) return ret
def test_account_number_is_passed(): responses.add( responses.POST, sales_url, status=200, ) new_reporter = reporter.Reporter(user_id='*****@*****.**', account='654321', password='******') response = new_reporter._make_request('sales', 'getVendors', {}) assert quote_plus("a=654321") in response.request.body
def test_error_handling(): error_xml = """ <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <Error> <Code>101</Code> <Message>Invalid command.</Message> </Error>""" responses.add(responses.POST, sales_url, body=error_xml, status=400) new_reporter = reporter.Reporter(user_id='*****@*****.**', password='******') with pytest.raises(HTTPError): new_reporter.access_token
def test_reporter_have_token_create(faker): access_token = faker.uuid4() account = faker.pyint() new_reporter = reporter.Reporter( access_token=access_token, account=account, ) assert type(new_reporter) is reporter.Reporter assert new_reporter.access_token == access_token assert new_reporter.account == account
def test_account_number_is_passed(): responses.add( responses.POST, sales_url, status=200, ) new_reporter = reporter.Reporter(user_id="*****@*****.**", account="654321", password="******") response = new_reporter.make_request("sales", "getVendors", {}) assert quote_plus("a=654321") in response.request.body
def run_process(empd): try: ep = empd.split(",") email = ep[0].strip() passwd = ep[1].strip() proxy = '' lengh = len(proxies) if lengh > 1: indx = randint(0, lengh - 1) pro = proxies[indx] if not pro.isspace(): proxy = pro.strip() print("Using proxy = ", proxy) reporter.Reporter(email, passwd, proxy) except Exception as c: print("Exception (start_reporting) ", str(c))
def __init__(self): # set environment self.set_environment() # initialize global services self.spore_globals = settings.SporeGlobals() self.spore_manager = manager.SporeManager() self.spore_reporter = reporter.Reporter() # self.spore_globals = self.parse_prefs() self.logger = self.get_logger() self.menu = self.build_menu() self.callbacks = self.add_callbacks() self.set_tracking_dir()
def test_vendor_numbers(faker): access_token = faker.uuid4() vendor_numbers = [ str(faker.random_int(800000, 899999)) for _ in range(faker.random_int()) ] response_xml = ''' <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <Vendors> {vendors} </Vendors> '''.format(vendors=''.join( ['<Vendor>{num}</Vendor>'.format(num=num) for num in vendor_numbers])) responses.add(responses.POST, sales_url, body=response_xml, status=200) new_reporter = reporter.Reporter(access_token=access_token) assert new_reporter.vendors == vendor_numbers
def test_reporter_unexpired_token(faker): user_id = faker.email() password = faker.password() access_token = faker.uuid4() future_date = datetime.now().date() + timedelta(days=2) future_date_str = future_date.strftime("%Y-%m-%d") response_xml = f'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n<ViewToken>\n <AccessToken>{access_token}</AccessToken>\n <ExpirationDate>{future_date_str}</ExpirationDate>\n</ViewToken>\n' responses.add( responses.POST, sales_url, status=200, body=response_xml, ) new_reporter = reporter.Reporter(user_id=user_id, password=password) assert type(new_reporter) is reporter.Reporter assert new_reporter.access_token == access_token
def __init__(self): # set environment self.set_environment() # initialize global services self.spore_globals = settings.SporeGlobals() # initialize ui only in gui mode windowed = mel.eval('$temp1=$gMainWindow') if windowed: self.spore_manager = manager.SporeManager() self.spore_reporter = reporter.Reporter() # self.spore_globals = self.parse_prefs() self.logger = self.get_logger() self.menu = self.build_menu() self.callbacks = self.add_callbacks() self.set_tracking_dir()
def test_reporter_have_password_create(faker): request_id = faker.uuid4() user_id = faker.email() password = faker.password() access_token = faker.uuid4() access_token_old = faker.uuid4() past_date = datetime.now().date() - timedelta(days=2) past_date_str = past_date.strftime("%Y-%m-%d") response_xml = f'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n<ViewToken>\n <AccessToken>{access_token_old}</AccessToken>\n <ExpirationDate>{past_date_str}</ExpirationDate>\n</ViewToken>\n' responses.add( responses.POST, sales_url, status=200, body=response_xml, ) responses.add( responses.POST, sales_url, body=( b"If you generate a new access token, your existing token will be " b"deleted. You will need to save your new access token within your" b" properties file. Do you still want to continue? (y/n): "), status=200, headers={ "SERVICE_REQUEST_ID": request_id, }, ) response_xml = f'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n<ViewToken>\n <AccessToken>{access_token}</AccessToken>\n <ExpirationDate>2018-09-24</ExpirationDate>\n <Message>Your new access token has been generated.</Message>\n</ViewToken>\n' responses.add( responses.POST, sales_url, status=200, body=response_xml, ) new_reporter = reporter.Reporter(user_id=user_id, password=password) assert type(new_reporter) is reporter.Reporter assert new_reporter.access_token == access_token
def main(): args = arguments() handle, genome_name = handle_input(args.assembly, args.genome_name) query, pseudoread_counts = format_query(handle, genome_name, args.fragment) classifications = classify(query, args.cores, args.kraken_database) phylogeny = parse_phylogeny(classifications) origins = determine_origin(phylogeny, pseudoread_counts, args.organism) foreign_indices = identify_foreign(origins, args.threshold, args.fragment) report = reporter.Reporter(handle, foreign_indices, phylogeny, args.fragment, args.nt_database, args.cores, args.top, args.fast, args.cache) report.report()
def finish_up(self): """Move the last bit of collected data and then start the Analyzer, Comparer and Reporter""" print("Collection is Complete") call("move " + vars.tmp_trgt_dir + "\\CollectedData*.* " + vars.outputdir + "\\", shell=True) #call("move " + vars.tmp_trgt_dir + "\\systeminfo.txt " + vars.outputdir + "\\SysInfo\\", shell=True) if vars.dumpm == "yes": call("mkdir " + vars.outputdir + "\\MemInfo\\", shell=True) call("move " + vars.tmp_trgt_dir + "\\physmem*.* " + vars.outputdir + "\\MemInfo\\", shell=True) if vars.gpg_enc == "yes": call("move " + vars.tmp_trgt_dir + "\\gpg\\CollectedData*.* " + vars.outputdir + "\\", shell=True) analyze.Analyzer() if vars.basecomp_run == "yes": comparer.Comparer() report.Reporter() self.all_done()
def clientLoop(self, opts): # client job from net.client import Client try: self.dir = os.getcwd() self.client = Client(opts.port) print "discovering server..." res = self.client.discover(opts.hostId, opts.hostName) print "Server discovered at:", self.client.server hostId = res['hostId'] self.client.hostId = hostId task = res['task'] print "Host id:", hostId print "Task:", task if not task: raise Exception("Task is None") args = task.split() klass = self.importClass(args[0]) return klass().run(args[1:], self.clientCallback) except (Exception, KeyboardInterrupt) as e: reporter.Reporter().reportExc("FuzzerClient", e) raise
def __init__(self, name, logger, blinkyAgent): self.__name = name self.__log = logger.createLogger(G_NAME_MODULE_DELIVERY, G_NAME_GROUP_DELIVERY_DELIVERY_MNG) self.__nginxManager = nginx_manager.NginxManager( "nginx-manager", logger) self.__ngxStatus = nginx_status.NginxStatus("nginx-status", logger) self.__ngxCmd = nginx_command.NginxCommand("nginx-cmd", logger) self.__reporter = reporter.Reporter("delivery-reporter", logger) self.__resourceManager = resource_manager.ResourceManager( "resource-manager", logger) self.__isStop = False self.__conf = None self.__blinkyAgent = blinkyAgent self.__confQueue = Queue.Queue() self.__configAgent = config_agent.ConfigAgent("config-agent", logger, blinkyAgent, self.__confQueue) self.__duringConfig = False self.__timeService = utils.TimesSerivce() self.__ipTablesClient = delivery_volume.IpTablesClient(logger)
from configs import * from subprocess import Popen import datetime from monitor import get_logs from rospkg import RosPack from pi_face_tracker.msg import Faces, Face rp = RosPack() import rospy json_encode = json.JSONEncoder().encode app = Flask(__name__, static_folder='../public/') app.config['CHAT_AUDIO_DIR'] = os.path.join( os.path.dirname(os.path.abspath(__file__)), os.pardir, 'chat_audio') rep = reporter.Reporter( os.path.dirname(os.path.abspath(__file__)) + '/checks.yaml') config_root = rp.get_path('robots_config') performance_dir = os.path.join(rp.get_path('performances'), 'robots') app.add_url_rule('/monitor/logs/<loglevel>', None, get_logs, methods=['POST']) def get_pub(): db = getattr(g, '_database', None) if db is None: db = g._database = connect_to_database() return db @app.route('/') def send_index():
def dump(self, trac_base, range_start, range_end): # # Get the trac environment. # env = trac.env.open_environment(trac_base) if not env: print "Couldn't open trac environment at: %s" % trac_base return # # Setup a logger # log_file = env.config.get('slimtimer', 'report_log') if log_file: logger = logging.getLogger("ReportDumper") if not os.path.exists(os.path.dirname(log_file)): print "Couldn't find log file: %s. Aborting. " % log_file return handler = \ logging.handlers.RotatingFileHandler(log_file, 'a', 20000, 5) formatter = \ logging.Formatter("[%(asctime)s] %(levelname)s: %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) else: logger = env.log # # Check we have some basic stuff for connecting to ST # st_api_key = env.config.get('slimtimer', 'api_key') if not st_api_key: logger.warn("API key for connecting to SlimTimer was not found.") return # # Connect to the database # db_host = env.config.get('slimtimer', 'db_host') db_username = env.config.get('slimtimer', 'db_username') db_password = env.config.get('slimtimer', 'db_password') db_dsn = env.config.get('slimtimer', 'db_dsn') missing = [] if not db_host: missing.append('host') if not db_username: missing.append('username') if not db_dsn: missing.append('database name') if len(missing): logger.warn("Missing %s for connecting to the datastore" % ','.join(missing)) return try: ts = time_store.TimeStore(host=db_host, user=db_username, password=db_password, database=db_dsn) except: logger.error("Couldn't connect to database %s (%s@%s)" % (db_dsn, db_username, db_host)) return # # Load the list of users # users_config_file = os.path.join(trac_base, 'conf', 'users.xml') usrs = users.Users(users_config_file) # # Get the trac db to update # trac_db = env.get_db_cnx() if not trac_db: logger.warn("Could not fetch the trac database. " "trac will not be updated.") # # Iterate through users for whom we should do reporting # for user in usrs.users.values(): if user.get('report', False) and user.get('st_user', ''): st_username = user['st_user'] st_password = user.get('st_pass', '') try: st = slimtimer.SlimTimerSession(st_username, st_password, st_api_key) except: logger.error( "Could not log in to SlimTimer with username %s,"\ " password %s character(s) in length, and API "\ "key %s." % (st_username, len(st_password), st_api_key)) return None rpt = reporter.Reporter(ts, st, usrs, logger, trac_db) rpt.fetch_entries(range_start, range_end) logger.debug("Dumped records from %s to %s" % (range_start, range_end))
def _reporter(self): return reporter.Reporter(self._formatter())
def components(): leftJoy = wpilib.Joystick(1) rightJoy = wpilib.Joystick(2) components = {} class DriveConfig(object): left_motors = wpilib.Talon(1) right_motors = wpilib.Talon(2) left_encoder = wpilib.Encoder(2, 3) left_PID_encoder = DistanceEncoder(left_encoder) left_PID_controller = wpilib.PIDController(0, 0, 0, left_PID_encoder, left_motors) right_encoder = wpilib.Encoder(4, 5) right_PID_encoder = DistanceEncoder(right_encoder) right_PID_controller = wpilib.PIDController(0, 0, 0, right_PID_encoder, right_motors) robot_drive = DriveBase(left_motors, right_motors, True, left_encoder, right_encoder, left_PID_controller, right_PID_controller) #robot_drive = wpilib.RobotDrive(left_motors, right_motors) left_shifter = wpilib.DoubleSolenoid(1, 2) right_shifter = wpilib.DoubleSolenoid(3, 4) forward = wpilib.DoubleSolenoid.kForward reverse = wpilib.DoubleSolenoid.kReverse drive_joy = leftJoy align_button = Button(leftJoy, 6) front_left_photo_switch = wpilib.DigitalInput(14) front_right_photo_switch = wpilib.DigitalInput(12) back_left_photo_switch = wpilib.DigitalInput(13) back_right_photo_switch = wpilib.DigitalInput(11) # Buttons squared_drive_stick = Button(leftJoy, 1) shift_button = Button(leftJoy, 9) components['drive'] = drive.Drive(DriveConfig) class PickupConfig(object): pickup_motor = wpilib.Talon(4) solenoid = wpilib.DoubleSolenoid(5, 6) # TODO: figure out if forward is pickup-up or pickup-down. # Rename these variables once we know forward = wpilib.DoubleSolenoid.kForward reverse = wpilib.DoubleSolenoid.kReverse pickup_switch = Button(rightJoy, 3) motor_button = Button(rightJoy, 2) pickup_fast_preset = Button(rightJoy, 10) pickup_slow_preset = Button(rightJoy, 11) pass_slow_preset = Button(rightJoy, 12) pass_fast_preset = Button(rightJoy, 13) components['pickup'] = pickup.Pickup(PickupConfig) class ShooterConfig(object): motors = wpilib.Jaguar(3) shoot_button = Button(rightJoy, 1) manual_reset_button = Button(rightJoy, 4) low_shot_preset_button = Button(rightJoy, 8) high_shot_preset_button = Button(rightJoy, 7) reset_hall_effect = HallEffect(wpilib.DigitalInput(6)) preset_hall_effect_counters = [] low_shot_hall_effect_counter = wpilib.Counter() low_shot_hall_effect_counter.SetUpSource(7) low_shot_hall_effect_counter.SetUpSourceEdge(False, True) low_shot_hall_effect_counter.Start() high_shot_hall_effect_counter = wpilib.Counter() high_shot_hall_effect_counter.SetUpSource(8) high_shot_hall_effect_counter.SetUpSourceEdge(False, True) high_shot_hall_effect_counter.Start() components['shooter'] = shooter.Shooter(ShooterConfig) class UtilConfig(object): reload_code_button = Button(leftJoy, 8) compressor = wpilib.Compressor(1, 1) components['util'] = utilComponent.UtilComponent(UtilConfig) components['reporter'] = reporter.Reporter(DriveConfig, PickupConfig, ShooterConfig, UtilConfig) return components
def serverCallback(self): delta = (datetime.now() - self.reportDate).total_seconds() * 1.0 / 60 if delta < self.reportTime or self.reportTime == 0: return self.reportDate = datetime.now() reporter.Reporter().report("FuzzerServer", DB().onlineReport())
def buildDump(link): print link my_reporter = reporter.Reporter() my_reporter.read(url=link) print my_reporter.report_news() return my_reporter.report_news()
""" # LIBRARIES import numpy as np import datetime # USER LIBRARIES import lib import logger import errors import reporter import base # Define instances Logger = logger.Logger("Profiles/BG.py") Reporter = reporter.Reporter() class PastBG(base.PastProfile): def __init__(self): """ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INIT ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ """ # Start initialization super(PastBG, self).__init__() # Initialize number of valid recent BGs self.n = 0
def test_finanical_vendors_and_regions(faker): access_token = faker.uuid4() response_xml = ''' <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <VendorsAndRegions> <Vendor> <Number>80012345</Number> <Region> <Code>US</Code> <Reports> <Report>Financial</Report> </Reports> </Region> <Region> <Code>JP</Code> <Reports> <Report>Financial</Report> </Reports> </Region> </Vendor> <Vendor> <Number>80067891</Number> <Region> <Code>US</Code> <Reports> <Report>Financial</Report> </Reports> </Region> </Vendor> </VendorsAndRegions> ''' responses.add(responses.POST, financial_url, body=response_xml, status=200) expected_result = { '80012345': { 'id': '80012345', 'regions': [ { 'code': 'US', 'reports': [ 'Financial', ], }, { 'code': 'JP', 'reports': [ 'Financial', ], }, ], }, '80067891': { 'id': '80067891', 'regions': [ { 'code': 'US', 'reports': [ 'Financial', ], }, ], }, } new_reporter = reporter.Reporter(access_token=access_token) assert new_reporter.vendors_and_regions == expected_result
def main(): parser = argparse.ArgumentParser(prog="fuel_stat", description="""Creating statistics based on generated tables""" ) parser.add_argument("--recreate", action="store_true", help="re-create data base") parser.add_argument("-l", "--load", action="store_true", help="paste data from files to tables") parser.add_argument("--log", action="store", default="info", help="enable logging") parser.add_argument("-r", "--report", action="store_true", default=False, help="generates a report on fuel use") parser.add_argument("-s", "--startdate", action="store", default="1000-00-00", help="set the start date for the report") parser.add_argument("-e", "--enddate", action="store", default="9999-00-00", help="set the end date for the report") parser.add_argument("-f", "--filename", action="store", default="report", help="set the name of the report file") parser.add_argument("-g", "--gasname", action="append", default=None, help="set gas names for the report") parser.add_argument("-i", "--info", action="store_true", default=True, help="display information about refueling in a report") parser.add_argument("--statistic", action="store_true", default=True, help="display statistics about refueling in a report") args = parser.parse_args() logging.basicConfig( filename="logging.log", level=args.log.upper(), format="%(asctime)s %(name)s [%(levelname)s] : %(message)s") logger = logging.getLogger("MAIN") # Создаем базу данных заправок db = database.DataBase("data/database.db", args.recreate) if args.recreate: # Создаем таблицу запрвавок recreate(db, args) if args.load: # Вставляем данные из текстового файла в таблицу db.insert_list("fuel", "name, price", db.csv_to_list("data/fuel.csv")) # Получаем список значений из файла data = [] for row in db.csv_to_list("data/trans.csv"): # Разбираем каждую строку # Получем id заправки из файла fuel.txt по названию заправки fuel_id = db.select("fuel", "id", "name = '" + row[2] + "'").fetchone() if fuel_id is None: logger.warning("Unknown gas name") continue data.append(tuple([row[0], row[1], fuel_id[0], row[3]])) db.insert_list("trans", "dtime, odometer, fuel_id, amount", data) db.commit() db.disconnect() if args.report is True: r = reporter.Reporter(args.startdate, args.enddate, args.gasname, args.filename) r.create_report(args.info, args.statistic)
from matplotlib.backends.backend_pdf import PdfPages import model as CMEmodel import reporter as CMEreporter # Driver for CME cost model. if __name__ == "__main__": cm = CMEmodel.CMECostModel(zero_intcpt=True, spread_term=True) # pdf = PdfPages('../../figs/model_diagnostic_default.pdf') pdf = PdfPages('../../figs/fig2.pdf') r = CMEreporter.Reporter('.', 'All', qc=True, pdf=pdf, closeonExit=True) cm.accept(r)
def setUp(self): self.reporter = reporter.Reporter() # TODO: refactor reporter class.
common.magic_cookie = magic.open(magic.MAGIC_MIME) common.magic_cookie.load() except AttributeError: common.magic_cookie = magic.Magic(mime=True, uncompress=True) common.verbose_print('[-] initialized magic cookie\n') # traverse patch files patch = patchloader.PatchLoader() npatch = patch.traverse(patch_path) if npatch == 0: print('[!] no patch to be queried') sys.exit(1) # traverse source files source = sourceloader.SourceLoader() nmatch = source.traverse(source_path, patch) if nmatch == 0: print('[!] no match to be checked') sys.exit(1) # generate a report report = reporter.Reporter(patch, source) exact_nmatch = report.output() if exact_nmatch == 0: print('[!] no exact match found') sys.exit(1) elapsed_time = time.time() - start_time print '[+] %d matches given %d patches ... %.1fs' % (exact_nmatch, npatch, elapsed_time)
import reporter import csv company_file = open('NASDAQ.csv') csv_reader = csv.reader(company_file) company_list = list(csv_reader) t_list = [] report_done = False for company in company_list: if company[5] == 'Technology': r = reporter.Reporter(company) t_list.append(r) for i in range(5): t_list[i].start() counter = 4 list_len = len(t_list) while not report_done: if t_list[counter].driver_done: counter += 1 t_list[counter].start() if counter == list_len: report_done = True