def get_pass(): password_feild.delete(0, END) if (length_feild.get() == ''): messagebox.showerror( "No value Entered", "Please Fill the Entry Feilds" ) # This is to make a default password, this must be changed else: try: tem_len = int(length_feild.get()) tem_spe = int(special_feild.get()) tem_dig = int(digit_feild.get()) except ValueError as e: messagebox.showerror("Input Error", "Please enter an Number") else: data = ((tem_len - (tem_dig + tem_spe), tem_dig, tem_spe) ) # Doing the calculation and porcessing length_feild.delete(0, END) digit_feild.delete(0, END) special_feild.delete(0, END) generate = Setup( data ) # This is from the Class Setup which is imported and an instance is created pass_word = generate.get_password() # Calling the method password_feild.insert(0, pass_word) pyperclip.copy( pass_word ) # This is for copying the password directly into the clipboard copied_status = Label( root, text="The password is copied to the clipboard", bg="yellow", fg="black") copied_status.grid(row=5)
def on_service_state_change(zeroconf, service_type, name, state_change): logger.info("Service %s of type %s state changed: %s" % (name, service_type, state_change)) logger.info("State change: %s" % state_change) if state_change is ServiceStateChange.Added: info = zeroconf.get_service_info(service_type, name) if info: logger.info(" Address: %s:%d" % (socket.inet_ntoa(info.address), info.port)) logger.info(" Weight: %d, priority: %d" % (info.weight, info.priority)) logger.info(" Server: %s" % (info.server,)) ip = ("%s:%d" % (socket.inet_ntoa(info.address), info.port)) device = Setup(name, ip) device.register() if info.properties: logger.info(" Properties are:") for key, value in info.properties.items(): logger.info(" %s: %s" % (key, value)) else: logger.info(" No properties") else: logger.info(" No info") logger.info('\n') elif state_change is ServiceStateChange.Removed: logger.info("Removing %s" % (name))
async def test_file_upload(): NUM_PEERS = 10 conn_matrix = np.zeros((NUM_PEERS, NUM_PEERS)) np.fill_diagonal(conn_matrix[:, 1:], 1) np.fill_diagonal(conn_matrix[1:, :], 1) s = Setup(PEERSTER_ROOT, NUM_PEERS, conn_matrix, anti_entropy=2) s.run_all() await asyncio.sleep(.5) # Upload files files = [] for i, peerster in enumerate(s.peersters): with open(f"{peerster.root}/_SharedFiles/test{i}.txt", "w+") as f: to_file = "12345678" * 1000 + peerster.name + "12345678" * 1000 files.append(to_file) f.write(to_file) peerster.upload_file(f"test{i}.txt") await asyncio.sleep(.5) await s.stop_all() # Assert files are uploaded for i, peerster in enumerate(s.peersters): with open(f"{peerster.root}/_SharedFiles/test{i}.txt", "rb") as f: h = Tests._calc_hash(f) assert h in peerster.uploaded_files
def main(): connect() Setup.setTable(cur) Setup.setData(cur) appNum = input("\nChoose application number below\n\t1. New Vehicle Registration\n\t2. Auto Transaction\n\t3. Driver Licence Registration\n\t4. Violation Record\n\t5. Search Engine\n\n------> ") status = False while status==False: if appNum=='1': NewVehicle() break elif appNum=='2': Trans() break elif appNum=='3': DLReg() break elif appNum=='4': VioRcd() break elif appNum=='5': Search(cur) break else: print("\n\nInvalid application number!") appNum = input('Choose application number again\n------> ') connection.commit() connection.close()
async def test_public_messages(): NUM_PEERS = 50 conn_matrix = np.zeros((NUM_PEERS, NUM_PEERS)) np.fill_diagonal(conn_matrix[:, 1:], 1) np.fill_diagonal(conn_matrix[1:, :], 1) print(conn_matrix) s = Setup(PEERSTER_ROOT, NUM_PEERS, conn_matrix, anti_entropy=2) s.run_all() await asyncio.sleep(1) for i, peerster in enumerate(s.peersters): peerster.send_public_message(f"Test{i}") await asyncio.sleep(10) await s.stop_all() for i, peerster in enumerate(s.peersters): print(peerster.public_messages) for j, other_peerster in enumerate(s.peersters): if i == j: continue assert peerster.public_messages[f"testPeer{j}"] == {1: f"Test{j}"}
def collect_relevant_files(self): self.storage = EDF_Store(files_with_categs={}, filenames=[], categ_names=[]) self.storage.fetch_data() self.eeg_setup = Setup(params=self.storage.test_user_ids) self.eeg_setup.run()
def __init__(self, cfg, label): self.cfg = cfg self.cfg.check_parametersSet() self.cfg.log_parametersSet(label) self.setup = Setup(cfg.parametersSet) self.trees = self.setup.grow_trees() self.setup.create_observable() self.init_selection()
def add_relevant_files_to_database(self): self.eeg_setup = Setup(params=None) self.eeg_setup.run() self.process_files() self.storage = EDF_Store(self.eeg_setup.files_with_categs, self.eeg_setup.filenames, self.eeg_setup.categ_names) self.storage.record_data()
def main(): if arg.signals_mit: try: gs = GetSignals() gs.mit(mit) except Exception as e: print(e) elif arg.signals_ecgid: try: gs = GetSignals() gs.mit(ecgid) except Exception as e: print(e) elif arg.signals_bmd: try: gs = GetSignals() gs.mit(bmd101) except Exception as e: print(e) elif arg.features_mit: try: feats = GetFeatures() feats.features('mit', mit) except Exception as e: print(e) elif arg.features_ecgid: try: feats = GetFeatures() feats.features('ecgid', mit) except Exception as e: print(e) elif arg.features_bmd: try: feats = GetFeatures() feats.features('bmd', mit) except Exception as e: print(e) elif arg.setup: try: su = Setup() su.load_signals(2500, "cnn", mit[:40], 0) su.load_signals(2500, "snn", mit[:40], 0) except Exception as e: print(e) elif arg.snn: try: snn.main() except Exception as e: print(e) elif arg.cnn: try: cnn.main() except Exception as e: print(e)
def start(): puzzle = Sudoku() setup = Setup(puzzle) setup.runSetup() print("Setup done, solving...") puzzleSolver = Solver(puzzle) solved, iterations = puzzleSolver.solve() puzzle.print() print("Solved:", solved, iterations, "iterations")
def main(): Helper.init() totalTime = 0 duration = args.duration if args.nosetup: itopod_setup = True else: itopod_setup = False while True: if not args.notitans: if args.verbose: print('getting available titans') titans = Adventure.getTitans() if titans: # after this needs to reset loadout and diggers and e/m Adventure.turnIdleOff() print('calling killTitans with args.snipe {args.snipe}') Adventure.killTitans(titans, verbose=args.verbose, snipe=args.snipe) itopod_setup = False if not itopod_setup: Setup.setup(args.setup) itopod_setup = True Navigation.menu('adventure') if not args.nobeast: if not Statistics.checkPixelColor(*coords.BEAST_MODE_ON, coords.BEAST_MODE_COLOR): Helper.click(*coords.BEAST_MODE) print('*' * 30) Itopod.itopodExperimental(duration=duration, optimal_floor=args.optimal) totalTime += duration print(f'total exp: {Itopod.EXP_gained}') print(f'total ap: {Itopod.AP_gained}') print(f'kills: {Itopod.kills}') print(f'total time: {totalTime} minutes') print('*' * 30) Navigation.menu('inventory') if Inventory.getEmptySlots() < 20: invManagement(boost=0, merge=0) if not args.noygg: Yggdrasil.harvestAll()
def __init__(self): Setup.__init__(self) self._solution = [] self._value = 0 self._numEval = 0 self._pFileName = '' self._bestSolution = [] self._bestMinimum = 0 self._avgMinimum = 0 self._sumOfNumEval = 0 self._avgWhen = 0
def __init__(self, uplink, config, session_id, hydra_url): ''' Constructor ''' self.uplink = uplink self.config = config self.session_id = session_id self.hydra_url = hydra_url self.setup = Setup(self) if self.config.has_option('general', 'fake'): self.fake = (self.config.get('general', 'fake') == "yes") else: self.fake = False
def play(self, reset_gamefile): self.players = set() self.hands = set() self.cards = set() self.me = None self.conviction = None self.gamefile = open('gamefile.txt', 'w+' if reset_gamefile else 'r+', buffering=1) self.prompt_queue = [x.rstrip("\n") for x in self.gamefile] setup = Setup() setup.run(self) play = Play() play.run(self)
def __init__(self, label, params={}, **kwargs): self.label = label self.setup = Setup() self.ana = Analytics() if 'analysis_type' in kwargs: self.analysis_type = kwargs['analysis_type'] else: self.analysis_type = 'dynamical' # set default analysis and circuit parameter self._set_up_circuit(params, kwargs) # set parameter derived from analysis and circuit parameter new_vars = self.setup.get_params_for_analysis(self) new_vars['label'] = self.label self._set_class_variables(new_vars) # set variables which require calculation in analytics class self._calc_variables()
async def test_file_upload(): NUM_PEERS = 10 s = Setup.create_line_setup(PEERSTER_ROOT, NUM_PEERS) s.run_all() await asyncio.sleep(.5) # Upload files files = [] for i, peerster in enumerate(s.peersters): with open(f"{peerster.root}/_SharedFiles/test{i}.txt", "w+") as f: to_file = "12345678" * 1000 + peerster.name + "12345678" * 1000 files.append(to_file) f.write(to_file) peerster.upload_file(f"test{i}.txt") await asyncio.sleep(.5) await s.stop_all() # Assert files are uploaded for i, peerster in enumerate(s.peersters): with open(f"{peerster.root}/_SharedFiles/test{i}.txt", "rb") as f: h = Tests._calc_hash(f)[0] assert h in peerster.uploaded_files
async def test_download_searched_files(): NUM_PEERS = 10 DIFF = 2 s = Setup.create_line_setup(PEERSTER_ROOT, NUM_PEERS) s.run_all() await asyncio.sleep(.5) # Send route rumors for peerster in s.peersters: peerster.send_public_message(f" ") hashes = await Tests._setup_searched_files(s, DIFF) # Download files for i, peerster in enumerate(s.peersters): if i - DIFF >= 0 and i + DIFF < len(s.peersters): peerster.download_searched_file(f"test{i}_left_d.txt", hashes["left"][i][0]) peerster.download_searched_file(f"test{i}_right_d.txt", hashes["right"][i][0]) # Wait for files to be downloaded await asyncio.sleep(15) # Stop all peersters await s.stop_all() for i, peerster in enumerate(s.peersters): if i - DIFF >= 0 and i + DIFF < len(s.peersters): assert f"test{i}_left_d.txt" in peerster.downloaded_files assert f"test{i}_right_d.txt" in peerster.downloaded_files
async def test_search(): NUM_PEERS = 10 DIFF = 2 BUDGET = None s = Setup.create_line_setup(PEERSTER_ROOT, NUM_PEERS) s.run_all() await asyncio.sleep(.5) # Send route rumors for peerster in s.peersters: peerster.send_public_message(f" ") hashes = await Tests._setup_searched_files(s, DIFF, budget=BUDGET) # Stop all peersters await s.stop_all() # Assert that searches are correct for i, peerster in enumerate(s.peersters): if i - DIFF >= 0 and i + DIFF < len(s.peersters): assert len(peerster.searches) == 1 results = peerster.searches[0] assert {"file_name": f"test{i}_left.txt", "peer": f"testPeer{i-DIFF}", "meta": hashes["left"][i][0], "chunks": [f"{i}" for i in range(1, hashes["left"][i][1]+1)]} in results assert {"file_name": f"test{i}_right.txt", "peer": f"testPeer{i+DIFF}", "meta": hashes["right"][i][0], "chunks": [f"{i}" for i in range(1, hashes["right"][i][1]+1)]} in results
def new_students(self): """Checks data for new students.""" print('Checking for new students...') # Creates list of existing students. existing_students = [] with open('storage.csv', 'r') as storage: reader = csv.reader(storage) for row in reader: existing_students.append(row[0]) def mask(email): if email in existing_students: return False return True # Reduces student_data DataFrame to new students only. new_data = self.student_data[self.student_data['Student Email'].apply( mask)] new_students = Data(df=new_data) if len(new_students.student_data) > 0: print('New students found.') print(new_students.student_data) Setup(new_students) else: print('No new students found.')
def __init__(self, test_utility, model, serial_manager, report): super().__init__() self.abort_btn = QPushButton("Abort") self.abort_btn.clicked.connect(self.abort) self.setButton(QWizard.CustomButton1, self.abort_btn) self.button(QWizard.FinishButton).clicked.connect(self.finish) qbtn_layout = [ QWizard.Stretch, QWizard.NextButton, QWizard.FinishButton, QWizard.CustomButton1 ] self.setButtonLayout(qbtn_layout) self.button(QWizard.NextButton).setEnabled(False) # This fixes a bug in the default style which hides the QWizard # buttons until the window is resized. self.setWizardStyle(0) setup_id = self.addPage( Setup(self, test_utility, serial_manager, model, report)) program_id = self.addPage( Program(self, test_utility, serial_manager, model, report)) interfaces_id = self.addPage( Interfaces(self, test_utility, serial_manager, model, report)) final_id = self.addPage(FinalPage(self, test_utility, report)) self.setup_page = self.page(setup_id) self.program_page = self.page(program_id) self.interfaces_page = self.page(interfaces_id) self.final_page = self.page(final_id) self.tu = test_utility self.report = report
def run( self, u_info ): # self, mojo_dir, tmp_dir, /// out_dir, dojoserver # register two data sources self.__segmentation = Segmentation( u_info.files_path , u_info.tmpdir, self) self.__image = Image( u_info.files_path , u_info.tmpdir) # and the controller self.__controller = Controller( u_info, self.__segmentation.get_database(), self ) #### # and the viewer self.__viewer = Viewer() # and the controller if self.__segmentation: db = self.__segmentation.get_database() else: db = None self.__controller = Controller( u_info, db, self ) # and the setup self.__setup = Setup(self, u_info.files_path, u_info.tmpdir) print('path_gfx: ',path_gfx) # running live #### asyncio.set_event_loop(u_info.worker_loop) dojo = tornado.web.Application([ (r'/dojo/gfx/(.*)', tornado.web.StaticFileHandler, {'path': path_gfx}), (r'/ws', Websockets, dict(controller=self.__controller)), (r'/(.*)', DojoHandler, dict(logic=self)) ],debug=True,autoreload=True) # (r'/dojo/gfx/(.*)', tornado.web.StaticFileHandler, {'path': '/dojo/gfx'}) # dojo.listen(u_info.port, max_buffer_size=1024*1024*150000) server = tornado.httpserver.HTTPServer(dojo) server.listen(u_info.port) print('*'*80) print('*', 'DOJO RUNNING') print('*') print('*', 'open', '[ http://' + u_info.ip + ':' + str(u_info.port) + '/dojo/ ] ') print('*'*80) tornado.ioloop.IOLoop.instance().start() server.stop() # def sig_handler(signum, frame): # IOLoop.current().add_callback_from_signal(receiver.shutdown) print("Tornado web server stops.") return
async def test_file_download(): NUM_PEERS = 10 conn_matrix = np.zeros((NUM_PEERS, NUM_PEERS)) np.fill_diagonal(conn_matrix[:, 1:], 1) np.fill_diagonal(conn_matrix[1:, :], 1) s = Setup(PEERSTER_ROOT, NUM_PEERS, conn_matrix, anti_entropy=2) s.run_all() await asyncio.sleep(.5) # Send route rumors for i, peerster in enumerate(s.peersters): peerster.send_public_message(f" ") # Upload files files = [] for i, peerster in enumerate(s.peersters): with open(f"{peerster.root}/_SharedFiles/test{i}.txt", "w+") as f: to_file = "12345678" * 1000 + peerster.name + "12345678" * 1000 files.append(to_file) f.write(to_file) peerster.upload_file(f"test{i}.txt") await asyncio.sleep(.5) # Don't calculate hash ourselves, but use peerster hashes: test doesn't fail # if hashing/chunking is done incorrectly metas = [] for i, peerster in enumerate(s.peersters): assert len(peerster.uploaded_files) == 1 hash = next(iter(peerster.uploaded_files)) assert len(hash) == 64 metas.append(hash) await asyncio.sleep(2) for i, peerster in enumerate(s.peersters): other = (i - 7) % len(s.peersters) peerster.download_file(f"test{other}_d.txt", metas[other], f"testPeer{other}") await asyncio.sleep(10) for i, peerster in enumerate(s.peersters): assert f"test{(i-7)%len(s.peersters)}_d.txt" in peerster.downloaded_files await s.stop_all()
def __init__(self): self.settings = Settings() self.logger = Logger(self.settings.logfile) self.reporter = Reporter(self.settings) self.setup = Setup(self.settings, self.logger) self.grader = Grader(self.settings, self.logger, self.setup) self.analyser = Analyser(self.settings, self.reporter, self.logger, self.setup, self.grader)
def move(): # tic = time.time() data = request.get_json() #NOTE grid_data[0] = move_grid // grid_data[1] = food_grid setup_process = Setup() helper = Helper() grid_data = setup_process.grid_setup(data) # Game States defend = State_Defend() attack = State_Attack() grow = State_Grow() # Assign the global helper methods to each state defend.helper = helper attack.helper = helper grow.helper = helper max_snake = helper.get_max_snake_length(data) closest_food_distance = helper.get_closest_food_dist(grid_data[1], data) board_width = data.get("board").get("width") if len(data.get("you").get("body")) > max_snake + 1: # Assign the attack state when we are 2 bigger than any other snake state = attack elif closest_food_distance < board_width / 1.5: # Assign the grow state if a piece of food is within boardlen/1.5 # or if there are more than 5 pieces of food on the board state = grow else: state = defend #NOTE Get the next move based on the pellet next_move = state.get_move(grid_data, data) # toc = time.time() # print("Move for round: {}".format(data.get("turn"))) # print("Time used: {}ms".format((toc - tic)*1000)) # print("State: {}".format(state.name)) #helper.print_board(grid_data[0]) #NOTE Return the move in the JSON object wrapper return jsonify(move=next_move)
def setup_files(self): self.generate_setup_list() self.setup = Setup(self) if len(self.setup_list) > 0: self.setup.status = True self.setup.sequencer() else: print("Nothing to setup proceed to analysis") self.analysis = Analysis(self, self.setup)
def __init__(self, **kwagrs): self.setup = Setup(**kwagrs) self.t0 = dt() s = self.setup self.T = self.setup.T self.v_couple_shape = [(s.na_c, s.nexo_t[t], s.ntheta_coarse) for t in range(self.T)] self.v_sf_shape = [(s.na_s, s.n_zf) for t in range(self.T)] self.v_sm_shape = [(s.na_s, s.n_zm) for t in range(self.T)] print('setup created, time {}'.format(dt() - self.t0)) self.solve()
def appRun(): s = Setup() if s.load(): s.showSelf() else: s.consInit() s.save() frames = JsonFileToFrames(s.inputFile) print('Frames count = ' + str(len(frames))) printer = BlenderPrinter(len(frames), s.outputDir, s.FPS) iter = 0 for frame in frames: printer.printFrame(frame, iter + 1) iter += 1
async def test_private_messages(): NUM_PEERS = 50 conn_matrix = np.zeros((NUM_PEERS, NUM_PEERS)) np.fill_diagonal(conn_matrix[:, 1:], 1) np.fill_diagonal(conn_matrix[1:, :], 1) s = Setup(PEERSTER_ROOT, NUM_PEERS, conn_matrix, anti_entropy=1) s.run_all() await asyncio.sleep(1) for peerster in s.peersters: peerster.send_public_message(f" ") await asyncio.sleep(10) msg_map = {} for i, peerster in enumerate(s.peersters): if i - 7 < 0 and not i + 7 >= len(s.peersters): other = i + 7 else: other = i - 7 msg_map[other] = msg_map.get(other, []) + [i] peerster.send_private_message(f"Test{i}", f"testPeer{other}") await asyncio.sleep(30) await s.stop_all() for i, peerster in enumerate(s.peersters): others = msg_map.get(i) if others is None: continue for other in others: peer = f"testPeer{other}" assert peer in peerster.private_messages.keys() assert len(peerster.private_messages[peer]) == 1 assert peerster.private_messages[peer][0] == {"msg": f"Test{other}", "hop-lim": "3"}
def setup(): setup = Setup() parser = argparse.ArgumentParser(description='Useage: simulator inst.txt data.txt reg.txt config.txt result.txt') parser.add_argument('inst.txt') parser.add_argument('data.txt') parser.add_argument('reg.txt') parser.add_argument('config.txt') parser.add_argument('result.txt') args = parser.parse_args() if len(vars(args)) != 5: print "Useage: simulator inst.txt data.txt reg.txt config.txt result.txt" sys.exit(0) inst_list, label_list = setup.parse_instructions(vars(args)['inst.txt']) register = setup.parse_registers(vars(args)['reg.txt']) memory = setup.parse_memory(vars(args)['data.txt']) config = setup.parse_config(vars(args)['config.txt']) priority = setup.return_priority(config) #print inst_list #print lable_list #print register #print memory #print config return inst_list, label_list, register, memory, config, priority
def main(): """Setup and run the backup.""" awsargs = { 'region': 'eu-west-1', 'endpoint': 'http://192.168.99.100:4569', 'key': 'localstack', 'secret': 'localstack' } args = _parse_cli_args() level = logging.DEBUG if args.verbose else logging.INFO if args.verbose: for logger in { 'boto3', 'botocore', 'requests', 'botocore.vendored.requests.packages.urllib3.connectionpool' }: logging.getLogger(logger).setLevel(level) LOGGER.error('STARTTINGG') LOGGER.info('Sleeping for 5 seconds') time.sleep(5) LOGGER.info('Woke up') setup = Setup() setup.setup() _execute(args, awsargs)
def __init__(self, ipAddr=None, port=80): # Init Midi client and display available devices midiINPort = mido.get_input_names()[0] midiOUTPort = mido.get_output_names()[0] self.midiIN = mido.open_input(midiINPort) self.midiOUT = mido.open_output(midiOUTPort) self.mcu = MCU() self.motu = Motu(ipAddr, port) self.settings = Settings() self.hwSetup = Setup() self.mixerUrl = "http://{}:{}".format(ipAddr, port) print("Will send events to mixer at {}".format(self.mixerUrl)) self.recall()
async def test_confirmed_file(): NUM_PEERS = 8 s = Setup.create_line_setup(PEERSTER_ROOT, NUM_PEERS, hw3ex2=True) s.run_all() await asyncio.sleep(.5) # Send route rumors for peerster in s.peersters: peerster.send_public_message(f" ") await asyncio.sleep(3) # Upload a file to each node hashes = [] sizes = [] for i, peerster in enumerate(s.peersters): with open(f"{peerster.root}/_SharedFiles/test{i}.txt", "w+") as f: to_file = "12345678" * 1000 + peerster.name + "12345678" * 1000 sizes.append(len(to_file)) f.write(to_file) with open(f"{peerster.root}/_SharedFiles/test{i}.txt", "rb") as f: hashes.append(Tests._calc_hash(f)[0]) peerster.upload_file(f"test{i}.txt") await asyncio.sleep(15) for i, peerster in enumerate(s.peersters): # check that correct file was rebroadcasted for re_br in peerster.re_broadcast: assert len(re_br["witnesses"]) > NUM_PEERS / 2 for j, other in enumerate(s.peersters): if i == j: continue info = {"origin": other.name, "name": f"test{j}.txt", "meta": hashes[j], "id": "3", "size": str(sizes[j])} assert info in peerster.confirmed # Now try to upload a duplicate file for i, peerster in enumerate(s.peersters): peerster.upload_file(f"test{len(s.peersters)-i-1}.txt") await asyncio.sleep(3) await s.stop_all() # It should not show up in uploaded files! for i, peerster in enumerate(s.peersters): assert hashes[-i-1] not in peerster.uploaded_files
def test_setup_load_properties(mock_logIt): obj = Setup() assert_equal(obj.installOxAuth, True) assert_equal(obj.installOxTrust, True) assert_equal(obj.installLdap, True) assert_equal(obj.installHttpd, True) assert_equal(obj.installSaml, False) assert_equal(obj.installAsimba, False) assert_equal(obj.installCas, False) assert_equal(obj.installOxAuthRP, False) # all false obj.load_properties('tests/sample1.properties') assert_equal(obj.installOxAuth, False) assert_equal(obj.installOxTrust, False) assert_equal(obj.installLdap, False) assert_equal(obj.installHttpd, False) assert_equal(obj.installSaml, False) assert_equal(obj.installAsimba, False) assert_equal(obj.installCas, False) assert_equal(obj.installOxAuthRP, False) # all true obj.load_properties('tests/sample2.properties') assert_equal(obj.installOxAuth, True) assert_equal(obj.installOxTrust, True) assert_equal(obj.installLdap, True) assert_equal(obj.installHttpd, True) assert_equal(obj.installSaml, True) assert_equal(obj.installAsimba, True) assert_equal(obj.installCas, True) assert_equal(obj.installOxAuthRP, True) # mix of both true and false obj.load_properties('tests/sample3.properties') assert_equal(obj.installOxAuth, False) assert_equal(obj.installOxTrust, True) assert_equal(obj.installLdap, False) assert_equal(obj.installHttpd, True) assert_equal(obj.installSaml, False) assert_equal(obj.installAsimba, True) assert_equal(obj.installCas, False) assert_equal(obj.installOxAuthRP, True)
def start_game(self): """ Hides main menu window and displays the board setup window """ # hide the menu window, and the rules window if it is open self.root.withdraw() if self.rules_displayed: self.rules_window.withdraw() def show_callback(): # after game finishes (by either loss or win), display the windows again self.root.deiconify() if self.rules_displayed: self.rules_window.deiconify() # create board setup window self.setup = Setup(self.root, show_callback)
def __init__(self, midas, time): df = pd.read_csv(midas) times = np.unique(df.filter(regex='^DA').values.flatten()) if time not in times: raise ValueError("The time-point %s does not exists in the dataset. Available time-points are: %s" % (time, list(times))) df.drop(df.columns[0], axis=1, inplace=True) cond = True for c in df.filter(regex='^DA').columns: cond = cond & (df[c] == time) super(Dataset, self).__init__(df[cond].reset_index(drop=True)) stimuli = map(lambda c: c[3:], filter(lambda c: self.is_stimulus(c), self.columns)) inhibitors = map(lambda c: c[3:-1], filter(lambda c: self.is_inhibitor(c), self.columns)) readouts = map(lambda c: c[3:], filter(lambda c: self.is_readout(c), self.columns)) self.setup = Setup(stimuli, inhibitors, readouts)
class Analyzer(): def __init__(self, cfg, label): self.cfg = cfg self.cfg.check_parametersSet() self.cfg.log_parametersSet(label) self.setup = Setup(cfg.parametersSet) self.trees = self.setup.grow_trees() self.setup.create_observable() self.init_selection() def init_selection(self): selection = Selection() selection.build_selection(self.setup.configuration) self.selection = selection.selection print '' print 'Applying the following selection:' print self.selection print '' def analyze(self): self.histograms = {} for process_name in self.trees: self.histograms[process_name] = self.setup.make_histogram(process_name) selection = self.selection if process_name.find('data')==-1: weight = str(self.cfg.parametersSet['lumi'])+'*weight' selection = '('+selection+')*'+weight self.trees[process_name].Project(self.histograms[process_name].GetName(), self.setup.observable.plot, selection) def format_histograms(self): self.formatted_histograms = {} stack = THStack('stack','') self.legend = TLegend(0.16,0.67,0.4,0.92) self.legend.SetFillColor(0) self.legend.SetLineColor(0) self.legend.SetBorderSize(0) for process_name in self.setup.order_processes(): try: self.histograms[process_name] except KeyError: continue hist = self.histograms[process_name] # --> events / GeV for bin in range(1,hist.GetNbinsX()+1): hist.SetBinContent(bin,hist.GetBinContent(bin)/hist.GetBinWidth(bin)) hist.SetBinError(bin,hist.GetBinError(bin)/hist.GetBinWidth(bin)) # data if process_name.find('data')>-1: legendMarker = 'p' hist.SetMarkerStyle(20) hist.SetMarkerSize(1.3) self.formatted_histograms['data'] = hist # signal elif process_name.find('signal')>-1: legendMarker = 'l' hist.SetLineWidth(2) hist.SetFillColor(0) hist.SetLineColor(2) self.formatted_histograms['signal'] = hist # MC else: legendMarker = 'f' hist.SetLineColor(processes[process_name]['color']) hist.SetFillColor(processes[process_name]['color']) stack.Add(hist) self.formatted_histograms['background'] = stack # adding to the legend self.legend.AddEntry(hist,processes[process_name]['label'],legendMarker) def draw(self,name): self.make_canvas(name) self.formatted_histograms['background'].Draw('fhist') self.formatted_histograms['background'].GetXaxis().SetTitle(self.setup.observable.labelX) self.formatted_histograms['background'].GetYaxis().SetTitle(self.setup.observable.labelY) self.formatted_histograms['background'].GetXaxis().SetNdivisions(505) self.formatted_histograms['background'].GetYaxis().SetNdivisions(505) try: self.formatted_histograms['data'].Draw('ep,same') self.draw_compare() except: print 'data not plotted' try: self.formatted_histograms['signal'].Draw('hist,same') except: print 'signal not plotted' self.legend.Draw() def make_canvas(self,name): self.canvas = MultiCanvas(name) if self.formatted_histograms.has_key('data') else TCanvas(name,name) SetOwnership(self.canvas,False)
def __init__(self): # No communications or arming yet self.comms = None self.armed = False # Do basic Tk initialization self.root = Tk() self.root.configure(bg=BACKGROUND_COLOR) self.root.resizable(False, False) self.root.title('Hackflight Ground Control Station') left = (self.root.winfo_screenwidth() - DISPLAY_WIDTH) / 2 top = (self.root.winfo_screenheight() - DISPLAY_HEIGHT) / 2 self.root.geometry('%dx%d+%d+%d' % (DISPLAY_WIDTH, DISPLAY_HEIGHT, left, top)) self.frame = Frame(self.root) self.root.wm_iconbitmap(bitmap = "@media/icon.xbm") self.root.tk.call('wm', 'iconphoto', self.root._w, PhotoImage('icon.png')) self.root.protocol('WM_DELETE_WINDOW', self.quit) # Create panes for two rows of widgets self.pane1 = self._add_pane() self.pane2 = self._add_pane() # Add a buttons self.button_connect = self._add_button('Connect', self.pane1, self._connect_callback) self.button_setup = self._add_button('Setup', self.pane2, self._setup_callback) self.button_motors = self._add_button('Motors', self.pane2, self._motors_button_callback) self.button_receiver = self._add_button('Receiver', self.pane2, self._receiver_button_callback) self.button_messages = self._add_button('Messages', self.pane2, self._messages_button_callback) #self.button_maps = self._add_button('Maps', self.pane2, self._maps_button_callback, disabled=False) # Prepare for adding ports as they are detected by our timer task self.portsvar = StringVar(self.root) self.portsmenu = None self.connected = False self.ports = [] # Finalize Tk stuff self.frame.pack() self.canvas = Canvas(self.root, width=DISPLAY_WIDTH, height=DISPLAY_HEIGHT, background='black') self.canvas.pack() # Add widgets for motor-testing dialog; hide them immediately self.motors = Motors(self) self.motors.stop() # Create receiver dialog self.receiver = Receiver(self) # Create messages dialog self.messages = Messages(self) # Create setup dialog self.setup = Setup(self) self._schedule_connection_task() # Create a maps dialog #self.maps = Maps(self, yoffset=-30) # Create a splash image self.splashimage = PhotoImage(file='media/splash.png') self._show_splash() # Create a message parser self.parser = MSP_Parser() # Set up parser's request strings self.attitude_request = self.parser.serialize_ATTITUDE_Request() self.rc_request = self.parser.serialize_RC_Request() # No messages yet self.yaw_pitch_roll = 0,0,0 self.rxchannels = 0,0,0,0,0 # A hack to support display in Setup dialog self.active_axis = 0
class GCS: def __init__(self): # No communications or arming yet self.comms = None self.armed = False # Do basic Tk initialization self.root = Tk() self.root.configure(bg=BACKGROUND_COLOR) self.root.resizable(False, False) self.root.title('Hackflight Ground Control Station') left = (self.root.winfo_screenwidth() - DISPLAY_WIDTH) / 2 top = (self.root.winfo_screenheight() - DISPLAY_HEIGHT) / 2 self.root.geometry('%dx%d+%d+%d' % (DISPLAY_WIDTH, DISPLAY_HEIGHT, left, top)) self.frame = Frame(self.root) self.root.wm_iconbitmap(bitmap = "@media/icon.xbm") self.root.tk.call('wm', 'iconphoto', self.root._w, PhotoImage('icon.png')) self.root.protocol('WM_DELETE_WINDOW', self.quit) # Create panes for two rows of widgets self.pane1 = self._add_pane() self.pane2 = self._add_pane() # Add a buttons self.button_connect = self._add_button('Connect', self.pane1, self._connect_callback) self.button_setup = self._add_button('Setup', self.pane2, self._setup_callback) self.button_motors = self._add_button('Motors', self.pane2, self._motors_button_callback) self.button_receiver = self._add_button('Receiver', self.pane2, self._receiver_button_callback) self.button_messages = self._add_button('Messages', self.pane2, self._messages_button_callback) #self.button_maps = self._add_button('Maps', self.pane2, self._maps_button_callback, disabled=False) # Prepare for adding ports as they are detected by our timer task self.portsvar = StringVar(self.root) self.portsmenu = None self.connected = False self.ports = [] # Finalize Tk stuff self.frame.pack() self.canvas = Canvas(self.root, width=DISPLAY_WIDTH, height=DISPLAY_HEIGHT, background='black') self.canvas.pack() # Add widgets for motor-testing dialog; hide them immediately self.motors = Motors(self) self.motors.stop() # Create receiver dialog self.receiver = Receiver(self) # Create messages dialog self.messages = Messages(self) # Create setup dialog self.setup = Setup(self) self._schedule_connection_task() # Create a maps dialog #self.maps = Maps(self, yoffset=-30) # Create a splash image self.splashimage = PhotoImage(file='media/splash.png') self._show_splash() # Create a message parser self.parser = MSP_Parser() # Set up parser's request strings self.attitude_request = self.parser.serialize_ATTITUDE_Request() self.rc_request = self.parser.serialize_RC_Request() # No messages yet self.yaw_pitch_roll = 0,0,0 self.rxchannels = 0,0,0,0,0 # A hack to support display in Setup dialog self.active_axis = 0 def quit(self): self.motors.stop() self.root.destroy() def hide(self, widget): widget.place(x=-9999) def getChannels(self): return self.rxchannels def getYawPitchRoll(self): # configure button to show connected self._enable_buttons() self.button_connect['text'] = 'Disconnect' self._enable_button(self.button_connect) return self.yaw_pitch_roll def checkArmed(self): if self.armed: self._show_armed(self.root) self._show_armed(self.pane1) self._show_armed(self.pane2) self._disable_button(self.button_motors) else: self._show_disarmed(self.root) self._show_disarmed(self.pane1) self._show_disarmed(self.pane2) def scheduleTask(self, delay_msec, task): self.root.after(delay_msec, task) def _add_pane(self): pane = PanedWindow(self.frame, bg=BACKGROUND_COLOR) pane.pack(fill=BOTH, expand=1) return pane def _add_button(self, label, parent, callback, disabled=True): button = Button(parent, text=label, command=callback) button.pack(side=LEFT) button.config(state = 'disabled' if disabled else 'normal') return button # Callback for Setup button def _setup_callback(self): self._clear() self.motors.stop() self.receiver.stop() self.messages.stop() #self.maps.stop() self.parser.set_ATTITUDE_Handler(self._handle_attitude) self.setup.start() def _start(self): self.parser.set_ATTITUDE_Handler(self._handle_attitude) self._send_attitude_request() self.setup.start() self.parser.set_RC_Handler(self._handle_rc) self._send_rc_request() # Sends Attitude request to FC def _send_attitude_request(self): self.comms.send_request(self.attitude_request) # Sends RC request to FC def _send_rc_request(self): self.comms.send_request(self.rc_request) # Callback for Motors button def _motors_button_callback(self): self._clear() self.setup.stop() self.parser.set_ATTITUDE_Handler(self._handle_attitude) self.receiver.stop() self.messages.stop() #self.maps.stop() self.motors.start() def _clear(self): self.canvas.delete(ALL) # Callback for Receiver button def _receiver_button_callback(self): self._clear() self.setup.stop() self.motors.stop() self.messages.stop() #self.maps.stop() self.receiver.start() # Callback for Messages button def _messages_button_callback(self): self._clear() self.setup.stop() self.motors.stop() #self.maps.stop() self.receiver.stop() self.messages.start() def _getting_messages(self): return self.button_connect['text'] == 'Disconnect' # Callback for Maps button def _maps_button_callback(self): self._clear() if self._getting_messages(): self.receiver.stop() self.messages.stop() self.setup.stop() self.motors.stop() #self.maps.start() # Callback for Connect / Disconnect button def _connect_callback(self): if self.connected: self.setup.stop() #self.maps.stop() self.motors.stop() self.messages.stop() self.receiver.stop() if not self.comms is None: self.comms.stop() self._clear() self._disable_buttons() self.button_connect['text'] = 'Connect' self._show_splash() else: #self.maps.stop() self.comms = Comms(self) self.comms.start() self.button_connect['text'] = 'Connecting ...' self._disable_button(self.button_connect) self._hide_splash() self.scheduleTask(CONNECTION_DELAY_MSEC, self._start) self.connected = not self.connected # Gets available ports def _getports(self): allports = comports() ports = [] for port in allports: portname = port[0] if 'ttyACM' in portname or 'ttyUSB' in portname or 'COM' in portname: ports.append(portname) return ports # Checks for changes in port status (hot-plugging USB cables) def _connection_task(self): ports = self._getports() if ports != self.ports: if self.portsmenu is None: self.portsmenu = OptionMenu(self.pane1, self.portsvar, *ports) else: for port in ports: self.portsmenu['menu'].add_command(label=port) self.portsmenu.pack(side=LEFT) if ports == []: self._disable_button(self.button_connect) self._disable_buttons() else: self.portsvar.set(ports[0]) # default value self._enable_button(self.button_connect) self.ports = ports self._schedule_connection_task() # Mutually recursive with above def _schedule_connection_task(self): self.root.after(USB_UPDATE_MSEC, self._connection_task) def _disable_buttons(self): self._disable_button(self.button_setup) self._disable_button(self.button_motors) self._disable_button(self.button_receiver) self._disable_button(self.button_messages) def _enable_buttons(self): self._enable_button(self.button_setup) self._enable_button(self.button_motors) self._enable_button(self.button_receiver) self._enable_button(self.button_messages) def _enable_button(self, button): button['state'] = 'normal' def _disable_button(self, button): button['state'] = 'disabled' def sendMotorMessage(self, index, value): values = [1000]*4 values[index-1] = value self.comms.send_message(self.parser.serialize_SET_MOTOR, values) def _show_splash(self): self.splash = self.canvas.create_image((400,250), image=self.splashimage) def _hide_splash(self): self.canvas.delete(self.splash) def _show_armed(self, widget): widget.configure(bg='red') def _show_disarmed(self, widget): widget.configure(bg=BACKGROUND_COLOR) if self._getting_messages(): self._enable_button(self.button_motors) def _handle_calibrate_response(self): self.setup.showCalibrated() def _handle_params_response(self, pitchroll_kp_percent, yaw_kp_percent): # Only handle parms from firmware on a fresh connection if self.newconnect: self.setup.setParams(pitchroll_kp_percent, yaw_kp_percent) self.newconnect = False def _handle_attitude(self, x, y, z): self.yaw_pitch_roll = z, -y/10., x/10. self.messages.setCurrentMessage('Yaw/Pitch/Roll: %+3.3f %+3.3f %+3.3f' % self.yaw_pitch_roll) # As soon as we handle the callback from one request, send another request self._send_attitude_request() def _handle_rc(self, c1, c2, c3, c4, c5, c6, c7, c8): self.rxchannels = c1, c2, c3, c4, c5 # As soon as we handle the callback from one request, send another request self._send_rc_request() self.messages.setCurrentMessage('Receiver: %04d %04d %04d %04d %04d' % (c1, c2, c3, c4, c5)) def _handle_arm_status(self, armed): self.armed = armed self.messages.setCurrentMessage('ArmStatus: %s' % ('ARMED' if armed else 'Disarmed')) def _handle_battery_status(self, volts, amps): self.messages.setCurrentMessage('BatteryStatus: %3.3f volts, %3.3f amps' % (volts, amps))
class Circuit(object): """Provides functions to calculate the stationary and dynamical properties of a given circuit. Arguments: label: string specifying circuit, options: 'microcircuit' Keyword Arguments: params: dictionary specifying parameter of the circuit, default parameter given in params_circuit.py will be overwritten analysis_type: string specifying level of analysis that is requested default: 'dynamical' options: - None: only circuit and default analysis parameter are set - 'stationary': circuit and default analysis parameter are set, mean and variance of input to each populations as well as firing rates are calculated - 'dynamical': circuit and default analysis parameter are set, mean and variance of input to each populations as well as firing rates are calculated, variables for calculation of spectra are calculated including the transfer function for all populations fmin: minimal frequency in Hz, default: 0.1 Hz fmax: maximal frequency in Hz, default: 150 Hz df: frequency spacing in Hz, default: 1.0/(2*np.pi) Hz to_file: boolean specifying whether firing rates and transfer functions are written to file, default: True from_file: boolean specifying whether firing rates and transfer functions are read from file, default: True if set to True and file is not found firing rates and transfer function are calculated """ def __init__(self, label, params={}, **kwargs): self.label = label self.setup = Setup() self.ana = Analytics() if 'analysis_type' in kwargs: self.analysis_type = kwargs['analysis_type'] else: self.analysis_type = 'dynamical' # set default analysis and circuit parameter self._set_up_circuit(params, kwargs) # set parameter derived from analysis and circuit parameter new_vars = self.setup.get_params_for_analysis(self) new_vars['label'] = self.label self._set_class_variables(new_vars) # set variables which require calculation in analytics class self._calc_variables() # updates variables of Circuit() and Analysis() classes, new variables # are specified in the dictionary new_vars def _set_class_variables(self, new_vars): for key, value in new_vars.items(): setattr(self, key, value) if 'params' in new_vars: for key, value in new_vars['params'].items(): setattr(self, key, value) self.ana.update_variables(new_vars) # updates class variables of variables of Circuit() and Analysis() # such that default analysis and circuit parameters are known def _set_up_circuit(self, params, args): # set default analysis parameter new_vars = self.setup.get_default_params(args) self._set_class_variables(new_vars) # set circuit parameter new_vars = self.setup.get_circuit_params(self, params) self._set_class_variables(new_vars) # quantities required for stationary analysis are calculated def _set_up_for_stationary_analysis(self): new_vars = self.setup.get_working_point(self) self._set_class_variables(new_vars) # quantities required for dynamical analysis are calculated def _set_up_for_dynamical_analysis(self): new_vars = self.setup.get_params_for_power_spectrum(self) self._set_class_variables(new_vars) # calculates quantities needed for analysis specified by analysis_type def _calc_variables(self): if self.analysis_type == 'dynamical': self._set_up_for_stationary_analysis() self._set_up_for_dynamical_analysis() elif self.analysis_type == 'stationary': self._set_up_for_stationary_analysis() def alter_params(self, params): """Parameter specified in dictionary params are changed. Changeable parameters are default analysis and circuit parameter, as well as label and analysis_type. Arguments: params: dictionary, specifying new parameters """ self.params.update(params) new_vars = self.setup.get_altered_circuit_params(self, self.label) self._set_class_variables(new_vars) new_vars = self.setup.get_params_for_analysis(self) self._set_class_variables(new_vars) self._calc_variables() def create_power_spectra(self): """Returns frequencies and power spectra. See: Eq. 9 in Bos et al. (2015) Shape of output: (len(self.populations), len(self.omegas)) Output: freqs: vector of frequencies in Hz power: power spectra for all populations, dimension len(self.populations) x len(freqs) """ power = np.asarray(map(self.ana.spec, self.ana.omegas)) return self.ana.omegas/(2.0*np.pi), np.transpose(power) def create_power_spectra_approx(self): """Returns frequencies and power spectra approximated by dominant eigenmode. See: Eq. 15 in Bos et al. (2015) Shape of output: (len(self.populations), len(self.omegas)) Output: freqs: vector of frequencies in Hz power: power spectra for all populations, dimension len(self.populations) x len(freqs) """ power = np.asarray(map(self.ana.spec_approx, self.ana.omegas)) return self.ana.omegas/(2.0*np.pi), np.transpose(power) def create_eigenvalue_spectra(self, matrix): """Returns frequencies and frequency dependence of eigenvalues of matrix. Arguments: matrix: string specifying the matrix, options are the effective connectivity matrix ('MH'), the propagator ('prop') and the inverse of the propagator ('prop_inv') Output: freqs: vector of frequencies in Hz eigs: spectra of all eigenvalues, dimension len(self.populations) x len(freqs) """ eigs = [self.ana.eigs_evecs(matrix, w)[0] for w in self.ana.omegas] eigs = np.transpose(np.asarray(eigs)) return self.ana.omegas/(2.0*np.pi), eigs def create_eigenvector_spectra(self, matrix, label): """Returns frequencies and frequency dependence of eigenvectors of matrix. Arguments: matrix: string specifying the matrix, options are the effective connectivity matrix ('MH'), the propagator ('prop') and the inverse of the propagator ('prop_inv') label: string specifying whether spectra of left or right eigenvectors are returned, options: 'left', 'right' Output: freqs: vector of frequencies in Hz evecs: spectra of all eigenvectors, dimension len(self.populations) x len(freqs) x len(self.populations) """ # one list entry for every eigenvector, evecs[i][j][k] is the # ith eigenvectors at the jth frequency for the kth component evecs = [np.zeros((len(self.ana.omegas), self.ana.dimension), dtype=complex) for i in range(self.ana.dimension)] for i, w in enumerate(self.ana.omegas): eig, vr, vl = self.ana.eigs_evecs(matrix, w) if label == 'right': v = vr elif label == 'left': v = vl for j in range(self.ana.dimension): evecs[j][i] = v[j] evecs = np.asarray([np.transpose(evecs[i]) for i in range(self.ana.dimension)]) return self.ana.omegas/(2.0*np.pi), evecs def reduce_connectivity(self, M_red): """Connectivity (indegree matrix) is reduced, while the working point is held constant. Arguments: M_red: matrix, with each element specifying how the corresponding connection is altered, e.g the in-degree from population j to population i is reduced by 30% with M_red[i][j]=0.7 """ M_original = self.M_full[:] M_original_fast = self.M_full_fast[:] M_original_slow = self.M_full_slow[:] if M_red.shape != M_original.shape: raise RuntimeError('Dimension of mask matrix has to be the ' + 'same as the original indegree matrix.') self.M = M_original*M_red self.M_fast = M_original_fast*M_red self.M_slow = M_original_slow*M_red self.ana.update_variables({'M': self.M, 'M_fast': self.M_fast, 'M_slow': self.M_slow}) def restore_full_connectivity(self): '''Restore connectivity to full connectivity.''' self.M = self.M_full self.M_fast = self.M_full_fast self.M_slow = self.M_full_slow self.ana.update_variables({'M': self.M, 'M_fast': self.M_fast, 'M_slow': self.M_slow}) def get_effective_connectivity(self, freq): """Returns effective connectivity matrix. Arguments: freq: frequency in Hz """ return self.ana.create_MH(2*np.pi*freq) def get_sensitivity_measure(self, freq, index=None): """Returns sensitivity measure. see: Eq. 21 in Bos et al. (2015) Arguments: freq: frequency in Hz Keyword arguments: index: specifies index of eigenmode, default: None if set to None the dominant eigenmode is assumed """ MH = self.get_effective_connectivity(freq) e, U = np.linalg.eig(MH) U_inv = np.linalg.inv(U) if index is None: # find eigenvalue closest to one index = np.argmin(np.abs(e-1)) T = np.outer(U_inv[index],U[:,index]) T /= np.dot(U_inv[index],U[:,index]) T *= MH return T def get_transfer_function(self): """Returns dynamical transfer function depending on frequency. Shape of output: (len(self.populations), len(self.omegas)) Output: freqs: vector of frequencies in Hz dyn_trans_func: power spectra for all populations, dimension len(self.populations) x len(freqs) """ dyn_trans_func = np.asarray(map(self.ana.create_H, self.ana.omegas)) return self.ana.omegas/(2.0*np.pi), np.transpose(dyn_trans_func)
class Session(object): def __init__(self, uplink, config, session_id, hydra_url): ''' Constructor ''' self.uplink = uplink self.config = config self.session_id = session_id self.hydra_url = hydra_url self.setup = Setup(self) if self.config.has_option('general', 'fake'): self.fake = (self.config.get('general', 'fake') == "yes") else: self.fake = False def log_format(self, message): return self.uplink.log_format("[" + self.session_id + "] " + message) def prepare(self): logging.info(self.setup.log_format(("preparing setup"))) if not self.fake: self.setup.load() self.setup.prepare_base() def add_node(self, node_id, node_name, ip_address, netmask): try: logging.info(self.setup.log_format(("add node " + str(node_id) + " '" + str(node_name) + "'"))) if not self.fake: self.setup.add_node(node_id, node_name, (ip_address, netmask)) except ValueError: pass def remove_node(self, node_id): logging.info(self.setup.log_format(("remove node " + str(node_id)))) if not self.fake: self.setup.remove_node(node_id) def action(self, data): try: logging.info(self.setup.log_format(("call action: " + data))) if not self.fake: return self.setup.action(data) except ValueError: pass def run(self): """ run the nodes """ logging.info(self.setup.log_format(("run all the nodes"))) if not self.fake: self.setup.startup() def stop(self): """ stop the nodes """ logging.info(self.setup.log_format(("stop all the nodes"))) if not self.fake: self.setup.shutdown() def cleanup(self): """ cleanup the setup """ logging.info(self.setup.log_format(("cleaning up"))) """ stop all nodes """ if not self.fake: self.setup.shutdown() """ delete the setup folder """ if not self.fake: self.setup.cleanup()
def open_camera(self): self.setup = Setup(self) self.setup.base_name = self.base_name self.setup.camera()
class Source: def __init__(self, main_window, database, admin): self.main_window = main_window self.dbs = database.session self.admin = admin self.setup_list = [] self.setup_frames = None self.analysis_frames = None self.background = None self.polygon = None self.base_name = None self.media = None self.video = None self.out = None self.location = None self.path = None self.analysis = None self.analysis_dict = None def open_files(self): self.setup_files() def open_camera(self): self.setup = Setup(self) self.setup.base_name = self.base_name self.setup.camera() def open(self, media): self.media = media if media == 'file': print("Source is a file") self.setup_list = [] self.files = QtGui.QFileDialog.getOpenFileNames(\ self.main_window, 'Open video file', '', \ 'Videos (*.avi *.mp4)') if self.files: self.open_files() if media == 'camera': print("Source is a camera") self.window = SourceWindow(self, self.main_window) def not_exist(self, source_path): s = self.dbs result = s.query(Video).filter(Video.path == source_path) print("result count :", result.count()) if result.count() == 0: return True elif result.count() == 1: print("Source exist", result.first()) else: print("Multiple source exist!") return False def generate_setup_list(self): for file in self.files: if self.not_exist(file): self.setup_list.append(file) print("Setup list :", self.setup_list) def setup_files(self): self.generate_setup_list() self.setup = Setup(self) if len(self.setup_list) > 0: self.setup.status = True self.setup.sequencer() else: print("Nothing to setup proceed to analysis") self.analysis = Analysis(self, self.setup) def setup_get_next_frame(self): ret, read_frame = self.setup_frames.read() if ret: self.setup_current_frame = read_frame else: self.setup_current_frame = None def analysis_get_next_frame(self): ret, read_frame = self.analysis_frames.read() if ret: self.analysis_current_frame = read_frame # save video if source is camera if self.media == 'camera': frame = copy(read_frame) def save_frame(): self.out.write(frame) thread = Thread(target=save_frame) thread.start() else: self.analysis_current_frame = None
from random import shuffle from setup import Setup from Knn import Knn from OperadoresBuilder import OperadoresBuilder Setup.setup() atributos = Setup.atributos estudiantes = Setup.estudiantes valoresPosibles = Setup.valoresPosibles operadores = {} OperadoresBuilder.multiple(operadores, OperadoresBuilder.hamming, ["school", "sex", "schoolsup", "famsup", "paid", "activities", "nursery", "higher", "internet", "romantic", "address", "famsize", "Pstatus", "Mjob", "Fjob", "reason", "guardian"]) OperadoresBuilder.multiple(operadores, OperadoresBuilder.rango, ["age","Medu","Fedu", "traveltime","studytime","failures","famrel", "freetime","goout","Dalc","Walc","health","absences"], valoresPosibles) knn = Knn(estudiantes, "G3", atributos, operadores) # Se realizaran multiples pruebas para obtener una estimacion de la efectividad del algoritmo # para cualquier eleccion de casos de entrenamiento / prueba CANTIDAD_PRUEBAS = 25 # Separo 1/5 de los casos de prueba cantEstTest = len(estudiantes)/5
class Dataset(pd.DataFrame): """ An experimental phospho-proteomics dataset extending `pandas.DataFrame`_ Parameters ---------- midas : Absolute PATH to a MIDAS file time : Data acquisition time-point for the early response Attributes ---------- setup : :class:`caspo.core.setup.Setup` clampings : :class:`caspo.core.clamping.ClampingList` readouts : `pandas.DataFrame`_ .. _pandas.DataFrame: http://pandas.pydata.org/pandas-docs/stable/dsintro.html#dataframe """ def __init__(self, midas, time): df = pd.read_csv(midas) times = np.unique(df.filter(regex='^DA').values.flatten()) if time not in times: raise ValueError("The time-point %s does not exists in the dataset. Available time-points are: %s" % (time, list(times))) df.drop(df.columns[0], axis=1, inplace=True) cond = True for c in df.filter(regex='^DA').columns: cond = cond & (df[c] == time) super(Dataset, self).__init__(df[cond].reset_index(drop=True)) stimuli = map(lambda c: c[3:], filter(lambda c: self.is_stimulus(c), self.columns)) inhibitors = map(lambda c: c[3:-1], filter(lambda c: self.is_inhibitor(c), self.columns)) readouts = map(lambda c: c[3:], filter(lambda c: self.is_readout(c), self.columns)) self.setup = Setup(stimuli, inhibitors, readouts) @property def clampings(self): clampings = [] for i, row in self.filter(regex='^TR').iterrows(): literals = [] for v,s in row.iteritems(): if self.is_stimulus(v): literals.append(Literal(v[3:], 1 if s == 1 else -1)) else: if s == 1: literals.append(Literal(v[3:-1], -1)) clampings.append(Clamping(literals)) return ClampingList(clampings) @property def readouts(self): return self.filter(regex='^DV').rename(columns=lambda c: c[3:]).astype(float) def is_stimulus(self, name): """ Returns if the given species name is a stimulus or not Parameters ---------- name : str Returns ------- boolean True if the given name is a stimulus, False otherwise. """ return name.startswith('TR') and not name.endswith('i') def is_inhibitor(self, name): """ Returns if the given species name is a inhibitor or not Parameters ---------- name : str Returns ------- boolean True if the given name is a inhibitor, False otherwise. """ return name.startswith('TR') and name.endswith('i') def is_readout(self, name): """ Returns if the given species name is a readout or not Parameters ---------- name : str Returns ------- boolean True if the given name is a readout, False otherwise. """ return name.startswith('DV') def to_funset(self, discrete): """ Converts the dataset to a set of `gringo.Fun`_ instances Parameters ---------- discrete : callable A discretization function Returns ------- set Representation of the dataset as a set of `gringo.Fun`_ instances .. _gringo.Fun: http://potassco.sourceforge.net/gringo.html#Fun """ fs = self.clampings.to_funset("exp") fs = fs.union(self.setup.to_funset()) for i,row in self.readouts.iterrows(): for var,val in row.iteritems(): if not np.isnan(val): fs.add(gringo.Fun('obs',[i,var,discrete(val)])) return fs
def main(): try: opts, extra_params = getopt.getopt(sys.argv[1:], "hgl:e:w:p:", ["help", "generate", "language=", "export=", "workspace=", "project="]) except getopt.GetoptError: usage() sys.exit(2) language = None export = None generate = False dir = None for opt, arg in opts: if opt in ['-h', '--help']: usage() sys.exit() else: if opt in ['-g', '--generate']: generate = True if opt in ['-l', '--language']: language = arg if opt in ['-e', '--export']: export = arg if opt in ['-w', '--workspace', '-p', '--project']: dir = arg if language is None: usage() sys.exit() # if len(extra_params) > 1: # usage() # sys.exit() # else: # project_name = extra_params[0] if dir != None: if opt == '-p': project_name = os.path.abspath(dir) print '' print project_name # Set the variables print '' print 'Setting up...' setup = Setup(language, project_name) # Set the project type (C or Java) project = setup.getProject() print 'Done.' print '' print '===================================' # Create the work directory (pl_stats directory) print '' print 'Creating directories...' project.createDirectories() print 'Done.' print '' print '===================================' # Copy files from project to work directory print '' print 'Copying source files to work directory...' project.copyFiles() print 'Done.' print '' print '===================================' # Create XML files from project source code print '' print 'Converting source files...' project.createXMLFiles() # Remove source files from work directory project.moveXMLFiles() print 'Done.' print '' print '===================================' # Create the object to manipulate project statistics print '' print 'Generating statistics...' stats = project.getStats() # Get project statistics dict_methods, dict_features, dict_cbr, dict_vsoc, dict_decl_coupling, dict_assign_coupling = stats.getStatistics() print 'Done.' print '' print '===================================' # Export directives results do XLS file # print '' # print 'Creating directives sheet...' # stats.exportDirectivesToXLS(dict_methods, dict_features) # print 'Done.' # print '' # print '===================================' # # # Export dependencies results do XLS file # print '' # print 'Creating dependencies sheet...' # stats.exportDependenciesToXLS(dict_decl_coupling, dict_assign_coupling) # print 'Done.' # print '' # print '===================================' # Export directives data to CSV file print '' print 'Creating directives_data csv file...' stats.exportDirectivesDataToCSV(dict_methods, dict_features) print 'Done.' print '' print '===================================' # Export directives results do CSV file print '' print 'Creating directives_results csv file...' stats.exportDirectivesResultsToCSV(dict_methods, dict_features) print 'Done.' print '' print '===================================' # Export dependencies results do CSV file print '' print 'Creating dependencies csv file...' stats.exportDependenciesToCSV(dict_decl_coupling, dict_assign_coupling) print 'Done.' print '' print '===================================' else: for project_dir in os.listdir(os.path.abspath(dir)): project_name = os.path.abspath(dir+project_dir) print '' print project_name # Set the variables print '' print 'Setting up...' setup = Setup(language, project_name) # Set the project type (C or Java) project = setup.getProject() print 'Done.' print '' print '===================================' # Create the work directory (pl_stats directory) print '' print 'Creating directories...' project.createDirectories() print 'Done.' print '' print '===================================' # Copy files from project to work directory print '' print 'Copying source files to work directory...' project.copyFiles() print 'Done.' print '' print '===================================' # Create XML files from project source code print '' print 'Converting source files...' project.createXMLFiles() # Remove source files from work directory project.moveXMLFiles() print 'Done.' print '' print '===================================' # Create the object to manipulate project statistics print '' print 'Generating statistics...' stats = project.getStats() # Get project statistics dict_methods, dict_features, dict_cbr, dict_vsoc, dict_decl_coupling, dict_assign_coupling = stats.getStatistics() print 'Done.' print '' print '===================================' # Export directives results do XLS file # print '' # print 'Creating directives sheet...' # stats.exportDirectivesToXLS(dict_methods, dict_features) # print 'Done.' # print '' # print '===================================' # # # Export dependencies results do XLS file # print '' # print 'Creating dependencies sheet...' # stats.exportDependenciesToXLS(dict_decl_coupling, dict_assign_coupling) # print 'Done.' # print '' # print '===================================' # Export directives data to CSV file print '' print 'Creating directives_data csv file...' stats.exportDirectivesDataToCSV(dict_methods, dict_features) print 'Done.' print '' print '===================================' # Export directives results do CSV file print '' print 'Creating directives_results csv file...' stats.exportDirectivesResultsToCSV(dict_methods, dict_features) print 'Done.' print '' print '===================================' # Export dependencies results do CSV file print '' print 'Creating dependencies csv file...' stats.exportDependenciesToCSV(dict_decl_coupling, dict_assign_coupling) print 'Done.' print '' print '===================================' print '===================================' print '######### PL-Stats - v. 0.5 #########' print '-----------------------------------' print '# Project: ' + project_name.__str__() print '-----------------------------------' print '# Total of project methods: ' + len(dict_methods).__str__() print '==================================='