def setUp(self): self.challenge = Heartbeat.challenge_type().\ fromdict( MockValues .get_challenges_response['challenges'][0]['challenge']) self.heartbeat = Heartbeat.fromdict( MockValues.connect_response['heartbeat']) self.tag = Heartbeat.tag_type().fromdict( MockValues.get_chunks_response['chunks'][0]['tag']) self.expiration = datetime.utcnow( ) + timedelta(int(MockValues.get_chunks_response['chunks'][0]['due'])) self.client = mock.MagicMock() self.manager = ThreadManager() self.test_hash = 'hash' self.test_size = 100 self.test_seed = 'seed' self.contract = DownstreamContract(self.client, self.test_hash, self.test_seed, self.test_size, self.challenge, self.expiration, self.tag, self.manager, os.path.join('data', 'chunks')) self.contract.generate_data()
def make_kernel(namespace, kernel_factory, out_stream_factory=None, display_hook_factory=None): """ Creates a kernel, redirects stdout/stderr, and installs a display hook and exception handler. """ # If running under pythonw.exe, the interpreter will crash if more than 4KB # of data is written to stdout or stderr. This is a bug that has been with # Python for a very long time; see http://bugs.python.org/issue706263. if sys.executable.endswith('pythonw.exe'): blackhole = file(os.devnull, 'w') sys.stdout = sys.stderr = blackhole sys.__stdout__ = sys.__stderr__ = blackhole # Install minimal exception handling sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor', ostream=sys.__stdout__) # Create a context, a session, and the kernel sockets. io.raw_print("Starting the kernel at pid:", os.getpid()) context = zmq.Context() # Uncomment this to try closing the context. # atexit.register(context.close) session = Session(username=u'kernel') reply_socket = context.socket(zmq.XREP) xrep_port = bind_port(reply_socket, namespace.ip, namespace.xrep) io.raw_print("XREP Channel on port", xrep_port) pub_socket = context.socket(zmq.PUB) pub_port = bind_port(pub_socket, namespace.ip, namespace.pub) io.raw_print("PUB Channel on port", pub_port) req_socket = context.socket(zmq.XREQ) req_port = bind_port(req_socket, namespace.ip, namespace.req) io.raw_print("REQ Channel on port", req_port) hb = Heartbeat(context, (namespace.ip, namespace.hb)) hb.start() hb_port = hb.port io.raw_print("Heartbeat REP Channel on port", hb_port) # Helper to make it easier to connect to an existing kernel, until we have # single-port connection negotiation fully implemented. io.raw_print("To connect another client to this kernel, use:") io.raw_print("-e --xreq {0} --sub {1} --rep {2} --hb {3}".format( xrep_port, pub_port, req_port, hb_port)) # Redirect input streams and set a display hook. if out_stream_factory: sys.stdout = out_stream_factory(session, pub_socket, u'stdout') sys.stderr = out_stream_factory(session, pub_socket, u'stderr') if display_hook_factory: sys.displayhook = display_hook_factory(session, pub_socket) # Create the kernel. kernel = kernel_factory(session=session, reply_socket=reply_socket, pub_socket=pub_socket, req_socket=req_socket) kernel.record_ports(xrep_port=xrep_port, pub_port=pub_port, req_port=req_port, hb_port=hb_port) return kernel
def make_kernel(namespace, kernel_factory, out_stream_factory=None, display_hook_factory=None): """ Creates a kernel, redirects stdout/stderr, and installs a display hook and exception handler. """ # Re-direct stdout/stderr, if necessary. if namespace.no_stdout or namespace.no_stderr: blackhole = file(os.devnull, 'w') if namespace.no_stdout: sys.stdout = sys.__stdout__ = blackhole if namespace.no_stderr: sys.stderr = sys.__stderr__ = blackhole # Install minimal exception handling sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor', ostream=sys.__stdout__) # Create a context, a session, and the kernel sockets. io.raw_print("Starting the kernel at pid:", os.getpid()) context = zmq.Context() # Uncomment this to try closing the context. # atexit.register(context.close) session = Session(username=u'kernel') reply_socket = context.socket(zmq.XREP) xrep_port = bind_port(reply_socket, namespace.ip, namespace.xrep) io.raw_print("XREP Channel on port", xrep_port) pub_socket = context.socket(zmq.PUB) pub_port = bind_port(pub_socket, namespace.ip, namespace.pub) io.raw_print("PUB Channel on port", pub_port) req_socket = context.socket(zmq.XREQ) req_port = bind_port(req_socket, namespace.ip, namespace.req) io.raw_print("REQ Channel on port", req_port) hb = Heartbeat(context, (namespace.ip, namespace.hb)) hb.start() hb_port = hb.port io.raw_print("Heartbeat REP Channel on port", hb_port) # Helper to make it easier to connect to an existing kernel, until we have # single-port connection negotiation fully implemented. io.raw_print("To connect another client to this kernel, use:") io.raw_print("-e --xreq {0} --sub {1} --rep {2} --hb {3}".format( xrep_port, pub_port, req_port, hb_port)) # Redirect input streams and set a display hook. if out_stream_factory: sys.stdout = out_stream_factory(session, pub_socket, u'stdout') sys.stderr = out_stream_factory(session, pub_socket, u'stderr') if display_hook_factory: sys.displayhook = display_hook_factory(session, pub_socket) # Create the kernel. kernel = kernel_factory(session=session, reply_socket=reply_socket, pub_socket=pub_socket, req_socket=req_socket) kernel.record_ports(xrep_port=xrep_port, pub_port=pub_port, req_port=req_port, hb_port=hb_port) return kernel
def init_heartbeat(self): """start the heart beating""" # heartbeat doesn't share context, because it mustn't be blocked # by the GIL, which is accessed by libzmq when freeing zero-copy messages hb_ctx = zmq.Context() self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) self.hb_port = self.heartbeat.port self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) self.heartbeat.start()
def setUp(self): self.challenge = Heartbeat.challenge_type()() self.tag = Heartbeat.tag_type()() self.expiration = datetime.utcnow()+timedelta(seconds=60) self.contract = Contract('hash', 'seed', 12345, self.challenge, self.expiration, self.tag)
class Chunk(object): def __init__(self, file_path, num_challenges, root_seed): self.secret = "mysecret" self.target_file = Heartbeat(file_path, self.secret) self.target_file.generate_challenges(num_challenges, root_seed) def challenge(self): return self.target_file.random_challenge() def response(self, answer): return self.target_file.check_answer(answer)
def run(func, args): config.init('../xmpp.conf') client_jid = config.get('users', 'client_jid') client_password = config.get('users', 'client_password') server_jid = config.get('users', 'server_jid') session = Remote.new_session(client_jid, client_password) endpoint = session.new_proxy(server_jid + '/rpc', TestRunner) job = Future() heartbeat = Heartbeat(job, endpoint.ping, session) heartbeat.start() getattr(endpoint.async(job), func)(*args)
def init_heartbeat(self): """start the heart beating""" # heartbeat doesn't share context, because it mustn't be blocked # by the GIL, which is accessed by libzmq when freeing zero-copy messages hb_ctx = zmq.Context() self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) self.hb_port = self.heartbeat.port self.log.debug("Heartbeat REP Channel on port: %i"%self.hb_port) self.heartbeat.start() # Helper to make it easier to connect to an existing kernel. # set log-level to critical, to make sure it is output self.log.critical("To connect another client to this kernel, use:")
def setUp(self): self.server_url = 'https://test.url/' self.address = base58.b58encode_check(b'\x00'+os.urandom(20)) self.client = DownstreamClient(self.address) self.test_contract = Contract(MockValues.get_chunk_response['file_hash'], MockValues.get_chunk_response['seed'], MockValues.get_chunk_response['size'], Heartbeat.challenge_type().fromdict( MockValues.get_chunk_response['challenge']), datetime.strptime( MockValues.get_chunk_response['expiration'], '%Y-%m-%dT%H:%M:%S'), Heartbeat.tag_type().fromdict( MockValues.get_chunk_response['tag'])) self.test_heartbeat = Heartbeat.fromdict(MockValues.connect_response['heartbeat'])
def test_heartbeat_and_challenge(self): file1 = Heartbeat(self.file_path, "mysecret") file1.generate_challenges(10, self.root_seed) challenge = file1.random_challenge() # Create hash_response from seed and duplicate file file2 = Heartbeat(self.file_path2) answer = file2.meet_challenge(challenge) self.assertTrue(file1.check_answer(answer)) # Create hash_answer from seed and edited file file3 = Heartbeat(self.file_path3) answer = file3.meet_challenge(challenge) # This should not match self.assertFalse(file1.check_answer(answer))
def test_connect_sign(self): self.client.msg = 'test message' self.client.sig = 'HyzVUenXXo4pa+kgm1vS8PNJM83eIXFC5r0q86FGbqFcdla6rcw' '72/ciXiEPfjli3ENfwWuESHhv6K9esI0dl5I=' self.client.address = '19qVgG8C6eXwKMMyvVegsi3xCsKyk3Z3jV' self.client.token = None with mock.patch('downstream_farmer.client.requests.post') as patch: patch.return_value.json.return_value = MockValues.connect_response self.client.connect() patch.assert_called_with('{0}/new/{1}'.format(self.server_url .strip('/') + self .api_path, self .client.address), data=json.dumps({ "message": self.client.msg, "signature": self.client.sig }), headers={ 'Content-Type': 'application/json' }, verify=None) self.assertEqual( self.client.token, MockValues.connect_response['token']) self.assertEqual(self.client.heartbeat, Heartbeat.fromdict(MockValues .connect_response['heartbeat']))
def __init__(self): load_dotenv() self.config = get_envvar_configuration('AMQP') self.heartbeat = Heartbeat(self.config) self.devices = DeviceManager(self.config['DEVICES']['devices'], self.config) loop = asyncio.get_event_loop() loop.run_forever()
def test_connect_working(self): with mock.patch('downstream_farmer.client.requests.get') as patch: inst = patch.return_value inst.json.return_value = MockValues.connect_response self.client.connect(self.server_url) self.assertEqual(self.client.token,MockValues.connect_response['token']) self.assertEqual(self.client.heartbeat, Heartbeat.fromdict(MockValues.connect_response['heartbeat']))
def setUp(self): self.server_url = 'https://test.url/' self.api_path = '/api/downstream/v1' self.size = 100 self.address = base58.b58encode_check(b'\x00' + os.urandom(20)) self.token = binascii.hexlify(os.urandom(16)).decode('ascii') self.msg = '' self.sig = '' self.thread_manager = ShellApplication() self.contract_thread = ManagedThread() self.chunk_dir = os.path.join('data', 'chunks') self.client = DownstreamClient(self.server_url, self.token, self.address, self.size, self.msg, self.sig, self.thread_manager, self.chunk_dir) self.client.session = mock.MagicMock() self.test_contract = \ DownstreamContract(self.client, MockValues.get_chunks_response[ 'chunks'][0]['file_hash'], MockValues.get_chunks_response[ 'chunks'][0]['seed'], MockValues.get_chunks_response[ 'chunks'][0]['size'], Heartbeat.challenge_type().fromdict( MockValues .get_chunks_response ['chunks'][0]['challenge']), datetime.utcnow() + timedelta( seconds=int( MockValues .get_chunks_response ['chunks'][0]['due'])), Heartbeat.tag_type().fromdict( MockValues .get_chunks_response ['chunks'][0]['tag']), self.thread_manager, self.chunk_dir) self.test_heartbeat = Heartbeat.fromdict( MockValues.connect_response['heartbeat'])
def _run(self): self.is_running = True self._debug('started') self.heartbeat_count = 0 heartbeat = Heartbeat(self._monitor) heartbeat_class = heartbeat.__class__ keep_running = True while keep_running: notifier, msg = self._monitor.notification() if notifier == heartbeat_class: self.heartbeat_count += 1 else: self._debug( 'hb count %d, from %s, received - %s' % (self.heartbeat_count, str(notifier).split('.')[-1], msg)) if msg == self.STOP_COMMAND: keep_running = False elif notifier == self._search_commands.__class__: if msg == SearchCommands.FINISHED_UPDATE_INMATES_STATUS: self._debug('initiate search for new inmates') self._find_new_inmates() elif msg == SearchCommands.FINISHED_FIND_INMATES: self._debug('fetch recently discharged inmate ids') self._recently_discharged_inmates_ids() elif msg == SearchCommands.FINISHED_CHECK_OF_RECENTLY_DISCHARGED_INMATES: self._debug('initiate inmates scraper finish') self._inmate_scraper.finish() else: self._debug( 'Unknown notification from %s, received - %s' % (notifier, msg)) elif notifier == self._inmate_scraper.__class__: self._debug('inmates finish') self._inmates.finish() elif notifier == self._inmates.__class__: keep_running = False elif notifier == self.__class__: if msg == self._START_COMMAND: self._debug('find active inmates') self._active_inmates() elif msg == self._RECEIVED_ACTIVE_IDS_COMMAND: self._debug('update inmates status') self._active_inmate_ids = self._inmates_response self._search_commands.update_inmates_status( self._inmates_response) elif msg == self._RECEIVED_RECENTLY_DISCHARGED_INMATES_IDS_COMMAND: self._debug( 'initiate confirmation search of recently discharged inmates' ) self._search_commands.check_if_really_discharged( self._inmates_response) else: self._debug('Unknown notification from %s, received - %s' % (notifier, msg)) self.is_running = False self._debug('stopped')
def setUp(self): self.challenge = Heartbeat.challenge_type().\ fromdict(MockValues.get_challenge_response['challenge']) self.heartbeat = Heartbeat.fromdict( MockValues.connect_response['heartbeat']) self.tag = Heartbeat.tag_type().fromdict( MockValues.get_chunk_response['tag']) self.expiration = datetime.utcnow( ) + timedelta(int(MockValues.get_chunk_response['due'])) self.client = mock.MagicMock() self.api = API() self.contract = DownstreamContract(self.client, 'hash', 'seed', 100, self.challenge, self.expiration, self.tag, self.api)
def setUp(self): self.server_url = 'https://test.url/' self.api_path = '/api/downstream/v1' self.size = 100 self.address = base58.b58encode_check(b'\x00' + os.urandom(20)) self.token = binascii.hexlify(os.urandom(16)).decode('ascii') self.msg = '' self.sig = '' self.api = API() self.client = DownstreamClient(self.server_url, self.token, self.address, self.size, self.msg, self.sig, self.api) self.test_contract = DownstreamContract(self.client, MockValues.get_chunk_response[ 'file_hash'], MockValues.get_chunk_response[ 'seed'], MockValues.get_chunk_response[ 'size'], Heartbeat.challenge_type() .fromdict( MockValues. get_chunk_response['challe' 'nge']), datetime.utcnow() + timedelta( seconds=int( MockValues. get_chunk_response['du' 'e'] )), Heartbeat.tag_type().fromdict( MockValues .get_chunk_response['ta' 'g'], ), self.api) self.test_heartbeat = Heartbeat.fromdict( MockValues.connect_response['heartbeat'])
def test_usage(self): beat = Heartbeat() public_beat = beat.get_public() with open("files/test.txt", "rb") as file: (tag, state) = beat.encode(file) challenge = beat.gen_challenge(state) with open("files/test.txt", "rb") as file: proof = public_beat.prove(file, challenge, tag) is_valid = beat.verify(proof, challenge, state) if is_valid: print("file is stored by the server") else: print("file proof invalid") self.assertTrue(is_valid)
def __init__(self): if MOCK_HARDWARE: from hardware_mock import HardwareMock self._hardware = HardwareMock() else: from hardware import Hardware self._hardware = Hardware() if MOCK_HEARTBEAT: self._heartbeat = HeartbeatMock() else: self._heartbeat = Heartbeat() if MOCK_DATA: self._sensor_service = SensorServiceMock() self._registration_service = RegistrationServiceMock() else: self._sensor_service = SensorService() self._registration_service = RegistrationService()
def test_connect_working(self): with mock.patch('downstream_farmer.client.requests.get') as patch: patch.return_value.json.return_value = MockValues.connect_response self.client.connect() patch.assert_called_with('{0}/heartbeat/{1}'.format( self.server_url.strip('/') + self.api_path, self.token), verify=None) self.assertEqual(self.client.token, MockValues.connect_response['token']) self.assertEqual( self.client.heartbeat, Heartbeat.fromdict(MockValues.connect_response['heartbeat']))
def test_connect_working(self): self.client.session.get.return_value.json.return_value \ = MockValues.connect_response self.client.connect() self.client.session.get.assert_called_with('{0}/heartbeat/{1}'.format( self.server_url.strip('/') + self.api_path, self.token), verify=None) self.assertEqual(self.client.token, MockValues.connect_response['token']) self.assertEqual( self.client.heartbeat, Heartbeat.fromdict(MockValues.connect_response['heartbeat']))
def main(): shared.init() receiver_ev = Event() receiver_ev.set() receiver_thread = SerialReceiver(receiver_ev) heartbeat_ev = Event() heartbeat_ev.set() heartbeat_thread = Heartbeat(heartbeat_ev) time.sleep(2) START = '1' shared.comm.send(START) rcv=shared.comm.receive() print(rcv) with shared.camera: try: receiver_thread.start() heartbeat_thread.start() server = StreamingServer(shared.address, StreamingHandler) print('Server started on port ' + str(shared.address[1])) server.serve_forever() except Exception as err: print(err) finally: heartbeat_ev.clear() heartbeat_thread.join() END = '9' shared.comm.send(END) receiver_ev.clear() receiver_thread.join()
def job_heartbeat(): global id heartbeat = Heartbeat(id) t1 = threading.Thread(target=job_heartbeat_failure, args=(heartbeat, )) t1.start() t = threading.Thread(target=expose_function_heartbeat, args=( heartbeat, id, )) t.start() return heartbeat, t, t1
def setUp(self): self.challenge = Heartbeat.challenge_type().\ fromdict( MockValues .get_challenges_response['challenges'][0]['challenge']) self.heartbeat = Heartbeat.fromdict( MockValues.connect_response['heartbeat']) self.tag = Heartbeat.tag_type().fromdict( MockValues.get_chunks_response['chunks'][0]['tag']) self.expiration = datetime.utcnow() + timedelta( int(MockValues.get_chunks_response['chunks'][0]['due'])) self.client = mock.MagicMock() self.manager = ThreadManager() self.test_hash = 'hash' self.test_size = 100 self.test_seed = 'seed' self.contract = DownstreamContract(self.client, self.test_hash, self.test_seed, self.test_size, self.challenge, self.expiration, self.tag, self.manager, os.path.join('data', 'chunks')) self.contract.generate_data()
def test_challenge_working(self): self.client.contract = self.test_contract self.client.heartbeat = self.test_heartbeat with mock.patch('downstream_farmer.client.requests.get') as patch: inst = patch.return_value inst.json.return_value = MockValues.get_challenge_response self.client.get_challenge() self.assertEqual(self.client.contract.challenge, Heartbeat.challenge_type().fromdict( MockValues.get_challenge_response['challenge'])) self.assertEqual(self.client.contract.expiration, datetime.strptime( MockValues.get_challenge_response['expiration'], '%Y-%m-%dT%H:%M:%S'))
def __start_agent(self): server_ip = self.config.get("server","ip") server_port = int(self.config.get("server","port")) sensor_id= self.config.get("agent","id") priority = 1 system_id_file = "" Worker(self.recv_packet_queue,self.send_packet_queue).start() self.server_connection = ServerConn( server_ip, server_port, priority, sensor_id,system_id_file) server_socket = self.server_connection.connect() if server_socket == None : print "connect failed !" exit(0); Sender(self.send_packet_queue,self.server_connection).start() Heartbeat(self.send_packet_queue).start() server_socket.setblocking(False) read_buff = "" print "====" while True: readable, writable, exceptional = select.select([self.server_connection.get_connectsocket()], [], [self.server_connection.get_connectsocket()]) #handle read event try: if server_socket in readable : #construct packet chunk= server_socket.recv(1024*1024) read_buff += chunk read_buff_len = len(read_buff) print read_buff_len while( read_buff_len > 4) : packet, read_buff = self.get_bson_packet(read_buff,read_buff_len) if packet: print "push a packet" self.recv_packet_queue.put(packet) read_buff_len = len(read_buff) else : break except Exception as e: print e for s in exceptional: print "socket --has exceptional"
def _find_missing_inmates(self): self.is_running = True self._debug('find_missing_inmates started') self.heartbeat_count = 0 heartbeat = Heartbeat(self._monitor) heartbeat_class = heartbeat.__class__ keep_running = True while keep_running: notifier, msg = self._monitor.notification() if notifier == heartbeat_class: self.heartbeat_count += 1 else: self._debug( 'hb count %d, from %s, received - %s' % (self.heartbeat_count, str(notifier).split('.')[-1], msg)) if msg == self.STOP_COMMAND: keep_running = False elif notifier == self._search_commands.__class__: if msg == SearchCommands.FINISHED_FIND_INMATES: self._debug('inmates scraper finish') self._inmate_scraper.finish() else: self._debug( 'Unknown notification from %s, received - %s' % (notifier, msg)) elif notifier == self._inmate_scraper.__class__: self._debug('inmates finish') self._inmates.finish() elif notifier == self._inmates.__class__: keep_running = False elif notifier == self.__class__: if msg == self._START_COMMAND: self._debug('find known inmates') self._known_inmates() elif msg == self._RECEIVED_KNOWN_INMATES_COMMAND: self._debug('find missing inmates') self._search_commands.find_inmates( exclude_list=self._inmates_response, start_date=self._start_date_missing_inmates) else: self._debug('Unknown notification from %s, received - %s' % (notifier, msg)) self._start_date_missing_inmates = None self.is_running = False self._debug('find_missing_inmates stopped')
def __init__(self): """ Constructor """ global SVM_PATH self.my_x = 0 self.my_y = 0 self.my_theta = 0 self.other_x = 0 self.other_y = 0 self.other_theta = 0 self.speed = 0 rospy.init_node('g3') self.i2c = I2CHandler() self.vs = VideoStream().start() self.posData = [[-1, -1], [-1, -1], [-1, -1], [-1, -1], [ -1, -1 ]] #-1 represents no data --> from the start no data has received self.followingData = [ -1, -1, -1, -1, -1 ] #-1 represents leader --> from start everyone is the leader self.fanOutData = [-1, -1, -1, -1, -1] #-1 represents no data has received self.laneToGoTo = -1 self.fanOutFlag = False self.foundLaneTime = -1 self.robot_follower = RobotFollower(100, k_p=0.5) #self.serial = SerialHandler('/dev/ttyUSB0', 9600) self.turnC = TurnCheck([640, 480], "segImage.png") #self.gps_other = GpsClient(self.position_callback_other, color='green') self.heartbeat = Heartbeat(self.heartbeat_callback, self.platoon_pos_callback, self.fan_out_callback, self.lane_change_callback) self.gps = GpsClient(self.position_callback) self.image_recognizer = ImageRecognizer(SVM_PATH) self.get_ultrasound() time.sleep(2)
def startFactory(self): # Runtime vars self.usedIDs = [] # List of Player IDs used (can be occupied by mobs) self.protocols = {} # Dictionary of Protocols indexed by ID self.salt = generate_salt() self.pingtimer = LoopingCall(self.sendPacket, PacketIDs["Ping"]) self.pingtimer.start(1, False) self.heart = Heartbeat(self, self.config["heartbeat_endpoint"]) self.heart.start() def save(self): self.world.save() self.sendMessage("World saved") self.saveConfig() self.savetimer = LoopingCall(save, self) self.savetimer.start(self.config["saveinterval"], False) print "SchnitzelFactory started"
def test_update_challenge_working(self): self.contract.answered = True self.client.heartbeat = self.heartbeat self.api = API() self.contract.time_remaining = mock.MagicMock() self.contract.time_remaining.return_value = 0 with mock.patch('downstream_farmer.contract.requests.get') as getpatch: getpatch.return_value.json.return_value =\ MockValues.get_challenge_response self.contract.update_challenge() self.assertEqual(self.contract.challenge, Heartbeat.challenge_type().fromdict( MockValues .get_challenge_response['challenge']), self.api) self.assertAlmostEqual((self. contract.expiration - datetime.utcnow()). total_seconds(), int(MockValues. get_challenge_response['due']), delta=0.5)
def __init__(self, platform): sys_clk_freq = int(75e6) SoCSDRAM.__init__(self, platform, clk_freq=sys_clk_freq, cpu_type="picorv32", integrated_rom_size=0x6000, integrated_sram_size=8192) self.submodules.crg = crg = _CRG(platform, sys_clk_freq) self.submodules.bridge = UARTWishboneBridge(platform.request("dbgserial"), sys_clk_freq, baudrate=115200) self.add_wb_master(self.bridge.wishbone) self.submodules.heartbeat = Heartbeat(sys_clk_freq, 0.5, platform.request("user_led")) self.submodules.j600io = J600IO( platform.request("U600"), platform.request("U601"), platform.request("U604"), platform.request("U605"), None) self.add_wb_slave(mem_decoder(self.mem_map["j600io"]), self.j600io.bus) self.add_memory_region("j600io", self.mem_map["j600io"] | self.shadow_base, 0x10) self.submodules.ethphy = LiteEthPHYRGMII(platform.request("eth_clocks", 1), platform.request("eth", 1)) self.submodules.ethmac = LiteEthMAC(phy=self.ethphy, dw=32, interface="wishbone", endianness=self.cpu.endianness) self.add_wb_slave(mem_decoder(self.mem_map["ethmac"]), self.ethmac.bus) self.add_memory_region("ethmac", self.mem_map["ethmac"] | self.shadow_base, 0x2000) self.ethphy.crg.cd_eth_rx.clk.attr.add("keep") platform.add_period_constraint(self.ethphy.crg.cd_eth_rx.clk, period_ns(75e6)) platform.add_false_path_constraints(crg.cd_sys.clk, self.ethphy.crg.cd_eth_rx.clk) if not self.integrated_main_ram_size: self.submodules.sdrphy = GENSDRPHY(platform.request("sdram")) sdram_module = M12L64322A(sys_clk_freq, "1:1") self.register_sdram(self.sdrphy, sdram_module.geom_settings, sdram_module.timing_settings, controller_settings=ControllerSettings( with_refresh=False))
def test_usage(self): beat = Heartbeat() public_beat = beat.get_public() with open('files/test.txt', 'rb') as file: (tag, state) = beat.encode(file) challenge = beat.gen_challenge(state) with open('files/test.txt', 'rb') as file: proof = public_beat.prove(file, challenge, tag) is_valid = beat.verify(proof, challenge, state) if (is_valid): print('file is stored by the server') else: print('file proof invalid') self.assertTrue(is_valid)
from heartbeat import Heartbeat from time import sleep a = Heartbeat("192.168.1.40", 5432) while True: sleep(1)
from config import Config from display import Display from ewh_net import Network from heartbeat import Heartbeat from name import Name from mqtt import MQTT from task import Scheduler import ntptime network = Network() MQTT.init(network) board = Board(network) board.init() heartbeat = Heartbeat(board.display) scheduler = Scheduler() config = Config(board, network, scheduler) name = Name(config, board.display) scheduler.register(board.display) scheduler.register(heartbeat) scheduler.register(network) scheduler.register(MQTT.task) print("Starting scheduler of version {0}".format(config.version)) scheduler.start(100)
from heartbeat import Heartbeat from timer import Timer # #class Clock(object): # """Clock class # """ # def __init__(self, name, ) timerDebug = True heartbeat = Heartbeat('basic heart beat') heartbeatTimer = Timer('heartbeat timer', 5, timerDebug) heartbeatTimer.set_repeat(False) heartbeatTimer.set_repeat(True) heartbeatTimer.add_master(heartbeat, 'low-high', 'tick') heartbeatTimer.start() for i in range(50): heartbeat.execute('none') #print heartbeat
from time import sleep import sys sys.path.append('src') from heartbeat import Heartbeat from pulseDetect import PulseDetect from accumulator import Accumulator from ftpLocalSave import FtpLocalSave from timedCallback import TimedCallback from readSensorDht import ReadSensorDht ## Heartbeat L2B1 = Heartbeat(300) L2B2 = FtpLocalSave() L2B1.connect_output(L2B2) ## Temperature sensor L3B1 = TimedCallback(delay=600) L3B2 = ReadSensorDht('Box_temperature', 21) L3B3 = FtpLocalSave() L3B2.connect_output(L3B3) L3B1.callback = L3B2.read L3B1.start() ## Humidity sensor L4B1 = TimedCallback(delay=600) L4B2 = ReadSensorDht('Box_humidity', 21, read_humid=True) L4B3 = FtpLocalSave() L4B2.connect_output(L4B3) L4B1.callback = L4B2.read L4B1.start()
def num_challenges(number): file1 = Heartbeat(self.size_path, "mysecret") file1.generate_challenges(number, self.root_seed)
def __init__(self, file_path): self.target_file = Heartbeat(file_path, "mysecret")
PROJECT_ROOT = path(__file__).abspath().dirname().dirname() VIRTUALENV = path(sys.executable).abspath().dirname().dirname() sys.path.append(PROJECT_ROOT) sys.path.append(PROJECT_ROOT / 'apps') from heartbeat import Heartbeat from what_apps.meta.alerts import local_red_alert import requests HEARTBEATS = Heartbeat.__subclasses__() class HeartBeatRunner(object): error_messages = [] def run(self): for HeartbeatChild in HEARTBEATS: #HeartbeatChild here is a class (a subclass of the Heartbeat class) heartbeat = HeartbeatChild() if not heartbeat.is_skipped(): # try: heartbeat.run() # except Exception, e: # self.error_messages.append(e)
class IPKernelApp(BaseIPythonApplication, InteractiveShellApp): name = 'ipkernel' aliases = Dict(kernel_aliases) flags = Dict(kernel_flags) classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session] # the kernel class, as an importstring kernel_class = DottedObjectName('IPython.kernel.zmq.ipkernel.Kernel') kernel = Any() poller = Any( ) # don't restrict this even though current pollers are all Threads heartbeat = Instance(Heartbeat) session = Instance('IPython.kernel.zmq.session.Session') ports = Dict() # inherit config file name from parent: parent_appname = Unicode(config=True) def _parent_appname_changed(self, name, old, new): if self.config_file_specified: # it was manually specified, ignore return self.config_file_name = new.replace('-', '_') + u'_config.py' # don't let this count as specifying the config file self.config_file_specified = False # connection info: transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True) ip = Unicode( config=True, help="Set the IP or interface on which the kernel will listen.") def _ip_default(self): if self.transport == 'ipc': if self.connection_file: return os.path.splitext(self.abs_connection_file)[0] + '-ipc' else: return 'kernel-ipc' else: return LOCALHOST hb_port = Integer(0, config=True, help="set the heartbeat port [default: random]") shell_port = Integer(0, config=True, help="set the shell (ROUTER) port [default: random]") iopub_port = Integer(0, config=True, help="set the iopub (PUB) port [default: random]") stdin_port = Integer(0, config=True, help="set the stdin (DEALER) port [default: random]") connection_file = Unicode( '', config=True, help= """JSON file in which to store connection info [default: kernel-<pid>.json] This file will contain the IP, ports, and authentication key needed to connect clients to this kernel. By default, this file will be created in the security dir of the current profile, but can be specified by absolute path. """) @property def abs_connection_file(self): if os.path.basename(self.connection_file) == self.connection_file: return os.path.join(self.profile_dir.security_dir, self.connection_file) else: return self.connection_file # streams, etc. no_stdout = Bool(False, config=True, help="redirect stdout to the null device") no_stderr = Bool(False, config=True, help="redirect stderr to the null device") outstream_class = DottedObjectName( 'IPython.kernel.zmq.iostream.OutStream', config=True, help="The importstring for the OutStream factory") displayhook_class = DottedObjectName( 'IPython.kernel.zmq.displayhook.ZMQDisplayHook', config=True, help="The importstring for the DisplayHook factory") # polling parent = Integer( 0, config=True, help="""kill this process if its parent dies. On Windows, the argument specifies the HANDLE of the parent process, otherwise it is simply boolean. """) interrupt = Integer(0, config=True, help="""ONLY USED ON WINDOWS Interrupt this process when the parent is signaled. """) def init_crash_handler(self): # Install minimal exception handling sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor', ostream=sys.__stdout__) def init_poller(self): if sys.platform == 'win32': if self.interrupt or self.parent: self.poller = ParentPollerWindows(self.interrupt, self.parent) elif self.parent: self.poller = ParentPollerUnix() def _bind_socket(self, s, port): iface = '%s://%s' % (self.transport, self.ip) if self.transport == 'tcp': if port <= 0: port = s.bind_to_random_port(iface) else: s.bind("tcp://%s:%i" % (self.ip, port)) elif self.transport == 'ipc': if port <= 0: port = 1 path = "%s-%i" % (self.ip, port) while os.path.exists(path): port = port + 1 path = "%s-%i" % (self.ip, port) else: path = "%s-%i" % (self.ip, port) s.bind("ipc://%s" % path) return port def load_connection_file(self): """load ip/port/hmac config from JSON connection file""" try: fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir]) except IOError: self.log.debug("Connection file not found: %s", self.connection_file) # This means I own it, so I will clean it up: atexit.register(self.cleanup_connection_file) return self.log.debug(u"Loading connection file %s", fname) with open(fname) as f: s = f.read() cfg = json.loads(s) self.transport = cfg.get('transport', self.transport) if self.ip == self._ip_default() and 'ip' in cfg: # not overridden by config or cl_args self.ip = cfg['ip'] for channel in ('hb', 'shell', 'iopub', 'stdin'): name = channel + '_port' if getattr(self, name) == 0 and name in cfg: # not overridden by config or cl_args setattr(self, name, cfg[name]) if 'key' in cfg: self.config.Session.key = str_to_bytes(cfg['key']) def write_connection_file(self): """write connection info to JSON file""" cf = self.abs_connection_file self.log.debug("Writing connection file: %s", cf) write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport, shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, iopub_port=self.iopub_port) def cleanup_connection_file(self): cf = self.abs_connection_file self.log.debug("Cleaning up connection file: %s", cf) try: os.remove(cf) except (IOError, OSError): pass self.cleanup_ipc_files() def cleanup_ipc_files(self): """cleanup ipc files if we wrote them""" if self.transport != 'ipc': return for port in (self.shell_port, self.iopub_port, self.stdin_port, self.hb_port): ipcfile = "%s-%i" % (self.ip, port) try: os.remove(ipcfile) except (IOError, OSError): pass def init_connection_file(self): if not self.connection_file: self.connection_file = "kernel-%s.json" % os.getpid() try: self.load_connection_file() except Exception: self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) self.exit(1) def init_sockets(self): # Create a context, a session, and the kernel sockets. self.log.info("Starting the kernel at pid: %i", os.getpid()) context = zmq.Context.instance() # Uncomment this to try closing the context. # atexit.register(context.term) self.shell_socket = context.socket(zmq.ROUTER) self.shell_port = self._bind_socket(self.shell_socket, self.shell_port) self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port) self.iopub_socket = context.socket(zmq.PUB) self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port) self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port) self.stdin_socket = context.socket(zmq.ROUTER) self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port) self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port) def init_heartbeat(self): """start the heart beating""" # heartbeat doesn't share context, because it mustn't be blocked # by the GIL, which is accessed by libzmq when freeing zero-copy messages hb_ctx = zmq.Context() self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) self.hb_port = self.heartbeat.port self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) self.heartbeat.start() # Helper to make it easier to connect to an existing kernel. # set log-level to critical, to make sure it is output self.log.critical("To connect another client to this kernel, use:") def log_connection_info(self): """display connection info, and store ports""" basename = os.path.basename(self.connection_file) if basename == self.connection_file or \ os.path.dirname(self.connection_file) == self.profile_dir.security_dir: # use shortname tail = basename if self.profile != 'default': tail += " --profile %s" % self.profile else: tail = self.connection_file self.log.critical("--existing %s", tail) self.ports = dict(shell=self.shell_port, iopub=self.iopub_port, stdin=self.stdin_port, hb=self.hb_port) def init_session(self): """create our session object""" default_secure(self.config) self.session = Session(config=self.config, username=u'kernel') def init_blackhole(self): """redirects stdout/stderr to devnull if necessary""" if self.no_stdout or self.no_stderr: blackhole = open(os.devnull, 'w') if self.no_stdout: sys.stdout = sys.__stdout__ = blackhole if self.no_stderr: sys.stderr = sys.__stderr__ = blackhole def init_io(self): """Redirect input streams and set a display hook.""" if self.outstream_class: outstream_factory = import_item(str(self.outstream_class)) sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout') sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr') if self.displayhook_class: displayhook_factory = import_item(str(self.displayhook_class)) sys.displayhook = displayhook_factory(self.session, self.iopub_socket) def init_signal(self): signal.signal(signal.SIGINT, signal.SIG_IGN) def init_kernel(self): """Create the Kernel object itself""" shell_stream = ZMQStream(self.shell_socket) kernel = Kernel( config=self.config, session=self.session, shell_streams=[shell_stream], iopub_socket=self.iopub_socket, stdin_socket=self.stdin_socket, log=self.log, profile_dir=self.profile_dir, ) kernel.record_ports(self.ports) self.kernel = kernel def init_gui_pylab(self): """Enable GUI event loop integration, taking pylab into account.""" # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab` # to ensure that any exception is printed straight to stderr. # Normally _showtraceback associates the reply with an execution, # which means frontends will never draw it, as this exception # is not associated with any execute request. shell = self.shell _showtraceback = shell._showtraceback try: # replace pyerr-sending traceback with stderr def print_tb(etype, evalue, stb): print("GUI event loop or pylab initialization failed", file=io.stderr) print(shell.InteractiveTB.stb2text(stb), file=io.stderr) shell._showtraceback = print_tb InteractiveShellApp.init_gui_pylab(self) finally: shell._showtraceback = _showtraceback def init_shell(self): self.shell = self.kernel.shell self.shell.configurables.append(self) @catch_config_error def initialize(self, argv=None): super(IPKernelApp, self).initialize(argv) self.init_blackhole() self.init_connection_file() self.init_session() self.init_poller() self.init_sockets() self.init_heartbeat() # writing/displaying connection info must be *after* init_sockets/heartbeat self.log_connection_info() self.write_connection_file() self.init_io() self.init_signal() self.init_kernel() # shell init steps self.init_path() self.init_shell() self.init_gui_pylab() self.init_extensions() self.init_code() # flush stdout/stderr, so that anything written to these streams during # initialization do not get associated with the first execution request sys.stdout.flush() sys.stderr.flush() def start(self): if self.poller is not None: self.poller.start() self.kernel.start() try: ioloop.IOLoop.instance().start() except KeyboardInterrupt: pass
class Client(object): def __init__(self, file_path): self.target_file = Heartbeat(file_path, "mysecret") def answer(self, challenge): return self.target_file.meet_challenge(challenge)
class IPKernelApp(BaseIPythonApplication, InteractiveShellApp): name='ipkernel' aliases = Dict(kernel_aliases) flags = Dict(kernel_flags) classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session] # the kernel class, as an importstring kernel_class = DottedObjectName('IPython.kernel.zmq.ipkernel.Kernel', config=True, help="""The Kernel subclass to be used. This should allow easy re-use of the IPKernelApp entry point to configure and launch kernels other than IPython's own. """) kernel = Any() poller = Any() # don't restrict this even though current pollers are all Threads heartbeat = Instance(Heartbeat) session = Instance('IPython.kernel.zmq.session.Session') ports = Dict() # inherit config file name from parent: parent_appname = Unicode(config=True) def _parent_appname_changed(self, name, old, new): if self.config_file_specified: # it was manually specified, ignore return self.config_file_name = new.replace('-','_') + u'_config.py' # don't let this count as specifying the config file self.config_file_specified.remove(self.config_file_name) # connection info: transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True) ip = Unicode(config=True, help="Set the IP or interface on which the kernel will listen.") def _ip_default(self): if self.transport == 'ipc': if self.connection_file: return os.path.splitext(self.abs_connection_file)[0] + '-ipc' else: return 'kernel-ipc' else: return LOCALHOST hb_port = Integer(0, config=True, help="set the heartbeat port [default: random]") shell_port = Integer(0, config=True, help="set the shell (ROUTER) port [default: random]") iopub_port = Integer(0, config=True, help="set the iopub (PUB) port [default: random]") stdin_port = Integer(0, config=True, help="set the stdin (ROUTER) port [default: random]") control_port = Integer(0, config=True, help="set the control (ROUTER) port [default: random]") connection_file = Unicode('', config=True, help="""JSON file in which to store connection info [default: kernel-<pid>.json] This file will contain the IP, ports, and authentication key needed to connect clients to this kernel. By default, this file will be created in the security dir of the current profile, but can be specified by absolute path. """) @property def abs_connection_file(self): if os.path.basename(self.connection_file) == self.connection_file: return os.path.join(self.profile_dir.security_dir, self.connection_file) else: return self.connection_file # streams, etc. no_stdout = Bool(False, config=True, help="redirect stdout to the null device") no_stderr = Bool(False, config=True, help="redirect stderr to the null device") outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream', config=True, help="The importstring for the OutStream factory") displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook', config=True, help="The importstring for the DisplayHook factory") # polling parent_handle = Integer(0, config=True, help="""kill this process if its parent dies. On Windows, the argument specifies the HANDLE of the parent process, otherwise it is simply boolean. """) interrupt = Integer(0, config=True, help="""ONLY USED ON WINDOWS Interrupt this process when the parent is signaled. """) def init_crash_handler(self): # Install minimal exception handling sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor', ostream=sys.__stdout__) def init_poller(self): if sys.platform == 'win32': if self.interrupt or self.parent_handle: self.poller = ParentPollerWindows(self.interrupt, self.parent_handle) elif self.parent_handle: self.poller = ParentPollerUnix() def _bind_socket(self, s, port): iface = '%s://%s' % (self.transport, self.ip) if self.transport == 'tcp': if port <= 0: port = s.bind_to_random_port(iface) else: s.bind("tcp://%s:%i" % (self.ip, port)) elif self.transport == 'ipc': if port <= 0: port = 1 path = "%s-%i" % (self.ip, port) while os.path.exists(path): port = port + 1 path = "%s-%i" % (self.ip, port) else: path = "%s-%i" % (self.ip, port) s.bind("ipc://%s" % path) return port def load_connection_file(self): """load ip/port/hmac config from JSON connection file""" try: fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir]) except IOError: self.log.debug("Connection file not found: %s", self.connection_file) # This means I own it, so I will clean it up: atexit.register(self.cleanup_connection_file) return self.log.debug(u"Loading connection file %s", fname) with open(fname) as f: s = f.read() cfg = json.loads(s) self.transport = cfg.get('transport', self.transport) if self.ip == self._ip_default() and 'ip' in cfg: # not overridden by config or cl_args self.ip = cfg['ip'] for channel in ('hb', 'shell', 'iopub', 'stdin', 'control'): name = channel + '_port' if getattr(self, name) == 0 and name in cfg: # not overridden by config or cl_args setattr(self, name, cfg[name]) if 'key' in cfg: self.config.Session.key = str_to_bytes(cfg['key']) def write_connection_file(self): """write connection info to JSON file""" cf = self.abs_connection_file self.log.debug("Writing connection file: %s", cf) write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport, shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, iopub_port=self.iopub_port, control_port=self.control_port) def cleanup_connection_file(self): cf = self.abs_connection_file self.log.debug("Cleaning up connection file: %s", cf) try: os.remove(cf) except (IOError, OSError): pass self.cleanup_ipc_files() def cleanup_ipc_files(self): """cleanup ipc files if we wrote them""" if self.transport != 'ipc': return for port in (self.shell_port, self.iopub_port, self.stdin_port, self.hb_port, self.control_port): ipcfile = "%s-%i" % (self.ip, port) try: os.remove(ipcfile) except (IOError, OSError): pass def init_connection_file(self): if not self.connection_file: self.connection_file = "kernel-%s.json"%os.getpid() try: self.load_connection_file() except Exception: self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) self.exit(1) def init_sockets(self): # Create a context, a session, and the kernel sockets. self.log.info("Starting the kernel at pid: %i", os.getpid()) context = zmq.Context.instance() # Uncomment this to try closing the context. # atexit.register(context.term) self.shell_socket = context.socket(zmq.ROUTER) self.shell_port = self._bind_socket(self.shell_socket, self.shell_port) self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port) self.iopub_socket = context.socket(zmq.PUB) self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port) self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port) self.stdin_socket = context.socket(zmq.ROUTER) self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port) self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port) self.control_socket = context.socket(zmq.ROUTER) self.control_port = self._bind_socket(self.control_socket, self.control_port) self.log.debug("control ROUTER Channel on port: %i" % self.control_port) def init_heartbeat(self): """start the heart beating""" # heartbeat doesn't share context, because it mustn't be blocked # by the GIL, which is accessed by libzmq when freeing zero-copy messages hb_ctx = zmq.Context() self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) self.hb_port = self.heartbeat.port self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) self.heartbeat.start() def log_connection_info(self): """display connection info, and store ports""" basename = os.path.basename(self.connection_file) if basename == self.connection_file or \ os.path.dirname(self.connection_file) == self.profile_dir.security_dir: # use shortname tail = basename if self.profile != 'default': tail += " --profile %s" % self.profile else: tail = self.connection_file lines = [ "To connect another client to this kernel, use:", " --existing %s" % tail, ] # log connection info # info-level, so often not shown. # frontends should use the %connect_info magic # to see the connection info for line in lines: self.log.info(line) # also raw print to the terminal if no parent_handle (`ipython kernel`) if not self.parent_handle: for line in lines: io.rprint(line) self.ports = dict(shell=self.shell_port, iopub=self.iopub_port, stdin=self.stdin_port, hb=self.hb_port, control=self.control_port) def init_session(self): """create our session object""" default_secure(self.config) self.session = Session(parent=self, username=u'kernel') def init_blackhole(self): """redirects stdout/stderr to devnull if necessary""" if self.no_stdout or self.no_stderr: blackhole = open(os.devnull, 'w') if self.no_stdout: sys.stdout = sys.__stdout__ = blackhole if self.no_stderr: sys.stderr = sys.__stderr__ = blackhole def init_io(self): """Redirect input streams and set a display hook.""" if self.outstream_class: outstream_factory = import_item(str(self.outstream_class)) sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout') sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr') if self.displayhook_class: displayhook_factory = import_item(str(self.displayhook_class)) sys.displayhook = displayhook_factory(self.session, self.iopub_socket) def init_signal(self): signal.signal(signal.SIGINT, signal.SIG_IGN) def init_kernel(self): """Create the Kernel object itself""" shell_stream = ZMQStream(self.shell_socket) control_stream = ZMQStream(self.control_socket) kernel_factory = import_item(str(self.kernel_class)) kernel = kernel_factory(parent=self, session=self.session, shell_streams=[shell_stream, control_stream], iopub_socket=self.iopub_socket, stdin_socket=self.stdin_socket, log=self.log, profile_dir=self.profile_dir, ) kernel.record_ports(self.ports) self.kernel = kernel def init_gui_pylab(self): """Enable GUI event loop integration, taking pylab into account.""" # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab` # to ensure that any exception is printed straight to stderr. # Normally _showtraceback associates the reply with an execution, # which means frontends will never draw it, as this exception # is not associated with any execute request. shell = self.shell _showtraceback = shell._showtraceback try: # replace pyerr-sending traceback with stderr def print_tb(etype, evalue, stb): print ("GUI event loop or pylab initialization failed", file=io.stderr) print (shell.InteractiveTB.stb2text(stb), file=io.stderr) shell._showtraceback = print_tb InteractiveShellApp.init_gui_pylab(self) finally: shell._showtraceback = _showtraceback def init_shell(self): self.shell = self.kernel.shell self.shell.configurables.append(self) @catch_config_error def initialize(self, argv=None): super(IPKernelApp, self).initialize(argv) self.init_blackhole() self.init_connection_file() self.init_session() self.init_poller() self.init_sockets() self.init_heartbeat() # writing/displaying connection info must be *after* init_sockets/heartbeat self.log_connection_info() self.write_connection_file() self.init_io() self.init_signal() self.init_kernel() # shell init steps self.init_path() self.init_shell() self.init_gui_pylab() self.init_extensions() self.init_code() # flush stdout/stderr, so that anything written to these streams during # initialization do not get associated with the first execution request sys.stdout.flush() sys.stderr.flush() def start(self): if self.poller is not None: self.poller.start() self.kernel.start() try: ioloop.IOLoop.instance().start() except KeyboardInterrupt: pass
def __init__(self, poll_interval): super(DreddDaemon, self).__init__(self.pid_filename) self.poll_interval = poll_interval self.daemon_location = os.path.abspath(__file__) self.heartbeat = Heartbeat()
from RPi import GPIO import time from settings import Settings from event_publisher import EventPublisher from screen_controller import ScreenController from sound_listener import SoundListener from menu import Menu from heartbeat import Heartbeat from distance_measurer import DistanceMeasurer if __name__ == '__main__': event_publisher = EventPublisher() queue_events = event_publisher.queue Heartbeat('sound-level-detector', event_publisher) screen_controller = ScreenController() DistanceMeasurer(screen_controller).start() sound_listener = SoundListener(Settings.INITIAL_THRESHOLD, Settings.INITIAL_BUFFER_SIZE, queue_events) queue_listen = sound_listener.queue menu = Menu(screen_controller, queue_listen) # initial state menu.update_screen() screen_controller.turn_off_light()
def run(self): Heartbeat(self._device_id, self._mqtt_publish).start() self._mqtt_client.loop_forever()
class SchnitzelFactory(ServerFactory): protocol = SchnitzelProtocol def __init__(self, name): # Configuration self.configname = name try: self.loadConfig() except IOError: self.createConfig() self.saveConfig() # World self.url = "" self.world = World(self.config["world"]) print "SchnitzelFactory created" def createConfig(self): self.config = { "port": 25565, "name": "SchnitzelCraft", "motd": "", "maxplayers": 128, "saveinterval": 600, # 10 minutes "ops": [], "plugins": [], "magicwand": True, "public": True, "heartbeat_endpoint": "http://www.classicube.net/server/heartbeat", "noverify": False, "world": "world.dat", } print "Created Configuration from defaults" def loadConfig(self): with open(self.configname, "r") as fp: self.config = json.load(fp) print 'Loaded Configuration from "%s"' % self.configname def saveConfig(self): with open(self.configname, "w") as fp: json.dump(self.config, fp, indent=4) print 'Saved Configuration to "%s"' % self.configname def startFactory(self): # Runtime vars self.usedIDs = [] # List of Player IDs used (can be occupied by mobs) self.protocols = {} # Dictionary of Protocols indexed by ID self.salt = generate_salt() self.pingtimer = LoopingCall(self.sendPacket, PacketIDs["Ping"]) self.pingtimer.start(1, False) self.heart = Heartbeat(self, self.config["heartbeat_endpoint"]) self.heart.start() def save(self): self.world.save() self.sendMessage("World saved") self.saveConfig() self.savetimer = LoopingCall(save, self) self.savetimer.start(self.config["saveinterval"], False) print "SchnitzelFactory started" def stopFactory(self): print "SchnitzelFactory stopping..." for i in self.protocols.itervalues(): i.transport.loseConnection() self.world.save() self.saveConfig() print "SchnitzelFactory stopped" def sendPacket(self, *packet): for i in self.protocols.itervalues(): i.sendPacket(*packet) def sendPacketSkip(self, skip, *packet): for i in self.protocols.itervalues(): if i != skip: i.sendPacket(*packet) def sendMessage(self, msg, pid=255): while msg: msgpart = string_to_notch(msg[:64]) msg = msg[64:] self.sendPacket(PacketIDs["Message"], pid, msgpart)
class Sensor: """ Defines sensor logic. It reads the distance of object continuously and calls services on the server on different events to notify server. Events are : * OBJECT_WITHIN_THRESHOLD1 : Notify server about an object which just showed up in the not-very-close distance. * OBJECT_WITHIN_THRESHOLD2 : Notify server about an object which just showed up in the close distance. It is smart enough to not to send same event over and over again. Also, if there is a very close event for a very long time, it doesn't broadcast the event to the server. """ def __init__(self): if MOCK_HARDWARE: from hardware_mock import HardwareMock self._hardware = HardwareMock() else: from hardware import Hardware self._hardware = Hardware() if MOCK_HEARTBEAT: self._heartbeat = HeartbeatMock() else: self._heartbeat = Heartbeat() if MOCK_DATA: self._sensor_service = SensorServiceMock() self._registration_service = RegistrationServiceMock() else: self._sensor_service = SensorService() self._registration_service = RegistrationService() @staticmethod def _current_time_in_millis(): # see http://stackoverflow.com/questions/5998245/get-current-time-in-milliseconds-in-python # about getting the current time in millis return int(round(time.time() * 1000)) def _start(self): log.info("Starting program...") # first of all, check all the required settings # noinspection PyBroadException try: self._sensor_info = self._registration_service.get_sensor_info_sync() except: log.exception("Unable to register! Exiting program") return if not self._sensor_info: log.critical("Unable to get sensor info. Exiting program!") # give the token to services self._sensor_service.set_token(self._sensor_info.token) self._heartbeat.set_token(self._sensor_info.token) # since server settings are all good, send a heartbeat about starting the sensor program self._heartbeat.sendSync(Constants.HEARTBEAT_STARTING) # initialize hardware. # noinspection PyBroadException try: log.info("Initializing hardware GPIO...") self._hardware.initialize_gpio() except: # do not care about clean up, since hardware does it itself log.exception("Unable to initialize hardware GPIO. Exiting program!") # send heartbeat die with message self._heartbeat.sendSync(Constants.HEARTBEAT_DIE, "Unable to initialize GPIO.", sys.exc_info()) return # send the sync heartbeat afterwards to not to mix GPIO initialization exceptions with heartbeat exceptions self._heartbeat.sendSync(Constants.HEARTBEAT_GPIO_INITIALIZED) # start heartbeat here self._heartbeat.start_heartbeat_thread() previous_broadcasted_event_type = None previous_broadcasted_event_time = 0 # start measuring while True: event_type = None try: distance = self._hardware.measure() except: # do not care about clean up, since hardware itself does it # update heartbeat status so that server knows there is something wrong with the measurement self._heartbeat.sendSync(Constants.HEARTBEAT_DIE, "Error during measure.", sys.exc_info()) # since long time if that is the case log.exception("Error during measurement!") # re-raise and eventually exit the program raise if distance < self._sensor_info.threshold1: if distance >= self._sensor_info.threshold2: # we have event type 1 event_type = Constants.EVENT_TYPE_OBJECT_WITHIN_THRESHOLD1 log.info("Object found between threshold1 and threshold2 : {} cm".format(distance)) else: # we might have event type 2. but need to check if object is too close if distance <= Constants.TOO_CLOSE_DISTANCE_THRESHOLD: # ignore log.info("Object is too close : {}".format(distance)) else: # we have event type 2 log.info("Object is withing threshold2 and it is not too close : {} cm".format(distance)) event_type = Constants.EVENT_TYPE_OBJECT_WITHIN_THRESHOLD2 else: # ignore the object since it is too far away log.info("Object is too far away : {} cm".format(distance)) pass # do not broadcast the event every time! # broadcast the event if it is new. # new means: # last broadcasted event is from a different type # OR # it has been N seconds since last broadcasted event send_broadcast = False if not event_type: send_broadcast = False else: if previous_broadcasted_event_type != event_type: send_broadcast = True else: elapsed_time_since_last_broadcast = Sensor._current_time_in_millis() - previous_broadcasted_event_time if elapsed_time_since_last_broadcast > self._sensor_info.seconds_to_ignore_same_events * 1000: send_broadcast = True else: log.info( "Not broadcasting the event since same type is broadcasted very recently : " + event_type) # noinspection PyBroadException try: if send_broadcast: log.info("Gonna broadcast event : " + event_type) self._sensor_service.broadcast_event(event_type) except: log.exception("Error broadcasting event : " + event_type) # do nothing. continue with the next measurement # sleep some time before measuring again if send_broadcast: previous_broadcasted_event_type = event_type previous_broadcasted_event_time = Sensor._current_time_in_millis() # if we do broadcast, then sleep less since some time will be gone during the REST call time.sleep(Constants.SLEEP_TIME_BEFORE_NEXT_MEASUREMENT_AFTER_BROADCAST) else: # sleep more time.sleep(Constants.SLEEP_TIME_BEFORE_NEXT_MEASUREMENT_NO_BROADCAST) def _on_exit(self): # try to clean up anyway. it is safe to do that over and over again self._hardware.clean_up() # stop heartbeat thread so that we don't send heartbeats anymore self._heartbeat.stop_heartbeat_thread() # send heartbeat die self._heartbeat.sendSync(Constants.HEARTBEAT_DIE, "Exiting program") def start_program(self): # noinspection PyBroadException try: self._start() except: # catch all unhandled exceptions # that means, program wanted to terminate log.exception("Program didn't handle the exception. Probably it wanted termination.") self._on_exit()
def test_type(self): b = Heartbeat() self.assertIsInstance(b, heartbeat.Swizzle.Swizzle)
class DreddDaemon(MultiItemCycleMixin, Daemon): config = Configuration() logging = logging.getLogger('dredd') classifier_filename = 'naivebays_1433295569.pickle' pid_filename = '/tmp/dredd-questions.pid' pub_sub_app_name = 'questions' def __init__(self, poll_interval): super(DreddDaemon, self).__init__(self.pid_filename) self.poll_interval = poll_interval self.daemon_location = os.path.abspath(__file__) self.heartbeat = Heartbeat() def run(self): try: self.logging.info("Dredd is running...") self.get_classifier() while True: self.heartbeat.send_heartbeat() self._run_retryable_cycle() time.sleep(self.poll_interval) #TODO: It should wait until now to kill on ctr c except Exception as e: self.logging.exception(e) self.stop() @retry(wait_exponential_multiplier=1000, wait_exponential_max=30000) def _run_retryable_cycle(self): try: cycle_outcome = self._run_cycle() if cycle_outcome: self.logging.info("Cycle Complete") return cycle_outcome else: raise except: exc_type, exc_value, exc_traceback = sys.exc_info() self.logging.exception(str(exc_type)) raise Exception("Retry") def start(self): self.logging.info("Starting Dredd") super(DreddDaemon, self).start() def stop(self): self.logging.info("Stopping Dredd") super(DreddDaemon, self).stop() def _score_task(self, task): return EmailMessageScorer(task, self.get_classifier()) def get_q_name(self): return "%s_dredd_%s-%s_%s" % ( self.config.pub_sub_prefix, self.pub_sub_app_name, self.config.pub_sub_prefix, self.config.pub_sub_topic) def get_classifier(self): try: return self.classifier_ except AttributeError: classifier_location = os.path.abspath(os.path.join(self.daemon_location, os.pardir, os.pardir, os.pardir, "classified_output", self.classifier_filename)) file = open(classifier_location) self.classifier_ = pickle.load(file) file.close() return self.classifier_
def __init__(self, file_path, num_challenges, root_seed): self.secret = "mysecret" self.target_file = Heartbeat(file_path, self.secret) self.target_file.generate_challenges(num_challenges, root_seed)