import client import sys import redis def handler(message): if type(message['data']) != int: print(message['data'].decode('utf-8')) print('% ', end='', flush=True) if __name__ == "__main__": try: host, port = sys.argv[1], int(sys.argv[2]) port = int(port) except: print("Usage: python3 host_addr host_port") red = redis.Redis(host='localhost', port=6379, db=0) pub = red.pubsub() pub.psubscribe(**{'': lambda x: x}) pub.punsubscribe() thread = pub.run_in_thread(sleep_time=0.001) process = client.Client(host, port, red, pub) process.start() thread.stop()
def createClient(self, pid, addr): return client.Client(pid, addr)
def pre_handler(self, shareFile,finding_pre,connect_to,payload): t=threading.Thread(target=client.Client(shareFile,connect_to).changing_the_pre,args=(shareFile,payload,finding_pre)) t.daemon=True t.start()
def send_cmd(msg_id, s, api_txt, cmd_txt): cli = client.Client() cli.connect(s[0], s[1]) cli.send(pack_lua(msg_id, api_txt)) cli.send(pack_lua(msg_id, cmd_txt)) lstClient.append(cli)
def multiple(msgs, hostname="localhost", port=1883, client_id="", keepalive=60, will=None, auth=None, tls=None, protocol=mqtt.MQTTv31): """Publish multiple messages to a broker, then disconnect cleanly. This function creates an MQTT client, connects to a broker and publishes a list of messages. Once the messages have been delivered, it disconnects cleanly from the broker. msgs : a list of messages to publish. Each message is either a dict or a tuple. If a dict, only the topic must be present. Default values will be used for any missing arguments. The dict must be of the form: msg = {'topic':"<topic>", 'payload':"<payload>", 'qos':<qos>, 'retain':<retain>} topic must be present and may not be empty. If payload is "", None or not present then a zero length payload will be published. If qos is not present, the default of 0 is used. If retain is not present, the default of False is used. If a tuple, then it must be of the form: ("<topic>", "<payload>", qos, retain) hostname : a string containing the address of the broker to connect to. Defaults to localhost. port : the port to connect to the broker on. Defaults to 1883. client_id : the MQTT client id to use. If "" or None, the Paho library will generate a client id automatically. keepalive : the keepalive timeout value for the client. Defaults to 60 seconds. will : a dict containing will parameters for the client: will = {'topic': "<topic>", 'payload':"<payload">, 'qos':<qos>, 'retain':<retain>}. Topic is required, all other parameters are optional and will default to None, 0 and False respectively. Defaults to None, which indicates no will should be used. auth : a dict containing authentication parameters for the client: auth = {'username':"******", 'password':"******"} Username is required, password is optional and will default to None if not provided. Defaults to None, which indicates no authentication is to be used. tls : a dict containing TLS configuration parameters for the client: dict = {'ca_certs':"<ca_certs>", 'certfile':"<certfile>", 'keyfile':"<keyfile>", 'tls_version':"<tls_version>", 'ciphers':"<ciphers">} ca_certs is required, all other parameters are optional and will default to None if not provided, which results in the client using the default behaviour - see the paho.mqtt.client documentation. Defaults to None, which indicates that TLS should not be used. """ if type(msgs) is not list: raise ValueError('msgs must be a list') client = mqtt.Client(client_id=client_id, userdata=msgs, protocol=protocol) client.on_publish = _on_publish client.on_connect = _on_connect if auth is not None: username = auth['username'] try: password = auth['password'] except KeyError: password = None client.username_pw_set(username, password) if will is not None: will_topic = will['topic'] try: will_payload = will['payload'] except KeyError: will_payload = None try: will_qos = will['qos'] except KeyError: will_qos = 0 try: will_retain = will['retain'] except KeyError: will_retain = False client.will_set(will_topic, will_payload, will_qos, will_retain) if tls is not None: ca_certs = tls['ca_certs'] try: certfile = tls['certfile'] except KeyError: certfile = None try: keyfile = tls['keyfile'] except KeyError: keyfile = None try: tls_version = tls['tls_version'] except KeyError: tls_version = None try: ciphers = tls['ciphers'] except KeyError: ciphers = None client.tls_set(ca_certs, certfile, keyfile, tls_version=tls_version, ciphers=ciphers) client.connect(hostname, port, keepalive) client.loop_forever()
def start_new(self): self.first_client = client.Client() self.second_client = client.Client() self.day = 1 self.count = 0
if (len(bids) == 0): return winnerBid = max(bids, key=lambda t: t[0])[0] buyer = max(bids, key=lambda t: t[0])[1] seller.delTruck(truck) seller.updateProfit(winnerBid) buyer.addTruck(truck) buyer.updateProfit(-winnerBid) if __name__ == "__main__": clients = [] clients += [client.Client("Joao")] clients += [client.Client("Pedro")] clients += [client.Client("Joana")] clients += [client.Client("Ana")] companies = [] companies += [ company.Company("C1", 0, np.random.choice(list(world_set.districts.keys()))) ] companies += [ company.Company("C2", 0, np.random.choice(list(world_set.districts.keys()))) ] companies += [ company.Company("C3", 0,
class ClientTest(TestCase): test_dir = config.DATA_DIR + "test/" c = client.Client("userA") enc_keys = [ b'\x1b\x8fL+\xd2\xfcLQ\x1a\x03:\xcf\x15\x8a\xc7+' for _ in range(10) ] enc_keys_int = [int.from_bytes(i, 'big') for i in enc_keys] hash_key = b'hash_key' @classmethod @patch("lib.config.RECORD_LENGTH", 5) @patch("lib.config.BLOOM_CAPACITY", 100) @patch("lib.config.BLOOM_ERROR_RATE", 10**-5) @patch("lib.config.RECORD_ID_LENGTH", 2) @patch("lib.config.ROUNDING_VEC", [3, 3]) def setUpClass(cls) -> None: """Disable logging.""" logging.getLogger().setLevel(logging.FATAL) shutil.rmtree(cls.test_dir, ignore_errors=True) os.makedirs(cls.test_dir, exist_ok=True) cls.records = [ Record([0, 0, 0, 0, 0]), # not in Bloom Record([1, 2, 3, 4, 5]), # in Bloom Record([2, 2, 3, 4, 5]), # in Bloom Record([3, 2, 3, 4, 5]), # in Bloom Record([4, 2, 3, 4, 5]), # not in Bloom Record([5, 2, 3, 4, 5]), # not in Bloom ] for r in cls.records: r.set_hash_key(cls.hash_key) b = BloomFilter(100, 0.0001, cls.test_dir + "test.bloom") b.update([1, 2, 3, 4, 5, 6, 7, 8, 9, 'a', 'b', 'c']) b.add(b64encode(cls.records[1].get_long_hash()).decode()) b.add(b64encode(cls.records[2].get_long_hash()).decode()) b.add(b64encode(cls.records[3].get_long_hash()).decode()) cls.b_encoded = b.to_base64().decode() cls.b = b cls.psi_ind = [ cls.records[1].get_psi_index(), cls.records[2].get_psi_index(), cls.records[3].get_psi_index() ] def setUp(self) -> None: """ Deactivate PSI Mode. """ self.c._psi_mode = False @classmethod def tearDownClass(cls) -> None: """Remove Test files.""" shutil.rmtree(cls.test_dir, ignore_errors=True) @patch("lib.base_client.BaseClient.post") def test_get_record_success(self, m): url = (f"https://{config.STORAGESERVER_HOSTNAME}:" f"{config.STORAGE_API_PORT}/" f"{UserType.CLIENT}/retrieve_record") j = { 'success': True, 'records': [['hash', 'record1'], ['hash', 'record2']] } m.return_value.json.return_value = j res = self.c.get_record('hash') self.assertEqual(res, j['records']) m.assert_called_once_with(url, json={'hash': 'hash'}) @patch("lib.base_client.BaseClient.post") def test_get_record_fail(self, m): url = (f"https://{config.STORAGESERVER_HOSTNAME}:" f"{config.STORAGE_API_PORT}/" f"{UserType.CLIENT}/retrieve_record") j = {'success': False, 'msg': "No record for hash exists: 'record'"} m.return_value.json.return_value = j with self.assertRaises(RuntimeError) as cm: self.c.get_record("hash") self.assertIn("No record for hash exists: 'record'", str(cm.exception)) m.assert_called_once_with(url, json={'hash': 'hash'}) @patch.object(c, "_get_enc_keys", Mock(return_value=enc_keys)) def test_batch_get_records(self): records = self.records[:5] enc_records = [] for i, r in enumerate(records): enc_records.append( (b64encode(r.get_long_hash()).decode(), json.dumps(r.get_encrypted_record(self.enc_keys[i], b'0')))) with patch.object(self.c, "_batch_get_encrpyted_records", Mock(return_value=enc_records)): res = self.c.batch_get_records(self.records[:5]) for r in res: # for comparison r.set_hash_key(self.hash_key) self.assertEqual(records, res) # Empty list with patch.object(self.c, "_batch_get_encrpyted_records", Mock(return_value=[])): self.assertEqual([], self.c.batch_get_records(self.records[:5])) @patch("lib.base_client.BaseClient.post") def test__batch_get_encrpyted_records_success(self, m): url = (f"https://{config.STORAGESERVER_HOSTNAME}:" f"{config.STORAGE_API_PORT}/" f"{UserType.CLIENT}/batch_retrieve_records") j = { 'success': True, 'records': [['hash1', 'record1'], ['hash2', 'record2']] } m.return_value.json.return_value = j hash_list = ['hash1', 'hash2', 'hash3'] res = self.c._batch_get_encrpyted_records(hash_list) self.assertEqual(res, j['records']) m.assert_called_once_with(url, json={'hashes': hash_list}) @patch("lib.base_client.BaseClient.post") def test__batch_get_encrypted_records_fail(self, m): url = (f"https://{config.STORAGESERVER_HOSTNAME}:" f"{config.STORAGE_API_PORT}/" f"{UserType.CLIENT}/batch_retrieve_records") j = {'success': False, 'msg': "Missing POST value 'hashes'."} m.return_value.json.return_value = j hash_list = ['hash1', 'hash2', 'hash3'] with self.assertRaises(RuntimeError) as cm: self.c._batch_get_encrpyted_records(hash_list) self.assertIn("Missing POST value 'hashes'.", str(cm.exception)) m.assert_called_once_with(url, json={'hashes': hash_list}) @patch("lib.base_client.BaseClient.get") def test_get_bloom_success(self, m): url = (f"https://{config.STORAGESERVER_HOSTNAME}:" f"{config.STORAGE_API_PORT}/" f"{UserType.CLIENT}/bloom") j = {'success': True, 'bloom': self.b_encoded} m.return_value.json.return_value = j res = self.c._get_bloom_filter() res_b = res.to_base64() self.assertEqual(res_b, self.b_encoded.encode()) m.assert_called_once_with(url) @patch("lib.base_client.BaseClient.get") def test_get_bloom_fail(self, m): url = (f"https://{config.STORAGESERVER_HOSTNAME}:" f"{config.STORAGE_API_PORT}/" f"{UserType.CLIENT}/bloom") j = {'success': False, 'msg': "Failed to retrieve bloom filter: "} m.return_value.json.return_value = j with self.assertRaises(RuntimeError) as cm: self.c._get_bloom_filter() self.assertIn("Failed to retrieve bloom filter:", str(cm.exception)) m.assert_called_once_with(url) def test_compute_matches_bloom_success(self): for v in [True, False]: with patch("lib.config.PARALLEL", v): with patch.object(self.c, "_get_bloom_filter", return_value=self.b): rec_list = [ self.records[1].record, self.records[2].record, self.records[4].record, self.records[5].record ] self.c._hash_key = self.hash_key m = MagicMock() m.__iter__.return_value = rec_list m.split.return_value = [m] res = self.c.compute_matches_bloom(m) self.assertEqual(res, [self.records[1], self.records[2]]) m.__iter__.return_value = [ self.records[1].record, self.records[2].record, self.records[3].record, self.records[4].record, self.records[5].record ] res = self.c.compute_matches_bloom(m) self.assertEqual( res, [self.records[1], self.records[2], self.records[3]]) m.__iter__.return_value = [ self.records[4].record, self.records[5].record ] res = self.c.compute_matches_bloom(m) self.assertEqual(res, []) def test_compute_matches_bloom_fail(self): self.c._psi_mode = True with patch.object(self.c, "_get_bloom_filter", return_value=[]): with self.assertRaises(RuntimeError) as e: self.c.compute_matches_bloom(Mock) self.assertIn("PSI-Mode is enabled", str(e.exception)) def test_compute_matches_psi_success(self): self.c._psi_mode = True self.c._hash_key = self.hash_key with patch.object(self.c, "_perform_psi", return_value=self.psi_ind): res = self.c.compute_matches_psi([ self.records[1].record, self.records[2].record, self.records[4].record, self.records[5].record ]) self.assertEqual(res, [self.records[1], self.records[2]]) res = self.c.compute_matches_psi([ self.records[1].record, self.records[2].record, self.records[3].record, self.records[4].record, self.records[5].record ]) self.assertEqual( res, [self.records[1], self.records[2], self.records[3]]) res = self.c.compute_matches_psi( [self.records[4].record, self.records[5].record]) self.assertEqual(res, []) @patch("lib.config.RECORD_LENGTH", 10) @patch("lib.config.RECORD_ID_LENGTH", 10) def test_compute_matches_psi_fail(self): # No PSI Mode with patch.object(self.c, "_perform_psi", return_value=[]): with self.assertRaises(RuntimeError) as e: self.c.compute_matches_psi([]) self.assertIn("PSI-Mode is not enabled", str(e.exception)) self.c._psi_mode = True with self.assertRaises(RuntimeError) as e: self.c.compute_matches_psi( RelativeOffsetIterator( [float(i + 1) for i in range(config.RECORD_LENGTH)], 10, [6 for _ in range(config.RECORD_ID_LENGTH)])) self.assertIn("too large for PSI", str(e.exception)) @patch.object(c, "_receive_psi", Mock(return_value=[1, 2, 3])) @patch("lib.base_client.BaseClient.get") @patch("lib.config.EVAL", False) def test__perform_psi(self, m): self.assertEqual([], self.c._perform_psi([])) url = (f"https://{config.STORAGESERVER_HOSTNAME}:" f"{config.STORAGE_API_PORT}/client/" f"psi") for tls in [True, False]: with patch("lib.config.PSI_TLS", tls): j = { 'success': True, 'tls': tls, 'host': '127.0.0.1', 'port': 1234, 'setSize': 100, 'msg': 'blub' } m.return_value.json.return_value = j res = self.c._perform_psi([1, 2, 3, 4, 5, 6]) self.assertEqual([1, 2, 3], res) m.assert_called_once_with(url) m.reset_mock() j['success'] = False m.return_value.json.return_value = j with self.assertRaises(RuntimeError) as e: self.c._perform_psi([1, 2, 3, 4, 5, 6]) m.assert_called_once_with(url) m.reset_mock() self.assertEqual("PSI failed: blub", str(e.exception)) j['success'] = True j['setSize'] = 1 m.return_value.json.return_value = j with self.assertRaises(RuntimeError) as e: self.c._perform_psi([1, 2, 3, 4, 5, 6]) m.assert_called_once_with(url) m.reset_mock() self.assertEqual("Client Set larger than PSI Setsize.", str(e.exception)) j['setSize'] = 100 j['tls'] = not tls m.return_value.json.return_value = j with self.assertRaises(RuntimeError) as e: self.c._perform_psi([1, 2, 3, 4, 5, 6]) m.assert_called_once_with(url) m.reset_mock() self.assertIn("Mismatch", str(e.exception)) @patch("lib.similarity_metrics.AbsoluteOffsetIterator") def test_compute_candidates(self, m): # Default r = [1, 2, 3] self.c.compute_candidates(r) m.assert_called_once_with(r, 1) # Non default self.c.compute_candidates(r, "offset-7.77") m.assert_called_with(r, 7.77) def test_parser(self): # Just syntax errors p = client.get_client_parser() self.assertTrue(isinstance(p, argparse.ArgumentParser)) @responses.activate @patch("lib.config.OT_TLS", False) @patch("lib.config.EVAL", False) @patch("lib.config.OT_SETSIZE", 10) @patch("client.Client._receive_ots", Mock(return_value=enc_keys_int[:3])) @patch("client.Client.get_token", Mock(return_value="token")) def test_full_retrieve(self): c = client.Client("userA") target = [2.0, 2.0, 3.0, 4.0, 5.0] # Server Records: sr: List[Record] = [ [2.01, 2.01, 3.3, 4.4, 5.0], # Match [2.5, 4.4, 3.9, 5.0, 5.0], # No Match [2.0, 7.0, 3.0, 4.0, 5.0], # No Match [2.0, 2.0, 10.6, 10.0, 5.0], # Match [3.0, 2.0, 3.0, 4.0, 5.0], # No Match [2.01, 2.004, 5, 9, 5.0], # Match [2.0, 2.0, 3.0, 4.0, 5.0] # No Match ] # Server Bloom Filter tmp = tempfile.NamedTemporaryFile(delete=False) b = BloomFilter(len(sr), 0.00001, tmp.name) c.metric = "offset-0.01" for i, r in enumerate(sr): sr[i]: Record = Record(r) sr[i].set_hash_key(self.hash_key) matches = [sr[0], sr[3], sr[5]] for m in matches: b.add(b64encode(m.get_long_hash()).decode()) b_encoded = b.to_base64().decode() # Responses # ----------------------------------------------------------- # 1. Hash Key url = f"https://*****:*****@patch("client.log.debug", Mock(side_effect=RuntimeError)) def test_full_error(self): with self.assertRaises(RuntimeError): self.c.full_retrieve([1, 2, 3])
kwargs=self.kwargs).start() if __name__ == '__main__': config = configparser.ConfigParser() argument_parser = argparse.ArgumentParser() argument_parser.add_argument('config', help='Config file') args = argument_parser.parse_args() config_path = os.path.realpath(args.config) config.readfp(open(config_path)) host = config.get('general', 'host') port = config.getint('general', 'port') server = Server(host, port) st = threading.Thread(target=server.serve) st.start() print('Started server at "{0}" in port "{1}". '.format(host, port), end='') print('Close this window or use "CTR+C" to stop server ', end='', flush=True) while st.isAlive(): try: for cursor in '\\|/-': time.sleep(0.5) sys.stdout.write('\b{}'.format(cursor)) sys.stdout.flush() except KeyboardInterrupt: client.Client(host, port).comunicate('QUIT') print('\nSignal received finishing pending tasks', end='') print('\nServer stopped', flush=True)
client.init_clients() while True: print("\n\t[1] URUCHOM") print("\t[2] Dodaj terminal") print("\t[3] Usun terminal") print("\t[4] Pokaz terminale") print("\t[0] Wyjscie\n") user_input = input("Wybierz akcje: ") print() if user_input == "1": client.run_clients() elif user_input == "2": new_client = client.Client( input("Podaj nazwe nowego termianalu: ")) client.add_client(new_client) print("\nTerminal dodano!") elif user_input == "3": client.print_clients() client.remove_client( input("\nPodaj nazwe terminalu, ktory chcesz usunac: ")) print("\nTerminal usunieto!") elif user_input == "4": client.print_clients() time.sleep(1) else: break
def test_full_retrieve(self): c = client.Client("userA") target = [2.0, 2.0, 3.0, 4.0, 5.0] # Server Records: sr: List[Record] = [ [2.01, 2.01, 3.3, 4.4, 5.0], # Match [2.5, 4.4, 3.9, 5.0, 5.0], # No Match [2.0, 7.0, 3.0, 4.0, 5.0], # No Match [2.0, 2.0, 10.6, 10.0, 5.0], # Match [3.0, 2.0, 3.0, 4.0, 5.0], # No Match [2.01, 2.004, 5, 9, 5.0], # Match [2.0, 2.0, 3.0, 4.0, 5.0] # No Match ] # Server Bloom Filter tmp = tempfile.NamedTemporaryFile(delete=False) b = BloomFilter(len(sr), 0.00001, tmp.name) c.metric = "offset-0.01" for i, r in enumerate(sr): sr[i]: Record = Record(r) sr[i].set_hash_key(self.hash_key) matches = [sr[0], sr[3], sr[5]] for m in matches: b.add(b64encode(m.get_long_hash()).decode()) b_encoded = b.to_base64().decode() # Responses # ----------------------------------------------------------- # 1. Hash Key url = f"https://localhost:" \ f"{config.KEY_API_PORT}/client/hash_key" j = {'success': True, 'hash_key': b64encode(self.hash_key).decode()} responses.add(responses.GET, url, json=j, status=200) # 2. PSI url = (f"https://{config.STORAGESERVER_HOSTNAME}:" f"{config.STORAGE_API_PORT}/client/psi") j = { 'success': True, 'tls': False, 'host': '127.0.0.1', 'port': 1234, 'setSize': 10 } responses.add(GET, url, status=200, json=j) # 2. Bloom filter url = (f"https://{config.STORAGESERVER_HOSTNAME}:" f"{config.STORAGE_API_PORT}/" f"{UserType.CLIENT}/bloom") j = {'success': True, 'bloom': b_encoded} responses.add(GET, url, status=200, json=j) # 3. Encryption Keys url = f"https://localhost:" \ f"{config.KEY_API_PORT}/client/key_retrieval?totalOTs=3" j = { 'success': True, 'port': 5000, 'host': "127.0.0.1", 'totalOTs': 3, 'tls': False } responses.add(responses.GET, url, json=j, status=200) # 4. Ciphertexts url = (f"https://{config.STORAGESERVER_HOSTNAME}:" f"{config.STORAGE_API_PORT}/" f"{UserType.CLIENT}/batch_retrieve_records") j = { 'success': True, 'records': [(b64encode(m.get_long_hash()).decode(), json.dumps(m.get_encrypted_record(self.enc_keys[i], b'0'))) for i, m in enumerate(matches)] } responses.add(POST, url, status=200, json=j) # --------------------------------------------------------------------- for psi in [True, False]: with patch.object( c, "_receive_psi", Mock(return_value=[m.get_psi_index() for m in matches])): c._psi_mode = psi res = c.full_retrieve(target) # Set hash key for comparison for r in res: r.set_hash_key(self.hash_key) # Compare self.assertEqual(matches, res)
def setUp(self): self.srv = srv.Server(srvf.IP_ADDR, srvf.scan_sense_port) self.cli = cli.Client(srvf.IP_ADDR, srvf.scan_sense_port)
#!/usr/bin/env python import os,json,sys import time,json,copy,cPickle import numpy as np import hopt,client from optparse import OptionParser parser = OptionParser() parser.add_option("-m", "--model", dest="model", help="which model python file to use", default="models/mnist_cnn_h.py") parser.add_option("-d", "--dataset", dest="dataset", help="which dataset python file to use", default="datasets/mnist.py") parser.add_option("-u", "--uri", dest="uri", help="controller URI", default= open(os.getenv("HOME") + "/.dh_uri","r").read() ) (options, args) = parser.parse_args() a = time.time() c = client.Client(options.uri) print options.dataset, options.model c.add_task(options.dataset, options.model) results = c.wait_task_finish() fn = "results_%s.pkl"%(str(time.time())) cPickle.dump(results, open(fn, "wb")) print "Finished in %ds, wrote results to %s"%(time.time()-a,fn)
def __init__(self): self.client = client.Client() self.proxy = proxy.Proxy() self.t = time.time() self.client_id = self.client.nextClientId() self.proxy_address = self.proxy.nextProxy()
def launch_calculation(s1, name, cid, phase, epsilon, atoms, geom, charge, receiver_email, user_name, output): # Authentication # to get autentication tokens send an email to [email protected] USER = "" API_KEY = "" URL_SERVER = "" ## Initialize client TC = client.Client(url=URL_SERVER, user=USER, api_key=API_KEY, engine="terachem", verbose=False) ## Set the job specification tcc_options = { # TCC options 'runtype': 'energy', 'jobname': 'TerX calculation', 'units': 'angstrom', # TeraChem engine options 'atoms': atoms, 'charge': charge, 'spinmult': 1, 'closed_shell': True, 'restricted': True, 'method': 'pbe0', 'basis': '3-21g', 'convthre': 3.0e-3, 'precision': 'single', 'dftgrid': 0, } if epsilon: tcc_options['pcm'] = 'cosmo' tcc_options['epsilon'] = epsilon if s1: tcc_options['cis'] = 'yes' tcc_options['cisnumstates'] = 2 tcc_options['cisconvtol'] = 1.0e-2 result = TC.compute(geom, tcc_options) if not s1: final_number = result['dipole_moment'] output_speech = 'The dipole moment of {} in {} is: '.format( name, phase) output_speech += '{:.1f} Debye. '.format(final_number) output_speech += 'What else can I do for you?' Tdip = None else: Tdip = [ math.sqrt(result['cis_transition_dipoles'][i][0]**2 + result['cis_transition_dipoles'][i][1]**2 + result['cis_transition_dipoles'][i][2]**2) for i in range(2) ] bright = Tdip.index(max(Tdip)) + 1 # convert to 1 indexed notation if bright == 1: bright_state = "first" elif bright == 2: bright_state = "second" final_number = [ ((result['energy'][bright] - result['energy'][0]) * 27.2114), result['energy'], Tdip ] output_speech = 'The brightest excited state of {} in {} is the {} state. '.format( name, phase, bright_state) output_speech += 'Its energy is {:.1f} electronVolt. '.format( final_number[0]) output_speech += 'What else can I do for you?' # Send email with Amazon SES # if receiver_email is not None: # if not s1: # prop = "dipole moment" # else: # prop = "excitation energy" # subject = 'ChemVox' # message = body_text(name,phase,epsilon,prop,cid,atoms,geom,charge,user_name,result['energy'],result['dipole_moment'],Tdip) # pool = ThreadPool(processes=1) # async_result = pool.apply_async(send_email,(receiver_email,message)) if output: return output_speech else: return final_number
#!/usr/bin/python import os import multiprocessing as mp import time from client import ComponentCode as cc import hub import client import actuator import sensor #inicializa hub e componentes h = hub.HUB() cl = client.Client(0,0) sensores = [sensor.Sensor(cc.temperature_sensor.value,1), sensor.Sensor(cc.humidity_sensor.value,1), sensor.Sensor(cc.co2_sensor.value,1)] atuador = [actuator.Actuator(cc.heater.value,1), actuator.Actuator(cc.cooler.value,1), actuator.Actuator(cc.irrigator.value,1), actuator.Actuator(cc.co2_injector.value,1)] #inicia handshake do hub e espera meio segundo processes = [mp.Process(target = h.handshake,args = ())] processes[0].start() time.sleep(0.5) #inicia handshake dos sensores for s in sensores: processes.append(mp.Process(target = s.handshake, args = ())) processes[-1].start()
time.sleep(0.2) #sprawdzenie czy są połączeni klienci if len(client.Collection.connected_sockets)>0: #zwraca liste odpowiednich socketów ready_to_read, ready_to_write, in_error = select.select(client.Collection.connected_sockets, [], [], 1) #pętla wykonuje się dla każdego socketu który jest gotowy do odczytu for socket in ready_to_read: client_temp = client.Collection.getBySocket(socket) error = False try: data = socket.recv(1024) except ConnectionAbortedError: error = True except ConnectionResetError: error = True if error or len(data)==0: client.Collection.connected_sockets.remove(socket) print("client", client_temp.IDL, "disconnected") client_temp.unsubAll() continue client_temp.handleIncomingJSON(data) client_monitor_thread = threading.Thread (target=monitorClients, args=() ) client_monitor_thread.start() while 1: (client_socket, address) = serversocket.accept() print("new client!") client_new = client.Client(client_socket, address) client.Collection.addClient(client_new)
#csvdata = [serial_num, latitude, longitude, json_extractor[0]['rssi'], time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())], #for value in csvdata: # csvtable.append(value) #print(csvtable) with open(filename, 'a', newline='') as csvFile: # write to CSV writer = csv.writer(csvFile) # Title and Data writer.writerow([ serial_num, latitude, longitude, json_extractor[0]['rssi'], time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) ]) print("Save") writer.writerows(csvtable) # Connect MQTT info client = mqtt.Client(client_id="", protocol=mqtt.MQTTv31) client.on_connect = on_connect client.on_message = on_message client.username_pw_set("", password="") client.connect("140.127.196.120", port=1883, keepalive=60) # MQTT IP address if __name__ == '__main__': channel = establishCommandChannel() client.loop_forever() while (True): pass
def main(args): context = client.make_context() # Users may want to use this client to control more than one ship game = client.Client(args.hostname, args.lobby_port, args.control_port, args.state_port, context) my_name = args.ship_name base_ship = ship.Ship(game, my_name, args.team_name, args.password) response = game.lobby.register(args.ship_name, args.team_name, args.password) print(response) # Subscribe to the game state game.state.subscribe(response.game) # load the map: mapname = response['map'] map_path = '../../maps/' maps = map_path + mapname # load the flow field # you may also want + '_wnormx' + '_wnormy' + '_flowx' + '_flowy' # _goal or _end x_map = np.load(maps + '_flowx.npy') y_map = np.load(maps + '_flowy.npy') occupancy0 = np.load(maps + '_occupancy.npy') ctrl = 0 dt = 0.01 go = True timeStart = 0. while True: print('Getting state') state = game.state.recv() print('Got state') # Idle until game start if state['state'] != 'running': if not go: print('Race over...') break time.sleep(0.1) continue if go: go = False timeStart = time.time() continue # Control the attitude of the craft alldata = state['data'] data = alldata[my_name] # find self in the list ship_x = data['x'] ship_y = data['y'] ship_vx = data['vx'] ship_vy = data['vy'] flow_xh = x_map[int(ship_y), int(ship_x)] flow_yh = y_map[int(ship_y), int(ship_x)] ship_theta = data['theta'] cos_th = np.cos(ship_theta) # ship X component should be this sin_th = np.sin(ship_theta) # ship y component should be this fwd_vel = ship_vx * cos_th + ship_vy * sin_th # Cross product of ship vector with flow cross = (-flow_xh * sin_th + flow_yh * cos_th) omega = data['omega'] # your ai goes here - thrust = 1 # ... [0 or 1] rotation = -1 ##... [-1, 0 or 1] # I suggest some sort of pulse modulation eg # thrust = int(np.random.random() < thrust_level) # Send results print((thrust, rotation)) base_ship.send_control(thrust, rotation) time.sleep(dt) return ()
import time, sys, argparse, signal import client args = argparse.ArgumentParser( description="Laika client example", epilog="You can use http://user:pass@host:port/path format for convenience", ) args.add_argument("-e", "--env", dest="environment", required=True, help="Environment name") args.add_argument("-u", "--user", dest="username", required=False, help="Username") args.add_argument("-p", "--pass", dest="password", required=False, help="Password") args.add_argument("url", help="URL for the laika") parsed = args.parse_args() sys.running = True lk = client.Client(parsed.url, parsed.username, parsed.password, parsed.environment) lk.init_timer(interval=5.0) def interrupt_handler(signum, frame): print("quitting:", signum, frame) sys.running = False lk.stop() return signal.signal(signal.SIGINT, interrupt_handler) #send raw request using client #re = client.request(method="POST", url="https://src.n0pe.me/", json=dict(hello="world")) gen = lk.generator("hello")
# pin_setup.setup_valve() # pin_setup.setup_reservoir() pass tiers = [None for x in range(num_tiers)] for i in range(num_tiers): # tiers[i] = Tier('pi6.'+str(i+1),pin_setup.DHT_PINS[i],light=pin_setup.light_pwm[i],fan=pin_setup.fan_pwm[i],heat=pin_setup.heat_pwm[i],exhaust=pin_setup.exhaust_pwm[i]) tiers[i] = Tier('pi5',pin_setup.DHT_PIN,light=pin_setup.dict[pin_setup.LED_HIGH],fan=pin_setup.dict[pin_setup.INTAKE_FAN],heat=pin_setup.dict[pin_setup.HEAT_PIN],exhaust=pin_setup.dict[pin_setup.EXHAUST_FAN],circ=pin_setup.dict[pin_setup.RE_FAN],tray_out=pin_setup.dict[pin_setup.TRAY_OUT],tray_read=pin_setup.dict[pin_setup.TRAY_READ],valve=pin_setup.dict[pin_setup.VALVE_PIN]) for tier in tiers: lightButtonThread(tier).start() h_res_read = pin_setup.dict[pin_setup.HRES_READ] h_res_out = pin_setup.dict[pin_setup.HRES_OUT] m_res_read = pin_setup.dict[pin_setup.MRES_READ] m_res_out = pin_setup.dict[pin_setup.MRES_OUT] tray_read = pin_setup.dict[pin_setup.TRAY_READ] tray_out = pin_setup.dict[pin_setup.TRAY_OUT] mqttc = client.Client(tiers) while True: for tier in tiers: tier.big_loop() if mqttc.connflag != False: tier.update_cloud() if mqttc.connflag == False: mqttc.reconnect() print("connflag false, reconnecting") GPIO.cleanup() mqttc.disconnect()
WHITE = (255, 255, 255) BLACK = (0, 0, 0) WINDOW_X = 350 WINDOW_Y = 350 square_size = 24 MARGIN = 1 AMOUNT = WINDOW_Y // (square_size + MARGIN) WINDOW_X = WINDOW_Y * 2 + 50 SIZE_OF_UNDER = 100 TEXT_X = 10 WINDOW_Y = WINDOW_Y + SIZE_OF_UNDER TEXT_Y = WINDOW_Y - SIZE_OF_UNDER while True: socClient = client.Client() PLAYER1 = player.Player(socClient.player_id) PLAYER1.gameID = socClient.game_id PLAYER1.firstTurn = socClient.goFirst PLAYER1.make_grid(AMOUNT) # creates grid for player1(top) p1 = BoardGame(WINDOW_X, WINDOW_Y, square_size, MARGIN, AMOUNT) p1.make_window() main_LOOP(p1) # only player1's status is broadcasted if PLAYER1.win: if PLAYER1.serverCrash: p1.textChange("Opponent has disconnected. Check Terminal.") else:
def main(): # noinspection PyPep8 try: # start and wait until client thread is ready the_client = client.Client() the_client.start() # messages _____________________________________________ messages = OrderedDict() messages['read_GPIOs'] = { 'message_type': 'command', 'command': 'read GPIOs', 'kwargs': { 'pins': [5, 12, 13, 14, 15, 16] }, 'need_result': True } messages['blink_led'] = { 'message_type': 'command', 'command': 'blink led', 'kwargs': { 'times': 3, 'forever': False, 'on_seconds': 0.1, 'off_seconds': 0.1 } } # messages['write_GPIOs'] = {'message_type': 'command', # 'command': 'write GPIOs', # 'kwargs': {'pins_and_values': [(2, 0), (2, 1), (2, 0),]}} # messages['test eval'] = {'message_type': 'eval', # 'to_evaluate': '2+3', # 'need_result': True} # messages['test exec'] = {'message_type': 'exec', # 'to_exec': 'print("Testing exec !")'} # with open('script_to_deploy.py') as f: # script = f.read() # messages['test upload script'] = {'message_type': 'script', # 'script': script} while not the_client.status['Is connected']: time.sleep(1) print('Node not ready yet.') # nodes _________________________________________________ message = { 'message_type': 'command', 'command': 'list connections by name', 'need_result': True } _, asynch_result = the_client.request('Hub', message) remote_nodes = sorted(list(asynch_result.get().keys())) print('\n[____________ Connected nodes ____________]\n') print('\nConnected nodes:\n{}\n'.format(remote_nodes)) print('\n[______________ Sending messages ______________]\n') results = [] # send out the messages for message in messages.values(): for remote_node in remote_nodes: if remote_node != the_client.node.worker.name: time.sleep(0.1) # PyCharm needs this delay. formatted_message, asynch_result = the_client.request( remote_node, message) results.append((formatted_message, asynch_result)) # collect and print results print('\n[_________ Wait few seconds for reply _________]\n') for (message, result) in results: try: if message.get('need_result'): print( '\n[Result for request]:\n___Request___:\n{0}\n___Result____:\n{1}\n' .format(message, result.get() if result else None)) except Exception as e: print('\n[{}]\nMessage:\n{}'.format(e, message)) # Wait a while time.sleep(3) # Stopping the_client.stop() the_client = None print('\n[________________ Demo stopped ________________]\n') except KeyboardInterrupt: print("Ctrl C - Stopping.") # noinspection PyUnboundLocalVariable the_client.stop() # noinspection PyUnusedLocal the_client = None sys.exit(1)
import client client.Client("", "")
import mexicanpizza import ingredient import pizzeria from mexican_bake import MexicanBake from pizza_system import PizzaSystem import client pizza = mexicanpizza.MexicanPizza(50, 50, [ ingredient.Ingredient("колбаска", "Вкусная", 25), ingredient.Ingredient("сырок", "очеень вкусный", 30) ], "mexican") print(pizza.price, pizza.ingredients) new_pizzeria = pizzeria.Pizzeria("Celentano", "Kyiv", [ pizza.name, pizza.name, pizza.name, pizza.name, pizza.name, pizza.name, pizza.name ]) new_pizzeria.add_bake( "mexican", MexicanBake(50, 50, [ingredient.Ingredient("сырок", "очеень вкусный", 30)])) system = PizzaSystem([new_pizzeria]) print(system.get_pizzas_list("Kyiv")) new_client = client.Client("Leha", "Sex", "Kyiv", system) order = {"mexican": [ingredient.Ingredient("сырок", "очеень вкусный", 30)]} print(system.get_pizzas_list("Kyiv")) print(system.process(order))
def start_server_connection(self): self.client_session = client.Client()
# Aya Maguire # July 2015 # This is the main program. It just needs to be run. # It just runs the "run" method from an instance of the class "Client". import server import socket import sys import SocketServer import client import os if __name__ == "__main__": HOST, PORT = "localhost", 9999 a_client = client.Client(HOST, PORT) a_client.run()
server = raw_input("Server: ").lower() if not server in data["servers"]: sys.exit("Server not found") if "ip" in data: login_ip = game_ip = data["ip"] login_port = data["login"] game_port = data["servers"][server] else: login_ip, login_port = data["login"].split(':') login_port = int(login_port) game_ip, game_port = data["servers"][server].split(':') game_port = int(game_port) magic = data["magic"] if "magic" in data else None client = client.Client(login_ip, login_port, game_ip, game_port, magic, True) if not client.log: print "Connecting..." error = client.connect(user, password, encrypted) if error: if error == 603: remove_penguin(cpps, user) sys.exit("Failed to connect") print "Connected!" commands = { "help": help, "log": log, "internal": internal, "id": id, "room": room, "igloo": igloo,
def next_next_handler(self,shareFile,payload,connect_to): t=threading.Thread(target=client.Client(shareFile,connect_to).change_your_next_next,args=(shareFile,payload)) t.daemon=True t.start()
def test_something(self): user = client.Client() id = user.identity print(id) self.assertIsNotNone('Our public key: ' + id)