def test_eps_drawer(): ca = CA(50, 20) ca.start_single() ca.loop(19) drawer = EPSDrawer() drawer.draw(ca) drawer.save(filename='eps_draw_test.eps')
def first_ca_test(): ca = CA(147, 20) ca.start_single() ca.loop(19) drawer = PyplotDrawer() drawer.draw(ca) drawer.show()
def test(self): agent_objects = { "agent_name1": agent_name1(), "agent_name2": agent_name2() } patch_object = Patch() CA_obj = CA(agent_objects, patch_object, settings) self.assertEqual(CA_obj.run(), True)
def figure1(rule=18, n=64): ca = CA(rule, n) ca.start_single() ca.loop(n-1) drawer = CADrawer.PyplotDrawer() drawer.draw(ca) drawer.show() drawer.draw(ca) drawer.save('rule18.png')
def main(script, rule=30, n=100, *args): rule = int(rule) n = int(n) ca = CA(rule, n) # ca = CircularCA(rule, n) if 'random' in args: ca.start_random() else: ca.start_single() ca.loop(n - 1) if 'eps' in args: drawer = CADrawer.EPSDrawer() elif 'pil' in args: drawer = CADrawer.PILDrawer() else: drawer = CADrawer.PyplotDrawer() if 'trim' in args: drawer.draw(ca, start=n / 2, end=3 * n / 2 + 1) else: drawer.draw(ca) drawer.show()
def main(script, rule=30, n=100, *args): rule = int(rule) n = int(n) ca = CA(rule, n) # ca = CircularCA(rule, n) if 'random' in args: ca.start_random() else: ca.start_single() ca.loop(n-1) if 'eps' in args: drawer = CADrawer.EPSDrawer() elif 'pil' in args: drawer = CADrawer.PILDrawer() else: drawer = CADrawer.PyplotDrawer() if 'trim' in args: drawer.draw(ca, start=n/2, end=3*n/2+1) else: drawer.draw(ca) drawer.show()
def main(): # get the message to send to the server # if no message is supplied to the script 'Hello' is sent MESSAGE = sys.argv[1] if len(sys.argv) > 1 else 'Hello' # initialize the Certificate Authority ca = CA() # register the server with the CA ca.register(Server.name, Server.publicKey) # The client will initiate contact with the server # initialize the client client = Client(ca, Server.port) # # send the message to the server response = client.send(MESSAGE) # # print the response from the server print('Server response = {}'.format(response))
def fractal_dimension(rule=18, n=512, save=False): """Estimates the fractal dimension for a given rule and number of steps. """ ca = CA(rule, n) ca.start_single() ca.loop(n-1) if save: filename = 'fractal-%d-%d.eps' % (rule, n) print 'Writing', filename save_ca(ca, filename) ts, ys = count(ca) if save: filename = 'fractal_dim-%d-%d.pdf' % (rule, n) print 'Writing', filename plot_loglog(ts, ys, filename) slope, inter = fit_loglog(ts, ys, n/2) return slope
def sierpinski(n): """ Rules 18 and 90 should approximate a Sierpinski triangle, testing that here. """ ca1 = CA(18, n) ca1.start_single() ca1.loop(n - 1) drawer = PyplotDrawer() drawer.draw(ca1) drawer.show() ca2 = CA(90, n) ca2.start_single() ca2.loop(n - 1) drawer = PyplotDrawer() drawer.draw(ca2) drawer.show()
def try_login(username, password): """ step1 : searching for the entry in LDAP using the cn and the userPassword. """ conn = get_ldap_connection() print('get connection') # encrypting password with sha256 algorithm for LDAP persistence password = hashlib.sha256(password.encode()).hexdigest() # searching for the LDAP entry conn.search('dc=chatroom,dc=com', '(&(cn=%s)(userPassword=%s))' % (username, password), attributes=['userCertificate', 'givenName', 'sn', 'uid']) if conn.entries == []: return 'error no entry found', 400 else: """ step2 : extracting the userCertificate from LDAP and verifying its validity. """ # returning a pem format certificate from the research output cert_pem, uid, sn, givenName = get_certificate_uid_sn_givenName_from_entry(conn.entries[0]) # verification of the certificate using the CA certificate and the CA private Key certificate_obj = CA.verify(cert_pem) if certificate_obj is not None: """ step2 : extracting the subject from the certificate and comparing it with the username. """ # extracting the subject, the issuer and the signature algorithm subject = str(certificate_obj.get_subject()).split('CN=')[1].split('/')[0] pubkey = get_pubkey_from_certifcate_pem(cert_pem) pubkey_str = crypto.dump_publickey(crypto.FILETYPE_PEM, pubkey).decode() # print('verification mta3 username') # print(subject) # print(username) issuer = str(certificate_obj.get_issuer()).split('CN=')[1].split('/')[0] signature_algorithm = certificate_obj.get_signature_algorithm().decode() # print(subject) if subject == username: print("username and the certificate subject are identical") # generating the token expires = datetime.timedelta(days=30) access_token = create_access_token(identity=str(subject), expires_delta=expires) return { 'token': access_token, 'certificate': cert_pem, 'cn': subject, 'givenName': givenName, 'sn': sn, 'uid': uid, 'issuer': issuer, 'signature_algorithm': signature_algorithm, 'pubkey': pubkey_str }, 200 return "username and the certificate subject are not identical", 400 return 'invalid certificate', 400
def figure1(rule=18, n=64): ca = CA(rule, n) ca.start_single() ca.loop(n - 1) drawer = CADrawer.PyplotDrawer() drawer.draw(ca) drawer.show() drawer.draw(ca) drawer.save('rule18.png')
def try_signup(cn, givenName, sn, telephoneNumber, userPassword, userCertificateRequest): # generating a random uid uid = generate_random_id() path = 'clients/' + uid + '-' + cn os.mkdir(path, 777) # private_key = generate_private_key() # write_private_key(private_key, path=path) # csr = generate_and_write_csr(private_key=private_key, # COMMON_NAME=cn, # path=path) # print(userCertificateRequest) # csr = crypto.load_certificate_request(crypto.FILETYPE_PEM, userCertificateRequest) try: csr = x509.load_pem_x509_csr(bytes(userCertificateRequest, 'utf-8'), default_backend()) except: print('CSR INVALID') return 400 # pubkeyString = crypto.dump_publickey(crypto.FILETYPE_PEM, csr.get_pubkey()) print(csr.subject) signed = CA.sign(csr, path) # file = open('cert.crt', 'rb').read() # pem_cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, signed) cert_der = crypto.dump_certificate(crypto.FILETYPE_ASN1, signed) # print(cert) conn = get_ldap_connection() conn.add('cn=%s,ou=myusers,dc=chatroom,dc=com' % cn, 'inetOrgPerson', {'givenName': givenName, 'sn': sn, 'telephoneNumber': telephoneNumber, 'userPassword': hashlib.sha256( userPassword.encode()).hexdigest(), 'uid': uid, 'userCertificate;binary': cert_der # 'userSMIMECertifcate': cert_der }) # cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, pem_cert) result = conn.result['description'] if result == 'success': # print(conn.result.entry_to_json()) return 'Success', 200 else: return result, 400
def start(self): #washed code print("Account Generator Ready") location = raw_input('Enter Location US UK CA AU \t') x = int(raw_input('Number of accounts to be made:\t')) domain = raw_input('Enter in your domain:') if location == 'US': USGEN.US(x, domain) if location == 'UK': UKGEN.UK(x, domain) if location == 'AU': AU(x, domain) if location == 'CA': CA(x, domain)
def main(script, rule=30, n=100, *args): #figure1() rule = int(rule) n = int(n) ca = CA(rule, n) filename = 'rule-%d-%d' % (rule, n) if 'random' in args: filename += '-random' ca.start_random() else: ca.start_single() ca.loop(n - 1) if 'eps' in args: drawer = CADrawer.EPSDrawer() filename += '.eps' elif 'pil' in args: drawer = CADrawer.PILDrawer() filename += '.png' else: drawer = CADrawer.PyplotDrawer() filename += '.pdf' if 'trim' in args: drawer.draw(ca, start=n / 2, end=3 * n / 2 + 1) else: drawer.draw(ca) #drawer.show() print 'Writing', filename drawer.save(filename)
def main(script, rule=30, n=100, *args): #figure1() rule = int(rule) n = int(n) ca = CA(rule, n) filename = 'rule-%d-%d' % (rule, n) if 'random' in args: filename += '-random' ca.start_random() else: ca.start_single() ca.loop(n-1) if 'eps' in args: drawer = CADrawer.EPSDrawer() filename += '.eps' elif 'pil' in args: drawer = CADrawer.PILDrawer() filename += '.png' else: drawer = CADrawer.PyplotDrawer() filename += '.pdf' if 'trim' in args: drawer.draw(ca, start=n/2, end=3*n/2+1) else: drawer.draw(ca) #drawer.show() print 'Writing', filename drawer.save(filename)
def fractal_dimension(rule=18, n=512, save=False): """Estimates the fractal dimension for a given rule and number of steps. """ ca = CA(rule, n) ca.start_single() ca.loop(n - 1) if save: filename = 'fractal-%d-%d.eps' % (rule, n) print 'Writing', filename save_ca(ca, filename) ts, ys = count(ca) if save: filename = 'fractal_dim-%d-%d.pdf' % (rule, n) print 'Writing', filename plot_loglog(ts, ys, filename) slope, inter = fit_loglog(ts, ys, n / 2) return slope
def CA_RUN(cluster_num=100, iterations=50): # Read data (in_centers.csv, in_data.csv only things that are required) # A1 Synthetic Dataset Example #""" in_centers = pd.read_csv('data/A1-Dataset/random-centers-100.csv', header=None) in_data = pd.read_csv('data/A1-Dataset/a1.csv', header=None) actual_classification_file = pd.read_csv("data/A1-Dataset/a1-ga.csv", header=None) actual_classification_vector = actual_classification_file[0].tolist() actual_clusters = pd.read_csv("data/A1-Dataset/a1-ga-cb.csv", header=None).to_numpy().tolist() #""" # MATLAB Example (For this example, cluster num must be 10) """ cluster_num = 10 in_centers = pd.read_csv('data/MATLAB-Data-Example/InCenters.csv', header=None) in_data = pd.read_csv('data/MATLAB-Data-Example/InData.csv', header=None) actual_classification_file = pd.read_csv("data/MATLAB-Data-Example/ActualClassification.csv", header=None) actual_classification_vector = actual_classification_file[0].tolist() actual_clusters = pd.read_csv("data/MATLAB-Data-Example/OutCenters.csv", header=None) """ # Calculate vector number, dimensions from shape vector_num = in_data.shape[0] dimensions = in_data.shape[1] in_centers = in_centers[0:cluster_num] # --------------------------CA----------------------------------------- print("Starting CA Algorithm:") CA_start_time = int(round(time.time() * 1000)) CA_cluster_num, CA_center_list, CA_classification_list = CA.CA( in_data, in_centers, max_iterations=iterations, cluster_num=cluster_num, vector_num=vector_num, dimensions=dimensions) CA_end_time = int(round(time.time() * 1000)) CA_time = (CA_end_time - CA_start_time) CA_accuracy = matrixAccuracy(actual_clusters, actual_classification_vector, CA_center_list, CA_classification_list) print("CA final cluster number: {}".format(CA_cluster_num)) print("CA time: {} ms".format(CA_time)) print("CA accuracy: {}%\n".format(CA_accuracy * 100)) """ # Output to files pd.DataFrame(CA_center_list).to_csv('CA_center_list.csv', index=False) pd.DataFrame(CA_classification_list).to_csv('CA_classification_list.csv', index=False) all_data = pd.concat([in_data, actual_classification_file, pd.DataFrame(CA_classification_list)], axis=1) all_data.to_csv("all_data.csv") cluster_data = pd.concat([actual_clusters, pd.DataFrame(CA_center_list)], axis=1) cluster_data.to_csv("cluster_data.csv", index=False) """ return (CA_cluster_num, CA_center_list)
def run_generative_model( learning_rate=0.1, dataset='mnist.pkl.gz', n_epochs=5, batch_size=20, display_step=1000, n_visible=28 * 28, # MNIST Pixels n_hidden=500, corruption_level=0.3, # DA contraction_level=0.1, # CA k=5, # RBM chains=10, # RBM output_folder='Generative_plots', img_shape=(28, 28), # image shape of MNIST for tile_raster_images model_name='AutoEncoder', ): """ This demo is tested on MNIST :type learning_rate: float :param learning_rate: learning rate used for training the DeNosing AutoEncoder :type n_epochs: int :param n_epochs: number of epochs used for training :type dataset: string :param dataset: path to the picked dataset """ # numpy random generator rng = np.random.RandomState(123) # create a Theano random generator that gives symbolic random values theano_rng = RandomStreams(rng.randint(2**30)) if not os.path.isdir(output_folder): os.makedirs(output_folder) os.chdir(output_folder) ############# # Load Data # ############# datasets = load_data(dataset) train_set_x, train_set_y = datasets[0] # valid_set_x, valid_set_y = datasets[1] test_set_x, test_set_y = datasets[2] ################################### # Calculate number of Minibatches # ################################### n_train_batches = train_set_x.get_value(borrow=True).shape[0] // batch_size # n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] // batch_size n_test_batches = test_set_x.get_value(borrow=True).shape[0] // batch_size ############################################ # allocate symbolic variables for the data # ############################################ # allocate symbolic variables for the data index = T.lscalar() # index to a [mini]batch x = T.matrix('x') # the data is presented as rasterized images ############### # BUILD MODEL # ############### print('... building the model') if model_name == 'AutoEncoder': model = AutoEncoder(numpy_rng=rng, theano_rng=theano_rng, input=x, n_visible=n_visible, n_hidden=n_hidden) elif model_name == 'DA': model = DA(numpy_rng=rng, theano_rng=theano_rng, input=x, n_visible=n_visible, n_hidden=n_hidden) elif model_name == 'CA': model = CA(numpy_rng=rng, theano_rng=theano_rng, input=x, n_visible=n_visible, n_hidden=n_hidden, batch_size=batch_size) elif model_name == 'RBM': model = RBM(input=x, numpy_rng=rng, theano_rng=theano_rng, n_visible=n_visible, n_hidden=n_hidden) ##################### # Training Function # ##################### # COST & UPDATES if model_name == 'AutoEncoder': cost, updates = model.get_cost_updates(learning_rate=learning_rate) elif model_name == 'DA': cost, updates = model.get_cost_updates( corruption_level=corruption_level, learning_rate=learning_rate) elif model_name == 'CA': cost, updates = model.get_cost_updates( contraction_level=contraction_level, learning_rate=learning_rate) elif model_name == 'RBM': # initialize storage for the persistent chain (state = hidden layer of chain) persistent_chain = theano.shared(np.zeros(shape=(batch_size, model.n_hidden), dtype=theano.config.floatX), borrow=True) # get the cost and the gradient corresponding to one step of CD-15 cost, updates = model.get_cost_updates(learning_rate=learning_rate, persistent=persistent_chain, k=k) # TRAINING FUNCTION train_model = theano.function( inputs=[index], outputs=cost, updates=updates, givens={x: train_set_x[index * batch_size:(index + 1) * batch_size]}) ############### # TRAIN MODEL # ############### print('... training') plotting_time = 0. start_time = timeit.default_timer() # go through training epochs for epoch in range(n_epochs): minibatch_avg_cost = [] for minibatch_index in range(n_train_batches): minibatch_avg_cost.append(train_model(minibatch_index)) # iteration number iter = epoch * n_train_batches + minibatch_index if iter % display_step == 0: print('training @ iter = ', iter) print('Training epoch %d, cost ' % epoch, np.mean(minibatch_avg_cost, dtype='float64')) # Plot filters after each training epoch plotting_start = timeit.default_timer() # Construct image from the weight matrix image = Image.fromarray( tile_raster_images(X=model.W.get_value(borrow=True).T, img_shape=img_shape, tile_shape=(10, 10), tile_spacing=(1, 1))) image.save('filters_at_epoch_%i.png' % epoch) plotting_stop = timeit.default_timer() plotting_time += (plotting_stop - plotting_start) end_time = timeit.default_timer() pretraining_time = (end_time - start_time) - plotting_time print('Training took %f minutes' % (pretraining_time / 60.)) print( ('The code for file ' + os.path.split(__file__)[1] + ' ran for %.2fm' % ((end_time - start_time) / 60.)), file=sys.stderr) image = Image.fromarray( tile_raster_images(X=model.W.get_value(borrow=True).T, img_shape=img_shape, tile_shape=(10, 10), tile_spacing=(1, 1))) image.save('trained_filters.png') ################################# # Sampling from the Model # ################################# #if model_name == 'RBM': # sample_RBM(model=model, test_set_x=test_set_x, chains=20) #################### # Change Directory # #################### os.chdir('../')