def DD(rid, LSAs, seqno): plen = len(LSAs) // 8 + 8 header = makeHeader(2, rid, plen) mtu = pad(1500, 16) options = pad(0, 16) seqno = pad(seqno, 32) return header + mtu + options + seqno + LSAs
def LSR(rid, LSAHeaderl): plen = len(LSAHeaderl) * 12 header = makeHeader(3, rid, plen) li = "" for i in LSAHeaderl: li = li + pad(1, 32) + i[32:96] return header + li
def encryptAES_CBC(decrypt: bytes, key: bytes, iv: bytes): if len(iv) != 16: raise ValueError('IV must have a 16 bytes block size.') decrypt = pad(decrypt, 16) encrypt = bytes() for i in range(int(len(decrypt) / 16)): block = xor(decrypt[i * 16:(i + 1) * 16], iv if i == 0 else encrypt[(i - 1) * 16:i * 16]) encrypt += encryptAES_ECB(block, key) return encrypt
def decryptAES_CBC(encrypt: bytes, key: bytes, iv: bytes): if len(iv) != 16: raise ValueError('IV must have a 16 bytes block size.') if len(encrypt) % 16 != 0: raise ValueError('The encrypted data must be 16 bytes padded.') middecrypt = pad(decryptAES_ECB(encrypt, key), 16) decrypt = bytes() for i in range(int(len(encrypt) / 16)): decrypt += xor(middecrypt[i * 16:(i + 1) * 16], iv if i == 0 else encrypt[(i - 1) * 16:i * 16]) return unpad(decrypt)
def makeHeader(type, rid, plen): ''' Type Description ________________________________ 1 Hello 2 Database Description 3 Link State Request 4 Link State Update 5 Link State Acknowledgment ''' version = pad(2, 8) typeP = pad(int(type), 8) packetlen = pad(plen + 24, 16) rid = pad(int(IPv4Address(rid)), 32) aid = pad(0, 32) checksum = pad(0, 16) autype = pad(0, 16) auth1 = pad(0, 32) auth2 = pad(0, 32) return version + typeP + packetlen + rid + aid + checksum + autype + auth1 + auth2
def HelloPacket(rid, netmask, neighL): nmask = pad(int(IPv4Address(netmask)), 32) hinterval = pad(1000, 16) options = pad(0, 8) rtrpri = pad(0, 8) routerdead = pad(2000, 32) dr = pad(0, 32) bdr = pad(0, 32) neighpadded = [] for i in neighL: neighpadded.append(pad(int(IPv4Address(i)), 32)) packet = nmask + hinterval + options + rtrpri + routerdead + dr + bdr + "".join( neighpadded) plen = len(packet) // 8 # print(plen) header = makeHeader(1, rid, plen) return header + packet
eureka_set = eureka.get_base_training_set() eureka_validation = Book.one_writer_set(eureka_set, sentences_per_sample=3) huck_set = huck.get_base_training_set() huck_validation = Book.one_writer_set(huck_set, sentences_per_sample=3) # read the model with open(MODEL_FILE, "rb") as infile: sequential_model = pickle.load(infile) # for simplicity we ignore parapgraphs that are longer than the longest one found in training max_expected_length = sequential_model.input_shape[1] # apply the model to pym pym_validation = pad( [u for u in pym_validation if u.shape[0] <= max_expected_length], max_expected_length) pym_validation = np.asarray(pym_validation) pym_predictions = sequential_model.predict(pym_validation) pym_accuracy = sum([probs[0] > 0.5 for probs in pym_predictions]) / len(pym_predictions) nlp_logger.warning("Accuracy for Poe/pym: {:.4f}".format(pym_accuracy)) # apply the model to tom tom_validation = pad( [u for u in tom_validation if u.shape[0] <= max_expected_length], max_expected_length) tom_validation = np.asarray(tom_validation) tom_predictions = sequential_model.predict(tom_validation) tom_accuracy = sum([probs[1] > 0.5 for probs in tom_predictions]) / len(tom_predictions)
import pickle import random from biblio_eater import BiblioEater from constants import * from the_logger import nlp_logger from padder import pad # Load training set from disk first with open(TRAINING_SET_FILE, "rb") as infile: training_set = pickle.load(infile) # pad with zero rows up to max sentence length max_length = max([s.shape[0] for s in training_set]) training_set = pad(training_set, max_length) with open(LABELS_FILE, "rb") as infile: writer_labels = pickle.load(infile) # Sequential network nlp_logger.warning("Shape of training set ({}, {})".format( training_set[0].shape[0], training_set[0].shape[1])) # Prepare net biblio_eater = BiblioEater() # all sentences are padded to same length, althoug Keras has a padding option that we are not using biblio_eater.design_sequential_net(training_set[0].shape[0], training_set[0].shape[1]) biblio_eater.train_sequential_net(training_set, writer_labels)
def LSU(rid, LSAList): LSAComb = "".join(LSAList) header = makeHeader(4, rid, len(LSAComb) // 8 + 4) return header + pad(len(LSAList), 32) + LSAComb
def LSA(lsid, seqno, linkarray): LSAge = pad(random.randint(0, 65535), 16) Options = pad(0, 8) LSType = pad(1, 8) LSID = pad(int(IPv4Address(lsid)), 32) Ad = LSID LSSeqNo = pad(seqno, 32) Checksum = pad(0, 16) VEB = pad(0, 16) li = "" for i in linkarray: lid = pad(int(IPv4Address(i[0])), 32) ld = i[1] if not ld: ld = pad(0, 32) else: ld = pad(int(IPv4Address(ld)), 32) ltype = pad(i[2], 8) TOSm = pad(0, 8) + pad(i[3], 16) li = li + lid + ld + ltype + TOSm links = pad(len(linkarray), 16) length = pad( len(LSAge + Options + LSType + LSID + Ad + LSSeqNo + Checksum + VEB + links + li) // 8 + 2, 16) packet = LSAge + Options + LSType + LSID + Ad + LSSeqNo + Checksum + length + VEB + links + li return packet
def encryptAES_ECB(decrypt: bytes, key: bytes): crypter = AES.new(key, AES.MODE_ECB) encrypt = crypter.encrypt(pad(decrypt, 16)) return encrypt