예제 #1
0
def main():
    with open(os.path.join(script_path, 'config.json'), 'r',
              encoding='utf8') as f:
        config = json.load(f)

    options = parse_args()
    flows = Flows(config)

    if options.prepare_data:
        flows.prepare_data()

    if options.prepare_build_and_evaluate:
        flows.prepare_build_and_evaluate()

    if options.build_and_evaluate:
        flows.build_and_evaluate()

    if options.tune_alpha_beta:
        flows.tune_alpha_beta()
예제 #2
0
class TestFlows(unittest.TestCase):
    def setUp(self):
        self.filename = "./flow_dumps.1.txt"
        self.data = request.get_from_file(self.filename)
        self.flows = Flows(self.data, logging.DEBUG)

    def test_process_data(self):
        pdata = self.flows.process_data()
        # print "parsed data:\n{}".format(pformat(pdata))

    def test_format_data(self):
        fdata = self.flows.format_data()
        # print "parsed data:\n{}".format(pformat(fdata))

    def test_write_file(self):
        self.flows.write_fdata("/tmp/flow_dumps.out.txt")

    def test_get_table_name(self):
        print "table: {} is the {} table".format(17, tables.get_table_name(17))
예제 #3
0
class RED(object):
    def __init__(self, host="http://localhost:1880", strategy=None):

        self.strategy = strategy
        self.host = host
        self.flows = Flows(host, strategy)

    def __str__(self):
        response = ""
        for flow in self.flows:
            response += "\"{0}\" ({1}) -> {2}\n\n".\
                format(blue(flow.get('label')),\
                    orange(flow.get('id')),
                    pretty(flow.get('nodes'))
                )
        return response

    def update(self, flow):
        self.flows.update(flow)
def emptyNet():
    switch = partial(OVSSwitch, protocols='OpenFlow13')
    link = partial(TCLink, bw=100)
    topo = SpineLeaf(leaves=3)
    f = Flows()

    net = Mininet(controller=RemoteController,
                  switch=switch,
                  link=link,
                  topo=topo,
                  build=False,
                  autoSetMacs=True)

    controllers.append(
        net.addController('c1',
                          controller=RemoteController,
                          ip="172.17.0.5",
                          port=6633))
    controllers.append(
        net.addController('c2',
                          controller=RemoteController,
                          ip="172.17.0.6",
                          port=6633))
    controllers.append(
        net.addController('c3',
                          controller=RemoteController,
                          ip="172.17.0.7",
                          port=6633))

    capture("inicio-conexao-com-testes", "docker0", timeout=30)

    net.build()
    net.start()

    f.test()

    h1 = net.getNodeByName("h1")
    h3 = net.getNodeByName("h3")

    os.system("killall tcpdump")
    #    CLI(net)
    net.stop()
예제 #5
0
class RED(object):

    def __init__(self, host="http://localhost:1880", strategy=None):

        self.strategy = strategy
        self.host = host
        self.flows = Flows(host, strategy)

    def __str__(self):
        response = ""
        for flow in self.flows:
            response += "\"{0}\" ({1}) -> {2}\n\n".\
                format(blue(flow.get('label')),\
                    orange(flow.get('id')), 
                    pretty(flow.get('nodes'))
                )
        return response

    def update(self, flow):
        self.flows.update(flow)
예제 #6
0
def topology(n=1):
    switch = partial( OVSSwitch, protocols='OpenFlow13' )

    topo = LinearTopo(k=n, n=1)

    f = Flows()

    net = Mininet(controller=RemoteController, switch=switch, topo=topo, build=False, autoSetMacs=True)

    for i in range(n):
        controllers.append(net.addController('c{0}'.format(i+1), controller=RemoteController, ip="172.17.0.2", port=6633))


    net.build()
    net.start()


    CLI(net)
    net.stop()
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf

from flows import Flows
from scipy.stats import halfcauchy

MODEL_FILENAME = 'temp_weights_eight_schools.h5'

d = 3
DATA_SHAPE = (2500, 3)
N_FLOWS = 30
q = Flows(d=3, n_flows=N_FLOWS, shape=DATA_SHAPE)
q(tf.zeros(DATA_SHAPE))
q.load_weights(MODEL_FILENAME)
z0, zk, log_det_jacobian, mu, log_var = q(tf.zeros(DATA_SHAPE))

thetas0, mu0, tau0 = z0[:, 0], z0[:, 1], z0[:, 2]
thetas, mu, tau = zk[:, 0], zk[:, 1], zk[:, 2]

thetas0, mu0, log_tau0 = z0[:, 0], z0[:, 1], z0[:, 2]
thetas, mu, log_tau = zk[:, 0], zk[:, 1], zk[:, 2]

# Prior
N = 5000
mu_prior = np.random.normal(loc=0, scale=5, size=N)
tau_prior = halfcauchy.rvs(loc=0, scale=5, size=N)
thetas_prior = np.random.normal(loc=mu_prior, scale=tau_prior, size=N)
mask_tau = (np.log(tau_prior) > -2) & (np.log(tau_prior) < 2.8)

plt.figure()
예제 #8
0
 def setUp(self):
     self.filename = "./flow_dumps.1.txt"
     self.data = request.get_from_file(self.filename)
     self.flows = Flows(self.data, logging.DEBUG)
예제 #9
0
import sys
import sys

import tensorflow as tf
import tensorflow_probability as tfp
from distributions import *
from flows import Flows

sys.path.append("..")
from psis import psislw

DISTRIBUTION_NAME = 'figure_eight'
MODEL_FILENAME = f"temp_weights_{DISTRIBUTION_NAME}.h5"

DATA_SHAPE = (5000, 2)
flows = Flows(d=2, n_flows=10, shape=DATA_SHAPE)
flows(tf.zeros(DATA_SHAPE))
flows.load_weights(MODEL_FILENAME)
z0, zk, log_det_jacobian, mu, log_var = flows(tf.zeros(DATA_SHAPE))

normal = tfp.distributions.Normal(loc=mu, scale=tf.math.exp(0.5 * log_var))
log_q0 = normal.log_prob(z0)
log_qk = tf.math.reduce_sum(log_q0) - tf.math.reduce_sum(log_det_jacobian)
log_p = tf.math.log(pdf_2D(zk, DISTRIBUTION_NAME))

lw_out, kss = psislw(log_p - log_qk)
print(lw_out, kss)
예제 #10
0
class Preprocessor(object):

    ########################################################################
    #                         Class initialisation                         #
    ########################################################################

    def __init__(self, verbose=False):
        """Preprocessor object for preprocessing flows from pcap files."""
        # Initialise Reader object
        self.reader = Reader(verbose)
        # Initialise Flow object
        self.flows = Flows()

    ########################################################################
    #                       Process files and labels                       #
    ########################################################################

    def process(self, files, labels):
        """Extract data from files and attach given labels.

            Parameters
            ----------
            files : iterable of string
                Paths from which to extract data.

            labels : iterable of int
                Label corresponding to each path.

            Returns
            -------
            X : np.array of shape=(n_samples, n_features)
                Features extracted from files.

            y : np.array of shape=(n_samples,)
                Labels for each flow extracted from files.
            """
        # Initialise X and y
        X, y = list(), list()

        # Loop over all given files
        for file, label in zip(files, labels):
            # On exit, exit for loop
            try:
                data = np.array(list(self.extract(file).values()))
            except KeyboardInterrupt:
                break
            except Exception:
                print("Reading {} failed".format(file), file=sys.stderr)
                continue
            # Append data to X
            X.append(data)
            # Append label to y
            y.append(np.array([label] * data.shape[0]))

        # Filter empty entries from array
        X = list(filter(lambda x: x.shape[0] != 0, X))
        y = list(filter(lambda x: x.shape[0] != 0, y))

        # Append both X and y
        try:
            X = np.concatenate(X)
            y = np.concatenate(y)
        except Exception:
            X = np.array([], dtype=object)
            y = np.array([], dtype=object)

        # Return result
        return X, y

    ########################################################################
    #                         Process single file                          #
    ########################################################################

    def extract(self, infile):
        """Extract flows from given pcap file.

            Parameters
            ----------
            infile : string
                Path to input file.

            Returns
            -------
            result : dict
                Dictionary of flow_key -> flow.
            """
        # Read packets
        result = self.reader.read(infile)
        # Combine packets into flows
        result = self.flows.combine(result)
        # Return result
        return result

    ########################################################################
    #                             I/O methods                              #
    ########################################################################

    def save(self, outfile, X, y):
        """Save data to given outfile.

            Parameters
            ----------
            outfile : string
                Path of file to save data to.

            X : np.array of shape=(n_samples, n_features)
                Features extracted from files.

            y : np.array of shape=(n_samples,)
                Labels for each flow extracted from files.
            """
        with open(outfile, 'wb') as outfile:
            pickle.dump((X, y), outfile)

    def load(self, infile):
        """Load data from given infile.

            Parameters
            ----------
            infile : string
                Path of file from which to load data.

            Returns
            -------
            X : np.array of shape=(n_samples, n_features)
                Features extracted from files.

            y : np.array of shape=(n_samples,)
                Labels for each flow extracted from files.
            """
        with open(infile, 'rb') as infile:
            return pickle.load(infile)
예제 #11
0
 def __init__(self, verbose=False):
     """Preprocessor object for preprocessing flows from pcap files."""
     # Initialise Reader object
     self.reader = Reader(verbose)
     # Initialise Flow object
     self.flows = Flows()
예제 #12
0
def train(flows, epochs=10):
    optimizer = tf.keras.optimizers.Adam(1e-2)
    for epoch in range(1, epochs + 1):
        loss = compute_apply_gradients(flows, optimizer)

        if epoch % 100 == 0:
            print('Epoch {}, loss: {}'.format(epoch, loss))

    return flows


if __name__ == '__main__':

    # PARAMETERS
    d = 3
    DATA_SHAPE = (5000, d)
    N_FLOWS = 30
    EPOCHS = 10000
    # MISC
    TRAIN = True
    SAVE_MODEL = True
    MODEL_FILENAME = 'temp_weights_eight_schools.h5'

    # Train
    flows = Flows(d=d, n_flows=N_FLOWS, shape=DATA_SHAPE)
    flows(tf.zeros(DATA_SHAPE))  # build model
    if TRAIN:
        flows = train(flows, epochs=EPOCHS)
    if SAVE_MODEL:
        flows.save_weights(MODEL_FILENAME)
예제 #13
0
    def __init__(self, host="http://localhost:1880", strategy=None):

        self.strategy = strategy
        self.host = host
        self.flows = Flows(host, strategy)
예제 #14
0
    def __init__(self, host="http://localhost:1880", strategy=None):

        self.strategy = strategy
        self.host = host
        self.flows = Flows(host, strategy)