Beispiel #1
0
def create_data(config, data_path):
    dataset = LJSpeech(data_path)

    train_dataset = SliceDataset(dataset, config["valid_size"], len(dataset))
    train_collator = DataCollector(config["p_pronunciation"])
    train_sampler = RandomSampler(train_dataset)
    train_cargo = DataCargo(train_dataset,
                            train_collator,
                            batch_size=config["batch_size"],
                            sampler=train_sampler)
    train_loader = DataLoader\
                 .from_generator(capacity=10, return_list=True)\
                 .set_batch_generator(train_cargo)

    valid_dataset = SliceDataset(dataset, 0, config["valid_size"])
    valid_collector = DataCollector(1.)
    valid_sampler = SequentialSampler(valid_dataset)
    valid_cargo = DataCargo(valid_dataset,
                            valid_collector,
                            batch_size=1,
                            sampler=valid_sampler)
    valid_loader = DataLoader\
                 .from_generator(capacity=2, return_list=True)\
                 .set_batch_generator(valid_cargo)
    return train_loader, valid_loader
Beispiel #2
0
    n_mels = data_config["n_mels"]
    train_clip_seconds = data_config["train_clip_seconds"]
    transform = Transform(sample_rate, n_fft, win_length, hop_length, n_mels)
    ljspeech = TransformDataset(ljspeech_meta, transform)

    valid_size = data_config["valid_size"]
    ljspeech_valid = SliceDataset(ljspeech, 0, valid_size)
    ljspeech_train = SliceDataset(ljspeech, valid_size, len(ljspeech))

    model_config = config["model"]
    n_loop = model_config["n_loop"]
    n_layer = model_config["n_layer"]
    filter_size = model_config["filter_size"]
    context_size = 1 + n_layer * sum([filter_size**i for i in range(n_loop)])
    print("context size is {} samples".format(context_size))
    train_batch_fn = DataCollector(context_size, sample_rate, hop_length,
                                   train_clip_seconds)
    valid_batch_fn = DataCollector(context_size,
                                   sample_rate,
                                   hop_length,
                                   train_clip_seconds,
                                   valid=True)

    batch_size = data_config["batch_size"]
    train_cargo = DataCargo(ljspeech_train,
                            train_batch_fn,
                            batch_size,
                            sampler=RandomSampler(ljspeech_train))

    # only batch=1 for validation is enabled
    valid_cargo = DataCargo(ljspeech_valid,
                            valid_batch_fn,
Beispiel #3
0
    def __init__(self, network_file_path, configs):
        self.configs = configs
        self.file_path = network_file_path

        # Initialize the event manager
        self.mgr = EventManager()
        self.collector = DataCollector()

        # Read network in from file.
        with open(file_path, 'r') as yaml_file:
            network = yaml.load(yaml_file)

        # Initialize the network components
        self.hosts = {}
        for host_name in network['hosts']:
            self.hosts[host_name] = Host(host_name, self, self.collector)

        self.routers = {}
        if 'routers' in network:
            for router_name in network['routers']:
                self.routers[router_name] = Router(router_name, self)

        self.flows = {}
        for flow_dict in network['flows']:
            flow_name = flow_dict.keys()[0]
            flow_desc = flow_dict[flow_name]

            self.flows[flow_name] = Flow(
                flow_name,
                self,
                self.collector,
                self.hosts[flow_desc['source']] ,
                self.hosts[flow_desc['destination']],
                flow_desc['amount'],
                flow_desc['start']
            )

        self.links = {}
        host_prefixes = ['H', 'S', 'T']
        for link_dict in network['links']:
            link_name = link_dict.keys()[0]
            link_desc = link_dict[link_name]

            aname = link_desc['anode']
            bname = link_desc['bnode']

            ahost = self.hosts[aname] \
                if aname[0] in host_prefixes else self.routers[aname]
            bhost = self.hosts[bname] \
                if bname[0] in host_prefixes else self.routers[bname]

            link = Link(
                link_name,
                self,
                self.collector,
                link_desc['rate'] / 8.0,
                link_desc['delay'],
                link_desc['buffer'],
                link_desc['buffer'],
                ahost,
                bhost
            )

            self.links[link_name] = link

            ahost.set_link(link)
            bhost.set_link(link)

            self.mgr.add_event(0.0, link.calculate_throughput, [])

        # Add initial events for each of the flows
        for flow in self.flows.values():
            self.mgr.add_event(flow.start_t, flow.send_packet, [])

            if not configs['tcpreno']:
                self.mgr.add_event(flow.start_t, flow.tcp_fast_update, [])

        # Initialize the routing tables
        for router in self.routers.values():
            router.init_routing_table()

        if len(self.routers) > 0:
            self.mgr.add_event(0.0, self.routers['R1'].send_rout_packet, [])
            self.mgr.add_event(self.mgr.t + configs['dynamic_routing_step'],
                               self.start_dynamic_routing, [])
Beispiel #4
0
                          clip_norm)
    ljspeech = TransformDataset(meta, transform)

    # =========================dataiterator=========================
    # use meta data's text length as a sort key for the sampler
    train_config = config["train"]
    batch_size = train_config["batch_size"]
    text_lengths = [len(example[2]) for example in meta]
    sampler = PartialyRandomizedSimilarTimeLengthSampler(text_lengths,
                                                         batch_size)

    # some hyperparameters affect how we process data, so create a data collector!
    model_config = config["model"]
    downsample_factor = model_config["downsample_factor"]
    r = model_config["outputs_per_step"]
    collector = DataCollector(downsample_factor=downsample_factor, r=r)
    ljspeech_loader = DataCargo(
        ljspeech, batch_fn=collector, batch_size=batch_size, sampler=sampler)

    # =========================model=========================
    if args.device == -1:
        place = fluid.CPUPlace()
    else:
        place = fluid.CUDAPlace(args.device)

    with dg.guard(place):
        # =========================model=========================
        n_speakers = model_config["n_speakers"]
        speaker_dim = model_config["speaker_embed_dim"]
        speaker_embed_std = model_config["speaker_embedding_weight_std"]
        n_vocab = en.n_vocab
Beispiel #5
0
class Simulator(object):

    def __init__(self, network_file_path, configs):
        self.configs = configs
        self.file_path = network_file_path

        # Initialize the event manager
        self.mgr = EventManager()
        self.collector = DataCollector()

        # Read network in from file.
        with open(file_path, 'r') as yaml_file:
            network = yaml.load(yaml_file)

        # Initialize the network components
        self.hosts = {}
        for host_name in network['hosts']:
            self.hosts[host_name] = Host(host_name, self, self.collector)

        self.routers = {}
        if 'routers' in network:
            for router_name in network['routers']:
                self.routers[router_name] = Router(router_name, self)

        self.flows = {}
        for flow_dict in network['flows']:
            flow_name = flow_dict.keys()[0]
            flow_desc = flow_dict[flow_name]

            self.flows[flow_name] = Flow(
                flow_name,
                self,
                self.collector,
                self.hosts[flow_desc['source']] ,
                self.hosts[flow_desc['destination']],
                flow_desc['amount'],
                flow_desc['start']
            )

        self.links = {}
        host_prefixes = ['H', 'S', 'T']
        for link_dict in network['links']:
            link_name = link_dict.keys()[0]
            link_desc = link_dict[link_name]

            aname = link_desc['anode']
            bname = link_desc['bnode']

            ahost = self.hosts[aname] \
                if aname[0] in host_prefixes else self.routers[aname]
            bhost = self.hosts[bname] \
                if bname[0] in host_prefixes else self.routers[bname]

            link = Link(
                link_name,
                self,
                self.collector,
                link_desc['rate'] / 8.0,
                link_desc['delay'],
                link_desc['buffer'],
                link_desc['buffer'],
                ahost,
                bhost
            )

            self.links[link_name] = link

            ahost.set_link(link)
            bhost.set_link(link)

            self.mgr.add_event(0.0, link.calculate_throughput, [])

        # Add initial events for each of the flows
        for flow in self.flows.values():
            self.mgr.add_event(flow.start_t, flow.send_packet, [])

            if not configs['tcpreno']:
                self.mgr.add_event(flow.start_t, flow.tcp_fast_update, [])

        # Initialize the routing tables
        for router in self.routers.values():
            router.init_routing_table()

        if len(self.routers) > 0:
            self.mgr.add_event(0.0, self.routers['R1'].send_rout_packet, [])
            self.mgr.add_event(self.mgr.t + configs['dynamic_routing_step'],
                               self.start_dynamic_routing, [])

    def get_data(self):
        """Telling all of the objects to report their data."""

        for flow in self.flows.values():
            self.mgr.add_event(0.0, flow.report_rates, [0.1, 0, 0])
            self.mgr.add_event(0.0, flow.report_link_rates, [0.2, {}])

        for link in self.links.values():
            self.mgr.add_event(0.0, link.report_occupancy, [0.1])
            self.mgr.add_event(0.0, link.report_dropped, [0.1])
            self.mgr.add_event(0.0, link.report_throughput, [0.5])

        for host in self.hosts.values():
            self.mgr.add_event(0.0, host.report_rates, [0.1, 0, 0])

    def start_dynamic_routing(self):
        # Initialize the routing tables and send out routing packets
        for router in self.routers.values():
            router.init_routing_table(temp_table=True)
            router.start_dynamic_routing()
            router.send_rout_packet(True)

        self.mgr.add_event(self.mgr.t + self.configs['dynamic_routing_step'],
                           self.start_dynamic_routing, [])

    def check_done_dynamic_routing(self):
        time_step = 0.01
        num_not_done = 0

        for router in self.routers.values():
            if not router.done_d_routing:
                num_not_done += 1

        if num_not_done == 0:
            for router in self.routers.values():
                router.finish_dynamic_routing()

            for link in self.links.values():
                link.time_through_link_update()

    def run(self):

        # Starting data collection
        self.get_data()

        while self.mgr.has_events():
            (t, event, args) = self.mgr.pop_event()
            self.mgr.t = t
            event(*args)

            # Check if all our flows have terminated
            flows_not_done = len(self.flows)
            for flow in self.flows.values():
                if flow.last_acknowledged == flow.max_seq:
                    flows_not_done -= 1

            if flows_not_done == 0:
                break

        print 'Done with all events at time: {}'.format(self.mgr.t)

    def plot(self):
        # Get only the file_name
        file_name = self.file_path.split("/")[-1].rstrip(".yml")

        # Print the plots to pdf
        plot_dir = "plots/"

        # Whether or not its reno or fast
        if self.configs['tcpreno']:
            mode = '_reno'
        else:
            mode = '_fast'

        # Pdf page
        pp = PdfPages(plot_dir + file_name + mode + '.pdf')

        print 'Plotting to {}'.format(plot_dir + file_name + mode + '.pdf')

        collecs = self.collector.collection.keys()
        collecs.sort()

        for g in collecs:
            self.collector.graph_data(pp, g, g)

        if file_name =='testcase0':
            # The congestion window for the flow
            self.collector.graph_data(pp, 'F1 Congestion Window', 'F1_cwnd')

        elif file_name == 'testcase1':
            # The congestion window for the flow
            self.collector.graph_data(pp, 'F1 Congestion Window', 'F1_cwnd')

            # The link rates of L1 and L2
            self.collector.combine_graphs(
                pp, 'Test Case 1 Link Rates',
                ['F1_L1_rate', 'F1_L2_rate'],
                labels=['L1', 'L2']
            )

            # The link rates of L3 and L4
            self.collector.combine_graphs(
                pp,
                'Test Case 1 Link Rates',
                ['F1_L3_rate', 'F1_L4_rate'],
                labels=['L3', 'L4']
            )

        elif file_name == 'testcase2':
            self.collector.combine_graphs(
                pp,
                'Test Case 2 Flow Congestion Windows',
                ['F1_cwnd', 'F2_cwnd', 'F3_cwnd'],
                labels=['F1', 'F2', 'F3']
            )

            self.collector.combine_graphs(
                pp,
                'Test Case 2 Buffer Occupancy',
                ['L1_a_occupancy', 'L2_a_occupancy', 'L3_a_occupancy'],
                labels=['L1a', 'L2a', 'L3a']
            )

        pp.close()