Exemplo n.º 1
0
    def LoSC_Chang(arrivals, sc: ServiceCurve, weights, foi, M):
        # remember that M contains list of indices of arrivals
        # arr_not_in_M = [arr for ix, arr in enumerate(arrivals) if ix not in M]
        arrNotInM_deconvBetaPG = [
            TokenBucket(b=arr.b + (arr.r * GPS.LoSC_PG(sc, ix, weights).delay),
                        r=arr.r,
                        t=1) for ix, arr in enumerate(arrivals) if ix not in M
        ]
        arrivalsAgg = ArrivalCurve.aggregateTokenBuckets(
            *arrNotInM_deconvBetaPG)
        weights_in_M = [w for ix, w in enumerate(weights) if ix in M]
        # arrAgg_deconv_beta = TokenBucket(b=arrivalsAgg.b + (arrivalsAgg.r * sc.delay),
        #                                  r=arrivalsAgg.r, t=1)
        sc_minus_gamma: ServiceCurve = RateLatency(R=sc.rate - arrivalsAgg.r,
                                                   t=1,
                                                   T=(arrivalsAgg.b +
                                                      (sc.rate * sc.delay)) /
                                                   (sc.rate - arrivalsAgg.r))

        res = RateLatency(R=(weights[foi] / sum(weights_in_M)) *
                          sc_minus_gamma.rate,
                          T=sc_minus_gamma.delay,
                          t=1)

        # sanity check for including PG
        if len(M) == len(arrivals):
            loscPG = GPS.LoSC_PG(sc, foi, weights)
            if not (res.rate == loscPG.rate and res.delay == loscPG.delay):
                raise Exception(
                    "sanity check failed. Chang doesn't include PG")

        return res
Exemplo n.º 2
0
    def LoSC_Bouillard(arrivals, sc: ServiceCurve, weights, new_foi, j):
        arrivals_current = copy.deepcopy(arrivals)
        weights_current = copy.deepcopy(weights)
        arrivals_current.insert(0, TokenBucket(r=0, b=0))
        weights_current.insert(0, 0)
        new_foi += 1
        arr_k = [
            arr for ix, arr in enumerate(arrivals_current)
            if ix in range(1, j + 1)
        ]
        arrivals_currentAgg = ArrivalCurve.aggregateTokenBuckets(*arr_k)
        weights_current_k = [
            w for ix, w in enumerate(weights_current)
            if ix in range(j + 1, new_foi + 1)
        ]
        sc_minus_gamma: ServiceCurve = RateLatency(
            R=sc.rate - arrivals_currentAgg.r,
            t=1,
            T=(arrivals_currentAgg.b +
               (sc.rate * sc.delay)) / (sc.rate - arrivals_currentAgg.r))

        ret = RateLatency(
            R=(weights_current[new_foi] / sum(weights_current_k)) *
            sc_minus_gamma.rate,
            t=1,
            T=sc_minus_gamma.delay)
        new_foi -= 1

        if j == 0:
            # all flows are included so Bouillard and PG has to match
            pg_losc = GPS.LoSC_PG(sc, new_foi, weights_current_k)
            if pg_losc.R == ret.R and pg_losc.T == ret.T:
                # all fine
                pass
            else:
                raise Exception("Bouillard does not include PG")

        return ret
Exemplo n.º 3
0
def setupInputs(number_of_flows, weight_mode: WeightsMode, target_util=0.75):
    t = 1
    # b=random(1,5) Mb, r=random(3,30) Mb/s, R=2400 Mb/s (2.4 Gb/s), T=2.0 seconds
    #  [max number of flows would be 79 for stability condition to be satisfied]
    random.seed(10)
    # fix b
    # alphas = [TokenBucket(b=0.5, r=random.uniform(0.5, 5.0), t=t) for _ in
    #           range(number_of_flows)]
    # random b
    alphas = [
        TokenBucket(b=random.uniform(0.1, 1.5), r=1, t=t)
        for _ in range(number_of_flows)
    ]

    if weight_mode == WeightsMode.EQUAL:
        alphas_weights = [1 for _ in alphas]
    elif weight_mode == WeightsMode.RPPS:
        alphas_weights = [__a.r for __a in alphas]
    elif weight_mode == WeightsMode.RANDOM:
        alphas_weights = [random.uniform(0.0, 1.0) for _ in alphas]
    else:
        raise Exception("WeightMode not recognized : " + weight_mode)

    foi_index = 1
    target_util = target_util
    agg_arr_rate = (sum([a.r for a in alphas]))
    # beta = WorkConservingLink(c=2400,t=t)
    # latency is usually boring, but add the sanity check, if safe set it to zero
    beta = RateLatency(R=agg_arr_rate / target_util, T=0.0, t=t)
    # stability
    assert beta.rate - (sum([a.r for a in alphas])) > 0, 'sys not stable'

    # print('utilization of the scenario:' + str((sum([a.r for a in alphas])) / beta.rate))
    # print('beta rate: ' + str(beta.rate))

    return t, alphas, alphas_weights, foi_index, beta
Exemplo n.º 4
0
    def LoSC_Bouillard_optimizeByDelayBound_new(arrivals, sc: ServiceCurve,
                                                weights, foi):
        assert (
            arrivals and len(arrivals) and weights and len(weights)
            and len(arrivals) == len(weights) and weights[foi]
        ), 'pre-conditions failed for GPS.LoSC_Bouillard_optimizeByDelayBound(...)'
        # re-indexing and sorting
        arr_foi = arrivals.pop(foi)
        weights_foi = weights.pop(foi)

        arrivals.insert(0, TokenBucket(r=0, b=0))
        weights.insert(0, 0)

        for i in range(len(arrivals) - 1):
            ti_p1, ti_p1_ix = GPS.Bouillard_ti_new(weights, sc, arrivals, i)
            # swap
            ai_p1 = arrivals[i + 1]
            wi_p1 = weights[i + 1]
            arrivals[i + 1] = arrivals[ti_p1_ix]
            arrivals[ti_p1_ix] = ai_p1
            weights[i + 1] = weights[ti_p1_ix]
            weights[ti_p1_ix] = wi_p1
            # after the loop "arrivals" and "weights" are already sorted

        arrivals.pop(0)
        weights.pop(0)

        arrivals.append(arr_foi)
        weights.append(weights_foi)
        new_foi = len(arrivals) - 1

        beta_i = None
        min_delay = None
        # in terms of delay
        best_j = None
        _iter = 1
        start = datetime.datetime.now()
        for j in range(new_foi):
            if _iter % 5 <= 5:
                clear_last_line()
                logging.debug(f"j: {_iter} of {new_foi}")
                percentage = round(_iter / new_foi * 100)
                print(
                    f"calculating {'#'*percentage}{'-'*(abs(100-percentage))} {percentage}%"
                )

            beta_candidate = GPS.LoSC_Bouillard_new(arrivals, sc, weights,
                                                    new_foi, j)
            delay_candidate = NC.delay_bound_token_bucket_rate_latency(
                arrivals[new_foi], beta_candidate)
            if min_delay is None or delay_candidate <= min_delay:
                # we ignore negative delay bounds as they are not reasonable
                if delay_candidate >= 0:
                    beta_i = beta_candidate
                    min_delay = delay_candidate
                    best_j = j
            _iter += 1

        write(f"j: {_iter-1} of {new_foi}")
        duration = datetime.datetime.now() - start
        print_write(
            "total computation time: ", ":".join([
                str(round(float(i))).zfill(2) for i in str(duration).split(":")
            ]))
        return beta_i, f'best_j={best_j}'
Exemplo n.º 5
0
def homo_arr_analysis_OBSCRate(number_of_flows):
    result = dict()
    t = 1
    # b=1 Mb, r=30 Mb/s, C=2400 Mb/s (2.4 Gb/s) [max number of flows would be 79 for stability
    # condition to be satisfied]
    alphas = [TokenBucket(b=1, r=30, t=t) for _ in range(number_of_flows)]
    # let's use RPPS
    __arrival_rates = [a.r for a in alphas]
    alphas_weights = __arrival_rates
    foi_index = 1
    beta = RateLatency(R=2400, T=2, t=t)
    # stability
    assert beta.rate - (sum(__arrival_rates)) > 0, 'sys not stable'

    result["PG (General)"] = {
        'LoSC': GPS.LoSC_PG(sc=beta, index=foi_index, weights=alphas_weights)
    }
    result["PG (General)"][
        'delay bound'] = NC.delay_bound_token_bucket_rate_latency(
            alpha=alphas[foi_index], beta=result['PG (General)']['LoSC'])

    subsets = length_distinct_subsets(alphas)
    result["Chang (homogeneous-optimised)"] = {
        'LoSC':
        GPS.LoSC_Chang_optimizeByMaxOverM(arrivals=alphas,
                                          sc=beta,
                                          weights=alphas_weights,
                                          foi=foi_index,
                                          subsetlist=subsets)
    }
    result["Chang (homogeneous-optimised)"][
        'delay bound'] = NC.delay_bound_token_bucket_rate_latency(
            alpha=alphas[foi_index],
            beta=result['Chang (homogeneous-optimised)']['LoSC'])

    result['Bouillard'] = {
        'LoSC':
        GPS.LoSC_Bouillard_optimizeByMaxOverM(arrivals=alphas,
                                              sc=beta,
                                              weights=alphas_weights,
                                              foi=foi_index)
    }
    result['Bouillard'][
        'delay bound'] = NC.delay_bound_token_bucket_rate_latency(
            alpha=alphas[foi_index], beta=result['Bouillard']['LoSC'])

    arrivals_index = list(range(len(alphas)))
    arrivals_index.pop(foi_index)
    subset_Burchard_Liebeherr = length_distinct_subsets(
        arrivals_index, return_type=ReturnType.ITEM)
    result["Burchard, Liebeherr"] = {
        'LoSC':
        GPS.LoSC_BL_optimizeByMaxOverM(arrivals=alphas,
                                       sc=beta,
                                       weights=alphas_weights,
                                       foi=foi_index,
                                       subsetlist=subset_Burchard_Liebeherr)
    }
    result["Burchard, Liebeherr"][
        'delay bound'] = NC.delay_bound_token_bucket_rate_latency(
            alpha=alphas[foi_index],
            beta=result['Burchard, Liebeherr']['LoSC'])

    time.sleep(0.5)
    print()
    print("number of arrivals:", len(alphas))
    for key, value in result.items():
        print(f'{key: <30}', ": ", value)

    print('\n', 'markdown table row:')
    print(
        f"| {len(alphas)} | homogeneous | doesn't matter | "
        f"rate={round(result['PG (General)']['LoSC'].rate,4)} "
        f"latency={round(result['PG (General)']['LoSC'].delay,4)} "
        f"delay_bound={result['PG (General)']['delay bound']} | "
        f"rate={round(result['Chang (homogeneous-optimised)']['LoSC'].rate,4)} "
        f"latency={round(result['Chang (homogeneous-optimised)']['LoSC'].delay,4)} "
        f"delay_bound={result['Chang (homogeneous-optimised)']['delay bound']} |"
        f"rate={round(result['Bouillard']['LoSC'].rate,4)} "
        f"latency={round(result['Bouillard']['LoSC'].delay,4)} "
        f"delay_bound={result['Bouillard']['delay bound']} | "
        f"rate={round(result['Burchard, Liebeherr']['LoSC'].rate,4)} "
        f"latency={round(result['Burchard, Liebeherr']['LoSC'].delay,4)} "
        f"delay_bound={result['Burchard, Liebeherr']['delay bound']} |")
Exemplo n.º 6
0
def homo_arr_analysis_OBDB(number_of_flows):
    result = dict()
    t = 1
    # b=1 Mb, r=30 Mb/s, C=2400 Mb/s (2.4 Gb/s)
    # [max number of flows would be 79 for stability condition to be satisfied]
    random.seed(30)
    alphas = [TokenBucket(b=2, r=30, t=t) for _ in range(number_of_flows)]
    # weights have to be equal1
    alphas_weights = [1 for _ in alphas]
    foi_index = 1
    # beta = WorkConservingLink(c=2400,t=t)
    beta = RateLatency(R=2400, T=2.0, t=t)
    # stability
    assert beta.rate - (sum([a.r for a in alphas])) > 0, 'sys not stable'

    result["PG (General)"] = {
        'LoSC': GPS.LoSC_PG(sc=beta, index=foi_index, weights=alphas_weights)
    }
    result["PG (General)"][
        'delay bound'] = NC.delay_bound_token_bucket_rate_latency(
            alpha=alphas[foi_index], beta=result['PG (General)']['LoSC'])

    subsets = length_distinct_subsets(alphas,
                                      return_type=ReturnType.INDEX,
                                      subseteq=True)
    result["Chang (homogeneous-optimised)"] = {
        'LoSC':
        GPS.LoSC_Chang_optimizeByDelayBound(arrivals=alphas,
                                            sc=beta,
                                            weights=alphas_weights,
                                            foi=foi_index,
                                            subsetlist=subsets)
    }
    result["Chang (homogeneous-optimised)"][
        'delay bound'] = NC.delay_bound_token_bucket_rate_latency(
            alpha=alphas[foi_index],
            beta=result['Chang (homogeneous-optimised)']['LoSC'][0])

    result['Bouillard'] = {
        'LoSC':
        GPS.LoSC_Bouillard_optimizeByDelayBound(
            arrivals=copy.deepcopy(alphas),
            sc=beta,
            weights=copy.deepcopy(alphas_weights),
            foi=foi_index)
    }
    result['Bouillard'][
        'delay bound'] = NC.delay_bound_token_bucket_rate_latency(
            alpha=alphas[foi_index], beta=result['Bouillard']['LoSC'][0])

    # we change N\{i} to N such that always i in M to make its semantic consistent with chang
    # semantic
    _subset_BL = length_distinct_subsets(alphas,
                                         return_type=ReturnType.INDEX,
                                         subseteq=True)
    subset_BL = list(filter(lambda x: foi_index in x, _subset_BL))
    result["Burchard, Liebeherr"] = {
        'LoSC':
        GPS.LoSC_BL_optimizeByDelayBound(arrivals=alphas,
                                         sc=beta,
                                         weights=alphas_weights,
                                         foi=foi_index,
                                         subsetlist=subset_BL)
    }
    result["Burchard, Liebeherr"][
        'delay bound'] = NC.delay_bound_token_bucket_rate_latency(
            alpha=alphas[foi_index],
            beta=result['Burchard, Liebeherr']['LoSC'][0])

    time.sleep(0.5)
    print()
    print("number of arrivals:", len(alphas))
    for key, value in result.items():
        print(f'{key: <30}', ": ", value)
Exemplo n.º 7
0
def setupInputs(number_of_flows,
                weight_mode: WeightsMode,
                target_util=0.75,
                seed=None,
                perFlowStability="all",
                flow_bursts=[],
                flow_rates=[],
                flow_weights=[],
                server_rate=None,
                server_delay=None):
    t = 1
    if seed:
        random.seed(seed)
    # b=random(1,5) Mb, r=random(3,30) Mb/s, R=2400 Mb/s (2.4 Gb/s), T=2.0 seconds
    #  [max number of flows would be 79 for stability condition to be satisfied]
    # constant b:
    # alphas = [TokenBucket(b=0.5, r=random.uniform(0.5, 5.0), t=t) for _ in
    #           range(number_of_flows)]
    # random b:
    # alphas = [TokenBucket(b=random.uniform(0.1, 1.5), r=1, t=t) for _ in range(number_of_flows)]
    # fix b:
    if not len(flow_rates):
        flow_rates = [1.0 for _ in range(number_of_flows)]
    if not len(flow_bursts):
        flow_bursts = [
            1.40, 1.5, 1.41, 0.28, 0.19, 1.07, 1.11, 1.01, 0.34, 1.04
        ]
    alphas = [
        TokenBucket(r=flow_rates[_i], b=flow_bursts[_i], t=t)
        for _i in range(number_of_flows)
    ]

    if weight_mode == WeightsMode.EQUAL:
        flow_weights = [1 for _ in alphas]
    elif weight_mode == WeightsMode.RPPS:
        flow_weights = [__a.r for __a in alphas]
    elif weight_mode == WeightsMode.RANDOM:
        flow_weights = [random.uniform(0.0, 1.0) for _ in alphas]
    elif weight_mode == WeightsMode.FIX:
        if not len(flow_weights):
            flow_weights = [
                0.95, 0.20, 0.25, 0.96, 0.61, 0.91, 0.33, 0.98, 0.72, 0.78
            ]
    else:
        raise Exception("WeightMode not recognized : " + weight_mode)

    foi_index = 1
    if type(server_rate) != float:
        agg_arr_rate = (sum([a.r for a in alphas]))
        server_rate = agg_arr_rate / target_util
    if not server_delay:
        server_delay = 0.0
    # beta = WorkConservingLink(c=2400,t=t)
    # latency is usually boring, but add the sanity check, if safe set it to zero
    beta = RateLatency(R=server_rate, T=server_delay, t=t)
    if type(target_util) == float:
        if perFlowStability == "all":
            # fix per flow stability
            fixPerFLowStability_all(beta, alphas, flow_weights, target_util)
            # total stability
            assert beta.rate - (sum([a.r
                                     for a in alphas])) > 0, 'sys not stable'
        else:
            # fix stability only for foi
            fixPerFLowStability_foi(beta, foi_index, alphas, flow_weights,
                                    target_util)

    return t, alphas, flow_weights, foi_index, beta