def test_costs_non_free(): my_lambda_nofree = awscosts.Lambda(use_free_tier=False) my_lambda = awscosts.Lambda() reqs = 1000000 cost1 = my_lambda.get_cost(reqs) cost2 = my_lambda_nofree.get_cost(reqs) assert cost1 < cost2, 'Cost1 {} < cost2 {}'.format(cost1, cost2)
def get_lambda_cost(df: pd.DataFrame, MB_per_request=128, ms_per_req=200): """ Given a Dataframe with requests per time unit, calculates the AWS Lambda costs for each row. Args: df (pandas.Dataframe): Dataframe with requests for each time unit per row. MB_per_request (int): memory consumption (in MiB) of the lambda function. If this values is not an actual AWS flavour, a greater valid one will be chosen. ms_per_req (int): duration of the lambda function in miliseconds Returns: The same DataFrame with two new columns: 'lambda_cost': Cost of the requests in that row. 'lambda_sum': Accumulative cost of this and the previous rows. """ mylambda = awscosts.Lambda(MB_per_req=MB_per_request, ms_per_req=ms_per_req) df['lambda_cost'] = df.apply( lambda x: mylambda.get_cost(reqs=x['requests']), axis=1) df['lambda_sum'] = df.lambda_cost.cumsum() df = df.round({'lambda_cost': 2, 'lambda_sum': 2}) return df
def test_costs_free(): my_lambda = awscosts.Lambda() reqs = 100 cost = 0 while cost < 1: cost += my_lambda.get_cost(reqs) reqs += reqs assert reqs == 6553600, 'Requests needed to get 1$: {}'.format(reqs)
def generate_costs_in_month(requests_range, flavors, memory, time, throughput_ratio=1): """Generates EC2 and Lambda costs in a month by a list of requests per second. Args: requests_range (:obj:`list` of :obj:`int`): list of reqs/s. flavors (:obj:`list` of :obj:`str`): list of valid EC2 flavors. memory time (int): duration (in milliseconds) of the Lambda request. throughput_ratio (int, optional): 1 Returns: Cost dict: - 'lambda': - REQS/S_1: COST_IN_DOLLARS - REQS/S_2: COST_IN_DOLLARS ... - REQS/S_n: COST_IN_DOLLARS - FLAVOR_1: - REQS/S_1: COST_IN_DOLLARS ... ... """ cost = dict() SECONDS_IN_A_MONTH = 3600 * 24 * 30 # generate costs for EC2 instances: for flavor in flavors: myec2 = awscosts.EC2( flavor, MB_per_req=memory, ms_per_req=time, throughput_ratio=throughput_ratio, ) cost[flavor] = dict() for reqs_per_second in requests_range: cost[flavor][reqs_per_second] = \ myec2.get_cost_per_second(reqs_per_second) * SECONDS_IN_A_MONTH # generate costs for Lambda: mylambda = awscosts.Lambda( MB_per_req=memory, ms_per_req=time, ) cost['lambda'] = dict() for reqs_per_second in requests_range: requests_per_month = reqs_per_second * SECONDS_IN_A_MONTH cost['lambda'][reqs_per_second] = \ mylambda.get_cost(requests_per_month, reset_free_tier=True) return cost
def test_no_free_tier(): my_lambda_nofree = awscosts.Lambda(use_free_tier=False) assert my_lambda_nofree.free_tier == (0, 0)
def test_non_standard_mem(): my_lambda = awscosts.Lambda(MB_per_req=250) assert my_lambda.mem == 256
def test_init(): my_lambda = awscosts.Lambda() assert my_lambda.mem == 128