コード例 #1
0
ファイル: CeleryLambda.py プロジェクト: Heronalps/Seneca
    def run(self):
        for _n in range(self.batch_number):
            clean_logs(self.lambda_path)

            if (not self.celery_async and not self.lambda_async):
                start_time = time.time()
                for _num in range(self.invoke_time):
                    # print("Lambda is invoked %d time" %(num + 1))
                    invoke_lambda(function_name=self.lambda_name,
                                  sync=True,
                                  payload=self.sync_payload,
                                  decoder=self.decoder)
                host_execu_time = 1000 * (time.time() - start_time)

            elif (not self.celery_async and self.lambda_async):
                # Add counter to make sure identifier is unique
                start_time = time.time()
                for _num in range(self.invoke_time):
                    # print("Lambda is invoked %d time" %(num + 1))
                    invoke_lambda(function_name=self.lambda_name,
                                  sync=False,
                                  payload=self.async_payload)
                host_execu_time = 1000 * (time.time() - start_time)

            elif (self.celery_async and not self.lambda_async):
                start_time = time.time()
                job = group(
                    invoke_lambda.s(function_name=self.lambda_name,
                                    sync=True,
                                    payload=self.sync_payload,
                                    decoder=self.decoder)
                    for i in range(self.invoke_time))
                print("===Async Tasks start===")
                job.apply_async()
                # result.join()
                host_execu_time = 1000 * (time.time() - start_time)
                print("===Async Tasks end===")

            elif (self.celery_async and self.lambda_async):
                start_time = time.time()
                job = group(
                    invoke_lambda.s(function_name=self.lambda_name,
                                    sync=False,
                                    payload=self.async_payload)
                    for i in range(self.invoke_time))
                print("===Async Tasks start===")
                result = job.apply_async()
                result.join()
                host_execu_time = 1000 * (time.time() - start_time)
                print("===Async Tasks end===")

            time.sleep(25)
            show_result(self.lambda_path, self.celery_async, self.lambda_async,
                        host_execu_time)
コード例 #2
0
ファイル: CeleryLambda.py プロジェクト: Heronalps/Seneca
 def sqs_trigger(self):
     sqs = boto3.client('sqs')
     invokeType = "Event" if self.lambda_async else "RequestResponse"
     for _i in range(self.batch_number):
         self.identifiers = []
         clean_logs(self.lambda_path)
         start_time = time.time()
         for _j in range(self.invoke_time):
             body = {
                 "messageType": "refreshConfig",
                 "invokeType": invokeType
             }
             _response = sqs.send_message(
                 QueueUrl=
                 'https://sqs.us-west-2.amazonaws.com/603495292017/container-test-queue',
                 MessageBody=json.dumps(body))
         host_execu_time = 1000 * (time.time() - start_time)
         time.sleep(25)
         _response_path, _metrics_path = show_result(
             self.lambda_path, self.celery_async, self.lambda_async,
             host_execu_time)
コード例 #3
0
ファイル: run_centaurus.py プロジェクト: Heronalps/Seneca
    args.experiment_time,
    "max_k":
    1,
    "covars": [
        "full-tied", "full-untied", "diag-tied", "diag-untied", "spher-tied",
        "spher-untied"
    ],
    "columns": ["Dimension 1", "Dimension 2"],
    "scale":
    True,
    "s3_file_key":
    "normal.csv"
}
lambda_name = "/aws/lambda/worker"

for _ in range(args.batch_number):
    print("===Centaurus jobs start===")
    clean_logs(lambda_name)
    clean_dynamodb('kmeansservice', 'job_id', 'task_id')

    start_time = time.time()
    invoke_lambda(function_name='create_job',
                  sync=True,
                  payload=payload,
                  decoder='utf-8')
    host_execu_time = 1000 * (time.time() - start_time)
    time.sleep(25)

    show_result(lambda_name, True, True, host_execu_time)
    print("===Centaurus jobs end===")
コード例 #4
0
def grid_search_controller(config_path):
    # start = time.time()
    
    # Dynamic importing config file from config_path
    config = load(config_path)
    
    # Dynamic loading lambda name
    LAMBDA_NAME = getattr(config.Hyperparameter, "LAMBDA_NAME")
    
    # Clean the log of specified lambda function
    clean_logs('/aws/lambda/' + LAMBDA_NAME)

    # Dynamic load parameters 
    DATASETS = []
    TARGETS = getattr(config.Hyperparameter, 'TARGETS')
    for key in getattr(config.Hyperparameter, 'DATASETS'):
        DATASETS.append(key)

    # Tune forecast horizon of the chosen model
    payload_list = create_event(config, DATASETS, TARGETS)

    min_metric = float('inf')
    chosen_model_event = None
    metrics = []
    
    # from src.lambda_func.multi_regression.multi_regression import lambda_handler
    # for payload in payload_list:
    #     print ("======Payload========")
    #     print (payload)
    #     map_item = lambda_handler(payload)
    #     metrics.append(map_item['metric'])
    #     if map_item['metric'] < min_metric:
    #         print ("======Update chosen model event==========")
    #         chosen_model_event = map_item['event']
    #         min_metric = map_item['metric']
    # print ("======Metric=======")
    # print (min_metric)
    
    # print ("======Event=======")
    # print (chosen_model_event)

    # print ("======Metrics========")
    # print (metrics)
    
    # print ("====Execution time====")
    # print (time.time() - start)

    start = time.time()
    print ("=====Time Stamp======")
    print (start)
    job = group(invoke_lambda.s(
                    function_name = LAMBDA_NAME,
                    sync = True,
                    payload = payload
                    ) for payload in payload_list)
    print("===Async Tasks start===")
    result = job.apply_async()
    result.save()
    from celery.result import GroupResult
    saved_result = GroupResult.restore(result.id)
    
    while not saved_result.ready():
        time.sleep(0.1)
    model_list = saved_result.get(timeout=None)

    print("===Async Tasks end===")
    print (time.time() - start)

    for item in model_list:
        payload = item['Payload']
        if payload['metric'] < min_metric:
            chosen_model_event = payload['event']
            min_metric = payload['metric']
    
    print (chosen_model_event)
    
    from src.celery_lambda import measurement
    measurement.parse_log("/aws/lambda/multi_regression_worker")
コード例 #5
0
def grid_search_controller(config_path):
    # start = time.time()
    
    # Dynamic importing config file from config_path
    config = load(config_path)

    # Dynamic loading lambda name
    LAMBDA_NAME = getattr(config.Cross_Validation, "LAMBDA_NAME")
    
    # Clean the log of specified lambda function
    clean_logs('/aws/lambda/' + LAMBDA_NAME)

    # Dynamic load parameters 
    PARAMETERS = []
    CV_SETTINGS = []
    for key in dir(config.Hyperparameter):
        if key.isupper():
            PARAMETERS.append(key)
    for key in dir(config.Cross_Validation):
        if key.isupper():
            CV_SETTINGS.append(key)

    # Tune forecast horizon of the chosen model
    payload_list = create_event(config, PARAMETERS, CV_SETTINGS)
    
    min_metric = float('inf')
    chosen_model_event = None
    metrics = []
    
    # from src.lambda_func.prophet.prophet import grid_search_worker
    # for payload in payload_list:
    #     map_item = grid_search_worker(payload)
        
    #     metrics.append(map_item['average_metric'])
    #     if map_item['average_metric'] < min_metric:
    #         print ("======Update chosen model event==========")
    #         chosen_model_event = map_item['event']
    #         min_metric = map_item['average_metric']
    
    # print ("=======Metric=======")
    # print (min_metric)
    # print ("======Event=======")
    # print (chosen_model_event)
    # print ("======Metrics=======")
    # print (metrics)
    # print ("====Execution time====")
    # print (time.time() - start)
    
    start = time.time()
    print ("=====Time Stamp======")
    print (start)
    job = group(invoke_lambda.s(
                    function_name = LAMBDA_NAME,
                    sync = True,
                    payload = payload
                    ) for payload in payload_list)
    print("===Async Tasks start===")
    result = job.apply_async()
    result.save()
    from celery.result import GroupResult
    saved_result = GroupResult.restore(result.id)

    while not saved_result.ready():
        time.sleep(0.1)
    model_list = saved_result.get(timeout=None)

    print("===Async Tasks end===")
    print (time.time() - start)

    for item in model_list:
        payload = item['Payload']
        if payload['average_metric'] < min_metric:
            chosen_model_event = payload['event']
            min_metric = payload['average_metric']
    
    from src.celery_lambda import measurement
    measurement.parse_log("/aws/lambda/prophet_worker")

    # Non-zero forecast period makes lambda upload graphs to s3
    chosen_model_event['forecast'] = getattr(config.Cross_Validation, "FORECAST")
    
    # Invoke Lambda with forecast

    response = invoke_lambda(function_name = LAMBDA_NAME,
                             sync=True,
                             payload=chosen_model_event)
    print ("=======The Execution Time===========")
    print (time.time() - start)
    print (response)
コード例 #6
0
def grid_search_controller(config_path):

    # start = time.time()

    # Dynamic importing config file from config_path
    config = load(config_path)

    # Dynamic loading lambda name
    LAMBDA_NAME = getattr(config.Config, "LAMBDA_NAME")

    # Clean the log of specified lambda function
    clean_logs('/aws/lambda/' + LAMBDA_NAME)

    # Dynamic load parameters
    PARAMETERS = []
    CONFIG = []
    for key in dir(config.Hyperparameter):
        if key.isupper():
            PARAMETERS.append(key)
    for key in dir(config.Config):
        if key.isupper():
            CONFIG.append(key)

    # Tune forecast horizon of the chosen model
    payload_list = create_event(config, PARAMETERS, CONFIG)

    max_metric = float('-inf')
    chosen_model_event = None
    metrics = []

    # from src.lambda_func.neural_network.neural_network import lambda_handler
    # for payload in payload_list:
    #     map_item = lambda_handler(payload)
    #     metrics.append(map_item['metric'])
    #     # Metric is Accuracy Score => Large than
    #     if map_item['metric'] > max_metric:
    #         print ("======Update chosen model event==========")
    #         chosen_model_event = map_item['event']
    #         max_metric = map_item['metric']
    # print ("===Event===")
    # print (chosen_model_event)
    # print ("===Max Metric===")
    # print (max_metric)
    # print ("===Metric List===")
    # print (metrics)
    # print ("====Execution time====")
    # print (time.time() - start)

    start = time.time()
    print("=====Time Stamp======")
    print(start)
    job = group(
        invoke_lambda.s(function_name=LAMBDA_NAME, sync=True, payload=payload)
        for payload in payload_list)
    print("===Async Tasks start===")
    result = job.apply_async()
    result.save()
    from celery.result import GroupResult
    saved_result = GroupResult.restore(result.id)

    while not saved_result.ready():
        time.sleep(0.1)
    model_list = saved_result.get(timeout=None)

    print("===Async Tasks end===")
    print(time.time() - start)

    for item in model_list:
        payload = item['Payload']
        # Metric is Accuracy Score => Large than
        if payload['metric'] > max_metric:
            chosen_model_event = payload['event']
            max_metric = payload['metric']

    print(max_metric)
    print(chosen_model_event)

    from src.celery_lambda import measurement
    measurement.parse_log("/aws/lambda/neural_network_worker")