Exemplo n.º 1
0
def lambda_handler(event, context):
    datasets = event['params']['Datasets']
    status = None
    arns = {}
    for dataset in datasets:
        dataset_name = dataset['DatasetName']
        arns[dataset_name] = ARN.format(account=ACCOUNTID,
                                        name=dataset_name,
                                        region=environ['AWS_REGION'])
    event['params']['misc']['AccountID'] = ACCOUNTID
    #If Dataset Already Exists
    try:
        status = []
        for arn in arns.values():
            stat = LOADER.forecast_cli.describe_dataset(DatasetArn=arn)
        status.append(stat['Status'])
    except LOADER.forecast_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info('Dataset not found! Will follow to create dataset.')
        status = []
        for idx, dataset in enumerate(datasets):
            LOADER.forecast_cli.create_dataset(
                Domain=dataset['Domain'],
                DatasetType=dataset['DatasetType'],
                DatasetName=dataset['DatasetName'],
                DataFrequency=dataset['DataFrequency'],
                Schema=dataset['Schema'])
            stat = LOADER.forecast_cli.describe_dataset(
                DatasetArn=arns[dataset['DatasetName']])
            status.append(stat['Status'])
    for idx, dataset in enumerate(datasets):
        event['params']['Datasets'][idx]['DatasetArn'] = arns[
            dataset['DatasetName']]
    actions.take_action(status)
    return event
Exemplo n.º 2
0
def lambda_handler(event, context):
    params = event['params']
    status = None
    event['DatasetImportJobArn'] = ARN.format(
        account=event['AccountID'],
        date=event['currentDate'],
        name=params['Datasets'][0]['DatasetName'],
        region=environ['AWS_REGION'])
    try:
        status = LOADER.forecast_cli.describe_dataset_import_job(
            DatasetImportJobArn=event['DatasetImportJobArn'])

    except LOADER.forecast_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info(
            'Dataset import job not found! Will follow to create new job.')

        LOADER.forecast_cli.create_dataset_import_job(
            DatasetImportJobName='{name}_{date}'.format(
                name=params['Datasets'][0]['DatasetName'],
                date=event['currentDate']),
            DatasetArn=event['DatasetArn'],
            DataSource={
                'S3Config': {
                    'Path':
                    's3://{bucket}/train/'.format(bucket=event['bucket']),
                    'RoleArn': environ['FORECAST_ROLE']
                }
            },
            TimestampFormat=params['TimestampFormat'])
        status = LOADER.forecast_cli.describe_dataset_import_job(
            DatasetImportJobArn=event['DatasetImportJobArn'])

    actions.take_action(status['Status'])
    return event
Exemplo n.º 3
0
def lambda_handler(event, context):
    status = None
    predictor = event['params']['Predictor']
    event['PredictorArn'] = ARN.format(account=event['AccountID'],
                                       date=event['currentDate'],
                                       name=predictor['PredictorName'],
                                       region=environ['AWS_REGION'])
    try:
        status = LOADER.forecast_cli.describe_predictor(
            PredictorArn=event['PredictorArn'])

    except LOADER.forecast_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info(
            'Predictor not found! Will follow to create new predictor.')
        if 'InputDataConfig' in predictor.keys():
            predictor['InputDataConfig']['DatasetGroupArn'] = event[
                'DatasetGroupArn']
        else:
            predictor['InputDataConfig'] = {
                'DatasetGroupArn': event['DatasetGroupArn']
            }
        LOADER.forecast_cli.create_predictor(**predictor)
        status = LOADER.forecast_cli.describe_predictor(
            PredictorArn=event["PredictorArn"])
    actions.take_action(status['Status'])
    return event
def lambda_handler(event, context):
    dataset_group = event['params']['DatasetGroup']
    status = None
    event['params']['DatasetGroup']['DatasetGroupArn'] = ARN.format(
        account=event['params']['misc']['AccountID'],
        name=dataset_group['DatasetGroupName'],
        region=environ['AWS_REGION'])
    try:
        status = LOADER.forecast_cli.describe_dataset_group(
            DatasetGroupArn=event['params']['DatasetGroup']['DatasetGroupArn'])

    except LOADER.forecast_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info(
            'Dataset Group not found! Will follow to create Dataset Group.')
        dataset_arns = []
        datasets = event['params']['Datasets']
        for dataset in datasets:
            dataset_arns.append(dataset['DatasetArn'])
        LOADER.forecast_cli.create_dataset_group(
            DatasetGroupName=dataset_group['DatasetGroupName'],
            Domain=dataset_group['Domain'],
            DatasetArns=dataset_arns)
        status = LOADER.forecast_cli.describe_dataset_group(
            DatasetGroupArn=event['params']['DatasetGroup']['DatasetGroupArn'])
    actions.take_action(status['Status'])
    return event
def lambda_handler(event, context):
    # return event
    datasetImportJobArn = ARN.format(region=environ['AWS_REGION'],
                                     account=LOADER.account_id,
                                     date=event['date'],
                                     type=event['datasetType'])

    try:
        status = LOADER.personalize_cli.describe_dataset_import_job(
            datasetImportJobArn=datasetImportJobArn)['datasetImportJob']

    except LOADER.personalize_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info(
            'Dataset import job not found! Will follow to create new job.')
        LOADER.personalize_cli.create_dataset_import_job(
            jobName='{datasetType}_{date}'.format(**event),
            datasetArn=event['datasetArn'],
            dataSource={
                'dataLocation': 's3://{bucket}/{datasetType}/'.format(**event)
            },
            roleArn=environ['PERSONALIZE_ROLE'])
        status = LOADER.personalize_cli.describe_dataset_import_job(
            datasetImportJobArn=datasetImportJobArn)['datasetImportJob']

    actions.take_action(status['status'])
    return datasetImportJobArn
def lambda_handler(event, context):
    # return event
    status = LOADER.personalize_cli.describe_solution_version(
        solutionVersionArn=event['solutionVersionArn'])['solutionVersion']

    actions.take_action(status['status'])
    return status['status']
Exemplo n.º 7
0
def lambda_handler(event, context):
    # return event
    dataset = event['datasets'][event['datasetType']]
    datasetArn = DATASET_ARN.format(
        region=environ['AWS_REGION'],
        account=LOADER.account_id,
        datasetGroupName=event['datasetGroupName'],
        type=str.upper(event['datasetType'])
    )
    try:
        status = LOADER.personalize_cli.describe_dataset(datasetArn=datasetArn
                                                        )['dataset']

    except LOADER.personalize_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info(
            'Dataset not found! Will follow to create schema and dataset.'
        )
        LOADER.personalize_cli.create_dataset(
            name=dataset['name'],
            schemaArn=create_schema(
                dataset['schema']['name'], dataset['schema']
            ),
            datasetGroupArn=event['datasetGroupArn'],
            datasetType=event['datasetType']
        )
        status = LOADER.personalize_cli.describe_dataset(datasetArn=datasetArn
                                                        )['dataset']

    actions.take_action(status['status'])
    return datasetArn
Exemplo n.º 8
0
def lambda_handler(event, _):
    """
    Lambda function handler
    """
    event['ForecastExportJobName'] = FORECAST_EXPORT_JOB_NAME.format(
        project_name=event['ProjectName'], date=event['TriggeredAt'])
    event['ForecastExportJobArn'] = FORECAST_EXPORT_JOB_ARN.format(
        region=event['Region'],
        account=event['AccountID'],
        forecast_name=event['ForecastName'],
        export_job_name=event['ForecastExportJobName'])

    try:
        response = forecast_client.describe_forecast_export_job(
            ForecastExportJobArn=event['ForecastExportJobArn'])
        logger.info({
            'message': 'forecast_client.describe_forecast_export_job called',
            'response': response
        })
    except forecast_client.exceptions.ResourceNotFoundException:
        logger.info({
            'message': 'creating new forecast export job',
            'forecast_export_job_arn': event['ForecastExportJobArn']
        })
        response = forecast_client.create_forecast_export_job(
            ForecastExportJobName=event['ForecastExportJobName'],
            ForecastArn=event['ForecastArn'],
            Destination={
                'S3Config': {
                    'Path':
                    's3://{bucket}/{folder}/'.format(
                        bucket=environ['S3_BUCKET_NAME'],
                        folder=environ['TGT_S3_FOLDER']),
                    'RoleArn':
                    environ['FORECAST_EXPORT_JOB_ROLE_ARN']
                }
            })
        logger.info({
            'message': 'forecast_client.create_forecast_export_job called',
            'response': response
        })

        response = forecast_client.describe_forecast_export_job(
            ForecastExportJobArn=event['ForecastExportJobArn'])
        logger.info({
            'message': 'forecast_client.describe_forecast_export_job called',
            'response': response
        })

    # When the resource is in CREATE_PENDING or CREATE_IN_PROGRESS,
    # ResourcePending exception will be thrown and this Lambda function will be retried.
    actions.take_action(response['Status'])

    logger.info({
        'message': 'forecast export job was created',
        'forecast_export_job_arn': event['ForecastExportJobArn']
    })

    return event
Exemplo n.º 9
0
def lambda_handler(event, _):
    """
    Lambda function handler
    """
    predictor_name = PREDICTOR_NAME.format(project_name=event['ProjectName'],
                                           date=event['TriggeredAt'])
    predictor_arn = PREDICTOR_ARN.format(region=event['Region'],
                                         account=event['AccountID'],
                                         predictor_name=predictor_name)
    try:
        response = forecast_client.describe_predictor(
            PredictorArn=predictor_arn)
        logger.info({
            'message': 'forecast_client.describe_predictor called',
            'response': response
        })

    except forecast_client.exceptions.ResourceNotFoundException:
        logger.info({
            'message': 'creating new predictor',
            'predictor_arn': predictor_arn
        })
        if 'InputDataConfig' in event['Predictor'].keys():
            event['Predictor']['InputDataConfig']['DatasetGroupArn'] = \
                event['DatasetGroupArn']
        else:
            event['Predictor']['InputDataConfig'] = {
                'DatasetGroupArn': event['DatasetGroupArn']
            }
        response = forecast_client.create_predictor(
            **event['Predictor'], PredictorName=predictor_name)
        logger.info({
            'message': 'forecast_client.create_predictor called',
            'response': response
        })

        response = forecast_client.describe_predictor(
            PredictorArn=predictor_arn)
        logger.info({
            'message': 'forecast_client.describe_predictor called',
            'response': response
        })

    # When the resource is in CREATE_PENDING or CREATE_IN_PROGRESS,
    # ResourcePending exception will be thrown and this Lambda function will be retried.
    actions.take_action(response['Status'])

    # Completed creating Predictor.
    logger.info({
        'message': 'predictor was created',
        'predictor_arn': predictor_arn
    })

    # Post accuracy information to CloudWatch Metrics
    post_metric(
        forecast_client.get_accuracy_metrics(PredictorArn=predictor_arn))

    return event
Exemplo n.º 10
0
def lambda_handler(event, _):
    """
    Lambda function handler
    """
    event['ForecastName'] = FORECAST_NAME.format(
        project_name=event['ProjectName'], date=event['TriggeredAt'])
    event['ForecastArn'] = FORECAST_ARN.format(
        region=event['Region'],
        account=event['AccountID'],
        forecast_name=event['ForecastName'])

    try:
        response = forecast_client.describe_forecast(
            ForecastArn=event['ForecastArn'])
        logger.info({
            'message': 'forecast_client.describe_forecast called',
            'response': response
        })

    except forecast_client.exceptions.ResourceNotFoundException:
        latest_predictor_arn = event['LatestPredictorArn']

        logger.info({
            'message': 'creating new forecast',
            'forecast_arn': event['ForecastArn'],
            'predictor_arn': latest_predictor_arn
        })
        response = forecast_client.create_forecast(
            **event['Forecast'],
            ForecastName=event['ForecastName'],
            PredictorArn=latest_predictor_arn)
        logger.info({
            'message': 'forecast_client.create_forecast called',
            'response': response
        })

        response = forecast_client.describe_forecast(
            ForecastArn=event['ForecastArn'])
        logger.info({
            'message': 'forecast_client.describe_forecast called',
            'response': response
        })

    # When the resource is in CREATE_PENDING or CREATE_IN_PROGRESS,
    # ResourcePending exception will be thrown and this Lambda function will be retried.
    actions.take_action(response['Status'])

    logger.info({
        'message': 'forecast was created',
        'forecast_arn': event['ForecastArn']
    })

    return event
Exemplo n.º 11
0
def lambda_handler(event, _):
    """
    Lambda function handler
    """
    event['DatasetGroupName'] = DATASET_GROUP_NAME.format(
        project_name=event['ProjectName'], date=event['TriggeredAt'])
    event['DatasetGroupArn'] = DATASET_GROUP_ARN.format(
        region=event['Region'],
        account=event['AccountID'],
        dataset_group_name=event['DatasetGroupName'],
    )
    try:
        response = forecast_client.describe_dataset_group(
            DatasetGroupArn=event['DatasetGroupArn'])
        logger.info({
            'message': 'forecast_client.describe_dataset_group called',
            'response': response
        })

    except forecast_client.exceptions.ResourceNotFoundException:
        logger.info({
            'message': 'creating new dataset group',
            'dataset_group_arn': event['DatasetGroupArn']
        })
        response = forecast_client.create_dataset_group(
            **event['DatasetGroup'],
            DatasetGroupName=event['DatasetGroupName'],
            DatasetArns=[
                dataset['DatasetArn'] for dataset in event['Datasets']
            ])
        logger.info({
            'message': 'forecast_client.create_dataset_group called',
            'response': response
        })

        response = forecast_client.describe_dataset_group(
            DatasetGroupArn=event['DatasetGroupArn'])
        logger.info({
            'message': 'forecast_client.describe_dataset called',
            'response': response
        })

    # When the resource is in CREATE_PENDING or CREATE_IN_PROGRESS,
    # ResourcePending exception will be thrown and this Lambda function will be retried.
    actions.take_action(response['Status'])

    logger.info({
        'message': 'dataset group was created',
        'dataset_group_arn': event['DatasetGroupArn']
    })

    return event
Exemplo n.º 12
0
    def navigate(self, setup):

        comb_id = 0

        while comb_id < len(self.combinations):
            comb = self.combinations[comb_id]
            comb = list(comb)
            arguments = []
            for option in comb:
                option.click()
                arguments.append(option.text)
                #print(option.text)

            el = self.driver.find_element_by_id(DEPARTMENT_FIELD)
            self.depts = [
                dept for dept in el.find_elements_by_tag_name('option')
            ]

            i = 0

            while i < len(self.depts):
                #print("dept",i)
                dept = self.depts[i]
                dept.click()
                deptName = dept.text
                self.submit_button.click()
                sleep(0.1)

                source = self.driver.page_source
                outBool = check_results(source)
                print(deptName, arguments[3:5])
                if outBool == True:
                    print("True")
                    args = copy(arguments)
                    args.append(deptName)
                    take_action(args, setup)

                self.driver.back()
                self.submit_button = self.driver.find_element_by_id('submit1')
                el = self.driver.find_element_by_id(DEPARTMENT_FIELD)
                self.depts = [
                    dept for dept in el.find_elements_by_tag_name('option')
                ]

                i += 1

            self.populate()
            comb_id += 1
Exemplo n.º 13
0
def lambda_handler(event, context):
    datasetGroupArn = ARN.format(account=LOADER.account_id,
                                 name=event['datasetGroup']['name'],
                                 region=environ['AWS_REGION'])
    try:
        status = LOADER.personalize_cli.describe_dataset_group(
            datasetGroupArn=datasetGroupArn)['datasetGroup']

    except LOADER.personalize_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info(
            'Dataset Group not found! Will follow to create Dataset Group.')
        LOADER.personalize_cli.create_dataset_group(**event['datasetGroup'])
        status = LOADER.personalize_cli.describe_dataset_group(
            datasetGroupArn=datasetGroupArn)['datasetGroup']

    actions.take_action(status['status'])
    return datasetGroupArn
Exemplo n.º 14
0
def lambda_handler(event, context):
    params = event['params']
    datasets = event['params']['Datasets']
    status = None
    arns = {}
    for dataset in datasets:
        dataset_name = dataset['DatasetName']
        arns[dataset_name] = ARN.format(account=params['misc']['AccountID'],
                                        date=params['misc']['currentDate'],
                                        name=dataset_name,
                                        region=environ['AWS_REGION'])

    try:
        status = []
        for dataset in datasets:
            stat = LOADER.forecast_cli.describe_dataset_import_job(
                DatasetImportJobArn=arns[dataset['DatasetName']])
            print(stat['Status'])
            status.append(stat['Status'])
    except LOADER.forecast_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info(
            'Dataset import job not found! Will follow to create new job.')
        status = []
        for idx, dataset in enumerate(datasets):
            LOADER.forecast_cli.create_dataset_import_job(
                DatasetImportJobName='{name}_{date}'.format(
                    name=dataset['DatasetName'],
                    date=params['misc']['currentDate']),
                DatasetArn=dataset['DatasetArn'],
                DataSource={
                    'S3Config': {
                        'Path': dataset['s3path'],
                        'RoleArn': environ['FORECAST_ROLE']
                    }
                },
                TimestampFormat=params['misc']['TimestampFormat'])
            stat = LOADER.forecast_cli.describe_dataset_import_job(
                DatasetImportJobArn=arns[dataset['DatasetName']])
            status.append(stat['Status'])
    for idx, dataset in enumerate(datasets):
        event['params']['Datasets'][idx]['DatasetImportJobArn'] = arns[
            dataset['DatasetName']]
    actions.take_action(status)
    return event
Exemplo n.º 15
0
def lambda_handler(event, context):
    datasets = event['params']['Datasets']
    status = None
    event['DatasetArn'] = ARN.format(account=ACCOUNTID,
                                     name=datasets[0]['DatasetName'],
                                     region=environ['AWS_REGION'])
    event['AccountID'] = ACCOUNTID
    try:
        status = LOADER.forecast_cli.describe_dataset(
            DatasetArn=event['DatasetArn'])
    except LOADER.forecast_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info('Dataset not found! Will follow to create dataset.')
        for dataset in datasets:
            LOADER.forecast_cli.create_dataset(**dataset)
        status = LOADER.forecast_cli.describe_dataset(
            DatasetArn=event['DatasetArn'])

    actions.take_action(status['Status'])
    return event
def lambda_handler(event, context):
    campaignArn = ARN.format(region=environ['AWS_REGION'],
                             account=LOADER.account_id,
                             name=event['campaign']['name'])
    try:
        status = LOADER.personalize_cli.describe_campaign(
            campaignArn=campaignArn)['campaign']

    except LOADER.personalize_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info(
            'Campaign not found! Will follow to create a new campaign.')
        LOADER.personalize_cli.create_campaign(
            name=event['campaign']['name'],
            solutionVersionArn=event['solutionVersionArn'],
            minProvisionedTPS=event['campaign']['minProvisionedTPS'])
        status = LOADER.personalize_cli.describe_campaign(
            campaignArn=campaignArn)['campaign']

    actions.take_action(status['status'])
    return campaignArn
def lambda_handler(event, context):
    listETResponse = LOADER.personalize_cli.list_event_trackers(
        datasetGroupArn=event['datasetGroupArn'])
    if (len(listETResponse['eventTrackers']) > 0):
        eventTrackerArn = listETResponse['eventTrackers'][0]['eventTrackerArn']
        status = LOADER.personalize_cli.describe_event_tracker(
            eventTrackerArn=eventTrackerArn)['eventTracker']
        status = LOADER.personalize_cli.describe_event_tracker(
            eventTrackerArn=eventTrackerArn)['eventTracker']
    else:
        LOADER.logger.info('Event tracker not found!')
        event['eventTracker']['datasetGroupArn'] = event['datasetGroupArn']
        createStatus = LOADER.personalize_cli.create_event_tracker(
            **event['eventTracker'])
        eventTrackerArn = createStatus['eventTrackerArn']
        status = LOADER.personalize_cli.describe_event_tracker(
            eventTrackerArn=eventTrackerArn)['eventTracker']

    actions.take_action(status['status'])
    return eventTrackerArn
def lambda_handler(event, context):
    campaignArn = ARN.format(region=environ['AWS_REGION'],
                             account=LOADER.account_id,
                             name=event['campaign']['name'])
    try:
        status = LOADER.personalize_cli.describe_campaign(
            campaignArn=campaignArn)['campaign']
        # Point to new campaign if the new solution version is not the one listed in the campaign
        if (status['solutionVersionArn'] != event['solutionVersionArn']):
            try:
                newStatus = LOADER.personalize_cli.update_campaign(
                    campaignArn=campaignArn,
                    solutionVersionArn=event['solutionVersionArn'],
                    minProvisionedTPS=event['campaign']['minProvisionedTPS'])
                status = LOADER.personalize_cli.describe_campaign(
                    campaignArn=campaignArn)['campaign']
                actions.take_action(status['latestCampaignUpdate']['status'])
                return campaignArn
            except LOADER.personalize_cli.exceptions.ResourceInUseException:
                actions.take_action(status['latestCampaignUpdate']['status'])
                return campaignArn

    except LOADER.personalize_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info(
            'Campaign not found! Will follow to create a new campaign.')
        LOADER.personalize_cli.create_campaign(
            name=event['campaign']['name'],
            solutionVersionArn=event['solutionVersionArn'],
            minProvisionedTPS=event['campaign']['minProvisionedTPS'])
        status = LOADER.personalize_cli.describe_campaign(
            campaignArn=campaignArn)['campaign']

    actions.take_action(status['status'])
    return campaignArn
Exemplo n.º 19
0
def lambda_handler(event, context):
    forecast = event['params']['Forecast']
    status = None
    event['ForecastArn'] = ARN.format(account=event['AccountID'],
                                      name=forecast['ForecastName'],
                                      region=environ['AWS_REGION'])
    event['ForecastExportJobArn'] = JOB_ARN.format(
        account=event['AccountID'],
        name=forecast['ForecastName'],
        date=event['currentDate'],
        region=environ['AWS_REGION'])

    # Creates Forecast and export Predictor metrics if Forecast does not exist yet.
    # Will throw an exception while the forecast is being created.
    try:
        actions.take_action(
            LOADER.forecast_cli.describe_forecast(
                ForecastArn=event['ForecastArn'])['Status'])
    except LOADER.forecast_cli.exceptions.ResourceNotFoundException:
        post_metric(
            LOADER.forecast_cli.get_accuracy_metrics(
                PredictorArn=event['PredictorArn']))
        LOADER.logger.info('Forecast not found. Creating new forecast.')
        LOADER.forecast_cli.create_forecast(**forecast,
                                            PredictorArn=event['PredictorArn'])
        actions.take_action(
            LOADER.forecast_cli.describe_forecast(
                ForecastArn=event['ForecastArn'])['Status'])

    # Creates forecast export job if it does not exist yet. Will trhow an exception
    # while the forecast export job is being created.
    try:
        status = LOADER.forecast_cli.describe_forecast_export_job(
            ForecastExportJobArn=event['ForecastExportJobArn'])
    except LOADER.forecast_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info('Forecast export not found. Creating new export.')
        LOADER.forecast_cli.create_forecast_export_job(
            ForecastExportJobName='{name}_{date}'.format(
                name=forecast['ForecastName'], date=event['currentDate']),
            ForecastArn=event['ForecastArn'],
            Destination={
                'S3Config': {
                    'Path':
                    's3://{bucket}/tmp/'.format(bucket=event['bucket']),
                    'RoleArn': environ['EXPORT_ROLE']
                }
            })
        status = LOADER.forecast_cli.describe_forecast_export_job(
            ForecastExportJobArn=event['ForecastExportJobArn'])

    actions.take_action(status['Status'])
    return event
Exemplo n.º 20
0
def lambda_handler(event, context):
    forecasts = event['params']['Forecast']
    predictors = event['params']['Predictor']
    status = None
    forecast_obj = {}
    forecast_idx = {}
    forecast_arns = {}
    export_arns = {}
    forecast_s3 = {}
    for idx, forecast in enumerate(forecasts):
        predictor_name = forecast['PredictorName']
        forecast_name = forecast['ForecastName']
        curr_date = event['params']['misc']['currentDate']
        forecast_arns[forecast_name] = ARN.format(
            account=event['params']['misc']['AccountID'],
            name=forecast_name,
            region=environ['AWS_REGION'])
        export_arns[forecast_name] = JOB_ARN.format(
            account=event['params']['misc']['AccountID'],
            name=forecast_name,
            date=event['params']['misc']['currentDate'],
            region=environ['AWS_REGION'])
        forecast_s3[
            forecast_name] = 's3://{bucket}/{forecast_name}_{date}/'.format(
                bucket=event['params']['misc']['bucket'],
                forecast_name=forecast_name,
                date=curr_date)
        forecast_obj[predictor_name] = forecast
        forecast_idx[predictor_name] = idx

    # Creates Forecast and export Predictor metrics if Forecast does not exist yet.
    # Will throw an exception while the forecast is being created.
    try:
        #If resource pending, will re-tried later by Step-Wise.
        #If resource not found, will be excepted and creates and export predictor.
        for arn in forecast_arns.values():
            actions.take_action(
                LOADER.forecast_cli.describe_forecast(
                    ForecastArn=arn)['Status'])
    except LOADER.forecast_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info('Forecast not found. Creating new forecast.')
        for predictor in predictors:
            predictor_name = predictor['PredictorName']
            metrics = LOADER.forecast_cli.get_accuracy_metrics(
                PredictorArn=predictor['PredictorArn'])
            #Post metrics to cloudwatch
            post_metric(metrics)
            forecast = forecast_obj[predictor_name]
            LOADER.forecast_cli.create_forecast(
                ForecastName=forecast['ForecastName'],
                ForecastTypes=forecast['ForecastTypes'],
                PredictorArn=predictor['PredictorArn'])
        for arn in forecast_arns.values():
            actions.take_action(
                LOADER.forecast_cli.describe_forecast(
                    ForecastArn=arn)['Status'])

    # Creates forecast export job if it does not exist yet. Will trhow an exception
    # while the forecast export job is being created.
    try:
        status = []
        for export_arn in export_arns.values():
            stat = LOADER.forecast_cli.describe_forecast_export_job(
                ForecastExportJobArn=export_arn)
            status.append(stat['Status'])
    except LOADER.forecast_cli.exceptions.ResourceNotFoundException:
        LOADER.logger.info('Forecast export not found. Creating new export.')
        status = []
        for idx, forecast in enumerate(forecasts):
            forecast_name = forecast['ForecastName']
            curr_date = event['params']['misc']['currentDate']
            LOADER.forecast_cli.create_forecast_export_job(
                ForecastExportJobName='{name}_{date}'.format(
                    name=forecast_name, date=curr_date),
                ForecastArn=forecast_arns[forecast_name],
                Destination={
                    'S3Config': {
                        'Path': forecast_s3[forecast_name],
                        'RoleArn': environ['EXPORT_ROLE']
                    }
                })
            stat = LOADER.forecast_cli.describe_forecast_export_job(
                ForecastExportJobArn=export_arns[forecast_name])
            status.append(stat['Status'])

    #Put results on payload
    for predictor in predictors:
        predictor_name = predictor['PredictorName']
        metrics = LOADER.forecast_cli.get_accuracy_metrics(
            PredictorArn=predictor['PredictorArn'])
        results = extract_metrics(metrics)
        idx = forecast_idx[predictor_name]
        event['params']['Forecast'][idx]['metric'] = results

    for idx, forecast in enumerate(forecasts):
        forecast_name = forecast['ForecastName']
        event['params']['Forecast'][idx]['ForecastArn'] = forecast_arns[
            forecast_name]
        event['params']['Forecast'][idx]['ForecastExportJobArn'] = export_arns[
            forecast_name]
        event['params']['Forecast'][idx]['s3path'] = forecast_s3[forecast_name]
    actions.take_action(status)
    return event
Exemplo n.º 21
0
def lambda_handler(event, _):
    """
    Lambda function handler
    """
    # Create datasets
    for dataset in event['Datasets']:
        dataset_name = DATASET_NAME.format(
            project_name=event['ProjectName'],
            dataset_type=dataset['DatasetType'],
            date=event['TriggeredAt']
        )

        dataset_arn = DATASET_ARN.format(
            region=event['Region'],
            account=event['AccountID'],
            dataset_name=dataset_name
        )

        try:
            response = forecast_client.describe_dataset(
                DatasetArn=dataset_arn
            )
            logger.info({
                'message': 'forecast_client.describe_dataset called',
                'response': response,
                'dataset_arn': dataset_arn
            })

        except forecast_client.exceptions.ResourceNotFoundException:
            logger.info({
                'message': 'creating new dataset',
                'dataset_arn': dataset_arn
            })
            response = forecast_client.create_dataset(
                **dataset,
                DatasetName=dataset_name
            )
            logger.info({
                'message': 'forecast_client.create_dataset called',
                'response': response,
                'dataset_arn': dataset_arn
            })

            dataset['DatasetName'] = dataset_name
            dataset['DatasetArn'] = dataset_arn

    # Check dataset status
    for dataset in event['Datasets']:
        response = forecast_client.describe_dataset(
            DatasetArn=dataset['DatasetArn']
        )
        logger.info({
            'message': 'forecast_client.describe_dataset called',
            'response': response,
            'dataset_arn': dataset_arn
        })

        # When the resource is in CREATE_PENDING or CREATE_IN_PROGRESS,
        # ResourcePending exception will be thrown and this Lambda function will be retried.
        actions.take_action(response['Status'])

    # All datasets were create
    logger.info({
        'message': 'all datasets were created',
        'dataset_arns': [dataset['DatasetArn'] for dataset in event['Datasets']]
    })
    return event
Exemplo n.º 22
0
def lambda_handler(event, _):
    """
    Lambda function handler
    """

    import_job_arns = []

    # In this sample, creating dataset import job sequentially because default limit of 'Maximum parallel running CreateDatasetImportJob tasks' is small. You should increase this limit to create dataset import job in parallel.
    # https://docs.aws.amazon.com/forecast/latest/dg/limits.html

    for dataset in event['Datasets']:
        dataset_name = dataset['DatasetName']
        dataset_arn = dataset['DatasetArn']
        import_job_name = IMPORT_JOB_NAME.format(
            date=event['TriggeredAt']
        )
        import_job_arn = IMPORT_JOB_ARN.format(
            region=event['Region'],
            account=event['AccountID'],
            dataset_name=dataset_name,
            import_job_name=import_job_name
        )
        import_job_arns.append(import_job_arn)

        filename = ''
        for job in event['DatasetImportJobs']:
            if job['DatasetType'] == dataset['DatasetType']:
                filename = job['Filename']
        if filename == '':
            raise Exception(
                'failed to find "Filename" for dataset import job.')
        try:
            response = forecast_client.describe_dataset_import_job(
                DatasetImportJobArn=import_job_arn
            )
            logger.info({
                'message': 'forecast_client.describe_dataset_import_job called',
                'response': response,
                'dataset_import_job_arn': import_job_arn
            })

        except forecast_client.exceptions.ResourceNotFoundException:
            logger.info({
                'message': 'creating new dataset import job',
                'dataset_import_job_arn': import_job_arn
            })

            response = forecast_client.create_dataset_import_job(
                DatasetImportJobName=import_job_name,
                DatasetArn=dataset_arn,
                DataSource={
                    'S3Config':
                        {'Path':
                            's3://{bucket}/{folder}/{file}'.format(
                                bucket=environ['S3_BUCKET_NAME'],
                                folder=environ['S3_SRC_FOLDER'],
                                file=filename
                            ),
                            'RoleArn':
                                environ['FORECAST_IMPORT_JOB_ROLE_ARN']
                         }
                },
                TimestampFormat=event['DatasetTimestampFormat']
            )
            logger.info({
                'message': 'forecast_client.create_dataset_import_job called',
                'response': response,
                'dataset_import_job_arn': import_job_arn
            })

            response = forecast_client.describe_dataset_import_job(
                DatasetImportJobArn=import_job_arn
            )
            logger.info({
                'message': 'forecast_client.describe_dataset_import_job called',
                'response': response,
                'dataset_import_job_arn': import_job_arn
            })

        # When the resource is in CREATE_PENDING or CREATE_IN_PROGRESS, ResourcePending exception will be thrown and this Lambda function will be retried.
        actions.take_action(response['Status'])

    logger.info({
        'message': 'dataset import job was created',
        'dataset_import_job_arns': import_job_arns
    })

    return event