コード例 #1
0
    def do_inference(self, subscription, model_id, model_dir, parameters):
        log.info('Start to inference {}'.format('Demo'))
        try:
            amplifier = parameters['instance']['params']['amplifier']
            end_time = str_to_dt(parameters['endTime'])
            if 'startTime' in parameters:
                start_time = str_to_dt(parameters['startTime'])
            else:
                start_time = end_time

            series = self.tsanaclient.get_timeseries(parameters['apiKey'],
                                                     parameters['seriesSets'],
                                                     start_time, end_time)

            copied = copy.deepcopy(series)

            for data in copied:
                data.value = data.value * amplifier

            self.tsanaclient.save_inference_result(parameters, copied)

            return STATUS_SUCCESS, ''
        except Exception as e:
            log.error('Exception thrown by inference: ' + repr(e))
            return STATUS_FAIL, 'Exception thrown by inference: ' + repr(e)
コード例 #2
0
    def do_inference(self, model_dir, parameters, context):
        log.info('Start to inference {}'.format('Demo'))
        try:
            amplifier = parameters['instance']['params']['amplifier']
            end_time = str_to_dt(parameters['endTime'])
            if 'startTime' in parameters:
                start_time = str_to_dt(parameters['startTime'])
            else:
                start_time = end_time

            series = self.tsanaclient.get_timeseries(parameters['apiEndpoint'],
                                                     parameters['apiKey'],
                                                     parameters['seriesSets'],
                                                     start_time, end_time)

            res = []
            for data in series or []:
                for value in data.value or []:
                    v = {
                        'dim': data.dim,
                        'metric_id': data.metric_id,
                        'series_id': data.series_id,
                        'value': value['value'] * amplifier,
                        'timestamp': value['timestamp']
                    }

                    res.append(v)

            self.tsanaclient.save_inference_result(parameters, res)

            return STATUS_SUCCESS, ''
        except Exception as e:
            log.error('Exception thrown by inference: ' + repr(e))
            return STATUS_FAIL, 'Exception thrown by inference: ' + repr(e)
コード例 #3
0
 def wrapped(*args, **kwargs):
     try:
         return fn(*args, **kwargs)
     except Exception as e:
         log.error("-----Exception-----")
         return jsonify(
             dict(status=STATUS_FAIL,
                  message='Unknown error, please check your request. ' +
                  str(e))), 502
コード例 #4
0
    def __init__(self):
        config_file = environ.get('SERVICE_CONFIG_FILE')
        config = load_config(config_file)
        if config is None:
            log.error("No configuration '%s', or the configuration is not in JSON format. " % (config_file))
            exit()
        self.config = config
        self.tsanaclient = TSANAClient(config.tsana_api_endpoint, config.series_limit)

        init_monitor(config)
        sched.add_job(func=lambda: run_monitor(config), trigger="interval", seconds=10)
        sched.start()
        atexit.register(lambda: stop_monitor(config))
        atexit.register(lambda: sched.shutdown())
コード例 #5
0
def get_meta(config, subscription, model_id):
    try:
        azure_table = AzureTable(environ.get('AZURE_STORAGE_ACCOUNT'),
                                 environ.get('AZURE_STORAGE_ACCOUNT_KEY'))
        if not azure_table.exists_table(config.az_tsana_meta_table):
            raise Exception('Meta table not exists')

        entity = azure_table.get_entity(config.az_tsana_meta_table,
                                        subscription, model_id)
        return entity
    except Exception as e:
        log.error(
            "Get entity error from %s with model_id %s and subscription %s, exception: %s."
            % (config.az_tsana_meta_table, model_id, subscription, str(e)))
        return None
コード例 #6
0
def get_meta(config, subscription, model_key):
    try:
        azure_table = AzureTable(config.az_storage_account,
                                 config.az_storage_account_key)
        if not azure_table.exists_table(config.az_tsana_meta_table):
            raise Exception('Meta table not exists')

        entity = azure_table.get_entity(config.az_tsana_meta_table,
                                        subscription, model_key)
        return entity
    except Exception as e:
        log.error(
            "Get entity error from %s with model_key %s and subscription %s, exception: %s."
            % (config.az_tsana_meta_table, model_key, subscription, str(e)))
        return None
コード例 #7
0
    def do_inference(self, subscription, model_id, model_dir, parameters):
        log.info("Start to inference %s", model_dir)
        inference_window = parameters['instance']['params']['windowSize']
        meta = self.tsanaclient.get_metric_meta(parameters['apiKey'], parameters['instance']['params']['target']['metricId'])
        if meta is None: 
            return STATUS_FAIL, 'Metric is not found. '
        end_time = str_to_dt(parameters['endTime'])
        if 'startTime' in parameters: 
            start_time  = str_to_dt(parameters['startTime'])
        else: 
            start_time = end_time
        cur_time = start_time

        data_end_time = get_time_offset(end_time, (meta['granularityName'], meta['granularityAmount']),
                                                    + 1)

        data_start_time = get_time_offset(start_time, (meta['granularityName'], meta['granularityAmount']),
                                                    - inference_window * 2)


        factor_def = parameters['seriesSets']
        factors_data = self.tsanaclient.get_timeseries(parameters['apiKey'], factor_def, data_start_time, data_end_time)

        target_def = [parameters['instance']['params']['target']]
        target_data = self.tsanaclient.get_timeseries(parameters['apiKey'], target_def, data_start_time, data_end_time)

        model, window = load_inference_model(model_dir=model_dir, target_size=parameters['instance']['params']['step'],
                            window=inference_window, 
                            metric_sender=MetricSender(self.config, subscription, model_id), 
                            epoc=parameters['instance']['params']['epoc'] if 'epoc' in
                                                                                                parameters[
                                                                                                    'instance'][
                                                                                                    'params'] else self.config.lstm['epoc'],
                            validation_freq=parameters['instance']['params']['validation_freq'] if 'validation_freq' in
                                                                                                parameters[
                                                                                                    'instance'][
                                                                                                    'params'] else self.config.lstm['validation_freq'],     
                            validation_ratio=parameters['instance']['params']['validation_ratio'] if 'validation_ratio' in
                                                                                                parameters[
                                                                                                    'instance'][
                                                                                                    'params'] else self.config.lstm['validation_ratio'])

        input_data = load_inference_input_data(target_series=target_data[0],factor_series=factors_data, 
                                            model=model, gran=Gran[meta['granularityName']], 
                                            custom_in_seconds=meta['granularityAmount'], 
                                            fill_type=Fill[parameters['instance']['params']['fill']] if 'fill' in
                                                                                                        parameters[
                                                                                                            'instance'][
                                                                                                            'params'] else Fill.Previous,
                                            fill_value=parameters['instance']['params']['fillValue'] if 'fillValue' in
                                                                                                        parameters[
                                                                                                            'instance'][
                                                                                                            'params'] else 0)
        while cur_time <= end_time: 
            try: 
                result = inference(input_data=input_data, window=window, timestamp=cur_time, 
                                            target_size=parameters['instance']['params']['step'], model=model)
                    
                if len(result) > 0: 
                    # offset back
                    if 'target_offset' in parameters['instance']['params']:
                        offset = int(parameters['instance']['params']['target_offset'])
                        for idx in range(len(result)): 
                            result[idx]['timestamp'] = dt_to_str(get_time_offset(cur_time, (meta['granularityName'], meta['granularityAmount']),
                                                                                        - offset + idx))
                            # print(result[idx]['timestamp'])
                    self.tsanaclient.save_inference_result(parameters, result)
                else:
                    log.error("No result for this inference %s, key %s" % (dt_to_str(cur_time), model_dir))
                # process = psutil.Process(os.getpid())
                # print(process.memory_info().rss)
            except Exception as e: 
                log.error("-------Inference exception-------")
            
            cur_time = get_time_offset(cur_time, (meta['granularityName'], meta['granularityAmount']),
                                                            + 1)
        return STATUS_SUCCESS, ''