def save_metadata(self):
     try:
         json_ = {"_col_index": self.col_index, "_col_metadata": self.col_metadata}
         log.info("RESULTS: Saving Column Metadata to {0}".format(self.log_prefix, self.metadata_file_path))
         save_to_JSON(self.metadata_file_path, json_)
     except Exception, e:
         log.warning("{0}: Failed to save Column Metadata.".format(self.log_prefix))
         log.warning(e)
         log.warning("{0}: File path was: {1}".format(self.log_prefix, self.metadata_file_path))
         raise e
Exemple #2
0
	def generate_d3_JSON_ParallelCoords(self, hndl_Res):
		# Historical Line Graph
		# Historical Line Graph / Get History Line
		(history_dates, history_values) = hndl_Res.get_resp_word_raw_values()
		json_history = [DATA_SERIES_TO_JSON(d,v) for (d,v) in zip(history_dates, history_values)]
		# Historical Line Graph / Write History Line
		filePath = get_json_history_path(hndl_Res.file_name)
		save_to_JSON(filePath, json_history)
		# Parallel Coordinates
		# Parallel Coordinates / Get Parallel Coords Points
		models = {}
		preds = []
		for dt in hndl_Res.get_prediction_dates():
			(model_idxs, pred_values) = hndl_Res.get_values_by_row(dt)
			models.update(dict.fromkeys(model_idxs))
			for (md, vl) in zip(model_idxs, pred_values):
				preds.append({
					JSON_MODEL_ID: md,
					JSON_DATE_KEY: dt_epoch_to_str_Y_M_D(dt),
					JSON_VALUE_KEY: vl
				})
		# Parallel Coordinates / Write Parallel Coords Points
		filePath = get_json_predictions_path(hndl_Res.file_name)
		save_to_JSON(filePath, preds)
		# Metadata
		# Metadata / Get Model Metadata
		for md in models.keys():
			models[md] = hndl_Res.get_model_metadata(md)
		# Metadata / Write Model Metadata
		filePath = get_json_model_path(hndl_Res.file_name)
		save_to_JSON(filePath, models)
		
	def generate_d3_JSON_ParallelCoords(self):
		# Generate 'History'
		n = 120 # 10 years
		n_fd = 12
		series = util_Tst.create_test_data_correlated_returns(n=n, numDims=1, includeResponse=False)
		dt = util_Tst.create_monthly_date_range(n=n)
		vals = series['data']
		json_history = [DATA_SERIES_TO_JSON(d,v) for (d,v) in zip(dt, vals)]
		# Generate Predictions
		std = np_std(transform_FOD_BackwardLooking(vals,{utl_Trns.FIRST_ORDER_DIFF_TIME:1}))
		end_val = vals[-1,0]
		def get_random_prediction_values(per_fd):
			numPreds = 40
			preds = []
			for i in xrange(numPreds):
				preds.append(end_val + normal()*std*sqrt(per_fd))
			return (range(numPreds), preds)
		def get_model_metadata(model_idx):
			return {
				JSON_MODEL_ID :			model_idx, 
				JSON_MODEL_CONFIDENCE :	random(), 
				JSON_MODEL_DESC :		'junkdesc ' + str(model_idx)	
			}
		end_dt = dt[-1]
		prd_dt = util_Tst.create_monthly_date_range(n=n_fd+1, startEpoch=end_dt+10000) #hacky, but end of next month
		models = {}
		preds = []
		for (i, dt) in enumerate(prd_dt):
			(model_idxs, pred_values) = get_random_prediction_values(i)
			models.update(dict.fromkeys(model_idxs))
			for (md, vl) in zip(model_idxs, pred_values):
				preds.append({
					JSON_MODEL_ID: md,
					JSON_DATE_KEY: dt_epoch_to_str_Y_M_D(dt),
					JSON_VALUE_KEY: vl
				})
		for md in models.keys():
			models[md] = get_model_metadata(md)
		# Save data
		dataName = 'test1'
		filePath = get_json_history_path(dataName)
		save_to_JSON(filePath, json_history)
		filePath = get_json_predictions_path(dataName)
		save_to_JSON(filePath, preds)
		filePath = get_json_model_path(dataName)
		save_to_JSON(filePath, models)


# if __name__ == '__main__':
# 	generator = EMF_TestDataGenerator()
# 	generator.generate_d3_JSON_ParallelCoords()