Exemple #1
0
def propeller_map_highpower(vec_size=1):
    # Data from https://frautech.wordpress.com/2011/01/28/design-fridays-thats-a-big-prop/
    J = np.linspace(0.0,4.0,9)
    cp = np.linspace(0.0,2.5,13)

    # data = np.array([[0.28,0.51,0.65,0.66,0.65,0.64,0.63,0.62,0.61],
    #                         [0.27,0.50,0.71,0.82,0.81,0.70,0.68,0.67,0.66],
    #                         [0.26,0.49,0.72,0.83,0.86,0.85,0.75,0.70,0.69],
    #                         [0.25,0.45,0.71,0.82,0.865,0.875,0.84,0.79,0.72],
    #                         [0.24,0.42,0.69,0.815,0.87,0.885,0.878,0.84,0.80],
    #                         [0.23,0.40,0.65,0.81,0.865,0.89,0.903,0.873,0.83],
    #                         [0.22,0.38,0.61,0.78,0.85,0.88,0.91,0.90,0.86],
    #                         [0.21,0.34,0.58,0.73,0.83,0.876,0.904,0.91,0.88],
    #                         [0.20,0.31,0.53,0.71,0.81,0.87,0.895,0.91,0.882]])
    data = np.array([[0.28,0.51,0.65,0.66,0.65,0.64,0.63,0.62,0.61],
                            [0.20,0.50,0.71,0.82,0.81,0.70,0.68,0.67,0.66],
                            [0.19,0.49,0.72,0.83,0.86,0.85,0.75,0.70,0.69],
                            [0.18,0.45,0.71,0.82,0.865,0.875,0.84,0.79,0.72],
                            [0.17,0.42,0.69,0.815,0.87,0.885,0.878,0.84,0.80],
                            [0.155,0.40,0.65,0.81,0.865,0.89,0.903,0.873,0.83],
                            [0.13,0.38,0.61,0.78,0.85,0.88,0.91,0.90,0.86],
                            [0.12,0.34,0.58,0.73,0.83,0.876,0.904,0.91,0.88],
                            [0.10,0.31,0.53,0.71,0.81,0.87,0.895,0.91,0.882],
                            [0.08,0.25,0.44,0.62,0.75,0.84,0.88,0.89,0.87],
                            [0.06,0.18,0.35,0.50,0.68,0.79,0.86,0.86,0.85],
                            [0.05,0.14,0.25,0.40,0.55,0.70,0.79,0.80,0.72],
                            [0.04,0.12,0.19,0.29,0.40,0.50,0.60,0.60,0.50]])

    data[:,0] = np.zeros(13)
    # Create regular grid interpolator instance
    interp = MetaModelStructuredComp(method='cubic',extrapolate=False,vec_size=vec_size)
    interp.add_input('cp', 0.3, cp)
    interp.add_input('J', 1, J)
    interp.add_output('eta_prop', 0.8, data)
    return interp
Exemple #2
0
def propeller_map_quadratic(vec_size=1):
    #enter three points in xyz array format:
    #the center of the efficiency bucket (J, cp, eta)
    #two more points (J, cp, eta)
    #points = np.array([[2.2,0.18,0.93], [0.2,0.03,0.40], [0.2,0.7,0.02]])
    points = np.array([[3.1,1.55,0.91], [0,0,0.3], [1.0,2.0,0.5]])
    #solve a linear system for the constants in AJ^2+BJ+C*cp^2+D*cp+E = eta
    #the first point meets the value and has zero gradient
    #the second and third points meet value
    vals = np.column_stack((points[:,0]**2,points[:,0],points[:,1]**2,points[:,1],np.ones((3,))))
    vals = np.vstack((vals,np.array([2*points[0,0],1,0,0,0]),np.array([0,0,2*points[0,1],1,0])))
    rhs = np.concatenate([points[:,2],np.zeros(2)])
    coeffs = np.linalg.solve(vals,rhs)
    Jvec = np.linspace(0,4.0,20)
    cpvec = np.linspace(0,2.0,10)
    J,cp=np.meshgrid(Jvec,cpvec)
    eta = coeffs[0]*J**2+coeffs[1]*J+coeffs[2]*cp**2+coeffs[3]*cp+coeffs[4]
    debug=False
    if debug:
        import matplotlib.pyplot as plt
        CS = plt.contour(J,cp,eta)
        plt.clabel(CS, inline=1, fontsize=10)
        plt.show()
    interp = MetaModelStructuredComp(method='cubic',extrapolate=False,vec_size=vec_size)
    interp.add_input('cp', 0.3, cpvec)
    interp.add_input('J', 1, Jvec)
    interp.add_output('eta_prop', 0.8, eta)
    return interp
Exemple #3
0
    def setUp(self):

        model = Group()
        ivc = IndepVarComp()

        mapdata = SampleMap()

        params = mapdata.param_data
        x, y, z = params
        outs = mapdata.output_data
        z = outs[0]
        ivc.add_output('x', x['default'], units=x['units'])
        ivc.add_output('y', y['default'], units=y['units'])
        ivc.add_output('z', z['default'], units=z['units'])

        model.add_subsystem('des_vars', ivc, promotes=["*"])

        comp = MetaModelStructuredComp(method='slinear', extrapolate=True)

        for param in params:
            comp.add_input(param['name'], param['default'], param['values'])

        for out in outs:
            comp.add_output(out['name'], out['default'], out['values'])

        model.add_subsystem('comp', comp, promotes=["*"])
        self.prob = Problem(model)
        self.prob.setup()
        self.prob['x'] = 1.0
        self.prob['y'] = 0.75
        self.prob['z'] = -1.7
    def setup(self):
        nn = self.options['num_nodes']
        nv = self.options['num_vehicles']
        separation = self.options['separation']
        """Add in the old trajectory as a meta model
            """

        mm = MetaModelStructuredComp(method='slinear',
                                     vec_size=nn,
                                     extrapolate=True)
        mm.add_input('t', val=np.zeros(nn), training_data=old_t)
        mm.add_output('interp_x', val=np.zeros(nn), training_data=old_X)
        mm.add_output('interp_y', val=np.zeros(nn), training_data=old_Y)
        self.add_subsystem('mm', mm, promotes=['*'])

        # now add in trajectories to be solved with dymos
        self.add_subsystem('vehicles',
                           Vehicles(num_nodes=nn, num_v=nv),
                           promotes=['*'])

        # add in distance calcs for solved trajectories
        self.add_subsystem('distances1',
                           GridDistComp(num_nodes=nn, num_v=nv, limit=limit),
                           promotes=['*'])

        # add in distance calcs for solved trajectories to the fixed ones
        self.add_subsystem('distances2',
                           SingleDistance(num_nodes=nn, num_v=nv),
                           promotes=['*'])
        self.connect('interp_x', 'fixed_x')
        self.connect('interp_y', 'fixed_y')
Exemple #5
0
def static_propeller_map_Raymer(vec_size=1):
    #Data from Raymer for static thrust of 3-bladed propeller
    cp = np.linspace(0.0,0.60,25)
    raymer_static_data = np.array([2.5,3.0,2.55,2.0,1.85,1.5,1.25,1.05,0.95,0.86,0.79,0.70,0.62,0.53,0.45,0.38,0.32,0.28,0.24,0.21,0.18,0.16,0.14,0.12,0.10])
    interp = MetaModelStructuredComp(method='cubic',extrapolate=True,vec_size=vec_size)
    interp.add_input('cp',0.15,cp)
    interp.add_output('ct_over_cp',1.5,raymer_static_data)
    return interp
Exemple #6
0
def static_propeller_map_highpower(vec_size=1):
    #Factoring up the thrust of the Raymer static thrust data to match the high power data
    cp = np.linspace(0.0,1.0,41)
    factored_raymer_static_data = np.array([2.5,3.0,2.55,2.0,1.85,1.5,1.25,1.05,0.95,0.86,0.79,0.70,0.62,0.53,0.45,0.38,0.32,0.28,0.24,0.21,0.18,0.16,0.14,0.12,0.10,0.09,0.08,0.08,0.08,0.08,0.08,0.08,0.08,0.08,0.08,0.08,0.08,0.08,0.08,0.08,0.08])
    factored_raymer_static_data[6:] = factored_raymer_static_data[6:]*1.2
    interp = MetaModelStructuredComp(method='cubic',extrapolate=True,vec_size=vec_size)
    interp.add_input('cp',0.15,cp)
    interp.add_output('ct_over_cp',1.5,factored_raymer_static_data)
    return interp
Exemple #7
0
    def test_shape(self):
        import numpy as np
        from openmdao.api import Group, Problem, IndepVarComp
        from openmdao.components.meta_model_structured_comp import MetaModelStructuredComp

        # create input param training data, of sizes 25, 5, and 10 points resp.
        p1 = np.linspace(0, 100, 25)
        p2 = np.linspace(-10, 10, 5)
        p3 = np.linspace(0, 1, 10)

        # can use meshgrid to create a 3D array of test data
        P1, P2, P3 = np.meshgrid(p1, p2, p3, indexing='ij')
        f = np.sqrt(P1) + P2 * P3

        # verify the shape matches the order and size of the input params
        print(f.shape)

        # Create regular grid interpolator instance
        interp = MetaModelStructuredComp(method='cubic')
        interp.add_input('p1', 0.5, training_data=p1)
        interp.add_input('p2', 0.0, training_data=p2)
        interp.add_input('p3', 3.14, training_data=p3)

        interp.add_output('f', 0.0, training_data=f)

        # Set up the OpenMDAO model
        model = Group()
        model.add_subsystem('comp', interp, promotes=["*"])
        prob = Problem(model)
        prob.setup()

        # set inputs
        prob['p1'] = 55.12
        prob['p2'] = -2.14
        prob['p3'] = 0.323

        prob.run_model()

        computed = prob['f']
        actual = 6.73306472

        assert_almost_equal(computed, actual)

        # we can verify all gradients by checking against finite-difference
        prob.check_partials(compact_print=True)
Exemple #8
0
def propeller_map_scaled(vec_size=1,design_J=2.2,design_cp=0.2):
    # Data from Raymer, Aircraft Design A Conceptual Approach, 4th Ed pg 498 fig 13.12 extrapolated in low cp range
    # For a 3 bladed constant-speed propeller, scaled for higher design Cp
    J = np.linspace(0.2,2.8*design_J/2.2,14)
    cp = np.array([0,0.1,0.2,0.3,0.4,0.5])*design_cp/0.2

    #raymer_data = np.ones((9,14))*0.75
    raymer_data = np.array([[0.45,0.6,0.72,0.75,0.70,0.65,0.6,0.55,0.5,0.45,0.40,0.35,0.3,0.25],
                            [0.35,0.6,0.74,0.83,0.86,0.88,0.9,0.9,0.88,0.85,0.83,0.8,0.75,0.7],
                            [0.2,0.35,0.55,0.7,0.8,0.85,0.87,0.9,0.91,0.92,0.9,0.9,0.88,0.87],
                            [0.12,0.22,0.36,0.51,0.66,0.75,0.8,0.85,0.87,0.88,0.91,0.905,0.902,0.9],
                            [0.07,0.15,0.29,0.36,0.45,0.65,0.73,0.77,0.83,0.85,0.87,0.875,0.88,0.895],
                            [0.05,0.12,0.25,0.32,0.38,0.50,0.61,0.72,0.77,0.79,0.83,0.85,0.86,0.865]])
    # Create regular grid interpolator instance
    interp = MetaModelStructuredComp(method='cubic',extrapolate=True,vec_size=vec_size)
    interp.add_input('cp', 0.3, cp)
    interp.add_input('J', 1, J)
    interp.add_output('eta_prop', 0.8, raymer_data)
    return interp
Exemple #9
0
def propeller_map_Raymer(vec_size=1):
    # Data from Raymer, Aircraft Design A Conceptual Approach, 4th Ed pg 498 fig 13.12 extrapolated in low cp range
    # For a 3 bladed constant-speed propeller
    J = np.linspace(0.2,2.8,14)
    cp = np.array([0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8])

    #raymer_data = np.ones((9,14))*0.75
    raymer_data = np.array([[0.45,0.6,0.72,0.75,0.70,0.65,0.6,0.55,0.5,0.45,0.40,0.35,0.3,0.25],
                            [0.35,0.6,0.74,0.83,0.86,0.88,0.9,0.9,0.88,0.85,0.83,0.8,0.75,0.7],
                            [0.2,0.35,0.55,0.7,0.8,0.85,0.87,0.9,0.91,0.92,0.9,0.9,0.88,0.87],
                            [0.12,0.22,0.36,0.51,0.66,0.75,0.8,0.85,0.87,0.88,0.91,0.905,0.902,0.9],
                            [0.07,0.15,0.29,0.36,0.45,0.65,0.73,0.77,0.83,0.85,0.87,0.875,0.88,0.895],
                            [0.05,0.12,0.25,0.32,0.38,0.50,0.61,0.72,0.77,0.79,0.83,0.85,0.86,0.865],
                            [0.04,0.11,0.19,0.26,0.33,0.40,0.51,0.61,0.71,0.74,0.78,0.815,0.83,0.85],
                            [0.035,0.085,0.16,0.22,0.28,0.35,0.41,0.52,0.605,0.69,0.74,0.775,0.8,0.82],
                            [0.03,0.06,0.13,0.19,0.24,0.31,0.35,0.46,0.52,0.63,0.71,0.75,0.78,0.8]])
    # Create regular grid interpolator instance
    interp = MetaModelStructuredComp(method='cubic',extrapolate=True,vec_size=vec_size)
    interp.add_input('cp', 0.3, cp)
    interp.add_input('J', 1, J)
    interp.add_output('eta_prop', 0.8, raymer_data)
    return interp
Exemple #10
0
    def test_meta_model(self):
        from openmdao.components.tests.test_meta_model_structured_comp import SampleMap
        from openmdao.components.meta_model_structured_comp import MetaModelStructuredComp

        filename = 'pyxdsm_meta_model'
        out_format = PYXDSM_OUT
        model = Group()
        ivc = IndepVarComp()

        mapdata = SampleMap()

        params = mapdata.param_data
        x, y, z = params
        outs = mapdata.output_data
        z = outs[0]
        ivc.add_output('x', x['default'], units=x['units'])
        ivc.add_output('y', y['default'], units=y['units'])
        ivc.add_output('z', z['default'], units=z['units'])

        model.add_subsystem('des_vars', ivc, promotes=["*"])

        comp = MetaModelStructuredComp(method='slinear', extrapolate=True)

        for param in params:
            comp.add_input(param['name'], param['default'], param['values'])

        for out in outs:
            comp.add_output(out['name'], out['default'], out['values'])

        model.add_subsystem('comp', comp, promotes=["*"])
        prob = Problem(model)
        prob.setup(check=False)
        prob.final_setup()

        write_xdsm(prob,
                   filename=filename,
                   out_format=out_format,
                   quiet=QUIET,
                   show_browser=SHOW,
                   show_parallel=True)
        # Check if file was created
        self.assertTrue(os.path.isfile('.'.join([filename, out_format])))
Exemple #11
0
    def test_training_gradient(self):
        model = Group()
        ivc = IndepVarComp()

        mapdata = SampleMap()

        params = mapdata.param_data
        outs = mapdata.output_data

        ivc.add_output('x', np.array([-0.3, 0.7, 1.2]))
        ivc.add_output('y', np.array([0.14, 0.313, 1.41]))
        ivc.add_output('z', np.array([-2.11, -1.2, 2.01]))

        ivc.add_output('f_train', outs[0]['values'])
        ivc.add_output('g_train', outs[1]['values'])

        comp = MetaModelStructuredComp(training_data_gradients=True,
                                       method='cubic',
                                       num_nodes=3)
        for param in params:
            comp.add_input(param['name'], param['default'], param['values'])

        for out in outs:
            comp.add_output(out['name'], out['default'], out['values'])

        model.add_subsystem('ivc', ivc, promotes=["*"])
        model.add_subsystem('comp',
                            comp,
                            promotes=["*"])


        prob = Problem(model)
        prob.setup()
        prob.run_model()

        val0 = np.array([ 50.26787317,  49.76106232,  19.66117913])
        val1 = np.array([-32.62094041, -31.67449135, -27.46959668])

        tol = 1e-5
        assert_rel_error(self, prob['f'], val0, tol)
        assert_rel_error(self, prob['g'], val1, tol)
        self.run_and_check_derivs(prob)
Exemple #12
0
    def test_raise_out_of_bounds_error(self):
        model = Group()
        ivc = IndepVarComp()

        mapdata = SampleMap()

        params = mapdata.param_data
        x, y, z = params
        outs = mapdata.output_data
        z = outs[0]
        ivc.add_output('x', x['default'], units=x['units'])
        ivc.add_output('y', y['default'], units=y['units'])
        ivc.add_output('z', z['default'], units=z['units'])

        model.add_subsystem('des_vars', ivc, promotes=["*"])

        # Need to make sure extrapolate is False for bounds to be checked
        comp = MetaModelStructuredComp(method='slinear', extrapolate=False)

        for param in params:
            comp.add_input(param['name'], param['default'], param['values'])

        for out in outs:
            comp.add_output(out['name'], out['default'], out['values'])

        model.add_subsystem('comp', comp, promotes=["*"])
        self.prob = Problem(model)
        self.prob.setup()

        self.prob['x'] = 1.0
        self.prob['y'] = 0.75
        self.prob['z'] = 9.0 # intentionally set to be out of bounds

        # The interpolating output name is given as a regexp because the exception could
        #   happen with f or g first. The order those are evaluated comes from the keys of
        #   dict so no guarantee on the order except for Python 3.6 !
        msg = "Error interpolating output '[f|g]' in 'comp' because input 'comp.z' was " \
              "out of bounds \('.*', '.*'\) with value '9.0'"
        with assertRaisesRegex(self, ValueError, msg):
           self.run_and_check_derivs(self.prob)
Exemple #13
0
    def test_xor(self):
        import numpy as np
        from openmdao.api import Group, Problem, IndepVarComp
        from openmdao.components.meta_model_structured_comp import MetaModelStructuredComp

        # Create regular grid interpolator instance
        xor_interp = MetaModelStructuredComp(method='slinear')

        # set up inputs and outputs
        xor_interp.add_input('x', 0.0, training_data=np.array([0.0, 1.0]), units=None)
        xor_interp.add_input('y', 1.0, training_data=np.array([0.0, 1.0]), units=None)

        xor_interp.add_output('xor', 1.0, training_data=np.array([[0.0, 1.0], [1.0, 0.0]]), units=None)

        # Set up the OpenMDAO model
        model = Group()
        ivc = IndepVarComp()
        ivc.add_output('x', 0.0)
        ivc.add_output('y', 1.0)
        model.add_subsystem('ivc', ivc, promotes=["*"])
        model.add_subsystem('comp', xor_interp, promotes=["*"])
        prob = Problem(model)
        prob.setup()

        # Now test out a 'fuzzy' XOR
        prob['x'] = 0.9
        prob['y'] = 0.001242

        prob.run_model()

        computed = prob['xor']
        actual = 0.8990064

        assert_almost_equal(computed, actual)

        # we can verify all gradients by checking against finite-difference
        prob.check_partials(compact_print=True)
class MetaModelVisualization(object):
    """
    Top-level container for the Meta Model Visualization.

    Attributes
    ----------
    prob : Problem
        Name of variable corresponding to Problem Component
    meta_model : MetaModel
        Name of empty Meta Model Component object reference
    resolution : int
        Number used to calculate width and height of contour plot
    is_structured_meta_model : Bool
        Boolean used to signal whether the meta model is structured or unstructured
    slider_source : ColumnDataSource
        Data source containing dictionary of sliders
    contour_training_data_source : ColumnDataSource
        Data source containing dictionary of training data points
    bottom_plot_source : ColumnDataSource
        Data source containing data for the bottom subplot
    bottom_plot_scatter_source : ColumnDataSource
        Data source containing scatter point data for the bottom subplot
    right_plot_source : ColumnDataSource
        Data source containing data for the right subplot
    right_plot_scatter_source : ColumnDataSource
        Data source containing scatter point data for the right subplot
    contour_plot_source : ColumnDataSource
        Data source containing data for the contour plot
    input_names : list
        List of input data titles as strings
    output_names : list
        List of output data titles as strings
    training_inputs : dict
        Dictionary of input training data
    x_input_select : Select
        Bokeh Select object containing a list of inputs for the x axis
    y_input_select : Select
        Bokeh Select object containing a list of inputs for the y axis
    output_select : Select
        Bokeh Select object containing a list of inputs for the outputs
    x_input_slider : Slider
        Bokeh Slider object containing a list of input values for the x axis
    y_input_slider : Slider
        Bokeh Slider object containing a list of input values for the y axis
    slider_dict : dict
        Dictionary of slider names and their respective slider objects
    predict_inputs : dict
        Dictionary containing training data points to predict at.
    num_inputs : int
        Number of inputs
    num_outputs : int
        Number of outputs
    limit_range : array
        Array containing the range of each input
    scatter_distance : TextInput
        Text input for user to enter custom value to calculate distance of training points around
        slice line
    right_alphas : array
        Array of points containing alpha values for right plot
    bottom_alphas : array
        Array of points containing alpha values for bottom plot
    dist_range : float
        Value taken from scatter_distance used for calculating distance of training points around
        slice line
    x_index : int
        Value of x axis column
    y_index : int
        Value of y axis column
    output_variable : int
        Value of output axis column
    sliders_and_selects : layout
        Layout containing the sliders and select elements
    doc_layout : layout
        Contains first row of plots
    doc_layout2 : layout
        Contains second row of plots
    Z : array
        A 2D array containing contour plot data
    """

    def __init__(self, model, resolution=50, doc=None):
        """
        Initialize parameters.

        Parameters
        ----------
        model : MetaModelComponent
            Reference to meta model component
        resolution : int
            Value used to calculate the size of contour plot meshgrid
        doc : Document
            The bokeh document to build.
        """
        self.prob = Problem()
        self.resolution = resolution
        logging.getLogger("bokeh").setLevel(logging.ERROR)

        # If the surrogate model coming in is structured
        if isinstance(model, MetaModelUnStructuredComp):
            self.is_structured_meta_model = False

            # Create list of input names, check if it has more than one input, then create list
            # of outputs
            self.input_names = [name[0] for name in model._surrogate_input_names]
            if len(self.input_names) < 2:
                raise ValueError('Must have more than one input value')
            self.output_names = [name[0] for name in model._surrogate_output_names]

            # Create reference for untructured component
            self.meta_model = MetaModelUnStructuredComp(
                default_surrogate=model.options['default_surrogate'])

        # If the surrogate model coming in is unstructured
        elif isinstance(model, MetaModelStructuredComp):
            self.is_structured_meta_model = True

            self.input_names = [name for name in model._var_rel_names['input']]

            if len(self.input_names) < 2:
                raise ValueError('Must have more than one input value')

            self.output_names = [name for name in model._var_rel_names['output']]

            self.meta_model = MetaModelStructuredComp(
                distributed=model.options['distributed'],
                extrapolate=model.options['extrapolate'],
                method=model.options['method'],
                training_data_gradients=model.options['training_data_gradients'],
                vec_size=1)

        # Pair input list names with their respective data
        self.training_inputs = {}

        self._setup_empty_prob_comp(model)

        # Setup dropdown menus for x/y inputs and the output value
        self.x_input_select = Select(title="X Input:", value=[x for x in self.input_names][0],
                                     options=[x for x in self.input_names])
        self.x_input_select.on_change('value', self._x_input_update)

        self.y_input_select = Select(title="Y Input:", value=[x for x in self.input_names][1],
                                     options=[x for x in self.input_names])
        self.y_input_select.on_change('value', self._y_input_update)

        self.output_select = Select(title="Output:", value=[x for x in self.output_names][0],
                                    options=[x for x in self.output_names])
        self.output_select.on_change('value', self._output_value_update)

        # Create sliders for each input
        self.slider_dict = {}
        self.predict_inputs = {}
        for title, values in self.training_inputs.items():
            slider_data = np.linspace(min(values), max(values), self.resolution)
            self.predict_inputs[title] = slider_data
            # Calculates the distance between slider ticks
            slider_step = slider_data[1] - slider_data[0]
            slider_object = Slider(start=min(values), end=max(values), value=min(values),
                                   step=slider_step, title=str(title))
            self.slider_dict[title] = slider_object

        self._slider_attrs()

        # Length of inputs and outputs
        self.num_inputs = len(self.input_names)
        self.num_outputs = len(self.output_names)

        # Precalculate the problem bounds.
        limits = np.array([[min(value), max(value)] for value in self.training_inputs.values()])
        self.limit_range = limits[:, 1] - limits[:, 0]

        # Positional indicies
        self.x_index = 0
        self.y_index = 1
        self.output_variable = self.output_names.index(self.output_select.value)

        # Data sources are filled with initial values
        # Slider Column Data Source
        self.slider_source = ColumnDataSource(data=self.predict_inputs)

        # Contour plot Column Data Source
        self.contour_plot_source = ColumnDataSource(data=dict(
            z=np.random.rand(self.resolution, self.resolution)))
        self.contour_training_data_source = ColumnDataSource(
            data=dict(x=np.repeat(0, self.resolution), y=np.repeat(0, self.resolution)))

        # Bottom plot Column Data Source
        self.bottom_plot_source = ColumnDataSource(data=dict(
            x=np.repeat(0, self.resolution), y=np.repeat(0, self.resolution)))
        self.bottom_plot_scatter_source = ColumnDataSource(data=dict(
            bot_slice_x=np.repeat(0, self.resolution), bot_slice_y=np.repeat(0, self.resolution)))

        # Right plot Column Data Source
        self.right_plot_source = ColumnDataSource(data=dict(
            x=np.repeat(0, self.resolution), y=np.repeat(0, self.resolution)))
        self.right_plot_scatter_source = ColumnDataSource(data=dict(
            right_slice_x=np.repeat(0, self.resolution),
            right_slice_y=np.repeat(0, self.resolution)))

        # Text input to change the distance of reach when searching for nearest data points
        self.scatter_distance = TextInput(value="0.1", title="Scatter Distance")
        self.scatter_distance.on_change('value', self._scatter_input)
        self.dist_range = float(self.scatter_distance.value)

        # Grouping all of the sliders and dropdowns into one column
        sliders = [value for value in self.slider_dict.values()]
        sliders.extend(
            [self.x_input_select, self.y_input_select, self.output_select, self.scatter_distance])
        self.sliders_and_selects = row(
            column(*sliders))

        # Layout creation
        self.doc_layout = row(self._contour_data(), self._right_plot(), self.sliders_and_selects)
        self.doc_layout2 = row(self._bottom_plot())

        if doc is None:
            doc = curdoc()

        doc.add_root(self.doc_layout)
        doc.add_root(self.doc_layout2)
        doc.title = 'Meta Model Visualization'

    def _setup_empty_prob_comp(self, metamodel):
        """
        Take data from surrogate ref and pass it into new surrogate model with empty Problem model.

        Parameters
        ----------
        metamodel : MetaModelComponent
            Reference to meta model component

        """
        # Check for structured or unstructured
        if self.is_structured_meta_model:
            # Loop through the input names
            for idx, name in enumerate(self.input_names):
                # Check for no training data
                try:
                    # Append the input data/titles to a dictionary
                    self.training_inputs[name] = metamodel.inputs[idx]
                    # Also, append the data as an 'add_input' to the model reference
                    self.meta_model.add_input(name, 0.,
                                              training_data=metamodel.inputs[idx])
                except TypeError:
                    msg = "No training data present for one or more parameters"
                    raise TypeError(msg)

            # Add the outputs to the model reference
            for idx, name in enumerate(self.output_names):
                self.meta_model.add_output(
                    name, 0.,
                    training_data=metamodel.training_outputs[name])

        else:
            for name in self.input_names:
                try:
                    self.training_inputs[name] = {
                        title for title in metamodel.options['train:' + str(name)]}
                    self.meta_model.add_input(
                        name, 0.,
                        training_data=[
                            title for title in metamodel.options['train:' + str(name)]])
                except TypeError:
                    msg = "No training data present for one or more parameters"
                    raise TypeError(msg)

            for name in self.output_names:
                self.meta_model.add_output(
                    name, 0.,
                    training_data=[
                        title for title in metamodel.options['train:' + str(name)]])

        # Add the subsystem and setup
        self.prob.model.add_subsystem('interp', self.meta_model)
        self.prob.setup()

    def _slider_attrs(self):
        """
        Assign data to slider objects and callback functions.

        Parameters
        ----------
        None

        """
        for name, slider_object in self.slider_dict.items():
            # Checks if there is a callback previously assigned and then clears it
            if len(slider_object._callbacks) == 1:
                slider_object._callbacks.clear()

            # Check if the name matches the 'x input' title
            if name == self.x_input_select.value:
                # Set the object and add an event handler
                self.x_input_slider = slider_object
                self.x_input_slider.on_change('value', self._scatter_plots_update)

            # Check if the name matches the 'y input' title
            elif name == self.y_input_select.value:
                # Set the object and add an event handler
                self.y_input_slider = slider_object
                self.y_input_slider.on_change('value', self._scatter_plots_update)
            else:
                # If it is not an x or y input then just assign it the event handler
                slider_object.on_change('value', self._update)

    def _make_predictions(self, data):
        """
        Run the data parameter through the surrogate model which is given in prob.

        Parameters
        ----------
        data : dict
            Dictionary containing training points.

        Returns
        -------
        array
            np.stack of predicted points.
        """
        # Create dictionary with an empty list
        outputs = {name: [] for name in self.output_names}

        # Parse dict into shape [n**2, number of inputs] list
        inputs = np.empty([self.resolution**2, self.num_inputs])
        for idx, values in enumerate(data.values()):
            inputs[:, idx] = values.flatten()

        # Check for structured or unstructured
        if self.is_structured_meta_model:
            # Assign each row of the data coming in to a tuple. Loop through the tuple, and append
            # the name of the input and value.
            for idx, tup in enumerate(inputs):
                for name, val in zip(data.keys(), tup):
                    self.prob[self.meta_model.name + '.' + name] = val
                self.prob.run_model()
                # Append the predicted value(s)
                for title in self.output_names:
                    outputs[title].append(
                        np.array(self.prob[self.meta_model.name + '.' + title]))

        else:
            for idx, tup in enumerate(inputs):
                for name, val in zip(data.keys(), tup):
                    self.prob[self.meta_model.name + '.' + name] = val
                self.prob.run_model()
                for title in self.output_names:
                    outputs[title].append(
                        float(self.prob[self.meta_model.name + '.' + title]))

        return stack_outputs(outputs)

    def _contour_data_calcs(self):
        """
        Parse input data into a dictionary to be predicted at.

        Parameters
        ----------
        None

        Returns
        -------
        dict
            Dictionary of training data to be predicted at.
        """
        # Create initial data array of training points
        resolution = self.resolution
        x_data = np.zeros((resolution, resolution, self.num_inputs))

        self._slider_attrs()

        # Broadcast the inputs to every row of x_data array
        x_data[:, :, :] = np.array(self.input_point_list)

        # Find the x/y input titles and match their index positions
        for idx, (title, values) in enumerate(self.slider_source.data.items()):
            if title == self.x_input_select.value:
                self.xlins_mesh = values
                x_index_position = idx
            if title == self.y_input_select.value:
                self.ylins_mesh = values
                y_index_position = idx

        # Make meshgrid from the x/y inputs to be plotted
        X, Y = np.meshgrid(self.xlins_mesh, self.ylins_mesh)
        # Move the x/y inputs to their respective positions in x_data
        x_data[:, :, x_index_position] = X
        x_data[:, :, y_index_position] = Y

        pred_dict = {}
        for idx, title in enumerate(self.slider_source.data):
            pred_dict.update({title: x_data[:, :, idx]})

        return pred_dict

    def _contour_data(self):
        """
        Create a contour plot.

        Parameters
        ----------
        None

        Returns
        -------
        Bokeh Image Plot
        """
        resolution = self.resolution
        # Output data array initialization
        y_data = np.zeros((resolution, resolution, self.num_outputs))
        self.input_point_list = [point.value for point in self.slider_dict.values()]

        # Pass the dict to make predictions and then reshape the output to
        # (resolution, resolution, number of outputs)
        y_data[:, :, :] = self._make_predictions(self._contour_data_calcs()).reshape(
            (resolution, resolution, self.num_outputs))
        # Use the output variable to pull the correct column of data from the predicted
        # data (y_data)
        self.Z = y_data[:, :, self.output_variable]
        # Reshape it to be 2D
        self.Z = self.Z.reshape(resolution, resolution)

        # Update the data source with new data
        self.contour_plot_source.data = dict(z=[self.Z])

        # Min to max of training data
        self.contour_x_range = xlins = self.xlins_mesh
        self.contour_y_range = ylins = self.ylins_mesh

        # Color bar formatting
        color_mapper = LinearColorMapper(
            palette="Viridis11", low=np.amin(self.Z), high=np.amax(self.Z))
        color_bar = ColorBar(color_mapper=color_mapper, ticker=BasicTicker(), label_standoff=12,
                             location=(0, 0))

        # Contour Plot
        self.contour_plot = contour_plot = figure(
            match_aspect=False,
            tooltips=[(self.x_input_select.value, "$x"), (self.y_input_select.value, "$y"),
                      (self.output_select.value, "@z")], tools='')
        contour_plot.x_range.range_padding = 0
        contour_plot.y_range.range_padding = 0
        contour_plot.plot_width = 600
        contour_plot.plot_height = 500
        contour_plot.xaxis.axis_label = self.x_input_select.value
        contour_plot.yaxis.axis_label = self.y_input_select.value
        contour_plot.min_border_left = 0
        contour_plot.add_layout(color_bar, 'right')
        contour_plot.x_range = Range1d(min(xlins), max(xlins))
        contour_plot.y_range = Range1d(min(ylins), max(ylins))
        contour_plot.image(image='z', source=self.contour_plot_source, x=min(xlins), y=min(ylins),
                           dh=(max(ylins) - min(ylins)), dw=(max(xlins) - min(xlins)),
                           palette="Viridis11")

        # Adding training data points overlay to contour plot
        if self.is_structured_meta_model:
            data = self._structured_training_points()
        else:
            data = self._unstructured_training_points()

        if len(data):
            # Add training data points overlay to contour plot
            data = np.array(data)
            if self.is_structured_meta_model:
                self.contour_training_data_source.data = dict(x=data[:, 0], y=data[:, 1],
                                                              z=self.meta_model.training_outputs[
                                                              self.output_select.value].flatten())
            else:
                self.contour_training_data_source.data = dict(x=data[:, 0], y=data[:, 1],
                                                              z=self.meta_model._training_output[
                                                              self.output_select.value])

            training_data_renderer = self.contour_plot.circle(
                x='x', y='y', source=self.contour_training_data_source,
                size=5, color='white', alpha=0.50)

            self.contour_plot.add_tools(HoverTool(renderers=[training_data_renderer], tooltips=[
                (self.x_input_select.value + " (train)", '@x'),
                (self.y_input_select.value + " (train)", '@y'),
                (self.output_select.value + " (train)", '@z'), ]))

        return self.contour_plot

    def _right_plot(self):
        """
        Create the right side subplot to view the projected slice.

        Parameters
        ----------
        None

        Returns
        -------
        Bokeh figure
        """
        # List of the current positions of the sliders
        self.input_point_list = [point.value for point in self.slider_dict.values()]

        # Find the title of the y input and match it with the data
        y_idx = self.y_input_select.value
        y_data = self.predict_inputs[y_idx]
        # Find the position of the x_input slider
        x_value = self.x_input_slider.value

        # Rounds the x_data to match the predict_inputs value
        subplot_value_index = np.where(
            np.around(self.predict_inputs[self.x_input_select.value], 5) ==
            np.around(x_value, 5))[0]

        # Make slice in Z data at the point calculated before and add it to the data source
        z_data = self.Z[:, subplot_value_index].flatten()

        x = z_data
        y = self.slider_source.data[y_idx]

        # Update the data source with new data
        self.right_plot_source.data = dict(x=x, y=y)

        # Create and format figure
        self.right_plot_fig = right_plot_fig = figure(
            plot_width=250, plot_height=500,
            title="{} vs {}".format(y_idx, self.output_select.value), tools="pan")
        right_plot_fig.xaxis.axis_label = self.output_select.value
        right_plot_fig.yaxis.axis_label = y_idx
        right_plot_fig.xaxis.major_label_orientation = math.pi / 9
        right_plot_fig.line(x='x', y='y', source=self.right_plot_source)
        right_plot_fig.x_range.range_padding = 0.1
        right_plot_fig.y_range.range_padding = 0.02

        # Determine distance and alpha opacity of training points
        if self.is_structured_meta_model:
            data = self._structured_training_points(compute_distance=True, source='right')
        else:
            data = self._unstructured_training_points(compute_distance=True, source='right')

        self.right_alphas = 1.0 - data[:, 2] / self.dist_range

        # Training data scatter plot
        scatter_renderer = right_plot_fig.scatter(x=data[:, 3], y=data[:, 1], line_color=None,
                                                  fill_color='#000000',
                                                  fill_alpha=self.right_alphas.tolist())

        right_plot_fig.add_tools(HoverTool(renderers=[scatter_renderer], tooltips=[
            (self.output_select.value + " (train)", '@x'),
            (y_idx + " (train)", '@y'),
        ]))
        right_plot_fig.scatter(x=data[:, 3], y=data[:, 1], line_color=None, fill_color='#000000',
                               fill_alpha=self.right_alphas.tolist())

        span_width = self.dist_range * (max(y_data) - min(y_data))

        # Set the right_plot data source to new values
        self.right_plot_scatter_source.data = dict(
            right_slice_x=np.repeat(x_value, self.resolution), right_slice_y=y_data,
            left_dashed=[i - span_width for i in np.repeat(x_value, self.resolution)],
            right_dashed=[i + span_width for i in np.repeat(x_value, self.resolution)])

        self.contour_plot.line(
            'right_slice_x', 'right_slice_y', source=self.right_plot_scatter_source,
            color='black', line_width=2)
        self.contour_plot.line(
            'left_dashed', 'right_slice_y', line_dash='dashed',
            source=self.right_plot_scatter_source, color='black', line_width=2)
        self.contour_plot.line(
            'right_dashed', 'right_slice_y', line_dash='dashed',
            source=self.right_plot_scatter_source, color='black', line_width=2)

        return self.right_plot_fig

    def _bottom_plot(self):
        """
        Create the bottom subplot to view the projected slice.

        Parameters
        ----------
        None

        Returns
        -------
        Bokeh figure
        """
        # List of the current positions of the sliders
        self.input_point_list = [point.value for point in self.slider_dict.values()]

        # Find the title of the x input and match it with the data
        x_idx = self.x_input_select.value
        x_data = self.predict_inputs[x_idx]
        # Find the position of the y_input slider
        y_value = self.y_input_slider.value

        # Rounds the y_data to match the predict_inputs value
        subplot_value_index = np.where(
            np.around(self.predict_inputs[self.y_input_select.value], 5) ==
            np.around(y_value, 5))[0]

        # Make slice in Z data at the point calculated before and add it to the data source
        z_data = self.Z[subplot_value_index, :].flatten()

        x = self.slider_source.data[x_idx]
        y = z_data

        # Update the data source with new data
        self.bottom_plot_source.data = dict(x=x, y=y)

        # Create and format figure
        self.bottom_plot_fig = bottom_plot_fig = figure(
            plot_width=550, plot_height=250,
            title="{} vs {}".format(x_idx, self.output_select.value), tools="")
        bottom_plot_fig.xaxis.axis_label = x_idx
        bottom_plot_fig.yaxis.axis_label = self.output_select.value
        bottom_plot_fig.line(x='x', y='y', source=self.bottom_plot_source)
        bottom_plot_fig.x_range.range_padding = 0.02
        bottom_plot_fig.y_range.range_padding = 0.1

        # Determine distance and alpha opacity of training points
        if self.is_structured_meta_model:
            data = self._structured_training_points(compute_distance=True)
        else:
            data = self._unstructured_training_points(compute_distance=True)

        self.bottom_alphas = 1.0 - data[:, 2] / self.dist_range

        # Training data scatter plot
        scatter_renderer = bottom_plot_fig.scatter(x=data[:, 0], y=data[:, 3], line_color=None,
                                                   fill_color='#000000',
                                                   fill_alpha=self.bottom_alphas.tolist())

        bottom_plot_fig.add_tools(HoverTool(renderers=[scatter_renderer], tooltips=[
            (x_idx + " (train)", '@x'),
            (self.output_select.value + " (train)", '@y'),
        ]))

        span_width = self.dist_range * (max(x_data) - min(x_data))

        # Set the right_plot data source to new values
        self.bottom_plot_scatter_source.data = dict(
            bot_slice_x=x_data, bot_slice_y=np.repeat(y_value, self.resolution),
            upper_dashed=[i + span_width for i in np.repeat(y_value, self.resolution)],
            lower_dashed=[i - span_width for i in np.repeat(y_value, self.resolution)])

        self.contour_plot.line(
            'bot_slice_x', 'bot_slice_y', source=self.bottom_plot_scatter_source, color='black',
            line_width=2)
        self.contour_plot.line(
            'bot_slice_x', 'upper_dashed', line_dash='dashed',
            source=self.bottom_plot_scatter_source, color='black', line_width=2)
        self.contour_plot.line(
            'bot_slice_x', 'lower_dashed', line_dash='dashed',
            source=self.bottom_plot_scatter_source, color='black', line_width=2)

        return self.bottom_plot_fig

    def _unstructured_training_points(self, compute_distance=False, source='bottom'):
        """
        Calculate the training points and returns and array containing the position and alpha.

        Parameters
        ----------
        compute_distance : bool
            If true, compute the distance of training points from surrogate line.
        source : str
            Which subplot the method is being called from.

        Returns
        -------
        array
            The array of training points and their alpha opacity with respect to the surrogate line
        """
        # Input training data and output training data
        x_training = self.meta_model._training_input
        training_output = np.squeeze(stack_outputs(self.meta_model._training_output), axis=1)

        # Index of input/output variables
        x_index = self.x_input_select.options.index(self.x_input_select.value)
        y_index = self.y_input_select.options.index(self.y_input_select.value)
        output_variable = self.output_names.index(self.output_select.value)

        # Vertically stack the x/y inputs and then transpose them
        infos = np.vstack((x_training[:, x_index], x_training[:, y_index])).transpose()
        if not compute_distance:
            return infos

        points = x_training.copy()

        # Normalize so each dimension spans [0, 1]
        points = np.divide(points, self.limit_range)
        dist_limit = np.linalg.norm(self.dist_range * self.limit_range)
        scaled_x0 = np.divide(self.input_point_list, self.limit_range)

        # Query the nearest neighbors tree for the closest points to the scaled x0 array
        # Nearest points to x slice
        if x_training.shape[1] < 3:

            tree = cKDTree(points)
            # Query the nearest neighbors tree for the closest points to the scaled x0 array
            dists, idxs = tree.query(
                scaled_x0, k=len(x_training), distance_upper_bound=self.dist_range)

            # kdtree query always returns requested k even if there are not enough valid points
            idx_finite = np.where(np.isfinite(dists))
            dists = dists[idx_finite]
            idxs = idxs[idx_finite]

        else:
            dists, idxs = self._multidimension_input(scaled_x0, points, source=source)

        # data contains:
        # [x_value, y_value, ND-distance, func_value]

        data = np.zeros((len(idxs), 4))
        for dist_index, j in enumerate(idxs):
            data[dist_index, 0:2] = infos[j, :]
            data[dist_index, 2] = dists[dist_index]
            data[dist_index, 3] = training_output[j, output_variable]

        return data

    def _structured_training_points(self, compute_distance=False, source='bottom'):
        """
        Calculate the training points and return an array containing the position and alpha.

        Parameters
        ----------
        compute_distance : bool
            If true, compute the distance of training points from surrogate line.
        source : str
            Which subplot the method is being called from.

        Returns
        -------
        array
            The array of training points and their alpha opacity with respect to the surrogate line
        """
        # Create tuple of the input parameters
        input_dimensions = tuple(self.meta_model.inputs)

        # Input training data and output training data
        x_training = np.array([z for z in product(*input_dimensions)])
        training_output = self.meta_model.training_outputs[self.output_select.value].flatten()

        # Index of input/output variables
        x_index = self.x_input_select.options.index(self.x_input_select.value)
        y_index = self.y_input_select.options.index(self.y_input_select.value)

        # Vertically stack the x/y inputs and then transpose them
        infos = np.vstack((x_training[:, x_index], x_training[:, y_index])).transpose()
        if not compute_distance:
            return infos

        points = x_training.copy()

        # Normalize so each dimension spans [0, 1]
        points = np.divide(points, self.limit_range)
        self.dist_limit = np.linalg.norm(self.dist_range * self.limit_range)
        scaled_x0 = np.divide(self.input_point_list, self.limit_range)
        # Query the nearest neighbors tree for the closest points to the scaled x0 array
        # Nearest points to x slice

        if x_training.shape[1] < 3:
            x_tree, x_idx = self._two_dimension_input(scaled_x0, points, source=source)
        else:
            x_tree, x_idx = self._multidimension_input(scaled_x0, points, source=source)

        # format for 'data'
        # [x_value, y_value, ND-distance_(x or y), func_value]

        n = len(x_tree)
        data = np.zeros((n, 4))
        for dist_index, j in enumerate(x_idx):
            data[dist_index, 0:2] = infos[j, :]
            data[dist_index, 2] = x_tree[dist_index]
            data[dist_index, 3] = training_output[j]

        return data

    def _two_dimension_input(self, scaled_points, training_points, source='bottom'):
        """
        Calculate the distance of training points to the surrogate line.

        Parameters
        ----------
        scaled_points : array
            Array of normalized slider positions.
        training_points : array
            Array of input training data.
        source : str
            Which subplot the method is being called from.

        Returns
        -------
        idxs : array
            Index of closest points that are within the dist range.
        x_tree : array
            One dimentional array of points that are within the dist range.
        """
        # Column of the input
        if source == 'right':
            col_idx = self.y_input_select.options.index(self.y_input_select.value)
        else:
            col_idx = self.x_input_select.options.index(self.x_input_select.value)

        # Delete the axis of input from source to predicted 1D distance
        x = np.delete(scaled_points, col_idx, axis=0)
        x_training_points = np.delete(training_points, col_idx, axis=1).flatten()

        # Tree of point distances
        x_tree = np.abs(x - x_training_points)

        # Only return points that are within our distance-viewing paramter.
        idx = np.where(x_tree <= self.dist_range)
        x_tree = x_tree[idx]
        return x_tree, idx[0]

    def _multidimension_input(self, scaled_points, training_points, source='bottom'):
        """
        Calculate the distance of training points to the surrogate line.

        Parameters
        ----------
        scaled_points : array
            Array of normalized slider positions.
        training_points : array
            Array of input training data.
        source : str
            Which subplot the method is being called from.

        Returns
        -------
        idxs : array
            Index of closest points that are within the dist range.
        x_tree : array
            Array of points that are within the dist range.
        """
        # Column of the input
        if source == 'right':
            col_idx = self.y_input_select.options.index(self.y_input_select.value)

        else:
            col_idx = self.x_input_select.options.index(self.x_input_select.value)

        # Delete the axis of input from source to predicted distance
        x = np.delete(scaled_points, col_idx, axis=0)
        x_training_points = np.delete(training_points, col_idx, axis=1)

        # Tree of point distances
        x_tree = cKDTree(x_training_points)

        # Query the nearest neighbors tree for the closest points to the scaled array
        dists, idx = x_tree.query(x, k=len(x_training_points),
                                  distance_upper_bound=self.dist_range)

        # kdtree query always returns requested k even if there are not enough valid points
        idx_finite = np.where(np.isfinite(dists))
        dists_finite = dists[idx_finite]
        idx = idx[idx_finite]
        return dists_finite, idx

    # Event handler functions
    def _update_all_plots(self):
        self.doc_layout.children[0] = self._contour_data()
        self.doc_layout.children[1] = self._right_plot()
        self.doc_layout2.children[0] = self._bottom_plot()

    def _update_subplots(self):
        self.doc_layout.children[1] = self._right_plot()
        self.doc_layout2.children[0] = self._bottom_plot()

    def _update(self, attr, old, new):
        self._update_all_plots()

    def _scatter_plots_update(self, attr, old, new):
        self._update_subplots()

    def _scatter_input(self, attr, old, new):
        # Text input update function of dist range value
        self.dist_range = float(new)
        self._update_all_plots()

    def _x_input_update(self, attr, old, new):
        # Checks that x and y inputs are not equal to each other
        if new == self.y_input_select.value:
            raise ValueError("Inputs should not equal each other")
        else:
            self.x_input_select.value = new
            self._update_all_plots()

    def _y_input_update(self, attr, old, new):
        # Checks that x and y inputs are not equal to each other
        if new == self.x_input_select.value:
            raise ValueError("Inputs should not equal each other")
        else:
            self.y_input_select.value = new
            self._update_all_plots()

    def _output_value_update(self, attr, old, new):
        self.output_variable = self.output_names.index(new)
        self._update_all_plots()
    def __init__(self, model, resolution=50, doc=None):
        """
        Initialize parameters.

        Parameters
        ----------
        model : MetaModelComponent
            Reference to meta model component
        resolution : int
            Value used to calculate the size of contour plot meshgrid
        doc : Document
            The bokeh document to build.
        """
        self.prob = Problem()
        self.resolution = resolution
        logging.getLogger("bokeh").setLevel(logging.ERROR)

        # If the surrogate model coming in is structured
        if isinstance(model, MetaModelUnStructuredComp):
            self.is_structured_meta_model = False

            # Create list of input names, check if it has more than one input, then create list
            # of outputs
            self.input_names = [name[0] for name in model._surrogate_input_names]
            if len(self.input_names) < 2:
                raise ValueError('Must have more than one input value')
            self.output_names = [name[0] for name in model._surrogate_output_names]

            # Create reference for untructured component
            self.meta_model = MetaModelUnStructuredComp(
                default_surrogate=model.options['default_surrogate'])

        # If the surrogate model coming in is unstructured
        elif isinstance(model, MetaModelStructuredComp):
            self.is_structured_meta_model = True

            self.input_names = [name for name in model._var_rel_names['input']]

            if len(self.input_names) < 2:
                raise ValueError('Must have more than one input value')

            self.output_names = [name for name in model._var_rel_names['output']]

            self.meta_model = MetaModelStructuredComp(
                distributed=model.options['distributed'],
                extrapolate=model.options['extrapolate'],
                method=model.options['method'],
                training_data_gradients=model.options['training_data_gradients'],
                vec_size=1)

        # Pair input list names with their respective data
        self.training_inputs = {}

        self._setup_empty_prob_comp(model)

        # Setup dropdown menus for x/y inputs and the output value
        self.x_input_select = Select(title="X Input:", value=[x for x in self.input_names][0],
                                     options=[x for x in self.input_names])
        self.x_input_select.on_change('value', self._x_input_update)

        self.y_input_select = Select(title="Y Input:", value=[x for x in self.input_names][1],
                                     options=[x for x in self.input_names])
        self.y_input_select.on_change('value', self._y_input_update)

        self.output_select = Select(title="Output:", value=[x for x in self.output_names][0],
                                    options=[x for x in self.output_names])
        self.output_select.on_change('value', self._output_value_update)

        # Create sliders for each input
        self.slider_dict = {}
        self.predict_inputs = {}
        for title, values in self.training_inputs.items():
            slider_data = np.linspace(min(values), max(values), self.resolution)
            self.predict_inputs[title] = slider_data
            # Calculates the distance between slider ticks
            slider_step = slider_data[1] - slider_data[0]
            slider_object = Slider(start=min(values), end=max(values), value=min(values),
                                   step=slider_step, title=str(title))
            self.slider_dict[title] = slider_object

        self._slider_attrs()

        # Length of inputs and outputs
        self.num_inputs = len(self.input_names)
        self.num_outputs = len(self.output_names)

        # Precalculate the problem bounds.
        limits = np.array([[min(value), max(value)] for value in self.training_inputs.values()])
        self.limit_range = limits[:, 1] - limits[:, 0]

        # Positional indicies
        self.x_index = 0
        self.y_index = 1
        self.output_variable = self.output_names.index(self.output_select.value)

        # Data sources are filled with initial values
        # Slider Column Data Source
        self.slider_source = ColumnDataSource(data=self.predict_inputs)

        # Contour plot Column Data Source
        self.contour_plot_source = ColumnDataSource(data=dict(
            z=np.random.rand(self.resolution, self.resolution)))
        self.contour_training_data_source = ColumnDataSource(
            data=dict(x=np.repeat(0, self.resolution), y=np.repeat(0, self.resolution)))

        # Bottom plot Column Data Source
        self.bottom_plot_source = ColumnDataSource(data=dict(
            x=np.repeat(0, self.resolution), y=np.repeat(0, self.resolution)))
        self.bottom_plot_scatter_source = ColumnDataSource(data=dict(
            bot_slice_x=np.repeat(0, self.resolution), bot_slice_y=np.repeat(0, self.resolution)))

        # Right plot Column Data Source
        self.right_plot_source = ColumnDataSource(data=dict(
            x=np.repeat(0, self.resolution), y=np.repeat(0, self.resolution)))
        self.right_plot_scatter_source = ColumnDataSource(data=dict(
            right_slice_x=np.repeat(0, self.resolution),
            right_slice_y=np.repeat(0, self.resolution)))

        # Text input to change the distance of reach when searching for nearest data points
        self.scatter_distance = TextInput(value="0.1", title="Scatter Distance")
        self.scatter_distance.on_change('value', self._scatter_input)
        self.dist_range = float(self.scatter_distance.value)

        # Grouping all of the sliders and dropdowns into one column
        sliders = [value for value in self.slider_dict.values()]
        sliders.extend(
            [self.x_input_select, self.y_input_select, self.output_select, self.scatter_distance])
        self.sliders_and_selects = row(
            column(*sliders))

        # Layout creation
        self.doc_layout = row(self._contour_data(), self._right_plot(), self.sliders_and_selects)
        self.doc_layout2 = row(self._bottom_plot())

        if doc is None:
            doc = curdoc()

        doc.add_root(self.doc_layout)
        doc.add_root(self.doc_layout2)
        doc.title = 'Meta Model Visualization'