def plot_component_variance(x, y):
  prim_basis = PrimitiveBasis(n_states=3, domain=[0, 2])
  model = MKSHomogenizationModel(basis=prim_basis)
  model.n_components = 20
  model.fit(x, y, periodic_axes=[0, 1])
  # Draw the plot containing the PCA variance accumulation
  draw_component_variance(model.dimension_reducer.explained_variance_ratio_)
    y=np.ndarray(shape=(samples*end_of_transience, metadata[0]['x']*metadata[0]['y']*2))
    y_ind = 0
    for metadatum in metadata:
      # Load data frames
      print "--->Loading: " + metadatum['filename']
      al_chunk = sp.load_data('data/test/'+metadatum['filename'])
      corrs = sp.compute_correlations(al_chunk[0:end_of_transience, :, :],
                                                    correlations=[(0,0), (1,1)],
                                                    compute_flat=False)
      for row in corrs:
        y[y_ind] = row.flatten()
        y_ind+=1
    # Get some PCA components!
    pca = PCA(n_components=12)
    y_pca = pca.fit_transform(y)
    draw_component_variance(pca.explained_variance_ratio_)
    # Zip and pickle PCA comps
    with gzip.GzipFile('data/pca_scores_transient.pgz', 'w') as f:
      pickle.dump(y_pca, f)
    sp.write_pca_to_csv(y_pca, '_transients')
 

  time_y = np.ndarray((samples, end_of_transience))
  row_ind = 0;
  for i in range(0, y_pca.shape[0], end_of_transience):
    time_y[row_ind, :] = y_pca[i:(i+end_of_transience), 0]
    row_ind += 1
  
  mse = leave_one_out(x, time_y)
  print "MSE AVE: " + str(np.mean(mse))
def plot_component_variance(y):
  pca = PCA(n_components=12)
  y_pca = pca.fit_transform(y)
  # Draw the plot containing the PCA variance accumulation
  draw_component_variance(pca.explained_variance_ratio_)
        samples = len(metadata)
        x = np.ndarray(shape=(samples, 2))
        y = np.ndarray(shape=(samples * end_of_transience,
                              metadata[0]['x'] * metadata[0]['y'] * 2))
        y_ind = 0
        x_ind = 0

        for metadatum in metadata:
            # Load data frames
            print "--->Loading: " + metadatum['filename']
            al_chunk = sp.load_data('data/test/' + metadatum['filename'])
            corrs = sp.compute_correlations(
                al_chunk[0:end_of_transience, :, :],
                correlations=[(0, 0), (1, 1)],
                compute_flat=False)
            for row in corrs:
                y[y_ind] = row.flatten()
                y_ind += 1
            x[x_ind, 0] = metadatum['ag']
            x[x_ind, 1] = metadatum['sv']
            x_ind += 1

        # Get some PCA components!
        pca = PCA(n_components=12)
        y_pca = pca.fit_transform(y)
        draw_component_variance(pca.explained_variance_ratio_)
        # Zip and pickle PCA comps
        with gzip.GzipFile('data/pca_scores_transient.pgz', 'w') as f:
            pickle.dump(y_pca, f)
        sp.write_pca_to_csv(y_pca, '_transients')