def partial_corrconn(activity_matrix, target_ts=None):
    """
    activity_matrix:    Activity matrix should be nodes X time
    target_ts:             Optional, used when only a single target time series (returns 1 X nnodes matrix)
    Output: connectivity_mat, formatted targets X sources
    """

    nnodes = activity_matrix.shape[0]
    timepoints = activity_matrix.shape[1]
    if nnodes > timepoints:
        print('activity_matrix shape: ', np.shape(activity_matrix))
        raise Exception(
            'More nodes (regressors) than timepoints! Use regularized regression'
        )

    if target_ts is None:
        connectivity_mat = np.zeros((nnodes, nnodes))
        cov = EmpiricalCovariance().fit(activity_matrix.T)
        #Calculate the inverse covariance matrix (equivalent to partial correlation)
        connectivity_mat = cov.get_precision()
    else:
        #Computing values for a single target node
        connectivity_mat = np.zeros((nnodes, 1))
        X = activity_matrix.T
        y = target_ts
        #Note: LinearRegression fits intercept by default (intercept beta not included in coef_ output)
        reg = LinearRegression().fit(X, y)
        connectivity_mat = reg.coef_

    return connectivity_mat
    def kNN_predict_mapper(self, sensor_date_tuple):
        sensor = sensor_date_tuple[0]
        temp_date = sensor_date_tuple[1]
        valid_sensor_idx = np.argwhere(np.isfinite(sensor)).T[0]
        self.valid_sensor_idx.append(valid_sensor_idx)

        # Construct dynamic kNN predictor
        emp_cov = EmpiricalCovariance().fit(self.recon_ts[:, valid_sensor_idx])
        emp_cov_matrix = emp_cov.get_precision()
        dist_metric = DistanceMetric.get_metric('mahalanobis', V=emp_cov_matrix)
        kNN = BallTree(self.recon_ts[:, valid_sensor_idx], metric=dist_metric)

        # Load DEM for this basin
        dem = gdal.Open("ASO_Lidar/" + dem_name[self.site_name] + "_500m_DEM.tif").ReadAsArray()
        dist, idx = kNN.query(np.array([sensor[valid_sensor_idx]]), k=self.k)
        temp_fn_list = [self.recon_fn[i] for i in idx[0]]
        self.est_sensor.append(np.nanmean(self.recon_ts[idx[0]][:, valid_sensor_idx], axis=0))
        self.est_residual.append(sensor[valid_sensor_idx] - self.est_sensor[-1])

        # Compute the sum of all k-nearest neighbors
        kNN_map_sum = 0.
        for temp_fn in temp_fn_list:
            kNN_map_sum += gdal.Open(temp_fn).ReadAsArray()

        # Compute the avg of the k-nearest neighbors
        kNN_map_avg = kNN_map_sum / float(self.k)

        # Load lidar, reconstruction, snodas at the same date
        lidar_map = gdal.Open("ASO_Lidar/"+product_name_abbr[self.site_name].upper() + \
                              temp_date.strftime("%Y%m%d") + "_500m.tif").ReadAsArray() * density_factor[self.site_name]

        if self.year <= 2014:
            recon_map = gdal.Open(product_name_abbr[self.site_name].upper() + \
                                  "_recon/"+temp_date.strftime("%d%b%Y").upper()+".tif").ReadAsArray()

        snodas_map = gdal.Open("SNODAS/" + product_name_abbr[self.site_name].upper() + "_" + \
                               temp_date.strftime("%Y%m%d") + ".tif").ReadAsArray() / 1000.

        # Filter these data by lidar value and store them in the est_dict
        self.est_dict['kNN'].append(kNN_map_avg[lidar_map >= 0.])
        self.est_dict['snodas'].append(snodas_map[lidar_map >= 0.])
        self.est_dict['lidar'].append(lidar_map[lidar_map >= 0.])
        self.est_dict['elev'].append(dem[lidar_map >= 0.])
        self.est_dict['kNN_GP'].append(0)
        if self.year <= 2014:
            self.est_dict['recon'].append(recon_map[lidar_map >= 0.])

        # Do not filter these data and store them in the est_raw_dict
        self.est_raw_dict['kNN'].append(kNN_map_avg)
        self.est_raw_dict['snodas'].append(snodas_map)
        self.est_raw_dict['lidar'].append(lidar_map)
        self.est_raw_dict['elev'].append(dem)
        self.est_raw_dict['kNN_GP'].append(0)
        if self.year <= 2014:
            self.est_raw_dict['recon'].append(recon_map)
    def _kNN_predict_custom_k_rmse(self, k, sensor, temp_date):
        valid_sensor_idx = np.argwhere(np.isfinite(sensor)).T[0]

        # Construct dynamic kNN predictor
        emp_cov = EmpiricalCovariance().fit(self.recon_ts[:, valid_sensor_idx])
        emp_cov_matrix = emp_cov.get_precision()
        dist_metric = DistanceMetric.get_metric('mahalanobis', V=emp_cov_matrix)
        kNN = BallTree(self.recon_ts[:, valid_sensor_idx], metric=dist_metric)
        dist, idx = kNN.query(np.array([sensor]), k=k)
        temp_fn_list = [self.recon_fn[i] for i in idx[0]]
        kNN_map_sum = 0.
        for temp_fn in temp_fn_list:
            kNN_map_sum += gdal.Open(temp_fn).ReadAsArray()
        kNN_map_avg = kNN_map_sum / float(k)
        lidar_map = gdal.Open("ASO_Lidar/" + site_name_abbr[self.site_name].upper() + \
                              temp_date.strftime("%Y%m%d").upper()+".tif").ReadAsArray()
        kNN_map_avg = kNN_map_avg[lidar_map>=0]
        lidar_map = lidar_map[lidar_map>=0]
        return np.sqrt(mse(kNN_map_avg, lidar_map))