コード例 #1
0
def predict(self,
            coord,
            fits_info,
            learning_rate=0.01,
            max_steps=2000,
            hidden1=36,
            hidden2=144,
            hidden3=576,
            batch_size=100):
    network_model_dir = 'assets/log/{}_{}/l3_lr{}_ms{}_h1.{}_h2.{}_h3.{}_bs{}/model.ckpt-{}'.format(
        self.region, self.exp_num, learning_rate, max_steps, hidden1, hidden2,
        hidden3, batch_size, max_steps - 1)
    with tf.Graph().as_default():
        num_coord = len(coord)
        coord_placeholder, psf_labels_placeholder = placeholder_inputs(
            num_coord)
        psf_pred = inference(coord_placeholder, hidden1, hidden2, hidden3)
        new_saver = tf.train.Saver()
        sess = tf.Session()
        new_saver.restore(sess, network_model_dir)
        feed_dict = {coord_placeholder: coord}
        psf_predictions = sess.run(psf_pred, feed_dict=feed_dict)
        if do_preprocess:
            psf_predictions += self.chip_avg_train_data.ravel()
        result_dir = 'assets/predictions/{}_{}/tf_psfwise/l3_lr{}_ms{}_h1.{}_h2.{}_h3.{}_bs{}/'.format(
            self.region, self.exp_num, learning_rate, max_steps, hidden1,
            hidden2, hidden3, batch_size)
        utils.write_predictions(result_dir,
                                psf_predictions,
                                fits_info,
                                method='tf_psfwise')
def predict(self,
            learning_rate=0.01, max_steps=2000, hidden1=36, hidden2=144, hidden3=576, batch_size=100, tag='validate'):
    fits_info = []

    for chip_num in range(36):

        chip_data_name = 'chip_{}_data'.format(tag)
        chip_psf_data = self.psf_data[chip_num]
        fits_info += [data[0:4] for data in chip_psf_data[chip_data_name]]
        coord = [data[0:2] for data in chip_psf_data[chip_data_name]]
        coord = np.array(coord)

        network_model_dir = 'assets/log/tf_chipwise/{}_{}/l3_lr{}_ms{}_h1.{}_h2.{}_h3.{}_bs{}/chn{}/model.ckpt-{}'.format(self.region, self.exp_num, learning_rate, max_steps, hidden1, hidden2, hidden3, batch_size, chip_num, max_steps-1)
        with tf.Graph().as_default():
            num_coord = len(coord)
            coord_placeholder, psf_labels_placeholder = placeholder_inputs(num_coord)
            psf_pred = inference(coord_placeholder, hidden1, hidden2, hidden3)
            new_saver = tf.train.Saver()
            sess = tf.Session()
            new_saver.restore(sess, network_model_dir)
            feed_dict = {
                coord_placeholder: coord
            }
            if chip_num == 0:
                psf_predictions = sess.run(psf_pred, feed_dict=feed_dict)
            else:
                chip_predictions = sess.run(psf_pred, feed_dict=feed_dict)
                psf_predictions = np.concatenate((psf_predictions, chip_predictions), axis=0)

    if do_preprocess:
        psf_predictions += self.chip_avg_train_data.ravel()
    result_dir = 'assets/predictions/{}_{}/tf_psfwise_chip/l3_lr{}_ms{}_h1.{}_h2.{}_h3.{}_bs{}/'.format(self.region, self.exp_num, learning_rate, max_steps, hidden1, hidden2, hidden3, batch_size)
    utils.write_predictions(result_dir, psf_predictions, fits_info, method='tf_psfwise')
コード例 #3
0
def predict(self,
            coord,
            fits_info,
            learning_rate=0.01,
            max_steps=1000,
            hidden1=3,
            hidden2=6,
            batch_size=100):
    pixel_predictions = []
    try:
        with open(
                'assets/cache/{}_{}/l2_lr{}_ms{}_h1.{}_h2.{}_bs{}_pixel_predictions.p'
                .format(self.region, self.exp_num, learning_rate, max_steps,
                        hidden1, hidden2, batch_size), 'rb') as pickle_file:
            pixel_predictions = pickle.load(pickle_file)['pixel_predictions']

    except FileNotFoundError:
        exit()
        for pixel_num in range(2304):
            network_model_dir = 'assets/log/pixel_wise/{}_{}/l2_lr{}_ms{}_h1.{}_h2.{}_bs{}/pn{}/model.ckpt-{}'.format(
                self.region, self.exp_num, learning_rate, max_steps, hidden1,
                hidden2, batch_size, pixel_num, max_steps - 1)
            with tf.Graph().as_default():
                num_coord = len(coord)
                coord_placeholder, pixel_labels_placeholder = placeholder_inputs(
                    num_coord)
                pixel_pred = inference(coord_placeholder, hidden1, hidden2)
                new_saver = tf.train.Saver()
                sess = tf.Session()
                new_saver.restore(sess, network_model_dir)
                feed_dict = {coord_placeholder: coord}
                sub_pixel_predictions = sess.run(pixel_pred,
                                                 feed_dict=feed_dict)
                pixel_predictions.append(
                    sub_pixel_predictions.reshape(
                        sub_pixel_predictions.shape[0]))
            # TODO: Design pixelwise saving directory
            # TODO: Or just assemble the information together
        pixel_predictions = np.array(pixel_predictions)
        pickle.dump(
            {'pixel_predictions': pixel_predictions},
            open(
                'assets/cache/{}_{}/l2_lr{}_ms{}_h1.{}_h2.{}_bs{}_pixel_predictions.p'
                .format(self.region, self.exp_num, learning_rate, max_steps,
                        hidden1, hidden2, batch_size), 'wb'))

    pixel_predictions = pixel_predictions.T.copy()
    if do_preprocess:
        pixel_predictions += self.chip_avg_train_data.ravel()
    result_dir = 'assets/predictions/{}_{}/tf_pixelwise/l2_lr{}_ms{}_h1.{}_h2.{}_bs{}/'.format(
        self.region, self.exp_num, learning_rate, max_steps, hidden1, hidden2,
        batch_size)
    utils.write_predictions(result_dir,
                            pixel_predictions,
                            fits_info,
                            method='tf_pixelwise')
コード例 #4
0
def predict(self, coord, fits_info, order):
    poly_name = 'poly_{}'.format(str(order))
    if not (poly_name in self.cal_info):
        poly_interpolation(self, order)
    coeffs = self.cal_info[poly_name]
    psf_predictions = np.array([utils.poly_val_all(the_coord[0], the_coord[1], coeffs, order) for the_coord in coord])
    if do_preprocess:
        psf_predictions += self.chip_avg_train_data.ravel()
    result_dir = 'assets/predictions/{}_{}/poly/{}/'.format(self.region, self.exp_num, poly_name)
    utils.write_predictions(result_dir, psf_predictions, fits_info, method=poly_name)
コード例 #5
0
def predict(self, coord, fits_info):
    if not ("poly1" in self.cal_info):
        poly1_interpolation(self)
    opt_A, opt_B, opt_C = self.cal_info['poly1']
    psf_predictions = np.array([
        the_coord[0] * opt_A + the_coord[1] * opt_B + opt_C
        for the_coord in coord
    ])
    if do_preprocess:
        psf_predictions += self.chip_avg_train_data.ravel()
    result_dir = 'assets/predictions/{}_{}/poly/poly1/'.format(
        self.region, self.exp_num)
    utils.write_predictions(result_dir, psf_predictions, fits_info, 'poly1')
コード例 #6
0
 def collect_origin_data(self, tag='train'):
     origin_psf = []
     fits_info = []
     part_name = 'chip_{}_data'.format(tag)
     for chip_psf_data in self.psf_data:
         origin_psf += [
             data[4].ravel() for data in chip_psf_data[part_name]
         ] if not do_preprocess else [
             data[4].ravel() + self.chip_avg_train_data.ravel()
             for data in chip_psf_data[part_name]
         ]
         fits_info += [data[0:4] for data in chip_psf_data[part_name]]
     origin_psf = np.array(origin_psf)
     result_dir = 'assets/predictions/{}_{}/origin/{}/'.format(
         self.region, self.exp_num, tag)
     utils.write_predictions(result_dir, origin_psf, fits_info, method=tag)