Example #1
0
def main():
    data_path = "../../data/train/before/LINE_100_dbdt.dat"

    with fluid.dygraph.guard():
        model = AlexNet()
        # Load static
        min_dict, _ = fluid.load_dygraph(model_path='min_polyfit')
        # print(min_dict)
        model.set_dict(stat_dict=min_dict)

        model.eval()

        data_file = SingleFile(data_path)
        one_point = data_file.get_one_point()

        data = one_point.get_data()

        data = np.array(data, 'float32').reshape(1, 2, 1, 100)
        # teacher.res
        data = fluid.dygraph.to_variable(data)

        logits = model(data)

        result = logits.numpy()

        result = back_change(result)

    x_data = one_point.x
    print("RESULT: \n", result)
    one_point.plot(show=False, label='origin')
    plt.plot(x_data, [exponenial_func(x, *result[0]) for x in x_data], label='predict')
    plt.show()
Example #2
0
def read_data(data_path, placeholder, max_input_length=500):
    logger.info("Logging data {}...".format(placeholder))

    sf = SingleFile(data_path)
    data = []
    all_point_number = 500
    # all_point_number = sf.point_number
    with tqdm(total=all_point_number) as pbar:
        pbar.set_description("Reading {}".format(placeholder))
        for i in range(all_point_number):
            point = sf.get_one_point()
            x_data, y_data = point.get_data()
            output = data_handle(x_data, y_data, MAX_INPUT_LENGTH)
            data.append(output.tolist())
            pbar.update(1)

    return K.cast_to_floatx(np.array(data))
Example #3
0
def generateAllPictures(data_dir):
    father_dir = os.path.dirname(data_dir)

    before_data_dir = os.path.join(data_dir, 'before')
    after_data_dir = os.path.join(data_dir, 'after')
    batch_size = 10

    figures_dir = os.path.join(father_dir, 'figures')

    if not os.path.exists(figures_dir):
        os.mkdir(figures_dir)

    for path_dir in [before_data_dir, after_data_dir]:
        for filename in os.listdir(path_dir):
            filepath = os.path.join(path_dir, filename)
            datafile = SingleFile(filepath=filepath)
            data_reader = datafile.get_reader(batch_size=batch_size)

            if path_dir is before_data_dir:
                single_file_path = os.path.join(figures_dir, 'before',
                                                f'{datafile.filename}')
            else:
                single_file_path = os.path.join(figures_dir, 'after',
                                                f'{datafile.filename}')

            if not os.path.exists(single_file_path):
                os.makedirs(single_file_path)

            for batch_id, points in enumerate(data_reader()):
                for point_id, point in enumerate(points):
                    point_id = batch_id * batch_size + point_id
                    print(f'[INFO] drawing {filename}--point_{point_id}...')
                    plt.figure()
                    # plt configs
                    plt.ylim(0, 30 * 1e-8)

                    point.plot(show=False)

                    plt.savefig(
                        os.path.join(single_file_path,
                                     f'point_{point_id}.jpg'))
                    plt.close()
Example #4
0
    def reader():
        points = []
        labels = []
        for file_name in os.listdir(data_dir):

            if debug:
                print('[INFO] getting teacher from {}'.format(file_name))

            file_path = os.path.join(data_dir, file_name)
            file_name, ext = os.path.splitext(file_name)
            if debug:
                print("[INFO] Reading file {}".format(file_name))
            try:
                file = SingleFile(file_path)
            except Exception as e:
                print(e)
                continue

            point_reader = file.get_reader(batch_size=1)
            label_df = pd.read_csv(os.path.join(csv_dir,
                                                'new_' + file_name + '.csv'),
                                   index_col=0)
            # print(label_df)
            # print(label_df.iloc[0, :])
            for point_id, point in enumerate(point_reader()):
                point = point[0]
                label_series = label_df.iloc[point_id, :]
                points.append(point.get_data())
                labels.append(label_series.to_list())
                if len(points) is batch_size:
                    points = np.array(points,
                                      dtype).reshape(len(points), 2, 1, 100)
                    labels = np.array(labels, dtype).reshape(len(labels), -1)
                    yield points, labels
                    points = []
                    labels = []

        if len(labels):
            points = np.array(points, dtype).reshape(len(points), 2, 1, 100)
            labels = np.array(labels, dtype).reshape(len(labels), -1)
            yield points, labels
Example #5
0
def read_data(data_path, placeholder, max_input_length=500):
    logger.info("Logging data {}...".format(placeholder))

    sf = SingleFile(data_path)
    data = []
    all_point_number = 50
    # all_point_number = sf.point_number
    with tqdm(total=all_point_number) as pbar:
        pbar.set_description("Reading {}".format(placeholder))
        for i in range(all_point_number):
            point = sf.get_one_point()
            one_data = point.get_data()
            one_data = np.array(one_data).reshape(1, -1, 2)
            one_data = data_handle(one_data)
            length = point.size
            output = np.pad(one_data,
                            pad_width=((0, 0), (0, max_input_length - length), (0, 0)),
                            constant_values=(0, 0),
                            )
            output = np.tile(output, reps=[3, 1, 1])
            data.append(output.tolist())
            pbar.update(1)

    return K.cast_to_floatx(np.array(data))
Example #6
0
            if len(points):
                yield points

        return reader

    def get_one_point(self) -> SinglePoint:
        point = next(self.point_reader())
        if point is None:
            raise ValueError('No more point in this file')
        return point

    def __describe(self):
        # TODO:ADD FILE BASIC DESCRIPTION
        var = (f"""
============================{self.filename}============================
point_number: {self._point_number}
============================{'=' * len(self.filename)}============================"

            """)


if __name__ == '__main__':
    from ele_common.units import SingleFile

    singlefile = SingleFile(
        filepath='../../data/origin/before/LINE_120_dbdt.dat')
    print(singlefile._date)
    print(singlefile.filename)
    point = singlefile.get_one_point()
    point.plot()
from ele_common.units import SingleFile
from ele_common.functions import fit_point

before_filepath = "../data/origin/before/LINE_100_dbdt.dat"
after_filepath = "../data/origin/after/new_LINE_100_dbdt.dat"

before_file = SingleFile(before_filepath)
after_file = SingleFile(after_filepath)

fit_point(before_file.get_one_point(), show=True)
fit_point(after_file.get_one_point(), show=True)
Example #8
0
import sys
sys.path.append("../..")

import numpy as np
from tensorflow.keras.models import load_model
from solutions.cnn_coder.train import data_handle
from ele_common.units import SingleFile
import matplotlib.pyplot as plt
from constants import *
from tensorflow.keras import backend as K

model = load_model("ed_model")
max_input_length = 500

tf = SingleFile("../../data/generate/concat/teacher_result.dat")
df = SingleFile("../../data/generate/concat/data_result.dat")

for i in range(10):
    tpoint = tf.get_one_point()
    point = df.get_one_point()
    tdata = data_handle(tpoint.x, tpoint.y, MAX_INPUT_LENGTH)
    ddata = data_handle(point.x, point.y, MAX_INPUT_LENGTH)

    tx = [t[0] for t in tdata[0]]
    ty = [t[1] for t in tdata[0]]

    dx = [t[0] for t in ddata[0]]
    dy = [t[1] for t in ddata[0]]
    x_data, y_data = point.get_data()

    output = data_handle(x_data, y_data, MAX_INPUT_LENGTH)