Пример #1
0
 def registerFractals(self):
     self.fractalList = []
     self.fractalList.append(SyntaxFractal())
     #self.fractalList.append(BlankFractal())
     self.fractalList.append(IFSFractal())
     self.fractalList.append(JuliaSet())
     self.fractalList.append(Mandelbrot())
     self.fractalList.append(DLA())
     return
    def plot__N_R(self):
        """Plot a N-R_g graph to calcurate DLA cluster's fractal dimension."""
        self.N = int(self.sp.entry[0].get())
        self.dla = DLA(self.N)
        self.lattice = self.dla.grow_cluster()
        self.center = self.dla.center
        self.Narr = np.array([2**x for x in range(1, int(np.log2(self.N))+1)])
        self.R_g = np.array([self.dla.R_g[n-1] for n in self.Narr])

        # plot
        fig = plt.figure("Fractal Dimension")
        self.ax = fig.add_subplot(111)
        self.ax.loglog(self.R_g, self.Narr, '-o')
        self.ax.set_xlabel(r'$R_{g}$', fontsize=16)
        self.ax.set_ylabel(r'$N$', fontsize=16)
        self.ax.set_ymargin(0.05)
        fig.tight_layout()
        plt.show()
Пример #3
0
 def time_sequential_then_bunch(self):
     d = DLA.main_single(1, gotosize=[1e4])
Пример #4
0
 def time_sequential(self):
     d = DLA.main_single(1, gotosize=[])
Пример #5
0
    def __init__(self, sess):
        self.sess = sess
        self.dataset_root = '/home/lthpc/zhangshichang/VOCdevkit/'

        if Detection_or_Classifier == 'classifier':
            self.train_data = DataLoader(root=self.dataset_root +
                                         'SmallNORB/trainImages',
                                         batch=batch_size)
            self.test_data = DataLoader(root=self.dataset_root +
                                        'SmallNORB/testImages',
                                        batch=batch_size)

            self.labels = ['1', '2', '3', '4', '5']

            print("Building the model...")
            self.model = DLA(
                num_classes=len(self.labels),
                num_anchors=5,
                batch_size=batch_size,
                max_box_per_image=max_box_per_image,
                max_grid=[max_input_size, max_input_size],
            )
            print("Model is built successfully\n\n")

        elif Detection_or_Classifier == 'detection':
            train_ints, valid_ints, self.labels = create_training_instances(
                self.dataset_root + 'VOC2012/Annotations/',
                self.dataset_root + 'VOC2012/JPEGImages/', 'data.pkl', '', '',
                '', [
                    'person', 'head', 'hand', 'foot', 'aeroplane', 'tvmonitor',
                    'train', 'boat', 'dog', 'chair', 'bird', 'bicycle',
                    'bottle', 'sheep', 'diningtable', 'horse', 'motorbike',
                    'sofa', 'cow', 'car', 'cat', 'bus', 'pottedplant'
                ])
            self.train_data = BatchGenerator(
                instances=train_ints,
                anchors=anchors,
                labels=self.labels,
                downsample=
                32,  # ratio between network input's size and network output's size, 32 for YOLOv3
                max_box_per_image=max_box_per_image,
                batch_size=batch_size,
                min_net_size=min_input_size,
                max_net_size=max_input_size,
                shuffle=True,
                jitter=0.3,
                norm=normalize)
            self.test_data = BatchGenerator(
                instances=valid_ints,
                anchors=anchors,
                labels=self.labels,
                downsample=
                32,  # ratio between network input's size and network output's size, 32 for YOLOv3
                max_box_per_image=max_box_per_image,
                batch_size=batch_size,
                min_net_size=min_input_size,
                max_net_size=max_input_size,
                shuffle=True,
                jitter=0.0,
                norm=normalize)

            print("Building the model...")
            self.model = DLA(
                num_classes=len(self.labels),
                num_anchors=5,
                batch_size=batch_size,
                max_box_per_image=max_box_per_image,
                max_grid=[max_input_size, max_input_size],
            )
            print("Model is built successfully\n\n")

        #tf.profiler.profile(tf.get_default_graph(),options=tf.profiler.ProfileOptionBuilder.trainable_variables_parameter(), cmd='scope')

        num_params = get_num_params()
        print('all params:{}'.format(num_params))

        var = tf.global_variables()
        var_list = [val for val in var]
        if Detection_or_Classifier == 'detection' and False:
            #var_list = [val for val in var if (('zsc_preprocessing' in val.name) or ('zsc_feature' in val.name) or ('zsc_attention' in val.name) or ('zsc_detection' in val.name)) and ('SE' not in val.name)]
            var_list = [val for val in var if ('SE' not in val.name)]

        self.saver = tf.train.Saver(var_list=var_list,
                                    max_to_keep=max_to_keep,
                                    keep_checkpoint_every_n_hours=10)

        self.save_checkpoints_path = os.path.join(os.getcwd(), 'checkpoints',
                                                  Detection_or_Classifier)
        if not os.path.exists(self.save_checkpoints_path):
            os.makedirs(self.save_checkpoints_path)

        # Initializing the model
        self.init = None
        self.__init_model()

        # Loading the model checkpoint if exists
        self.__load_model()

        summary_dir = os.path.join(os.getcwd(), 'logs',
                                   Detection_or_Classifier)
        if not os.path.exists(summary_dir):
            os.makedirs(summary_dir)
        summary_dir_train = os.path.join(summary_dir, 'train')
        if not os.path.exists(summary_dir_train):
            os.makedirs(summary_dir_train)
        summary_dir_test = os.path.join(summary_dir, 'test')
        if not os.path.exists(summary_dir_test):
            os.makedirs(summary_dir_test)
        self.train_writer = tf.summary.FileWriter(summary_dir_train,
                                                  sess.graph)
        self.test_writer = tf.summary.FileWriter(summary_dir_test)
Пример #6
0
from ConstantStickDistribution import ConstantStickDistribution
from DLA import DLA

stickDist = ConstantStickDistribution(proba=1.0)
dla = DLA(radius_limit=200, numParticles=5000, stickDistribution=stickDist)

dla.simulate()
 def grow_cluster(self):
     """Create a DLA cluster with N particles by dla.grow_cluster method."""
     self.N = int(self.sp.entry[0].get())
     self.dla = DLA(self.N)
     self.lattice = self.dla.grow_cluster()
     self.center = self.dla.center
class Main(object):

    def __init__(self):
        import sys
        self.sp = SetParameter()
        self.N = None
        self.dla = None

        self.sp.show_setting_window(
            [
                {'N': 200}
            ],
            [
                {'start': self.grow_cluster},
                {'plot graph': self.plot__N_R},
                {'calcurate D': self.fit_to_powerlow},
                {'save': self.save_to_file},
                {'quit': sys.exit}
            ]
        )

    def grow_cluster(self):
        """Create a DLA cluster with N particles by dla.grow_cluster method."""
        self.N = int(self.sp.entry[0].get())
        self.dla = DLA(self.N)
        self.lattice = self.dla.grow_cluster()
        self.center = self.dla.center

    def plot__N_R(self):
        """Plot a N-R_g graph to calcurate DLA cluster's fractal dimension."""
        self.N = int(self.sp.entry[0].get())
        self.dla = DLA(self.N)
        self.lattice = self.dla.grow_cluster()
        self.center = self.dla.center
        self.Narr = np.array([2**x for x in range(1, int(np.log2(self.N))+1)])
        self.R_g = np.array([self.dla.R_g[n-1] for n in self.Narr])

        # plot
        fig = plt.figure("Fractal Dimension")
        self.ax = fig.add_subplot(111)
        self.ax.loglog(self.R_g, self.Narr, '-o')
        self.ax.set_xlabel(r'$R_{g}$', fontsize=16)
        self.ax.set_ylabel(r'$N$', fontsize=16)
        self.ax.set_ymargin(0.05)
        fig.tight_layout()
        plt.show()

    def fit_to_powerlow(self):
        """Fitting method to calcurate the fractal dimension of DLA cluster."""

        def fit_func(parameter0, R_g, Narr):
            """Fitting function: Narr ~ R_{g}^{D}"""
            log = np.log
            c1 = parameter0[0]
            c2 = parameter0[1]
            residual = log(Narr) - c1 - c2*log(R_g)
            return residual

        def fitted(R, c1, D):
            return np.exp(c1)*(R**D)

        fitting(self.R_g, self.Narr,
                fit_func, [0.1, 1.7], fitted,
                xlabel=r'$R_{g}$', ylabel=r'$N$',
                param_to_show={'D': 1}
                )

    def save_to_file(self):
        """Save the figure of the DLA cluster with eps format."""
        import tkFileDialog
        import os

        if self.dla is None:
            print "No figure exists."
            return

        ftype = [('eps flle', '*.eps'), ('all files', '*')]
        filename = tkFileDialog.asksaveasfilename(
            filetypes=ftype,
            initialdir=os.getcwd(),
            initialfile="figure_1.eps"
        )
        if filename is None:
            return
        self.dla.canvas.postscript(file=filename)
Пример #9
0
from DLA import DLA

N = 16
l = 51
g = 300
a = 0
for k in range(N):
    if a == 0:
        dla1 = DLA(l, 601)
    else:
        dla1 = appo
    for i in range(g):
        dla1.OnePiece()
    a += 5
    
    appo = DLA(l+10, 601)
    appo.t = dla1.t
    for i in range(l):
        for j in range(l):
            appo.x[l + 15 + i][l + 15 + j] = dla1.x[l+i][l+j]

    l += 10

dla1.PrintGrid()