Exemple #1
0
 def _my_exit_callback(self, child):
     """Child threads call this when they die"""
     if self._running:
         self._children_lock.acquire()
         # if child failed to init, it won't be in self._children
         utils.safe_remove(self._children, child)
         self._children_lock.release()
def initialize(output_dir, reset=True):
    all_path = os.path.join(output_dir, 'all.log')
    error_path = os.path.join(output_dir, 'error.log')

    if reset:
        utils.safe_remove(all_path)
        utils.safe_remove(error_path)

    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)

    # create console handler and set level to info
    handler = logging.StreamHandler()
    handler.setLevel(logging.INFO)
    formatter = logging.Formatter("%(asctime)s %(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    # create error file handler and set level to error
    handler = logging.FileHandler(error_path, "w", encoding=None, delay="true")
    handler.setLevel(logging.ERROR)
    formatter = logging.Formatter("%(asctime)s %(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    # create debug file handler and set level to debug
    handler = logging.FileHandler(all_path, "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(asctime)s %(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)
Exemple #3
0
def run(model, filename=None, train_filename=None, predict_filename=None, line_function=None, train_line_function=None, predict_line_function=None, evaluate_function=None, split=0.8, header=True):
    if train_line_function is None and line_function is not None:
        train_line_function = line_function
    if predict_line_function is None and line_function is not None:
        predict_line_function = line_function
    if train_filename is None and filename is not None:
        train_filename = filename
    if predict_filename is None and filename is not None:
        predict_filename = filename
    num_cores = len(model) if isinstance(model, collections.Sequence) else 1
    if num_cores > 1:
        os.system("spanning_tree")
        if header:
            num_lines = sum(1 for line in open(train_filename))
            os.system('tail -n {} {} > {}'.format(num_lines - 1, train_filename, train_filename + '_'))
            if predict_filename != train_filename:
                num_lines = sum(1 for line in open(predict_filename))
                os.system('tail -n {} {} > {}'.format(num_lines - 1, predict_filename, predict_filename + '_'))
            train_filename = train_filename + '_'
            predict_filename = predict_filename + '_'
            header = False
        split_file(train_filename, num_cores)
        if predict_filename != train_filename:
            split_file(predict_filename, num_cores)
        pool = Pool(num_cores)
        train_filenames = [train_filename + (str(n) if n >= 10 else '0' + str(n)) for n in range(num_cores)]
        predict_filenames = [predict_filename + (str(n) if n >= 10 else '0' + str(n)) for n in range(num_cores)]
        args = []
        for i in range(num_cores):
            args.append({'model': model[i],
                         'train_filename': train_filenames[i],
                         'predict_filename': predict_filenames[i],
                         'train_line_function': train_line_function,
                         'predict_line_function': predict_line_function,
                         'evaluate_function': evaluate_function,
                         'split': split,
                         'quiet': model[i].params.get('quiet'),
                         'multicore': True,
                         'header': header})
        results = sum(pool.map(run_model, args), [])
        if evaluate_function:
            print(evaluate_function(results))
        for f in train_filenames + predict_filenames:
            safe_remove(f)
        os.system('killall spanning_tree')
        return results
    else:
        return run_(model,
                    train_filename=train_filename,
                    predict_filename=predict_filename,
                    train_line_function=train_line_function,
                    predict_line_function=predict_line_function,
                    evaluate_function=evaluate_function,
                    split=split,
                    quiet=model.params.get('quiet'),
                    multicore=False,
                    header=header)
Exemple #4
0
def test_safe_remove():
    with open("temp.txt", "w") as file:
        pass

    assert Path("temp.txt").exists(), "Failed pre-test file existence check"
    assert not Path("temp.xml").exists(), "Failed pre-test file existence check"

    utils.safe_remove("temp.txt", "temp.xml")

    assert not Path("temp.txt").exists(), "File wasn't deleted"
    assert not Path("temp.xml").exists(), "File somehow appeared???"
Exemple #5
0
    async def latex(self, ctx, *, latex):
        """Allows the compilation of LaTeX expressions into a PNG. The expressions are automatically wrapped and """\
            """interpreted as math blocks., though if you include a $ on each end then you can escape into and out """\
            """of 'normal' mode. If text display is bothering you, Talos will accept latex inside a code block. """\
            """(```latex```)"""
        # Strip code block
        latex = latex.strip("`")

        # Generate document code
        doc = f"""
\\documentclass[
    12pt,
    border=2pt
]{{standalone}}

\\usepackage{{tex/talos}}

\\begin{{document}}
    ${latex}$
\\end{{document}}
"""

        # Compile into PNG
        filename = ""
        try:
            filename = str(dt.datetime.now().timestamp()) + str(random.randint(0, 100))
            tex = f"{filename}.tex"
            with open(tex, "w") as file:
                file.write(doc)

            sp.check_output(["pdflatex", "-interaction=nonstopmode", tex])
            if os.name == 'nt':
                gs = "gswin64c"
            else:
                gs = "gs"
            sp.call([gs, "-sDEVICE=pngalpha", "-dNOPAUSE", "-dBATCH", "-r300", f"-sOutputFile={filename}.png",
                     f"{filename}.pdf"])

            await ctx.send(file=discord.File(f"{filename}.png"))
        except Exception as e:
            if isinstance(e, sp.CalledProcessError):
                out = "Error while parsing LaTeX:\n"
                lines = e.stdout.decode().split("\n")
                log.debug('\n'.join(lines))
                for line in lines:
                    if line.startswith("! "):
                        out += line[2:] + "\n"
                await ctx.send(out)
            else:
                await ctx.send("Unknown error while attempting to parse LaTeX")
        finally:
            log.debug("Cleaning up LaTeX files")
            utils.safe_remove(*(f"{filename}." + x for x in ["tex", "aux", "pdf", "png", "log"]))
Exemple #6
0
def test_train_split(filename, train_pct=0.8, header=True):
    num_lines = sum(1 for line in open(filename)) - 1
    train_lines = int(math.ceil(num_lines * 0.8))
    test_lines = int(math.floor(num_lines * (1 - train_pct)))
    filename = shuffle_file(filename, header=header)
    train_file = filename + 'train'
    test_file = filename + 'test'
    os.system('tail -n {} {} > {}'.format(num_lines, filename, filename + '_'))
    os.system('head -n {} {} > {}'.format(train_lines, filename + '_', train_file))
    os.system('head -n {} {} > {}'.format(test_lines, filename + '_', test_file))
    safe_remove(filename + '_')
    safe_remove(filename)
    return (train_file, test_file)
Exemple #7
0
    def start_training(self):
        cache_file = self.get_cache_file()
        model_file = self.get_model_file()

        # Remove the old cache and model files
        if not self.params.get('incremental'):
            safe_remove(cache_file)
            safe_remove(model_file)

        # Run the actual training
        self.vw_process = self.make_subprocess(self.vw_train_command(cache_file, model_file))

        # set the instance pusher
        self.push_instance = self.push_instance_stdin
def main():
    # check command line arguments
    assert len(sys.argv) >= 2,\
        '\n[Usage] python3 "%s" <URL to download>' \
        '[directory to save images]' % __file__

    # get the URL to download from using command line argument, and download the page
    # exit if failed to download
    source_url = sys.argv[1]
    source_html_filename = download_from_url(source_url,
                                             output_doc='index.html',
                                             exit_on_error=True)

    # get images URL and their descriptions
    images_info = get_images_info(source_html_filename)
    num_total_images = len(images_info)
    print('%s image(s) to download: ' % num_total_images)
    index = 0
    for image_info in images_info:
        index += 1
        print('[%s] %s' % (index, vars(image_info)))

    # remove the downloaded html
    safe_remove(source_html_filename)

    # download images
    target_dir = str(sys.argv[2]) if len(sys.argv) >= 3 else '.'
    safe_mkdir(target_dir)
    index = 0
    num_success = 0
    num_failure = 0
    for image_info in images_info:
        index += 1
        print('Downloading image %s of %s' % (index, num_total_images))
        downloaded_filename = download_from_url(image_info.src,
                                                target_dir=target_dir)
        if downloaded_filename is not None:
            num_success += 1
            print('[Download success: %s / %s]\n%s' %
                  (num_success, num_total_images, downloaded_filename))
        else:
            num_failure += 1
            print('[Download failed: %s / %s]\n%s' %
                  (num_failure, num_total_images, image_info.src))

    # print final results
    print('[Download results]')
    print('Success: %s / %s' % (num_success, num_total_images))
    print('Failure: %s / %s' % (num_failure, num_total_images))
Exemple #9
0
 def train_on(self, filename, line_function, evaluate_function=None, header=True):
     hyperparams = [k for (k, p) in self.params.iteritems() if is_list(p) and k not in ['quadratic', 'cubic']]
     if len(hyperparams):
         if evaluate_function is None:
             raise ValueError("evaluate_function must be defined in order to hypersearch.")
         num_lines = sum(1 for line in open(filename))
         train = int(math.ceil(num_lines * 0.8))
         test = int(math.floor(num_lines * 0.2))
         train_file = filename + '_vp_hypersearch_train'
         test_file = filename + '_vp_hypersearch_validate'
         filename = shuffle_file(filename, header=header)
         os.system('head -n {} {} > {}'.format(train, filename, train_file))
         os.system('tail -n {} {} > {}'.format(test, filename, test_file))
         pos = 0
         for hyperparam in hyperparams:
             pos += 1
             if len(self.params[hyperparam]) == 2:
                 hypermin, hypermax = self.params[hyperparam]
                 if hypermax / float(hypermin) > 100:
                     param_range = [10 ** x for x in range(int(math.log10(hypermin)), int(math.log10(hypermax)) + 1)]
                 else:
                     param_range = range(int(hypermin), int(hypermax) + 1)
             else:
                 param_range = self.params[hyperparam]
             best_value = None
             best_metric = None
             model = deepcopy(self)
             model.params['quiet'] = True
             model.params['debug'] = False
             for other_hyperparam in hyperparams[pos:]:
                 average = (model.params[other_hyperparam][0] + model.params[other_hyperparam][1]) / 2.0
                 model.params[other_hyperparam] = average
             for value in param_range:
                 print('Trying {} as value for {}...'.format(value, hyperparam))
                 model.params[hyperparam] = value
                 model = model._run_train(train_file, line_function=line_function, evaluate_function=None, header=header)
                 results = model.predict_on(test_file)
                 eval_metric = evaluate_function(results)
                 print('...{}'.format(eval_metric))
                 if best_metric is None or eval_metric < best_metric:  #TODO: >
                     best_metric = eval_metric
                     best_value = value
             print('Best value for {} was {}!'.format(hyperparam, best_value))
             self.params[hyperparam] = best_value
         self.line_function = line_function
         self.evaluate_function = evaluate_function
         self.header = header
         safe_remove(train_file)
         safe_remove(test_file)
         safe_remove(filename)
         return self
     else:
         return self._run_train(filename, line_function, evaluate_function, header)
Exemple #10
0
def run_(model, train_filename=None, predict_filename=None, train_line_function=None, predict_line_function=None, evaluate_function=None, split=0.8, header=True, quiet=False, multicore=False):
    if is_list(model):
        model = model[0]
    if train_filename == predict_filename:
        train_filename, predict_filename = test_train_split(train_filename, train_pct=split, header=header)
    results = (model.train_on(train_filename,
                              line_function=train_line_function,
                              evaluate_function=evaluate_function,
                              header=header)
                     .predict_on(predict_filename,
                                 line_function=predict_line_function,
                                 header=header))
    if not quiet and multicore:
        print 'Shuffling...'
    if train_filename == predict_filename:
        safe_remove(train_filename)
        safe_remove(predict_filename)
    safe_remove(model.get_cache_file())
    safe_remove(model.get_model_file())
    return results
Exemple #11
0
 def _my_exit_callback(self, child):
     """Child threads call this when they die"""
     utils.safe_remove(self._children, child)
Exemple #12
0
def preprocess_zeta(source_dir, output_dir, include_path, metadata_filename):
    source_dir = pt.normpath(source_dir)
    output_dir = pt.normpath(output_dir)

    wavs_dir = pt.join(output_dir, "wavs")
    metadata_csv_path = pt.join(output_dir, metadata_filename)

    utils.safe_remove(output_dir)
    utils.ensure_dirs(source_dir, output_dir, wavs_dir)
    audio_paths = []
    for ext in AUDIO_EXTS:
        audio_paths.extend(glob(pt.join(source_dir, "*" + ext)))
    rows = []
    for ap in audio_paths:
        apb = pt.basename(ap)
        stripped = utils.strip_audio_ext(ap)
        stripped_apb = utils.strip_audio_ext(apb)
        big_wav_path = stripped + ".wav"

        sub_path = stripped + SUBTITLE_EXT
        cut_path = stripped + IGNORE_EXT

        if not pt.exists(sub_path):
            continue
        idx = 1
        cuts = process_cuts(cut_path)
        if not pt.exists(big_wav_path):
            subprocess.run(["ffmpeg", "-i", ap, big_wav_path])
            subprocess.run([
                "sox",
                big_wav_path,
                "-b",
                "16",
                "tmp.wav",
                "rate",
                "22050",
                "channels",
                "1",
            ])
            utils.safe_remove(big_wav_path)
            os.rename("tmp.wav", big_wav_path)

        print(sub_path)
        for (line, start, end) in gen_subs(sub_path):
            in_cut = False
            for (cut_start, cut_end) in cuts:
                if cut_start < end < cut_end or cut_start < start < cut_end:
                    in_cut = True
                    break

            if not in_cut and 7000 > (end - start) > 100:
                if "[" in line and "]" in line:
                    continue
                p = pt.join(wavs_dir, stripped_apb + "_" + str(idx) + ".wav")
                subprocess.run([
                    "sox",
                    big_wav_path,
                    p,
                    "trim",
                    str(start / 1000),
                    "=" + str(end / 1000),
                    "silence",
                    "1",
                    "0.1",
                    "0.5%",
                    "reverse",
                    "silence",
                    "1",
                    "0.1",
                    "0.5%",
                    "reverse",
                ])
                if not pt.exists(p):
                    raise Exception("Clip wasn't made??")
                sound = AudioSegment.from_file(p)
                if len(sound) < 2000:
                    utils.safe_remove(p)
                    continue
                rows.append([
                    p if include_path else pt.splitext(pt.basename(p))[0],
                    line, line
                ])
                idx += 1
                print("Finished " + p)
            utils.safe_remove(big_wav_path)
    with open(metadata_csv_path, "w", newline="") as csvfile:
        writer = csv.writer(csvfile, delimiter="|", quoting=csv.QUOTE_MINIMAL)
        writer.writerows(rows)
def compute_coeff(airfoil, reynolds=500000, mach=0, alpha=3, n_iter=200, tmp_dir='./tmp'):
    
    create_dir(tmp_dir)
    
    gc.collect()
    safe_remove('{}/airfoil.log'.format(tmp_dir))
    fname = '{}/airfoil.dat'.format(tmp_dir)
    with open(fname, 'wb') as f:
        np.savetxt(f, airfoil)
    
    try:
        # Has error: Floating point exception (core dumped)
        # This is the "empty input file: 'tmp/airfoil.log'" warning in other approaches
        child = pexpect.spawn('xfoil')
        timeout = 10
        
        child.expect('XFOIL   c> ', timeout)
#        child.sendline('PLOP')
#        child.expect('Option, Value   (or <Return>)    c>  ', timeout)
#        child.sendline('G F')
#        child.expect('Option, Value   (or <Return>)    c>  ', timeout)
#        child.sendline()
#        child.expect('XFOIL   c> ', timeout)
        child.sendline('load {}/airfoil.dat'.format(tmp_dir))
        child.expect('Enter airfoil name   s> ', timeout)
        child.sendline('af')
        child.expect('XFOIL   c> ', timeout)
        child.sendline('OPER')
        child.expect('.OPERi   c> ', timeout)
        child.sendline('VISC {}'.format(reynolds))
        child.expect('.OPERv   c> ', timeout)
        child.sendline('ITER {}'.format(n_iter))
        child.expect('.OPERv   c> ', timeout)
        child.sendline('MACH {}'.format(mach))
        child.expect('.OPERv   c> ', timeout)
        child.sendline('PACC')
        child.expect('Enter  polar save filename  OR  <return> for no file   s> ', timeout)
        child.sendline('{}/airfoil.log'.format(tmp_dir))
        child.expect('Enter  polar dump filename  OR  <return> for no file   s> ', timeout)
        child.sendline()
        child.expect('.OPERva   c> ', timeout)
        child.sendline('ALFA {}'.format(alpha))
        child.expect('.OPERva   c> ', timeout)
        child.sendline()
        child.expect('XFOIL   c> ', timeout)
        child.sendline('quit')
        
        child.expect(pexpect.EOF)
        child.close()
    
        res = np.loadtxt('{}/airfoil.log'.format(tmp_dir), skiprows=12)
        if len(res) == 9 and res[2] >= 0.003:
            CL = res[1]
            CD = res[2]
        else:
            CL = -np.inf
            CD = np.inf
            
    except Exception as ex:
#        print(ex)
        print('XFoil error!')
        CL = -np.inf
        CD = np.inf
        
    safe_remove(':00.bl')
    
    return CL, CD
Exemple #14
0
def compute_coeff(airfoil, reynolds=500000, mach=0, alpha=3, n_iter=200):

    gc.collect()
    safe_remove('tmp/airfoil.log')
    fname = 'tmp/airfoil.dat'
    with open(fname, 'wb') as f:
        np.savetxt(f, airfoil)

    try:
        # Has error: Floating point exception (core dumped)
        # This is the "empty input file: 'tmp/airfoil.log'" warning in other approaches
        child = pexpect.spawn('xfoil')
        timeout = 10

        child.expect('XFOIL   c> ', timeout)
        child.sendline('load tmp/airfoil.dat')
        child.expect('Enter airfoil name   s> ', timeout)
        child.sendline('af')
        child.expect('XFOIL   c> ', timeout)
        child.sendline('OPER')
        child.expect('.OPERi   c> ', timeout)
        child.sendline('VISC {}'.format(reynolds))
        child.expect('.OPERv   c> ', timeout)
        child.sendline('ITER {}'.format(n_iter))
        child.expect('.OPERv   c> ', timeout)
        child.sendline('MACH {}'.format(mach))
        child.expect('.OPERv   c> ', timeout)
        child.sendline('PACC')
        child.expect(
            'Enter  polar save filename  OR  <return> for no file   s> ',
            timeout)
        child.sendline('tmp/airfoil.log')
        child.expect(
            'Enter  polar dump filename  OR  <return> for no file   s> ',
            timeout)
        child.sendline()
        child.expect('.OPERva   c> ', timeout)
        child.sendline('ALFA {}'.format(alpha))
        child.expect('.OPERva   c> ', timeout)
        child.sendline()
        child.expect('XFOIL   c> ', timeout)
        child.sendline('quit')

        child.expect(pexpect.EOF)
        child.close()

        # Has the dead lock issue
        #        with open('tmp/control.in', 'w') as text_file:
        #            text_file.write('load tmp/airfoil.dat\n' +
        #                            'af\n' +
        #                            'OPER\n' +
        #                            'VISC {}\n'.format(reynolds) +
        #                            'ITER {}\n'.format(n_iter) +
        #                            'MACH {}\n'.format(mach) +
        #                            'PACC\n' +
        #                            'tmp/airfoil.log\n' +
        #                            '\n' +
        #                            'ALFA {}\n'.format(alpha) +
        #                            '\n' +
        #                            'quit\n')
        #        os.system('xfoil <tmp/control.in> tmp/airfoil.out')

        # Has the dead lock issue
        # Has memory issue
        #        ps = sp.Popen(['xfoil'], stdin=sp.PIPE, stderr=sp.PIPE, stdout=sp.PIPE)
        #
        #        # Use communicate() rather than .stdin.write, .stdout.read or .stderr.read
        #        # to avoid deadlocks due to any of the other OS pipe buffers filling up and
        #        # blocking the child process.
        #        out, err = ps.communicate('load tmp/airfoil.dat\n' +
        #                                  'af\n' +
        #                                  'OPER\n' +
        #                                  'VISC {}\n'.format(reynolds) +
        #                                  'ITER {}\n'.format(n_iter) +
        #                                  'MACH {}\n'.format(mach) +
        #                                  'PACC\n' +
        #                                  'tmp/airfoil.log\n' +
        #                                  '\n' +
        #                                  'ALFA {}\n'.format(alpha) +
        #                                  '\n' +
        #                                  'quit\n')

        res = np.loadtxt('tmp/airfoil.log', skiprows=12)

        if len(res) in [7, 9]:
            CL = res[1]
            CD = res[2]
        else:
            CL = -np.inf
            CD = np.inf

    except Exception as ex:
        #        print(ex)
        print('XFoil error!')
        CL = -np.inf
        CD = np.inf

    safe_remove(':00.bl')

    return CL, CD
Exemple #15
0
    def train(self, data_obj, func_obj, val_scale, train_steps=10000, batch_size=32, 
              disc_lr=2e-4, gen_lr=2e-4, save_interval=0, save_dir='.'):
        
        safe_remove('{}/logs'.format(save_dir))
        
        # Inputs
        self.x = tf.placeholder(tf.float32, shape=[None, self.data_dim], name='data')
        self.z = tf.placeholder(tf.float32, shape=[None, self.noise_dim], name='noise')
        
        # Outputs
        d_real = self.discriminator(self.x)
        self.x_fake = self.generator(self.z)
        d_fake = self.discriminator(self.x_fake)
        
        # Losses
        # Cross entropy losses for D
        d_loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=d_real, labels=tf.ones_like(d_real)))
        d_loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=d_fake, labels=tf.zeros_like(d_fake)))
        # Cross entropy losses for G
        g_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=d_fake, labels=tf.ones_like(d_fake)))
        dpp_loss, D, S, Q, L, y = self.compute_diversity_loss(self.x_fake, func_obj.equation, val_scale)
        mean_y = tf.reduce_mean(y)
        g_dpp_loss = g_loss + self.lambda1 * dpp_loss
        
        # Optimizers
        d_optimizer = tf.train.AdamOptimizer(learning_rate=disc_lr, beta1=0.5)
        g_optimizer = tf.train.AdamOptimizer(learning_rate=gen_lr, beta1=0.5)
        
        # Generator variables
        gen_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='Generator')
        # Discriminator variables
        dis_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='Discriminator')
        
        # Training operations
        d_train = d_optimizer.minimize(d_loss_real+d_loss_fake, var_list=dis_vars)
        g_train = g_optimizer.minimize(g_dpp_loss, var_list=gen_vars)
        
#        d_grads_real = d_optimizer.compute_gradients(d_loss_real, dis_vars)
#        d_grads_fake = d_optimizer.compute_gradients(d_loss_fake, dis_vars)
#        g_grads = g_optimizer.compute_gradients(g_loss, gen_vars)
#        dpp_grads = g_optimizer.compute_gradients(dpp_loss, gen_vars)
        
#        def clip_gradient(optimizer, loss, var_list):
#            grads_and_vars = optimizer.compute_gradients(loss, var_list)
#            clipped_grads_and_vars = [(grad, var) if grad is None else 
#                                      (tf.clip_by_value(grad, -1., 1.), var) for grad, var in grads_and_vars]
#            train_op = optimizer.apply_gradients(clipped_grads_and_vars)
#            return train_op
#        
#        d_train = clip_gradient(d_optimizer, d_loss_real+d_loss_fake, dis_vars)
#        g_train = clip_gradient(g_optimizer, g_dpp_loss, gen_vars)
        
        # Initialize the variables (i.e. assign their default value)
        init = tf.global_variables_initializer()
        
        # Create summaries to monitor losses
        tf.summary.scalar('D_loss_for_real', d_loss_real)
        tf.summary.scalar('D_loss_for_fake', d_loss_fake)
        tf.summary.scalar('G_loss', g_loss)
        tf.summary.scalar('DPP_loss', dpp_loss)
        # Merge all summaries into a single op
        merged_summary_op = tf.summary.merge_all()
        
        # Add ops to save and restore all the variables.
        saver = tf.train.Saver()
        
        # Start training
        self.sess = tf.Session()
        
        # Run the initializer
        self.sess.run(init)
        # op to write logs to Tensorboard
        summary_writer = tf.summary.FileWriter('{}/logs'.format(save_dir), graph=self.sess.graph)
        
        data = data_obj.data
    
        for t in range(train_steps):
    
#            print('#################################### D_vars ####################################')
#            for var, val in zip(dis_vars, self.sess.run(dis_vars)):
#                print('D_vars before update: '+var.name, val)
                
            ind = np.random.choice(data.shape[0], size=batch_size, replace=False)
            X_real = data[ind]
            noise = np.random.normal(scale=0.5, size=(batch_size, self.noise_dim))
            _, dlr, dlf = self.sess.run([d_train, d_loss_real, d_loss_fake], feed_dict={self.x: X_real, self.z: noise})
            
#            print('#################################### D_grads_real ####################################')
#            for var, val in zip(dis_vars, self.sess.run(d_grads_real, feed_dict={self.x: X_real})):
#                print('D_grads_real: '+var.name, val)
#            print('#################################### D_grads_fake ####################################')
#            for var, val in zip(dis_vars, self.sess.run(d_grads_fake, feed_dict={self.z: noise})):
#                print('D_grads_fake: '+var.name, val)
#            print('#################################### D_vars ####################################')
#            for var, val in zip(dis_vars, self.sess.run(dis_vars)):
#                print('D_vars after update: '+var.name, val)
            
#            X_fake = self.sess.run(self.x_fake, feed_dict={self.z: noise})
#            print('************************************ X_fake ************************************')
#            print('Before G update:', X_fake)
            
#            D_batch, S_batch, Q_batch, L_batch = self.sess.run([D, S, Q, L], feed_dict={self.z: noise})
#            print('************************************ SQL ************************************')
#            print('D before G update:', D_batch)
#            print(np.min(D_batch), np.max(D_batch))
#            print('S before G update:', S_batch)
#            print('Q before G update:', Q_batch)
#            print('L before G update:', L_batch)
#            print(np.min(L_batch), np.max(L_batch))
#            print('Singular values:', np.linalg.svd(L_batch, compute_uv=False))
                
#            print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% G_vars %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
#            for var, val in zip(gen_vars, self.sess.run(gen_vars)):
#                print('G_vars before update: '+var.name, val)
                
            noise = np.random.normal(scale=0.5, size=(batch_size, self.noise_dim))
            _, gl, dppl, my = self.sess.run([g_train, g_loss, dpp_loss, mean_y], feed_dict={self.z: noise})
            
#            print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% G_grads %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
#            for var, val in zip(gen_vars, self.sess.run(g_grads, feed_dict={self.z: noise})):
#                print(var.name, val)
#            print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% DPP_grads %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
#            for var, val in zip(gen_vars, self.sess.run(dpp_grads, feed_dict={self.z: noise})):
#                print(var.name, val)
#            print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% G_vars %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
#            for var, val in zip(gen_vars, self.sess.run(gen_vars)):
#                print('G_vars after update: '+var.name, val)
            
#            X_fake = self.sess.run(self.x_fake, feed_dict={self.z: noise})
#            print('************************************ X_fake ************************************')
#            print('After G update:', X_fake)
            
#            D_batch, S_batch, Q_batch, L_batch = self.sess.run([D, S, Q, L], feed_dict={self.z: noise})
#            print('************************************ SQL ************************************')
#            print('D after G update:', D_batch)
#            print(np.min(D_batch), np.max(D_batch))
#            print('S after G update:', S_batch)
#            print('Q after G update:', Q_batch)
#            print('L after G update:', L_batch)
#            print(np.min(L_batch), np.max(L_batch))
#            print('Singular values:', np.linalg.svd(L_batch, compute_uv=False))
            
            summary_str = self.sess.run(merged_summary_op, feed_dict={self.x: X_real, self.z: noise})
            summary_writer.add_summary(summary_str, t+1)
            
            # Show messages
            log_mesg = "%d: [D] real %f fake %f" % (t+1, dlr, dlf)
            log_mesg = "%s  [G] fake %f dpp %f y %f" % (log_mesg, gl, dppl, my)
            print(log_mesg)
            
            assert not (np.isnan(dlr) or np.isnan(dlf) or np.isnan(gl) or np.isnan(dppl))
            
            if save_interval>0 and (t+1)%save_interval==0 or t+1==train_steps:
                # Save the variables to disk.
                save_path = saver.save(self.sess, '{}/model'.format(save_dir))
                print('Model saved in path: %s' % save_path)
                print('Plotting results ...')
                gen_data = self.synthesize(1000)
                visualize_2d(data, func=func_obj.evaluate, gen_data=gen_data, save_path='{}/{}_synthesized.svg'.format(save_dir, t+1))
Exemple #16
0
    assert N == Y.shape[0]
    
    # Prepare save directory
    create_dir('./trained_gan')
    save_dir = './trained_gan/{}_{}'.format(lambda0, lambda1)
    create_dir(save_dir)
    
#    print('Plotting training samples ...')
#    samples = X[np.random.choice(N, size=36, replace=False)]
#    plot_samples(None, samples, scale=1.0, scatter=False, lw=1.2, alpha=.7, c='k', fname='{}/samples'.format(save_dir))
    
    # Train
    surrogate_dir = './surrogate/trained_surrogate'
    model = BezierGAN(latent_dim, noise_dim, X.shape[1], bezier_degree, bounds, lambda0, lambda1)
    if args.mode == 'train':
        safe_remove(save_dir)
        timer = ElapsedTimer()
        model.train(X, batch_size=batch_size, train_steps=train_steps, disc_lr=disc_lr, gen_lr=gen_lr, 
                    save_interval=save_interval, directory=save_dir, surrogate_dir=surrogate_dir)
        elapsed_time = timer.elapsed_time()
        runtime_mesg = 'Wall clock time for training: %s' % elapsed_time
        print(runtime_mesg)
        runtime_file = open('{}/runtime.txt'.format(save_dir), 'w')
        runtime_file.write('%s\n' % runtime_mesg)
        runtime_file.close()
    else:
        model.restore(directory=save_dir)
    
    print('Plotting synthesized shapes ...')
    airfoils = model.synthesize(36)
    plot_samples(None, airfoils, scale=1.0, scatter=False, lw=1.2, alpha=.7, c='k', fname='{}/synthesized'.format(save_dir))
Exemple #17
0
 def read_predictions_(self):
     for x in open(self.prediction_file):
         yield self.parse_prediction(x)
     safe_remove(self.prediction_file)