Exemple #1
0
                    self.std_batch_z = self.get_noise_batches(batch_size=10 *
                                                              10)
                    self.std_test_data = True

                loss_G, loss_D, D1, D2, batch_fx = self.sess.run(
                    [self.loss_G, self.loss_D, self.D1, self.D2, self.G],
                    feed_dict={
                        self.x: self.std_batch_x,
                        self.y: self.std_batch_y,
                        self.z: self.std_batch_z,
                        self.keep_prob: 1.0
                    })
                self.plot_losses(epoch=i, loss_G=loss_G, loss_D=loss_D)
                self.plot_fake_data(epoch=i, batch_fx=batch_fx)
                time_diff = time.time() - start_time
                start_time = time.time()
                logger.info(
                    "Epoch: {:3d} - L_G: {:0.3f} - L_D: {:0.3f} - D1: {:0.3f} - D2: {:0.3f} - Time: {:0.1f}"
                    .format(i, loss_G, loss_D, D1[0][0], D2[0][0], time_diff))


if __name__ == "__main__":
    setup_logger(log_directory=get_project_directory("mnist_cnn", "logs"),
                 file_handler_type=HandlerType.TIME_ROTATING_FILE_HANDLER,
                 allow_console_logging=True,
                 allow_file_logging=True,
                 max_file_size_bytes=10000,
                 change_log_level=None)
    mnist_gan_mlp = GAN_CNN(z_dim=10, batch_size=100)
    mnist_gan_mlp.run(epochs=1000, batch_size=100, summary_epochs=1)
Exemple #2
0
"""Entry point for the backend application."""

from pylogging import HandlerType, setup_logger

from flask_app import server

if __name__ == '__main__':
    setup_logger(log_directory='./logs',
                 file_handler_type=HandlerType.TIME_ROTATING_FILE_HANDLER,
                 allow_console_logging=True,
                 allow_file_logging=False,
                 backup_count=100,
                 max_file_size_bytes=100000,
                 when_to_rotate='D',
                 change_log_level=None)
    server.main()


# -*- coding: utf-8 -*-
import logging
import pickle
import time
from datetime import date, datetime
from datetime import timedelta

import feedparser
from pylogging import HandlerType, setup_logger
import json
from pymongo import MongoClient

logger = logging.getLogger(__name__)
setup_logger(log_directory='./logs',
             file_handler_type=HandlerType.ROTATING_FILE_HANDLER,
             allow_console_logging=True,
             console_log_level=logging.DEBUG,
             max_file_size_bytes=1000000)
from os import listdir
from os.path import isfile, join

client = MongoClient("localhost", 27017)
mydb = client["amivmat"]
mydb['links'].drop()


def insert(dictObject, db):
    """ Inserts a given menu Object into the menus database.<br>
    If an object with the current id, date, mensaName and lang allready exists, it will be updated """
    """ res = db["menus"].update_one(
        {"id": dictObject["id"],
Exemple #4
0
"""Entry point for the backend application."""

from pylogging import HandlerType, setup_logger

from flask_app import server

if __name__ == '__main__':
    setup_logger(log_directory='./logs',
                 file_handler_type=HandlerType.TIME_ROTATING_FILE_HANDLER,
                 allow_console_logging=True,
                 allow_file_logging=False,
                 backup_count=100,
                 max_file_size_bytes=100000,
                 when_to_rotate='D',
                 change_log_level=None)
    server.main()
Exemple #5
0
        if it.heartbeat:
            logger.info(it.itr_message())
            logger.info('\tVisitationError:' +
                        str(it.pop_mean('VisitationInfNormError')))
    return reward_fn, q_rew


if __name__ == "__main__":
    # test IRL
    # from q_iteration import q_iteration
    from simple_env import random_env
    # np.set_logger.infooptions(suppress=True)

    setup_logger(log_directory='./logs',
                 file_handler_type=HandlerType.ROTATING_FILE_HANDLER,
                 allow_console_logging=True,
                 console_log_level="INFO")

    # Environment parameters
    env = random_env(16, 4, seed=1, terminate=False, t_sparsity=0.8)
    dS = env.spec.observation_space.flat_dim
    dU = env.spec.action_space.flat_dim
    dO = 8
    ent_wt = 1.0
    discount = 0.9
    obs_matrix = np.random.randn(dS, dO)

    # Compute optimal policy for double checking
    true_q = q_iteration(env, K=150, ent_wt=ent_wt, gamma=discount)
    true_sa_visits = compute_visitation(env,
                                        true_q,
Exemple #6
0
from graypy import GELFUDPHandler

from pylogging import HandlerType, setup_logger

logger = logging.getLogger(__name__)

# If want to add extra fields.
# logger = logging.LoggerAdapter(logger, {"app_name": "test-service"})
if __name__ == '__main__':
    gelf_handler = GELFUDPHandler(host="elk.recogizer.net",
                                  port=12201,
                                  level_names=True,
                                  debugging_fields=False)

    setup_logger(
        log_directory='./logs',
        file_handler_type=HandlerType.TIME_ROTATING_FILE_HANDLER,
        allow_console_logging=True,
        allow_file_logging=True,
        backup_count=100,
        max_file_size_bytes=100000,
        when_to_rotate='D',
        change_log_level=None,
        gelf_handler=gelf_handler,
        log_tags={"app_name": "Test-App"},
    )

    logger.error("Error logs")
    logger.debug("Debug logs")
    logger.info("Info logs")
from rllab.misc import logger as rllog
# from rllab.algos.ppo import PPO
from sandbox.rocky.tf.envs.base import TfEnv
from sandbox.rocky.tf.policies.gaussian_mlp_policy import GaussianMLPPolicy

log = logging.getLogger(__name__)
rllog.print = log.debug
rlc.print = log.debug
register_custom_envs()
setup_logger(log_directory='./logs',
             file_handler_type=HandlerType.ROTATING_FILE_HANDLER,
             allow_console_logging=True,
             console_log_level="DEBUG",
             change_log_level={
                 'tensorflow': 'error',
                 'matplotlib': 'error',
                 'GP': 'error',
                 'gpirl': 'info',
                 'gpirl.utils2': 'error',
                 __name__: 'info',
                 'gym': 'error'
             })

data_path = 'data/lunarlander_demo/'
env_name = 'LunarLanderContinuous-v3'
state_var_names = 'state_0,state_1,state_2,state_3,state_4,state_5,state_6,state_7'
nstate_var_names = state_var_names.replace('state', 'next_state')
action_names = 'action_0,action_1'
log.debug("Column names : {},{},{}".format(state_var_names, action_names, nstate_var_names))
trajectories = []
experts = []