help='RAY Init num_cpus parameter.')
    parser.add_argument('--ray-gpus',
                        default=0,
                        type=int,
                        help='RAY Init num_gpus parameter.')
    ######## Profiling
    parser.add_argument('--profiler',
                        dest='profiler',
                        action='store_true',
                        help='Enables cProfile.')
    parser.set_defaults(profiler=False)
    return parser.parse_args()


ARGS = argument_parser()
logger = set_logging('runPPOtraining')

####################################################################################################


class NPEncoder(NumpyEncoder):
    def default(self, obj):
        try:
            encoded_value = super(NPEncoder, self).default(obj)
            return encoded_value
        except:
            logger.debug('%s ---> %s', str(type(obj)), str(obj))
            return str(obj)


def load_json_file(json_file):
Example #2
0
    args = vars(parser.parse_args())

    ######################################
    # Check Input
    ######################################

    if not os.path.isfile(args['season_captures_csv']):
        raise FileNotFoundError("season_captures_csv: {} not found".format(
            args['season_captures_csv']))

    if not os.path.isfile(args['predictions_csv']):
        raise FileNotFoundError("predictions_csv: {} not found".format(
            args['predictions_csv']))

    # logging
    set_logging(args['log_dir'], args['log_filename'])
    logger = logging.getLogger(__name__)

    for k, v in args.items():
        logger.info("Argument {}: {}".format(k, v))

    ######################################
    # Read Data
    ######################################

    # read captures data
    season_data_df = read_cleaned_season_file_df(args['season_captures_csv'])
    n_images_in_season_data = season_data_df.shape[0]
    logger.info("Read {} records from {}".format(n_images_in_season_data,
                                                 args['season_captures_csv']))
from utils.logger import set_logging

# """ Import SUMO library """
if 'SUMO_HOME' in os.environ:
    sys.path.append(os.path.join(os.environ['SUMO_HOME'], 'tools'))
    import traci
    import libsumo
    import traci.constants as tc
else:
    sys.exit("please declare environment variable 'SUMO_HOME'")

####################################################################################################

DEBUGGER = True
logger = set_logging(__name__)

####################################################################################################


def env_creator(config):
    """ Environment creator used in the environment registration. """
    logger.debug(
        '[env_creator] Environment creation: PrefChoiceRefCSPersuasiveDeepMARLEnv'
    )
    return PrefChoiceRefCSPersuasiveDeepMARLEnv(config)


####################################################################################################

Example #4
0
        '--action-distr',
        type=float,
        nargs='+',
        help=
        "Probability distribution for the epsilon action. Required with PDEGQLET."
    )
    parser.add_argument('--profiler',
                        dest='profiler',
                        action='store_true',
                        help='Enables cProfile.')
    parser.set_defaults(profiler=False)
    return parser.parse_args()


ARGS = argument_parser()
logger = set_logging('runA3Ctraining')

####################################################################################################


def load_json_file(json_file):
    """ Loads a JSON file. """
    logger.debug('Loading %s.', json_file)
    return json.load(open(json_file))


####################################################################################################

CHECKPOINT_METRICS = [
    # 'max_episode_reward_mean', # this is cumulative value
    'min_policy_loss',
Example #5
0
        '--action-distr',
        type=float,
        nargs='+',
        help=
        "Probability distribution for the epsilon action. Required with PDEGQLET."
    )
    parser.add_argument('--profiler',
                        dest='profiler',
                        action='store_true',
                        help='Enables cProfile.')
    parser.set_defaults(profiler=False)
    return parser.parse_args()


ARGS = argument_parser()
logger = set_logging('runDQNtraining')

####################################################################################################


class NPEncoder(NumpyEncoder):
    def default(self, obj):
        try:
            encoded_value = super(NPEncoder, self).default(obj)
            return encoded_value
        except:
            logger.debug('%s ---> %s', str(type(obj)), str(obj))
            return str(obj)


def load_json_file(json_file):
    parser.add_argument("--images_to_match_path", type=str, required=True)
    parser.add_argument("--output_csv", type=str, default=None)
    args = vars(parser.parse_args())

    # Check Input
    if not os.path.isfile(args['captures']):
        raise FileNotFoundError("captures: {} not found".format(
            args['captures']))

    if not os.path.isdir(args['images_to_match_path']):
        raise FileNotFoundError(
            "images_to_match_path: {} must be a directory".format(
                args['images_to_match_path']))

    # logging
    set_logging()

    # find all images
    images_to_find = list_pictures(args['images_to_match_path'],
                                   ext=('jpg', 'jpeg'))

    logger.info("Found {} images in {}".format(len(images_to_find),
                                               args['images_to_match_path']))

    captures = read_image_inventory(args['captures_csv'],
                                    unique_id='image_path')

    logger.info("Read {} with {} images".format(args['captures_csv'],
                                                len(captures.keys())))

    images_to_search_in = list(captures.keys())