Esempio n. 1
0
def train(config_path, cleanup, short_mode):
    config = toml.load(config_path)
    # base_result_dir = config['result_dir'].format(now_str=now_str(str_format='%Y%m%d_%H%M%S'))
    base_result_dir = os.path.join(config['result_dir'], 'train')
    with tempfile.TemporaryDirectory() as f:
        if cleanup:
            print('cleanup mode')
            print('temporary directory: {}'.format(f))
            base_result_dir = f
        else:
            os.makedirs(base_result_dir, exist_ok=True)
        config['result_dir'] = base_result_dir

        # writing_config
        with open(os.path.join(config['result_dir'], 'config.toml'), 'a') as f:
            toml.dump(config, f)
        # building_logger
        logger_file_path = os.path.join(base_result_dir, 'log.txt')
        logger = build_logger(logger_file_path)
        # set_train_config
        preprocess_dir_name = config['preprocess_params'][
            'preprocessed_dir_name']
        n_splits = config['preprocess_params']['n_splits']
        preprocess_base_dir_path = os.path.join('../data/preprocessed_data/',
                                                preprocess_dir_name)

        # fold分のdirがあるので、fold数分回す
        for fold in range(n_splits):
            fold_dir_name = '{}cv_{}'.format(n_splits, fold + 1)
            logger.info('{}_start'.format(fold_dir_name))
            preprocess_dir_path = os.path.join(preprocess_base_dir_path,
                                               fold_dir_name)
            result_dir = os.path.join(base_result_dir, fold_dir_name)
            os.makedirs(result_dir, exist_ok=True)
            _train(config,
                   preprocess_dir_path,
                   result_dir,
                   logger,
                   short_mode=False)
Esempio n. 2
0
def score_basic(protocol_class, args):
    verbosity_threshold = 1 if args.verbose else 0
    log = build_logger(verbosity_threshold)
    log(1, "[Info] Command: {}".format(" ".join(sys.argv)))

    if not args.validation_only:
        # Check for now-required arguments
        if args.reference_file is None:
            err_quit("Missing required REFERENCE_FILE argument (-r, --reference-file).  Aborting!")

        if args.output_dir is None:
            err_quit("Missing required OUTPUT_DIR argument (-o, --output-dir).  Aborting!")

    activity_index = load_activity_index(log, args.activity_index)
    file_index = load_file_index(log, args.file_index)
    input_scoring_parameters = load_scoring_parameters(log, args.scoring_parameters_file) if args.scoring_parameters_file else {}
    protocol = protocol_class(input_scoring_parameters, file_index, activity_index, " ".join(sys.argv))
    protocol.pn = args.processes_number
    protocol.minmax = None
    system_output_schema = load_schema_for_protocol(log, protocol)

    log(1, "[Info] Loading activities and references")
    if args.prune_system_output:
        system_output, minmax = prune(args.system_output_file, args.prune_system_output, file_index, log)
        protocol.minmax = minmax
    else:
        system_output = load_system_output(log, args.system_output_file)

    validate_input(log, system_output, system_output_schema)
    check_file_index_congruence(log, system_output, file_index, args.ignore_extraneous_files, args.ignore_missing_files)
    log(1, "[Info] Validation successful")

    if args.validation_only:
        exit(0)

    system_activities = parse_activities(system_output, file_index, protocol_class.requires_object_localization, args.ignore_extraneous_files, args.ignore_missing_files)
    reference = load_reference(log, args.reference_file)
    reference_activities = parse_activities(reference, file_index, protocol_class.requires_object_localization, args.ignore_extraneous_files, args.ignore_missing_files)

    log(1, "[Info] Computing alignments ..")
    alignment = protocol.compute_alignment(system_activities, reference_activities)
    log(1, '[Info] Scoring ..')
    results = protocol.compute_results(alignment, args.det_point_resolution)

    mkdir_p(args.output_dir)
    log(1, "[Info] Saving results to directory '{}'".format(args.output_dir))
    audc_by_activity = []
    mean_audc = []
    if not args.disable_plotting:
        export_records(log, results.get("det_point_records", {}), results.get("tfa_det_point_records", {}), args.output_dir, args.no_ppf)
        audc_by_activity, mean_audc = protocol.compute_auc(args.output_dir)

    write_out_scoring_params(args.output_dir, protocol.scoring_parameters)
    write_records_as_csv("{}/alignment.csv".format(args.output_dir), ["activity", "alignment", "ref", "sys", "sys_presenceconf_score", "kernel_similarity", "kernel_components"], results.get("output_alignment_records", []))
    write_records_as_csv("{}/pair_metrics.csv".format(args.output_dir), ["activity", "ref", "sys", "metric_name", "metric_value"], results.get("pair_metrics", []))
    write_records_as_csv("{}/scores_by_activity.csv".format(args.output_dir), ["activity", "metric_name", "metric_value"], results.get("scores_by_activity", []) + audc_by_activity)
    write_records_as_csv("{}/scores_aggregated.csv".format(args.output_dir), [ "metric_name", "metric_value" ], results.get("scores_aggregated", []) + mean_audc)
    write_records_as_csv("{}/scores_by_activity_and_threshold.csv".format(args.output_dir), [ "activity", "score_threshold", "metric_name", "metric_value" ], results.get("scores_by_activity_and_threshold", []))

    if vars(args).get("dump_object_alignment_records", False):
        write_records_as_csv("{}/object_alignment.csv".format(args.output_dir), ["activity", "ref_activity", "sys_activity", "frame", "ref_object_type", "sys_object_type", "mapped_ref_object_type", "mapped_sys_object_type", "alignment", "ref_object", "sys_object", "sys_presenceconf_score", "kernel_similarity", "kernel_components"], results.get("object_frame_alignment_records", []))
Esempio n. 3
0
import logger
import constants


logger = logger.build_logger('deciphering')


def decipher_with_private_keys(cipher_text, multiplicative_inverse, multiplication_of_key_primes):
    """
    :param cipher_text:
    :param multiplicative_inverse:
    :param multiplication_of_key_primes:
    :return:
    """
    deciphered_items = list()
    for i in range(0, len(cipher_text)):
        cipher_item = cipher_text[i]
        deciphered_item = pow(cipher_item, multiplicative_inverse, multiplication_of_key_primes)
        deciphered_items.append(deciphered_item)

    return deciphered_items


def calculate_modular_inverse(alg, n, m):
    """
    :param alg: algorithm  type
    :param n: multiplicative
    :param m: modulo
    :return: modular multiplicative inverse by algorithm
    """
    if alg == constants.algorithm_brute_force:
Esempio n. 4
0
            "-c",
            "--config",
            action="store",
            dest="config",
            type="str",
            help="Config file, default %default",
            default="./config.ini")

        options, args = parser.parse_args()

        if not os.path.isfile(options.config):
            raise RuntimeError('Given config file was not found')

        config = Config()
        config.read(options.config)
        logger = build_logger(config.fetch('logs_dir'), logging.DEBUG)

        app = Tracker(config, logger)
        app.track()

    except KeyboardInterrupt as e:
        if logger is not None:
            logger.info('Tracker stopped by user. PID: {0}'.format(os.getpid()))
        else:
            print 'Tracker stopped by user. PID: {0}'.format(os.getpid())
    except Exception as e:
        if logger is not None:
            logger.critical(e.message, exc_info=True)
        else:
            print traceback.format_exc()
Esempio n. 5
0
import constants
import logger

logger = logger.build_logger('knapsack_solver')


def solve_knapsack(alg, private_key_vector, deciphered_item, best_response):
    """
    :param alg:
    :param private_key_vector:
    :param deciphered_item:
    :param best_response:
    :return: knapsack solution by algorithm
    """
    if alg == constants.algorithm_back_tracking:
        return back_tracking_solution(private_key_vector, deciphered_item,
                                      best_response)
    else:
        return ""


def back_tracking_solution(private_key_vector, deciphered_item, best_response):
    """
    :param private_key_vector:
    :param deciphered_item:
    :param best_response:
    :return: knapsack solution by recursive algorithm
    """
    reference_index = 0
    editable_deciphered_item = deciphered_item
    if editable_deciphered_item == 0:
Esempio n. 6
0
        # append obj id, last possition detection time, extrapolation line parameters(p0,pn,v), intersection point coords and obj id-s, spatial position
        if not_street and (x_depth != 0 or y_depth != 0) and z_depth != 0:
            object_list.append([object_id, p_time, ([0,0,0],[0,0,0],[0,0,0]), [], (x_center, y_center, x_depth, y_depth, z_depth)])
        elif z_depth > 5: # if distance from the cam is bigger then 5m
            object_list.append([object_id, p_time, ([0,0,0],[0,0,0],[0,0,0]), [], (x_center, y_center, x_depth, y_depth, z_depth)])

    return object_list, object_id



'''
    Performs inference on RGB camera and retrieves spatial location coordinates: x,y,z relative to the center of depth map.
'''

if __name__=="__main__":
    log = build_logger()
    log.debug("Test of log to file")
    if len(sys.argv) > 1:
        nnBlobPath = sys.argv[1]
    
    # Start defining a pipeline
    pipeline = dai.Pipeline()
    
    # Define a source - color camera
    colorCam = pipeline.createColorCamera()
    spatialDetectionNetwork = pipeline.createMobileNetSpatialDetectionNetwork()
    monoLeft = pipeline.createMonoCamera()
    monoRight = pipeline.createMonoCamera()
    stereo = pipeline.createStereoDepth()

    xoutRgb = pipeline.createXLinkOut()
Esempio n. 7
0
import sys
import constants
import liblll
import utility
import ciphering
import deciphering
import attacking
import logger
import time
import os

logger = logger.build_logger("tester")


def main():
    validation_message = utility.validate_initial_parameters()
    if validation_message != "":
        print(validation_message)
        sys.exit()
    print(constants.terminal_delimiter)
    print("\n" + "The application started")
    #the generation of the super-increasing sequence (private key)
    private_key_vector = utility.generate_super_increasing_vector()
    #generation of madulo
    modulo = utility.determine_modulo_acc_to_random_key_vector(
        private_key_vector)
    multiplicative_to_mask = utility.determine_element_to_mask(modulo)

    print("\n private key: " + str(private_key_vector))
    # store the private key
    write_in_file("keys/private_key/private_key.txt", private_key_vector)
Esempio n. 8
0
# coding: utf-8

import json
import logging
import falcon
from gensim.models import Word2Vec
from config import logpath
from parse import SimilarWords, Similarity
from utils import get_model_path
from logger import build_logger


logger = build_logger(__name__, logpath)
models = {}
VALID_LANGUAGE_MODEL = ('tw', 'cn')


def get_model(lang):
    if lang not in models:
        path = get_model_path(lang)
        models[lang] = Word2Vec.load(path)
    return models[lang]

def make_response(res, data, status):
    res.body = json.dumps(data, indent=2, ensure_ascii=False)
    res.status = status
    res.append_header('Access-Control-Allow-Origin', '*')

def build_log_msg(service, *args):
    lang = args[0]
    params = ', '.join(args[1:])
Esempio n. 9
0
from random import choice
import random
import sys
import string
import re
import math
import configparser
import constants
import logger


logger = logger.build_logger("utility")


def read_property_key(key, structure_type, section, file_name):
    """
    :param key:
    :param structure_type:
    :param section:
    :param file_name:
    :return: read key with desired type of structure from property file
    """
    try:
        config = configparser.RawConfigParser()
        config.optionxform = str
        config.read(file_name)
        if structure_type == constants.structure_type_int:
            return config.getint(section, key)
        elif structure_type == constants.structure_type_float:
            return config.getfloat(section, key)
        elif structure_type == constants.structure_type_boolean:
Esempio n. 10
0
  --actions VAL            [default: sgsoft]
  --dropout VAL            [default: 1]
  --l2reg VAL              [default: 0]
  --episodes VAL     [default: 5000]
  --episodeSteps VAL    [default: 50]
  --agentEta VAL     [default: 0.1]
  --seed SEED              [default: 1]
  --experts VAL            [default: uni,lp]
  --globalEval VAL         [default: tderror]
  
"""

if __name__ == '__main__':

    args = docopt(help)
    log_dir = build_logger(args)
    loggerTB = Logger(dir=log_dir, format_strs=['TB'])
    loggerStdoutJSON = Logger(dir=log_dir, format_strs=['json', 'stdout'])

    env, wrapper = make(args['--env'], args)
    env.set_objects()

    # model = Predictor(wrapper, layers=np.array([int(l) for l in args['--layers'].split(',')]),
    #                   dropout=float(args['--dropout']), l2reg=float(args['--l2reg']))
    agent = Agent(args, env, wrapper, [loggerTB, loggerStdoutJSON])

    agent.model = Controller(
        agent,
        nstep=1,
        _gamma=0.99,
        _lambda=0,
Esempio n. 11
0
 def get_logger(self):
     timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
     log_path = '{}_train_{}.log'.format(self.save_prefix, timestamp)
     # log_path = os.path.expanduser(log_file)
     build_logger(log_path)
Esempio n. 12
0

if __name__ == '__main__':

    parser = argparse.ArgumentParser(
        description='Training GlamPoints detector')
    parser.add_argument('--path_ymlfile',
                        type=str,
                        default='configs/glampoints_training.yml',
                        help='Path to yaml file.')

    opt = parser.parse_args()

    with open(opt.path_ymlfile, 'r') as ymlfile:
        cfg = yaml.load(ymlfile)

    _device = settings.initialize_cuda_and_logging(cfg)

    train_loader, val_loader = make_data_loader(cfg)

    model = build_model(cfg)
    model.to(_device)

    optimizer = build_optimizer(cfg, model)

    loss_func = build_loss(cfg)

    logger, tb_logger = build_logger(cfg)

    do_train(cfg, model, train_loader, val_loader, optimizer, loss_func,
             logger, tb_logger, _device)
Esempio n. 13
0
import sys
import constants
import utility
import ciphering
import deciphering
import logger
import time

"""
For test purposes. You can also use this file as main file to check all application
"""
logger = logger.build_logger("tester")


def main():
    validation_message = utility.validate_initial_parameters()
    if validation_message != "":
        print(validation_message)
        sys.exit()

    print(constants.terminal_delimiter)
    print("\n" + constants.foreground_colorant_yellow + "The application started" + constants.attribute_default)
    private_key_primes_holder = list()
    generated_key_found = False
    t1 = time.process_time()
    print("\nPublic keys are generating...")
    while generated_key_found is False:
        private_key_primes_holder.append(utility.generate_large_prime(utility.private_key_primes_bit_length))
        private_key_primes_holder.append(utility.generate_large_prime(utility.private_key_primes_bit_length))
        if private_key_primes_holder[0] == private_key_primes_holder[1]:
            private_key_primes_holder = list()