def process_amt_results(filename, output_dir, overwrite, gen_apprv_cmnt):

    input_file = basename(filename)
    input_file_base, input_file_ext = splitext(input_file)

    output_name = '{0}.{1}'.format(input_file_base, 'json')
    output_dir = dir_path(output_dir)
    output_file = dir_join(output_dir, output_name)

    results = read_results(filename)

    scene_data = extract_scene_data(results)
    save_json(scene_data, output_file, saveNonMin=False)

    assignment_ids = set([scene['assignmentId'] for scene in scene_data])

    create_approval_file(assignment_ids, filename, gen_apprv_cmnt)
    create_reject_note_files(filename)

    counter = 0
    filename_base, filename_ext = splitext(output_name)
    indv_output_dir = dir_path(dir_join(output_dir, filename_base + '_indv'))
    for scene_data_single in scene_data:
        #pdb.set_trace()
        new_fn = '{0}_{1:02d}{2}'.format(scene_data_single['assignmentId'],
                                         scene_data_single['hitIdx'],
                                         filename_ext)
        indv_output_file = dir_join(indv_output_dir, new_fn)
        # Skip if already exists and no overwrite flag
        if (not os.path.isfile(indv_output_file) or overwrite == True):
            save_json(scene_data_single, indv_output_file, saveNonMin=False)
        counter += 1

    no_scene_data_name = '{0}_{1}{2}'.format(filename_base, 'noSceneData',
                                             filename_ext)
    no_scene_data_fn = dir_join(output_dir, no_scene_data_name)
    for scene_datum in scene_data:
        scene_datum['counts'] = extract_scene_stats(scene_datum['scene'])
        scene_datum['sceneType'] = scene_datum['scene']['sceneType']
        del scene_datum['scene']

    save_json(scene_data, no_scene_data_fn, saveNonMin=True)

    no_scene_data_name = '{0}_{1}{2}'.format(filename_base,
                                             'noSceneNoCountsData',
                                             filename_ext)
    no_scene_data_fn = dir_join(output_dir, no_scene_data_name)
    for scene_datum in scene_data:
        del scene_datum['counts']

    save_json(scene_data, no_scene_data_fn, saveNonMin=True)
def process_amt_results(filename, output_dir, overwrite, gen_apprv_cmnt):

    input_file = basename(filename)
    input_file_base, input_file_ext = splitext(input_file)

    output_name = "{0}.{1}".format(input_file_base, "json")
    output_dir = dir_path(output_dir)
    output_file = dir_join(output_dir, output_name)

    results = read_results(filename)

    scene_data = extract_scene_data(results)
    save_json(scene_data, output_file, saveNonMin=False)

    assignment_ids = set([scene["assignmentId"] for scene in scene_data])

    create_approval_file(assignment_ids, filename, gen_apprv_cmnt)
    create_reject_note_files(filename)

    counter = 0
    filename_base, filename_ext = splitext(output_name)
    indv_output_dir = dir_path(dir_join(output_dir, filename_base + "_indv"))
    for scene_data_single in scene_data:
        # pdb.set_trace()
        new_fn = "{0}_{1:02d}{2}".format(scene_data_single["assignmentId"], scene_data_single["hitIdx"], filename_ext)
        indv_output_file = dir_join(indv_output_dir, new_fn)
        # Skip if already exists and no overwrite flag
        if not os.path.isfile(indv_output_file) or overwrite == True:
            save_json(scene_data_single, indv_output_file, saveNonMin=False)
        counter += 1

    no_scene_data_name = "{0}_{1}{2}".format(filename_base, "noSceneData", filename_ext)
    no_scene_data_fn = dir_join(output_dir, no_scene_data_name)
    for scene_datum in scene_data:
        scene_datum["counts"] = extract_scene_stats(scene_datum["scene"])
        scene_datum["sceneType"] = scene_datum["scene"]["sceneType"]
        del scene_datum["scene"]

    save_json(scene_data, no_scene_data_fn, saveNonMin=True)

    no_scene_data_name = "{0}_{1}{2}".format(filename_base, "noSceneNoCountsData", filename_ext)
    no_scene_data_fn = dir_join(output_dir, no_scene_data_name)
    for scene_datum in scene_data:
        del scene_datum["counts"]

    save_json(scene_data, no_scene_data_fn, saveNonMin=True)
from os.path import abspath, dirname
from os.path import join as dir_join
import tensorflow as tf
from configparser import SafeConfigParser

# File hierarchy
_python_dir      = dirname(abspath(__file__))
_proyect_dir     = dirname(_python_dir)
_config_dir      = dir_join(_proyect_dir, 'config')
_log_dir_root    = dir_join(_proyect_dir, 'log')
_data_dir        = dir_join(_proyect_dir, 'data')

# Configuration parser
cfg_parser = SafeConfigParser()
cfg_parser.read(dir_join(_config_dir, 'cfg_model.ini'))

# Private variables
_initializer    = cfg_parser.get(section='train', option='initializer')
_normalization  = cfg_parser.get(section='train', option='normalization')
_batch_size     = cfg_parser.getint(section='train', option='batch_size')

def safe_div(numerator, denominator, name='safe_div'):
    """Divides two values, returning 0 if the denominator is <= 0.

    Args:
      numerator: A real `Tensor`.
      denominator: A real `Tensor`, with dtype matching `numerator`.
      name: Name for the returned op.

    Returns:
      0 if `denominator` <= 0, else `numerator` / `denominator`
# Django settings for {{ project_name }} project.
from os.path import join as dir_join
import sys


PROJECT_ROOT = '{{ project_directory }}'

sys.path.append(dir_join(PROJECT_ROOT, 'apps'))

DEBUG = True
TEMPLATE_DEBUG = DEBUG

ADMINS = (
    # ('Your Name', '*****@*****.**'),
)

MANAGERS = ADMINS

DATABASES = {
    'default': {
        'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
        'NAME': 'dev.{{ project_name }}.db',                      # Or path to database file if using sqlite3.
        'USER': '',                      # Not used with sqlite3.
        'PASSWORD': '',                  # Not used with sqlite3.
        'HOST': '',                      # Set to empty string for localhost. Not used with sqlite3.
        'PORT': '',                      # Set to empty string for default. Not used with sqlite3.
    }
}

# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name