Exemplo n.º 1
0
def run_cppcheck(throw=True):
    """ Runs cppcheck. """
    basedir = util.get_base_path()
    cmd = [
        'cppcheck',
        '--enable=all',
#        '--error-exitcode=2', # Uncomment when cppcheck issues are fixed
        '-UGD_LOG_LEVEL', '-UGD_DISABLE_LOG_COLORS',
        '--suppressions-list=%s' % (path.join('tools', 'cppcheck-suppr-list')),
        '--includes-file=%s' % (path.join('tools', 'cppcheck-incl-list')),
        'src',
        'examples',
    ]

    print('')
    print("Running cppcheck...")

    try:
        chdir(basedir)
        return util.call(cmd, throw)
    except OSError as e:
        if e.errno == errno.ENOENT:
            raise OSError("Cppcheck is not installed.")
        else:
            raise
Exemplo n.º 2
0
def build_dependencies(backends, force=False):
    source_path = os.path.join(util.get_base_path(), "thirdparty")
    build_path = os.path.join(source_path, "build")

    print('Building dependencies.')

    if 'all' in backends:
        backends = ['gles2', 'vulkan', 'software']

    if force:
        needs_build = backends
    else:
        needs_build = []
        for backend in backends:
            stamp_path = os.path.join(build_path, "stamps", "%s.stamp" % backend)
            if not check_stamp(stamp_path):
                needs_build.append(backend)

    if not needs_build:
        print('Nothing to do.')
        return

    build.configure(source_path, build_path)
    build.build_targets(build_path, needs_build)
    print('Done.')
Exemplo n.º 3
0
def run_cppcheck(throw=True):
    """ Runs cppcheck. """
    basedir = util.get_base_path()
    cmd = [
        'cppcheck',
        '--enable=all',
#        '--error-exitcode=2', # Uncomment when cppcheck issues are fixed
        '-UGD_LOG_LEVEL', '-UGD_DISABLE_LOG_COLORS',
        '--suppressions-list=%s' % (path.join('tools', 'cppcheck-suppr-list')),
        '--includes-file=%s' % (path.join('tools', 'cppcheck-incl-list')),
        'src',
        'examples',
    ]

    print('')
    print("Running cppcheck...")

    try:
        chdir(basedir)
        return util.call(cmd, throw)
    except OSError as e:
        if e.errno == errno.ENOENT:
            raise OSError("Cppcheck is not installed.")
        else:
            raise
Exemplo n.º 4
0
    def get(self):
        """Build a set of template strings as Javascript."""
        templates = {}
        base = os.path.join(util.get_base_path(), 'templates/js')
        files = os.listdir(base)
        for i in files:
            if os.path.isfile(os.path.join(base, i)):
                templates[
                    os.path.splitext(i)[0].replace('-', '_').upper()
                ] = open(os.path.join(base, i)).read()

        template_type = self.get_argument('type', '')
        if template_type and os.path.isdir(os.path.join(base, template_type)):
            files = os.listdir(os.path.join(base, template_type))
            for i in files:
                if os.path.isfile(os.path.join(base, template_type, i)):
                    templates['.'.join([
                        template_type,
                        os.path.splitext(i)[0].replace('-', '_').upper()
                    ])] = open(os.path.join(base, template_type, i)).read()

        self.set_header('Content-Type', 'text/javascript')
        self.write(self.render_string(
            'templates.js',
            template_type = template_type,
            templates = templates
        ).replace('\n', ''))
Exemplo n.º 5
0
def configure(arguments):
    """ Configures the build based on the supplied arguments. """
    build_path = util.get_build_path(arguments)

    try:
        makedirs(build_path)
    except OSError:
        pass

    util.call(['cmake', '-B' + build_path, '-H' + util.get_base_path()] + create_options(arguments))
Exemplo n.º 6
0
def configure(arguments):
    """ Configures the build based on the supplied arguments. """
    build_path = util.get_build_path(arguments)

    try:
        makedirs(build_path)
    except OSError:
        pass

    util.call(['cmake', '-B' + build_path, '-H' + util.get_base_path()] +
              create_options(arguments))
Exemplo n.º 7
0
def main():
    arguments = get_args()

    try:
        dependencies.build_dependencies([arguments.backend],
                                        arguments.rebuild_deps)
        build_path = util.get_build_path(arguments)
        configure(util.get_base_path(), build_path, arguments)
        build_targets(build_path, arguments.targets)
    except util.CommandError as e:
        print("Build failed: %s" % e)
        sys.exit(e.code)

    print_success()
Exemplo n.º 8
0
def run_unittest(throw=True):
    """ Runs unit-tests. """
    args = lambda: None
    args.build_dir = "build/unittest"
    args.build_type = "debug"
    args.backend = "software"
    args.targets = ["unittest"]
    build_path = util.get_build_path(args)

    print('')
    print("Building unit-tests...")
    dependencies.build_dependencies([args.backend])
    build.configure(util.get_base_path(), build_path, args)
    build.build_targets(build_path, args.targets)

    print('')
    print("Running unit-tests...")
    return util.call([path.join(build_path, 'bin', 'unittest')], throw)
Exemplo n.º 9
0
        'Check dependent libraries pysftp, paramiko and pycrypto.'
    ]))

SETTINGS = {
    'autoescape': None,
    'cookie_secret': util.get_configuration_value(
        'cookieSecret',
        'aW5zZWN1cmVTZWNyZXQ='
    ),
    'dropbox_consumer_key': util.get_configuration_value('dropboxKey', ''),
    'dropbox_consumer_secret': util.get_configuration_value(
        'dropboxSecret',
        ''
    ),
    'login_url': '/',
    'static_path': os.path.join(util.get_base_path(), 'static'),
    'template_path': os.path.join(util.get_base_path(), 'templates'),
}

URLS = [
    (r'/', handlers.IndexHandler),
    (r'/ws/?', handlers.EditorWebSocketHandler),
    (r'/templates.js', handlers.TemplateHandler)
]

if util.get_configuration_value('dropboxKey', '') != '':
    if util.get_configuration_value('dropboxSecret', '') != '':
        URLS = URLS + [
            (r'/auth/dropbox/?', handlers.auth.dropbox.DropboxHandler),
            (r'/dropbox/create-folder/?', handlers.dropbox.CreateFolderHandler),
            (r'/dropbox/download/?', handlers.dropbox.DownloadHandler),
Exemplo n.º 10
0
def train_classifier(feature_name, train_batch_num, base_npz_dir,
                     test_batches):
    test_acc = []
    base_path = util.get_base_path()
    categories = util.get_categories()
    train_batches = range(0, train_batch_num)
    #test_batches = range(train_batch_num,train_batch_num+1) JC edit
    set_name = 'setb50k'
    label_set_name = set_name
    subset = ''  #'_pca1'
    classifier_paramstring = ''
    if do_norm: classifier_paramstring += 'N'
    if props['C'] != 0:
        classifier_paramstring += 'C%d' % props['C']
    out_fn = os.path.join(
        base_npz_dir, feature_name, '%s%s_%s%s_%d-%d.pickle' %
        (classifier_type, classifier_paramstring, set_name, subset,
         train_batches[0], train_batches[-1]))
    if do_norm:
        out_fn_norm = os.path.join(
            base_npz_dir, feature_name,
            'norm_%s%s_%d.pickle' % (set_name, subset, train_batches[0]))
    print 'Training %s...' % out_fn

    if classifier_type == 'sgd_svm':
        is_incremental = True
    else:
        is_incremental = False

    norm = dict()
    clf = None

    for i_batch, train_batch in enumerate(train_batches + test_batches):
        fn = os.path.join(base_npz_dir, feature_name,
                          '%s_%05d%s.npz' % (set_name, train_batch, subset))
        print 'Processing feature file %s.' % fn
        print fn
        with np.load(fn) as file_contents:

            data = file_contents['data']

        true_labels, _ = util.load_labels(label_set_name, train_batch)

        if do_norm:
            if i_batch == 0:
                # Initial batch to determine mean and variance for normalization
                norm['mean'] = np.expand_dims(data.mean(axis=0), 0)
                norm['std'] = np.expand_dims(data.std(axis=0), 0)
                norm['std'] = np.maximum(norm['std'], 0.01)
                with open(out_fn_norm, 'wb') as fid:
                    pickle.dump(norm, fid)

            data -= norm['mean']
            data /= norm['std']
            print 'Data after normalization: Mean %f, Std %f' % (data.mean(
                axis=0).mean(axis=0), data.std(axis=0).mean(axis=0))

        if is_incremental:
            # Incremental: Do training every training iteration
            # Do testing not just on test but also during training before feeding the new training data
            do_train = (i_batch < len(train_batches))
            do_test = (i_batch > 0)
            use_data = data
            use_true_labels = true_labels
        else:
            # Non-incremental: Train once when all training batches have been collected
            do_train = (i_batch == len(train_batches) - 1)
            do_test = (i_batch >= len(train_batches))
            # data collection phase
            if not do_test:
                if i_batch == 0:
                    data_all = data
                    all_true_labels = true_labels
                else:
                    data_all = np.concatenate((data_all, data), axis=0)
                    all_true_labels = np.concatenate(
                        (all_true_labels, true_labels), axis=0)
            use_data = data_all
            use_true_labels = all_true_labels

        print '  use data %s.' % str(use_data.shape)
        print '  use labels %s' % str(use_true_labels.shape)

        if do_test:
            # After some batch training has been done, predict performance
            pred_labels = clf.predict(data)
            acc = float(sum(pred_labels == true_labels)) / true_labels.size
            test_acc.append(acc)
            print '  Batch accuracy: %.1f%%' % (acc * 100)

        if do_train:
            if classifier_type == 'sgd_svm':
                clf = train_sgd(clf, 'hinge', use_data, use_true_labels)
            elif classifier_type == 'svm':
                clf = train_svm(clf, use_data, use_true_labels, props)
            pred_labels = clf.predict(use_data)
            acc = float(
                sum(pred_labels == use_true_labels)) / use_true_labels.size
            print '  Train accuracy: %.1f%%' % (acc * 100)
            # Dump classifier data at every iteration
            with open(out_fn, 'wb') as fid:
                pickle.dump(clf, fid)
    return np.mean(test_acc)
Exemplo n.º 11
0
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import fnmatch
import os
import re
import subprocess
import util

glslang_path = os.path.join(util.get_base_path(), 'thirdparty', 'glslang')
glslang_path_build = os.path.join(glslang_path, 'build', 'vulkan')
glslang_git_url = 'https://github.com/KhronosGroup/glslang.git'

shader_source_directory = os.path.join(util.get_base_path(), 'src', 'engines',
                                       'vulkan', 'shaders')

c_file_name = 'gepard-vulkan-spirv-binaries'
generated_warning = '/* This file was auto-generated by {0}! */\n\n'.format(
    __file__)

header_begin = '''
#ifndef {0}_H
#define {0}_H

#include <stdint.h>