Exemplo n.º 1
0
def analyze_song(fname_song):
    """
    Helper function.
    """

    path_work = os.path.dirname(fname_song)
    path_analysis = os.path.join(path_work, 'Audio Analysis')
    if not os.path.isdir(path_analysis):
        os.mkdir(path_analysis)

    b, e = os.path.splitext(os.path.basename(fname_song))
    fname_beats = b + '.beats.npz'
    fname_segments = b + '.segments.npz'

    fa = os.path.join(path_analysis, fname_beats)
    fb = os.path.join(path_analysis, fname_segments)
    if os.path.isfile(fa) and os.path.isfile(fb):
        print('Loading analysis')
        beats, meta = data_io.read(fa)
        segments, meta = data_io.read(fb)
    else:
        print('Analyze song')
        analysis = echo_nest_analysis(fname_song)

        print('Caching analysis results')
        beats, segments = parse_analysis(analysis)

        beats = np.asarray(beats)
        segments = np.asarray(segments)

        io.write(fa, beats)
        io.write(fb, segments)

    # Normalize levels.
    v = segments[:, 2]

    v0, v1 = np.percentile(v, [10.0, 90.0])

    b0, b1 = 0.1, 1.4

    g = (b1 - b0) / (v1 - v0)

    v -= np.median(v)

    v *= g
    v += 1.

    #v = v / np.median(v)
    #lo, hi = np.percentile(v, [20., 80.])
    #v = (v - lo) / (hi - lo)
    #v = np.clip(v, 0, 1)

    segments[:, 2] = v

    # Done,
    return beats, segments
Exemplo n.º 2
0
def load_reference_tiles(path_base=None):
    """
    Load reference tile images.
    """

    ix_name_label = 10
    pattern_grid_tiles = 'tile_grid_*.png'
    pattern_rack_tiles = 'tile_rack_*.png'

    if path_base is None:
        path_base = os.path.dirname(os.path.abspath(__file__))
    
    folder_tiles = os.path.join('data', 'tiles')
    path_tiles = os.path.join(path_base, folder_tiles)

    # Load reference grid tiles and rack tiles.
    p = os.path.join(path_tiles, pattern_grid_tiles)
    files_grid = glob.glob(p)

    p = os.path.join(path_tiles, pattern_rack_tiles)
    files_rack = glob.glob(p)

    info_reference_grid = {}
    for f in files_grid:
        # Load tile from file.
        tile, meta = io.read(f)

        # Extract label.
        name, ext = os.path.splitext(os.path.basename(f))
        label = name[ix_name_label:]

        # Store.
        info_reference_grid[label] = tile

    info_reference_rack = {}
    for f in files_rack:
        # Load tile from file.
        tile, meta = io.read(f)

        # Extract label.
        name, ext = os.path.splitext(os.path.basename(f))
        label = name[ix_name_label:]

        # Store.
        info_reference_rack[label] = tile

    # Done.
    return info_reference_grid, info_reference_rack
Exemplo n.º 3
0
    def test_encode_uint16(self):
        data, meta = io.read(self.fname16)

        data_comp = jls.encode(data)

        msg = 'oops size={:d}'.format(data_comp.size)
        self.assertTrue(data_comp.size < 2740000, msg)
Exemplo n.º 4
0
    def test_encode_uint16(self):
        data, meta = io.read(self.fname16)

        data_comp = jls.encode(data)

        msg = 'oops size={:d}'.format(data_comp.size)
        self.assertTrue(data_comp.size < 2740000, msg)
Exemplo n.º 5
0
    def test_encode_band_resid(self):
        data, meta = io.read(self.fname_resid)

        data = data.squeeze()
        data_comp = jls.encode(data)

        msg = 'oops size={:d}'.format(data_comp.size)
        self.assertTrue(data_comp.size < 24000, msg)
Exemplo n.º 6
0
    def test_encode_band_resid(self):
        data, meta = io.read(self.fname_resid)

        data = data.squeeze()
        data_comp = jls.encode(data)

        msg = 'oops size={:d}'.format(data_comp.size)
        self.assertTrue(data_comp.size < 24000, msg)
Exemplo n.º 7
0
    def test_encode_to_file(self):
        data, meta = io.read(self.fname)

        fname_temp = os.path.join(self.path_module, 'data_temp.jls')
        jls.write(fname_temp, data)

        file_size = os.path.getsize(fname_temp)
        msg = 'oops file_size={:d}'.format(file_size)
        self.assertTrue(file_size < 2090000, msg)
Exemplo n.º 8
0
    def test_encode_to_file(self):
        data, meta = io.read(self.fname)

        fname_temp = os.path.join(self.path_module, 'data_temp.jls')
        jls.write(fname_temp, data)

        file_size = os.path.getsize(fname_temp)
        msg = 'oops file_size={:d}'.format(file_size)
        self.assertTrue(file_size < 2090000, msg)
Exemplo n.º 9
0
    def test_encode_decode_compare_uint8(self):
        data, meta = io.read(self.fname)

        # Compress, decompress.
        data_comp = jls.encode(data)

        data_image = jls.decode(data_comp)

        diff = np.sum( (data.squeeze().astype(np.int) - data_image.astype(np.int))**2)
        self.assertTrue(diff == 0)
Exemplo n.º 10
0
    def test_encode_decode_compare_uint16(self):
        data, meta = io.read(self.fname16)

        # Compress, decompress.
        data_comp = jls.encode(data)

        data_image = jls.decode(data_comp)

        diff = np.sum(
            (data.squeeze().astype(np.int) - data_image.astype(np.int))**2)
        self.assertTrue(diff == 0)
Exemplo n.º 11
0
    def test_read_header(self):
        data, meta = io.read(self.fname)
        data_comp = jls.encode(data)

        header = jls.CharLS._CharLS.read_header(data_comp)

        self.assertTrue(header['width'] == 2592)
        self.assertTrue(header['height'] == 1944)
        self.assertTrue(header['bitspersample'] == 8)
        self.assertTrue(header['bytesperline'] == 2592)
        self.assertTrue(header['components'] == 1)
        self.assertTrue(header['allowedlossyerror'] == 0)
        self.assertTrue(header['ilv'] == 0)
Exemplo n.º 12
0
    def test_read_header(self):
        data, meta = io.read(self.fname)
        data_comp = jls.encode(data)

        header = jls.CharLS._CharLS.read_header(data_comp)

        self.assertTrue(header['width'] == 2592)
        self.assertTrue(header['height'] == 1944)
        self.assertTrue(header['bitspersample'] == 8)
        self.assertTrue(header['bytesperline'] == 2592)
        self.assertTrue(header['components'] == 1)
        self.assertTrue(header['allowedlossyerror'] == 0)
        self.assertTrue(header['ilv'] == 0)
def testNN2(runs=1,
            width=3,
            data="iris.txt",
            iters=20000,
            std=True,
            trainPct=0.666):
    if data == 'gauss':
        X, y = multimodalData(numModes=4, numPerMode=30)
        # X, y = sklearn.datasets.make_classification()
        XY = np.asarray(np.hstack([X, y.reshape((X.shape[0], 1))]))
    else:
        XY = data_io.read(data)
    nclass = len(set(XY[:, -1]))  # number of classes

    # y has nclass classes (0, ..., nclass-1)
    def unary(yi):
        return [(1 if i == yi else 0) for i in range(nclass)]

    # build a network
    u = width
    nn = modules.Sequential([
        modules.Linear(XY.shape[1] - 1, u),
        modules.Tanh(),
        modules.Linear(u, u),
        modules.Tanh(),
        modules.Linear(u, nclass),
        modules.SoftMax()
    ])
    results = {False: [], True: []}
    for run in range(runs):
        Xtrain, ytrain, Xtest, ytest = splitByClass(XY, trainPct)
        # Map into n softmax outputs
        Ytrain = np.array([unary(yi) for yi in ytrain])
        for rms in (False, True):
            # train the network.
            nn.clean()
            nn.train2(np.asarray(Xtrain),
                      np.asarray(Ytrain),
                      batchsize=1,
                      iters=iters,
                      lrate_decay_step=1000,
                      rms=rms,
                      momentum=(0.9 if rms else None))
            errors = predictionErrors(nn, Xtest, ytest)
            accuracy = 1.0 - (float(errors) / Xtest.shape[0])
            print 'RMS', rms, 'Prediction accuracy', accuracy
            results[rms].append(accuracy)
    print 'Results', results
    print 'Average accuracy', 'rms=False', sum(
        results[False]) / runs, 'rms=True', sum(results[True]) / runs,
Exemplo n.º 14
0
def build_config():
    """
    This is a helper function to assemble required config data.
    """
    fname = 'config_data.yml'

    # parser is here to play nice with Google's stuff using the flags variable.
    parser = argparse.ArgumentParser(description="authorize",
                                     formatter_class=argparse.RawDescriptionHelpFormatter,
                                     parents=[oauth2client.tools.argparser])
    flags = parser.parse_args()

    info, meta = data_io.read(fname)

    return info, flags
Exemplo n.º 15
0
def build_config():
    """
    This is a helper function to assemble required config data.
    """
    fname = 'config_data.yml'

    # parser is here to play nice with Google's stuff using the flags variable.
    parser = argparse.ArgumentParser(
        description="authorize",
        formatter_class=argparse.RawDescriptionHelpFormatter,
        parents=[oauth2client.tools.argparser])
    flags = parser.parse_args()

    info, meta = data_io.read(fname)

    return info, flags
Exemplo n.º 16
0
def run_test():

    small_test()
    path = os.path.dirname(__file__) + r'/shangwubu/small_result.csv'

    df = read(path)
    df = news_df(df)

    lsi = plsa_model(df)

    new_news = u'2018年4月19日,商务部发布2018年第39号公告,公布对原产于美国、欧盟和新加坡的进口卤化丁基橡胶(也称卤代丁基橡胶)反倾销调查的初裁裁定。商务部初步裁定原产于美国、欧盟和新加坡的进口卤化丁基橡胶存在倾销,国内卤化丁基橡胶产业受到了实质损害,且倾销与实质损害之间存在因果关系,并决定对原产于美国、欧盟和新加坡的进口卤化丁基橡胶产品实施保证金形式的临时反倾销措施。根据裁定,自2018年4月20日起,进口经营者在进口原产于美国、欧盟和新加坡的卤化丁基橡胶时,应依据裁定所确定的各公司倾销幅度(26.0%-66.5%)向中华人民共和国海关提供相应的保证金。应国内卤化丁基橡胶产业申请,商务部于2017年8月30日发布公告,决定对原产于美国、欧盟和新加坡的进口卤化丁基橡胶进行反倾销立案调查。该产品归在《中华人民共和国进出口税则》:40023910和40023990税号项下。'
    lst = new_news_topic(lsi, new_news)
    print(lst)

    counts = futures_count(lst)
    print(counts)
Exemplo n.º 17
0
    def __init__(self, fname_definition):
        """
        Create a new Bag instance.
        Definition file includes letter frequency and points.
        """

        # Load definition.
        definition = io.read(fname_definition)

        self.letters_inside = collections.Counter()
        self.letters_removed = collections.Counter()
        self.letter_points = {}

        for L in alphabet:
            self.letters_inside[L] = definition["frequency"][L]
            self.letters_removed[L] = 0
            # self.point_value[L] = definition['points'][L]

        self._count_total = sum(self.letters_inside.values())
Exemplo n.º 18
0
def testNN2(runs=1, width=3, data="iris.txt",
            iters=20000, std=True, trainPct = 0.666):
    if data == 'gauss':
        X, y = multimodalData(numModes = 4, numPerMode=30)
        # X, y = sklearn.datasets.make_classification()
        XY = np.asarray(np.hstack([X,y.reshape((X.shape[0],1))]))
    elif data == 'digits':
        digits = sklearn.datasets.load_digits()
        X = digits.data
        y = digits.target
        XY = np.asarray(np.hstack([X,y.reshape((X.shape[0],1))]))
    else:
        XY = data_io.read(data)
    nclass = len(set(XY[:, -1]))    # number of classes
    # y has nclass classes (0, ..., nclass-1)
    def unary(yi): return [(1 if i == yi else 0) for i in range(nclass)]
    # build a network
    u = width
    nn = modules.Sequential([modules.Linear(XY.shape[1]-1,u),
                             modules.Tanh(),
                             modules.Linear(u,u),
                             modules.Tanh(),
                             modules.Linear(u,nclass),
                             modules.SoftMax()])
    results = {False: [], True: []}
    for run in range(runs):
        Xtrain, ytrain, Xtest, ytest = splitByClass(XY, trainPct)
        # Map into n softmax outputs
        Ytrain = np.array([unary(yi) for yi in ytrain])
        for rms in (False, True):
            # train the network.
            nn.clean()
            nn.train2(np.asarray(Xtrain),np.asarray(Ytrain),
                      batchsize = 1, iters = iters,
                      lrate_decay_step = 1000,
                      rms=rms,
                      momentum=(0.9 if rms else None))
            errors = predictionErrors(nn, Xtest, ytest)
            accuracy = 1.0 - (float(errors)/Xtest.shape[0])
            print 'RMS', rms, 'Prediction accuracy', accuracy
            results[rms].append(accuracy)
    print 'Results', results
    print 'Average accuracy', 'rms=False', sum(results[False])/runs, 'rms=True', sum(results[True])/runs, 
Exemplo n.º 19
0
import importlib.util as imp
if imp.find_spec("cupy"):  #use cupy for GPU support if available
    import cupy
    import cupy as np
na = np.newaxis

import model_io
import data_io
import render

#load a neural network, as well as the MNIST test data and some labels
nn = model_io.read(
    '../models/MNIST/long-tanh.nn')  # 99.16% prediction accuracy
nn.drop_softmax_output_layer()  #drop softnax output layer for analyses

X = data_io.read('../data/MNIST/test_images.npy')
Y = data_io.read('../data/MNIST/test_labels.npy')

# transfer pixel values from [0 255] to [-1 1] to satisfy the expected input / training paradigm of the model
X = X / 127.5 - 1

# transform numeric class labels to vector indicator for uniformity. assume presence of all classes within the label set
I = Y[:, 0].astype(int)
Y = np.zeros([X.shape[0], np.unique(Y).size])
Y[np.arange(Y.shape[0]), I] = 1

acc = np.mean(np.argmax(nn.forward(X), axis=1) == np.argmax(Y, axis=1))
if not np == numpy:  # np=cupy
    acc = np.asnumpy(acc)
print('model test accuracy is: {:0.4f}'.format(acc))
Exemplo n.º 20
0
def main():
    """
    This is the entry point for the application.
    """

    #
    # Build and query the parser.
    #
    parser = argparse.ArgumentParser()
    # parser.add_argument('-U', '--upload', default=False, action='store_true',
                        # help='Upload sensor data to my Fusion Table.')
    # parser.add_argument('-R', '--record', default=False, action='store_true',
                        # help='Record data from DHT22 sensors.')
    parser.add_argument('-C', '--config_file', default=None,
                        help='Config file name.')

    # Parse command line input, do the work.
    args = parser.parse_args()

    # Config file.
    if not args.config_file:
        args.config_file = 'config_data.yml'

    f = os.path.join(path_to_module(), args.config_file)
    info_master, meta = io.read(f)

    power_cycle_interval = 15*60  # seconds

    #############################################
    # Do it.
    channels = None
    info_config = None
    try:
        # Get config data from master table.
        print('Fetch master table config data')
        info_config = master_table.get(info_master)

        # Convert some string values to integers.
        pins_data = info_config['pins_data']
        pins_data = pins_data.split(',')
        pins_data = [int(pin) for pin in pins_data]

        info_config['pins_data'] = pins_data

        info_config['pin_ok'] = int(info_config['pin_ok'])
        info_config['pin_err'] = int(info_config['pin_error'])
        info_config['pin_power'] = int(info_config['pin_power'])

        # Initialize stuff.
        print('Initialize sensors')
        channels, queue = initialize_sensors(info_config)

        print('Initialize upload data API')
        service, tableId = initialize_upload(info_config)

        # Start recording data.
        print('Begin recording: %s' % info_config['pins_data'])
        record_data(channels, queue, service, tableId, info_config, power_cycle_interval)

    except KeyboardInterrupt:
        # Stop it all when user hits ctrl-C.
        print()
        print('Main: User stop!')

    except Exception as e:
        print('Main Something went horribly wrong!  Shutting down.')
        print(e)
        print(type(e))

    # Finish.
    print('Stop recording')
    finalize(channels, info_config)

    # Done.
    print('Done.')
Exemplo n.º 21
0
def roar_kar(keep, random=False, train_only=False):

    logdir = 'tf_logs/standard/'

    def get_savedir():

        savedir = logdir.replace('tf_logs', 'KAR' if keep else 'ROAR')

        if not os.path.exists(savedir):

            os.makedirs(savedir)

        return savedir


#     ratio = 0.1

    percentiles = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1]
    attribution_methods = ['normal', 'LRP', 'proposed_method']

    if not train_only:
        DNN = model_io.read('../models/MNIST/LeNet-5.nn')
        for v in attribution_methods:
            batch_size = 128
            print("{} Step is start".format(v))
            if random:
                print("{} percentile Remove".format(v))
                occlude_dataset(DNN=DNN,
                                attribution=v,
                                percentiles=percentiles,
                                random=True,
                                keep=keep,
                                batch_size=batch_size,
                                savedir=get_savedir())
            else:
                print("{} Random Remove".format(v))
                occlude_dataset(DNN=DNN,
                                attribution=v,
                                percentiles=percentiles,
                                random=False,
                                keep=keep,
                                batch_size=batch_size,
                                savedir=get_savedir())
            print("{} : occlude step is done".format(v))
        print("ress record")
    ress = {k: [] for k in attribution_methods}

    for _ in range(3):

        for v in attribution_methods:

            res = []

            for p in percentiles:

                occdir = get_savedir() + '{}_{}_{}.pickle'.format('{}', v, p)
                occdir_y = get_savedir() + '{}_{}_{}_{}.pickle'.format(
                    '{}', v, p, 'label')

                data_train = unpickle(occdir.format('train'))
                #                 data_test = unpickle(occdir.format('test'))
                Xtrain = np.array(data_train)
                Ytrain = unpickle(occdir_y.format('train'))
                Ytrain = np.array(Ytrain)
                Xtest = data_io.read('../data/MNIST/test_images.npy')
                Ytest = data_io.read('../data/MNIST/test_labels.npy')
                print("check : {}".format(Ytrain.shape))

                Xtest = scale(Xtest)
                Xtest = np.reshape(Xtest, [Xtest.shape[0], 28, 28, 1])
                Xtest = np.pad(Xtest, ((0, 0), (2, 2), (2, 2), (0, 0)),
                               'constant',
                               constant_values=(-1., ))
                Ix = Ytest[:, 0].astype(int)
                Ytest = np.zeros([Xtest.shape[0], np.unique(Ytest).size])
                Ytest[np.arange(Ytest.shape[0]), Ix] = 1
                print(occdir)

                #                 DNN = model_io.read('../models/MNIST/LeNet-5.nn')

                DNN = modules.Sequential([
                                modules.Convolution(filtersize=(5,5,1,10),stride = (1,1)),\
                                modules.Rect(),\
                                modules.SumPool(pool=(2,2),stride=(2,2)),\
                                modules.Convolution(filtersize=(5,5,10,25),stride = (1,1)),\
                                modules.Rect(),\
                                modules.SumPool(pool=(2,2),stride=(2,2)),\
                                modules.Convolution(filtersize=(4,4,25,100),stride = (1,1)),\
                                modules.Rect(),\
                                modules.SumPool(pool=(2,2),stride=(2,2)),\
                                modules.Convolution(filtersize=(1,1,100,10),stride = (1,1)),\
                                modules.Flatten()
                            ])
                print("training...")
                DNN.train(X=Xtrain,\
                    Y=Ytrain,\
                    Xval=Xtest,\
                    Yval=Ytest,\
                    iters=10**5,\
                    lrate=0.0001,\
#                     status = 2,\
                    batchsize = 128
                         )
                #                 ypred = DNN.forward(Xtest)

                acc = np.mean(
                    np.argmax(DNN.forward(Xtest), axis=1) == np.argmax(Ytest,
                                                                       axis=1))
                del DNN
                print('metric model test accuracy is: {:0.4f}'.format(acc))

                res.append(acc)
            print("End of {}:training, accuracy...".format(_))

            ress[v].append(res)
    print("metric...")
    res_mean = {v: np.mean(v, axis=0) for v in ress.item()}

    print(res_mean)

    return res_mean
Exemplo n.º 22
0
                    np.argmax(DNN.forward(Xtest), axis=1) == np.argmax(Ytest,
                                                                       axis=1))
                del DNN
                print('metric model test accuracy is: {:0.4f}'.format(acc))

                res.append(acc)
            print("End of {}:training, accuracy...".format(_))

            ress[v].append(res)
    print("metric...")
    res_mean = {v: np.mean(v, axis=0) for v in ress.item()}

    print(res_mean)

    return res_mean
Xtrain = data_io.read('../data/MNIST/train_images.npy')
Ytrain = data_io.read('../data/MNIST/train_labels.npy')

Xtest = data_io.read('../data/MNIST/test_images.npy')
Ytest = data_io.read('../data/MNIST/test_labels.npy')

Xtrain = scale(Xtrain)
Xtest = scale(Xtest)

Xtrain = np.reshape(Xtrain, [Xtrain.shape[0], 28, 28, 1])
Xtest = np.reshape(Xtest, [Xtest.shape[0], 28, 28, 1])
Xtrain = np.pad(Xtrain, ((0, 0), (2, 2), (2, 2), (0, 0)),
                'constant',
                constant_values=(-1., ))
Xtest = np.pad(Xtest, ((0, 0), (2, 2), (2, 2), (0, 0)),
               'constant',
Exemplo n.º 23
0
from six import iterkeys
from sys import exit
import configparser

from data_io import is_prop_user, read
from geo import calc_dist, deg2rad, is_prop_latlon

if __name__ == '__main__':
    """Start of program execution."""

    # Read configuration file...
    config = configparser.ConfigParser()
    config.read('config.ini')

    # Read customer records from file...
    cust_records = read(
        join(config['DEFAULT']['DATA_DIR'], config['DEFAULT']['GIST_FILE']))
    # Convert intercom office coordinates from degrees to radians...
    off_lat_deg = config['INPUTS']['OFFICE_LAT']
    if is_prop_latlon(off_lat_deg, True):
        off_lat = deg2rad(float(off_lat_deg))
    else:
        print("Intercom Office latitude is not in a proper float format.")
        print("Exitiing application.")
        exit()

    off_lon_deg = config['INPUTS']['OFFICE_LON']
    if is_prop_latlon(off_lon_deg, False):
        off_lon = deg2rad(float(off_lon_deg))
    else:
        print("Intercom Office longitude is not in a proper float format.")
        print("Exitiing application.")
Exemplo n.º 24
0
def main():
    """
    This is the entry point for the application.
    """

    #
    # Build and query the parser.
    #
    parser = argparse.ArgumentParser()
    # parser.add_argument('-U', '--upload', default=False, action='store_true',
    # help='Upload sensor data to my Fusion Table.')
    # parser.add_argument('-R', '--record', default=False, action='store_true',
    # help='Record data from DHT22 sensors.')
    parser.add_argument('-C',
                        '--config_file',
                        default=None,
                        help='Config file name.')

    # Parse command line input, do the work.
    args = parser.parse_args()

    # Config file.
    if not args.config_file:
        args.config_file = 'config_data.yml'

    f = os.path.join(path_to_module(), args.config_file)
    info_master, meta = io.read(f)

    power_cycle_interval = 15 * 60  # seconds

    #############################################
    # Do it.
    channels = None
    info_config = None
    try:
        # Get config data from master table.
        print('Fetch master table config data')
        info_config = master_table.get(info_master)

        # Convert some string values to integers.
        pins_data = info_config['pins_data']
        pins_data = pins_data.split(',')
        pins_data = [int(pin) for pin in pins_data]

        info_config['pins_data'] = pins_data

        info_config['pin_ok'] = int(info_config['pin_ok'])
        info_config['pin_err'] = int(info_config['pin_error'])
        info_config['pin_power'] = int(info_config['pin_power'])

        # Initialize stuff.
        print('Initialize sensors')
        channels, queue = initialize_sensors(info_config)

        print('Initialize upload data API')
        service, tableId = initialize_upload(info_config)

        # Start recording data.
        print('Begin recording: %s' % info_config['pins_data'])
        record_data(channels, queue, service, tableId, info_config,
                    power_cycle_interval)

    except KeyboardInterrupt:
        # Stop it all when user hits ctrl-C.
        print()
        print('Main: User stop!')

    except Exception as e:
        print('Main Something went horribly wrong!  Shutting down.')
        print(e)
        print(type(e))

    # Finish.
    print('Stop recording')
    finalize(channels, info_config)

    # Done.
    print('Done.')
Exemplo n.º 25
0
# imports
import model_io
import data_io
import render

import importlib.util as imp
import numpy
import numpy as np
if imp.find_spec("cupy"):  #use cupy for GPU support if available
    import cupy
    import cupy as np
na = np.newaxis
# end of imports

nn = model_io.read('../models/MNIST/LeNet-5.nn')  # read model
X = data_io.read('../data/MNIST/test_images.npy')[
    na, 0, :]  # load first MNIST test image
X = X / 127.5 - 1  # normalized data to range [-1 1]

Ypred = nn.forward(X)  # forward pass through network
R = nn.lrp(Ypred)  # lrp to explain prediction of X

if not np == numpy:  # np=cupy
    X = np.asnumpy(X)
    R = np.asnumpy(R)

# render rgb images and save as image
digit = render.digit_to_rgb(X)
hm = render.hm_to_rgb(R, X)  # render heatmap R, use X as outline
render.save_image([digit, hm], '../2nd_py.png')
Exemplo n.º 26
0
    # # data prep.
    # path = os.path.dirname(os.path.abspath(__file__))
    # fname = 'IMG_20120129_120644.jpg'

    # f = os.path.join(path, fname)
    # data, meta = io.read(f)

    # data = np.mean(data, axis=2)

    # data -= data.min()
    # data /= data.max()

    # data = (data * 255).astype(np.uint8)

    # data_io.write('gray_raw.dat', data)
    # data_io.write('gray_raw.png', data)

    path = os.path.dirname(os.path.abspath(__file__))
    fname = "gray_raw.dat"

    f = os.path.join(path, fname)
    image_gray, meta = data_io.read(f)

    image_gray = image_gray.squeeze()

    buff = encode(image_gray)

    with open("file.jls", "wb") as fo:
        fo.write(buff.tostring())
Exemplo n.º 27
0
import tiles

#########
# Setup.
folder_data = 'data'
fname_config = 'config.yml'

#####################################
# Do it.
path_module = os.path.dirname(os.path.abspath(__file__))
path_data = os.path.join(path_module, folder_data)

# Read config file.
f = os.path.join(path_data, fname_config)
info = io.read(f)

#
# Loop over reference images, carve out selected grid and rack tiles.
#
for fname_img, info_img in info['reference']['grid'].items():

    f = os.path.join(path_data, 'reference', fname_img)
    img, meta = io.read(f)

    tiles_grid, tiles_rack = tiles.carve_tiles(img, info)

    print(fname_img)
    
    # Save specified tiles to files.
    for label, ij in info_img.items():
@maintainer: Sebastian Lapuschkin
@contact: [email protected], [email protected]
@date: 30.11.2016
@version: 1.2+
@copyright: Copyright (c)  2015-2017, Sebastian Lapuschkin, Alexander Binder, Gregoire Montavon, Klaus-Robert Mueller, Wojciech Samek
@license : BSD-2-Clause
'''

import modules
import model_io
import data_io

import numpy as np ; na = np.newaxis

#load the mnist data
Xtrain = data_io.read('../data/MNIST/train_images.npy')
Ytrain = data_io.read('../data/MNIST/train_labels.npy')

Xtest = data_io.read('../data/MNIST/test_images.npy')
Ytest = data_io.read('../data/MNIST/test_labels.npy')

#transfer the pixel values from [0 255] to [-1 1]
Xtrain = Xtrain / 127.5 -1
Xtest = Xtest / 127.5 -1

#reshape the vector representations of the mnist data back to image format. extend the image vertically and horizontally by 4 pixels each.
Xtrain = np.reshape(Xtrain,[Xtrain.shape[0],28,28,1])
Xtrain = np.pad(Xtrain,((0,0),(2,2),(2,2),(0,0)), 'constant', constant_values = (-1.,))

Xtest = np.reshape(Xtest,[Xtest.shape[0],28,28,1])
Xtest = np.pad(Xtest,((0,0),(2,2),(2,2),(0,0)), 'constant', constant_values = (-1.,))
Exemplo n.º 29
0
##########################################
# Do it.
path_module = os.path.dirname(os.path.abspath(__file__))

path_data = os.path.join(path_module, folder_data)
path_games = os.path.join(path_data, folder_games)
path_dictionary = os.path.join(path_data, folder_dictionary)

#
# Load data.
#

# Read config file.
f = os.path.join(path_data, fname_config)
info_config = io.read(f)

# Load dictionary into trie.
f = os.path.join(path_dictionary, fname_dictionary)
daggad = trie_manager.load_daggad_dictionary(f)

# Reference tiles.
info_reference_grid, info_reference_rack = tiles.load_reference_tiles()

# Load game image.
f = os.path.join(path_games, fname_game)
img_game, meta = io.read(f)

# Parse game image to game letters.
letters_game, letters_rack = tiles.parse_game_letters(img_game,
                                                      info_reference_grid,
Exemplo n.º 30
0
# Setup.
fname_game = '2012-05-08 20.36.00.png'

fname_config = 'config.yml'
folder_data = 'data'
folder_games = 'games'

#####################################
# Do it.
path_module = os.path.dirname(os.path.abspath(__file__))
path_data = os.path.join(path_module, folder_data)
path_games = os.path.join(path_module, folder_data, folder_games)

# Read config file.
f = os.path.join(path_data, fname_config)
info = io.read(f)

# Load reference tiles.
reference_grid = tile_utilities.load_reference_tiles()

# Load game and parse letters.
f = os.path.join(path_games, fname_game)
img_game, meta = io.read(f)

letters_game = tile_utilities.parse_game_grid(img_game, reference_grid, info)

# Make a board.
b = board.Board()
b.place_starting_letters(letters_game)

print( repr(b) )
Exemplo n.º 31
0
'''
An implementation of Differentially Private Layer-wise Relevance Propagation (dpLRP) for MNIST dataset.
Author: Hai Phan, CCS, NJIT.
'''

import matplotlib.pyplot as plt
import numpy as np ; na = np.newaxis
import pickle;
import model_io
import data_io
import os
#import render

#load a neural network, as well as the MNIST test data and some labels
nn = model_io.read(os.getcwd() + '/models/MNIST/LeNet-5.txt') # 99.23% prediction accuracy
X = data_io.read(os.getcwd() + '/data/MNIST/train_images.npy')
Y = data_io.read(os.getcwd() + '/data/MNIST/train_labels.npy')

#print(Y);
# transfer pixel values from [0 255] to [-1 1] to satisfy the expected input / training paradigm of the model
X =  X / 127.5 - 1.

#reshape the vector representations in X to match the requirements of the CNN input
X = np.reshape(X,[X.shape[0],28,28,1])
X = np.pad(X,((0,0),(2,2),(2,2),(0,0)), 'constant', constant_values = (-1.,))

# transform numeric class labels to vector indicator for uniformity. assume presence of all classes within the label set
I = Y[:,0].astype(int)
Y = np.zeros([X.shape[0],np.unique(Y).size])
Y[np.arange(Y.shape[0]),I] = 1
print(Y);
Exemplo n.º 32
0
finally, the resulting heatmap is rendered as an image and (over)written out to disk and displayed.
'''


import matplotlib.pyplot as plt
import numpy as np
na = np.newaxis

import model_io 
import data_io 
import render

#load a neural network, as well as the MNIST test data and some labels
nn = model_io.read('../models/MNIST/long-rect.nn')
X = data_io.read('../data/MNIST/test_images.npy')
Y = data_io.read('../data/MNIST/test_labels.npy')

# transfer pixel values from [0 255] to [-1 1] to satisfy the expected input / training paradigm of the model
X =  X / 127.5 - 1

# transform numeric class labels to vector indicator for uniformity. assume presence of all classes within the label set
I = Y[:,0].astype(int)
Y = np.zeros([X.shape[0],np.unique(Y).size])
Y[np.arange(Y.shape[0]),I] = 1

#permute data order for demonstration. or not. your choice.
I = np.arange(X.shape[0])
#I = np.random.permutation(I) 

#predict and perform LRP for the 10 first samples
Exemplo n.º 33
0
def echo_nest_analysis(fname_song, fname_config=None):
    """
    Get track details via Echo Nest API.
    """
    if not fname_config:
        fname_config = 'audio_config.yml'

    fname_config = os.path.abspath(fname_config)
    path_work = os.path.dirname(fname_config)

    path_analysis = os.path.join(path_work, 'Audio Analysis')
    if not os.path.isdir(path_analysis):
        os.mkdir(path_analysis)

    fname_song = os.path.basename(fname_song)
    b, e = os.path.splitext(fname_song)
    #if not (e == '.mp3' or e == '.m4a'):
    #    fname_song = b + '.mp3'

    fname_analysis = b + '.full.yml'

    f = os.path.join(path_analysis, fname_analysis)
    if os.path.isfile(f):
        print('Load existing analysis')
        analysis, meta = data_io.read(f)
    else:
        # Read config.
        info, meta = data_io.read(fname_config)
        if not info['songs']:
            info['songs'] = {}

        if not 'songs' in info:
            info['songs'] = {}

        # Configure Echo Nest API key.
        pyechonest.config.ECHO_NEST_API_KEY = info['api_key']

        # Load track details.
        if fname_song not in info['songs']:
            print('Upload new song to Echo Nest: %s' % fname_song)

            info['songs'][fname_song] = {}

            track = pyechonest.track.track_from_filename(fname_song)

            info['songs'][fname_song]['id'] = track.id
            info['songs'][fname_song]['analysis_url'] = track.analysis_url

            # Save updated config.
            data_io.write(fname_config, info)

        else:
            print('Download song analysis from Echo Nest: %s' % fname_song)
            track = pyechonest.track.track_from_id(info['songs'][fname_song]['id'])

        print('Retrieve full analysis from url')
        r = requests.get(track.analysis_url)
        analysis = r.json()

        print('Save analysis to cache folder')
        f = os.path.join(path_analysis, fname_analysis)
        data_io.write(f, analysis)

    # Done.
    return analysis
Exemplo n.º 34
0
\begin{Verbatim}[frame=single, fontsize=\small]
# imports
import model_io
import data_io
import render

import numpy as np
na = np.newaxis
# end of imports

# read model and first MNIST test image
nn = model_io.read(<model_path>) 
X = data_io.read(<data_path>)[na,0,:]
# normalized data to range [-1 1]
X = X / 127.5 - 1

# forward pass through network
Ypred = nn.forward(X)
# lrp to explain prediction of X
R = nn.lrp(Ypred)

# render rgb images and save as image
digit = render.digit_to_rgb(X)
# render heatmap R, use X as outline
hm = render.hm_to_rgb(R,X) 
render.save_image([digit,hm],<i_path>)
\end{Verbatim}
Exemplo n.º 35
0
@author: Sebastian Bach
@maintainer: Sebastian Bach
@contact: [email protected]
@date: 21.09.2015
@version: 1.0
@copyright: Copyright (c)  2015, Sebastian Bach, Alexander Binder, Gregoire Montavon, Klaus-Robert Mueller
@license : BSD-2-Clause
'''


# imports
import model_io
import data_io
import render

import numpy as np
na = np.newaxis
# end of imports

nn = model_io.read('../models/MNIST/long-rect.nn') # read model
X = data_io.read('../data/MNIST/test_images.npy')[na,0,:] # load first MNIST test image
X = X / 127.5 - 1 # normalized data to range [-1 1]

Ypred = nn.forward(X) # forward pass through network
R = nn.lrp(Ypred) # lrp to explain prediction of X

# render rgb images and save as image
digit = render.digit_to_rgb(X)
hm = render.hm_to_rgb(R, X) # render heatmap R, use X as outline
render.save_image([digit, hm], './hm_py.png')