Exemplo n.º 1
0
import random, sys, os
sys.path.append(".")

import tensorflow as tf
from TFLibraries.Layer import Layers
from TFLibraries.Train import Training
from TFLibraries.Sparse import SparseFiles
from TFLibraries.Embeddings import Embedding
Layer = Layers()

random.seed(20160408)

indices = []


def generate_batch(size, data, labels):
    global indices
    if len(indices) < size:
        indices.extend(range(data.shape[0]))
    r = random.sample(indices, size)
    indices = filter(lambda a: a not in r, indices)
    # Randomly reorder the data
    return data[r], labels[r]


## Read Training/Dev/Test data
os.chdir('/home/ybisk/GroundedLanguage')
print("Running from ", os.getcwd())
maxlength = 80
offset = 3
labelspace = 9
Exemplo n.º 2
0
import os,random,sys
sys.path.append(".")

import tensorflow as tf
import numpy as np
np.set_printoptions(threshold=np.nan)
from TFLibraries.Layer import Layers
from TFLibraries.Train import Training
from TFLibraries.Sparse import SparseFiles
from TFLibraries.Embeddings import Embedding
Layer = Layers()

random.seed(20160408)

indices = []
def generate_batch(size, data, labels, lengths):
  global indices
  if len(indices) < size:
    indices.extend(range(data.shape[0]))
  # Random indices
  r = random.sample(indices, size)
  indices = filter(lambda a: a not in r, indices)
  return data[r], labels[r], lengths[r]

## Read Training/Dev/Test data
os.chdir('/home/ybisk/GroundedLanguage')
print("Running from ", os.getcwd())
maxlength = 80
offset = 3
labelspace = 9
Sparse = SparseFiles(maxlength, offset, labelspace=labelspace, prediction=2)
Exemplo n.º 3
0
import os,random,sys
sys.path.append(".")

## Model Imports
import tensorflow as tf
tf.set_random_seed(20160905)
import numpy as np
np.set_printoptions(threshold=np.nan)
from TFLibraries.Layer import Layers
from TFLibraries.Train import Training
from TFLibraries.Sparse import SparseFiles
from TFLibraries.Embeddings import Embedding
Layer = Layers()

## Server Code
import json
import time
from flask import Flask
from flask import request
from flask import jsonify

random.seed(20160408)

indices = []
def generate_batch(size, data, labels, lengths):
  global indices
  if len(indices) < size:
    indices.extend(range(data.shape[0]))
  # Random indices
  r = random.sample(indices, size)
  indices = filter(lambda a: a not in r, indices)
Exemplo n.º 4
0
import os,random,sys,gzip
sys.path.append(".")

from TFLibraries.Embeddings import Embedding
from TFLibraries.Layer import Layers
from TFLibraries.Ops import *
from Priors.Evaluation import Eval
import tensorflow as tf
import numpy as np
np.set_printoptions(threshold=np.nan)
Layer = Layers()

""" Parameters """
random.seed(20160408)
batch_size = 512
maxlength = 40
filters = int(sys.argv[1])
hiddendim = 100
num_epochs = 12

rep_dim = 32
offset = rep_dim/2 -1
block_size = 0.1528
space_size = 3.0
unit_size = space_size / rep_dim

Directory = '/home/ybisk/GroundedLanguage'
TrainData = 'Priors/Train.%d.L1.LangAndBlank.20.npz' % rep_dim
EvalData = 'Priors/Dev.%d.L1.Lang.20.npz' % rep_dim
RawEval = 'Priors/WithText/Dev.mat.gz'
#EvalData = 'Priors/Test.Lang.20.npz'
Exemplo n.º 5
0
import os, random, sys
sys.path.append(".")

## Model Imports
import tensorflow as tf
tf.set_random_seed(20160905)
import numpy as np
np.set_printoptions(threshold=np.nan)
from TFLibraries.Layer import Layers
from TFLibraries.Train import Training
from TFLibraries.Sparse import SparseFiles
from TFLibraries.Embeddings import Embedding
Layer = Layers()

## Server Code
import json
import time
from flask import Flask
from flask import request
from flask import jsonify

random.seed(20160408)

indices = []


def generate_batch(size, data, labels, lengths):
    global indices
    if len(indices) < size:
        indices.extend(range(data.shape[0]))
    # Random indices
Exemplo n.º 6
0
import random,sys,os
sys.path.append(".")

import tensorflow as tf
from TFLibraries.Layer import Layers
from TFLibraries.Train import Training
from TFLibraries.Sparse import SparseFiles
from TFLibraries.Embeddings import Embedding
Layer = Layers()

random.seed(20160408)

indices = []
def generate_batch(size, data, labels):
  global indices
  if len(indices) < size:
    indices.extend(range(data.shape[0]))
  r = random.sample(indices, size)
  indices = filter(lambda a: a not in r, indices)
  # Randomly reorder the data
  return data[r], labels[r]

## Read Training/Dev/Test data
os.chdir('/home/ybisk/GroundedLanguage')
print("Running from ", os.getcwd())
maxlength = 80
offset = 3
labelspace = 9
Sparse = SparseFiles(maxlength, offset, labelspace=labelspace, prediction=2)
train, _, vocabsize = Sparse.read("JSONReader/data/2016-NAACL/SRD/Train.mat")
dev, _, _           = Sparse.read("JSONReader/data/2016-NAACL/SRD/Dev.mat")
Exemplo n.º 7
0
import os, random, sys, gzip
sys.path.append(".")

from TFLibraries.Embeddings import Embedding
from TFLibraries.Layer import Layers
from TFLibraries.Ops import *
from Priors.Evaluation import Eval
import tensorflow as tf
import numpy as np
np.set_printoptions(threshold=np.nan)
Layer = Layers()
""" Parameters """
random.seed(20160408)
batch_size = 512
maxlength = 40
filters = int(sys.argv[1])
hiddendim = 100
num_epochs = 12

rep_dim = 32
offset = rep_dim / 2 - 1
block_size = 0.1528
space_size = 3.0
unit_size = space_size / rep_dim

Directory = '/home/ybisk/GroundedLanguage'
TrainData = 'Priors/Train.%d.L1.LangAndBlank.20.npz' % rep_dim
EvalData = 'Priors/Dev.%d.L1.Lang.20.npz' % rep_dim
RawEval = 'Priors/WithText/Dev.mat.gz'
#EvalData = 'Priors/Test.Lang.20.npz'
#RawEval = 'Priors/WithText/Test.mat.gz'