Exemplo n.º 1
0
    def test_Param(self):
        print util.Param({'name': '--c', 'val': 'bar', 'sep': '@'})

        x = util.Param('--a')
        print json.dumps(x)
        print util.Param('--b', 'foo')
        print util.Param('--d', 'baz', '#')
Exemplo n.º 2
0
 def setUpClass(cls):
     #cls is a class object here
     cls.testENV = util.TestENV(
         util.Param(
             TMP_DIR="./tmp/TestUMsgQueryIter",
             BHTTPD_MAIN=False,
             BHTTPD_BAK=True,
         ))
     cls.svc = abhttp.Service(cfg_stream=StringIO(cls.CONFIG_STR))
     hosts = [x for x in cls.svc.uhost_iter()]
     for (_id, _text) in hosts:
         m = re.match("node([0-9]+)", _text)
         hid = int(m.group(1))
         cls.svc.uhost_assign(hid, _text)
     ptns = [x for x in cls.svc.uptn_iter()]
     ptn = re.compile(
         "^.*pattern " +
         "(((Zero|One|Two|Three|Four|Five|Six|Seven|Eight|Nine) *)+):")
     for p in ptns:
         m = re.match(ptn, p.text)
         t = m.group(1)
         n = 0
         for x in t.split(' '):
             n *= 10
             n += util.num[x]
         logger.debug("assigning id: %d, ptn: '%s'" % (n, p.text))
         cls.svc.uptn_assign(n, p.text)
         if re.match("This is pattern Zero: .*", p.text):
             cls.ptn_zero = n
     assert (cls.ptn_zero != None)
     for p in cls.svc.uptn_iter():
         logger.debug(p)
     pass
Exemplo n.º 3
0
 def setUpClass(cls):
     #cls is a class object here
     cls.testENV = util.TestENV(
         util.Param(
             TMP_DIR='./tmp/TestService',
             BHTTPD_MAIN=False,
             BHTTPD_BAK=True,
         ))
     cls.svc = abhttp.Service(cfg_stream=StringIO(cls.CONFIG_STR))
     cls.numeric_assign_host()
     cls.numeric_assign_ptn()
Exemplo n.º 4
0
    def getParameters(self, hidden=True):
        """Get the current parameter list"""
        # Members of the plist array are dictionary items
        plist = self._trch_getparameterlist()
        if not hidden:
            plist = [x for x in plist if not self.isHiddenParameter(x.name)]

        if not self.param_order:
            return plist

        # Give them same order in which they appear in xml file
        pdict = util.iDict(plist)
        order = [ util.Param(pname, pdict.pop(pname) or "") 
                      for pname in self.param_order
                      if pname in pdict ]

        return order + list(pdict.items())
Exemplo n.º 5
0
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import logging
import unittest
import abhttp
import util

logger = logging.getLogger(__name__)

testENV = util.TestENV(util.Param(
    TMP_DIR='./tmp/test_conn',
    N_DAEMONS=1,
))


class TestConnBase(object):
    def setUp(self):
        """ subclass must provide `self.conn` """
        raise Exception("Override me!")

    def test_get_fetch(self):
        self.conn.get_ptn()
        ptns = self.conn.fetch_ptn()
        logger.info("number of patterns: %d", len(ptns))

    def test_img2(self):
        self.conn.get_img2(img_store="3600-1",
Exemplo n.º 6
0
def main():
	
	mode = sys.argv[1]

	try:
		resume_pickle = sys.argv[2]
		resume = True
	except:
		resume = False

	print("resume = " + str(resume))


	for i in [0]:
		chainer.backends.cuda.get_device_from_id(i).use()


	model = Model()
	model.to_gpu()

	#if mode == 'A':
	if False:
		options = {'zero_pos':4}
	else:
		options = {'zero_pos':None}

	ncand = 4
		
	#normal = util2.Param(mode=mode, shape=(6,ncand), max_iter=(max_epoch-freeze)*(50000/batch_size), options=options)
	normal = util.Param(mode=mode, shape=(6,ncand), max_iter=(max_epoch-freeze)*(25000/batch_size))
	normal.to_gpu()

	normal.optimizer.beta1 = 0.5
	normal.optimizer.add_hook(chainer.optimizer.WeightDecay(0.003))
	normal.optimizer.add_hook(chainer.optimizer.GradientClipping(1.0))

	
	loss_hist = []
	train_acc = []
	advs = []
	totals = []

	
	(train, val, test) = cifar.get_cifar10()
	
	train = Preprocess(train, with_cutout=False, test=False)
	val = Preprocess(val, with_cutout=False, test=False)

	train_iter = chainer.iterators.SerialIterator(train, batch_size)
	val_iter = chainer.iterators.SerialIterator(val, batch_size)

	
	if resume:
		f = open(resume_pickle,"rb")
		normal = pickle.load(f)

		loss_hist = pickle.load(f)
		train_acc = pickle.load(f)
		model = pickle.load(f)
		advs = pickle.load(f)
		totals = pickle.load(f)
		f.close()

		restart_epoch = int(resume_pickle.split("/")[-1].split(".")[0])
		train_iter.epoch = restart_epoch - 1


	#optimizer1 = chainer.optimizers.MomentumSGD(momentum=0.9)


	optimizer1 = chainer.optimizers.NesterovAG(momentum=0.9)
	optimizer1.setup(model)
	optimizer1.add_hook(chainer.optimizer.WeightDecay(0.0005))
	optimizer1.add_hook(chainer.optimizer.GradientClipping(1.0))


	baseline = EMA(coef=0.05)


	# SNAP SHOT
	start = time.time()
	os.mkdir('./snapshot/'+str(start))
	iter = 0



	best_arc = normal.deter()
	best_val = 999999.


	while train_iter.epoch < max_epoch:

		if train_iter.epoch >= freeze and iter % 2 == 0:
			update_a = True
			update_w = False
		else:
			update_a = False
			update_w = True

		print("train, ", train_iter.epoch, iter, update_w)
		print("val, ", val_iter.epoch, iter, update_a)


		# SNAP SHOT
		if (iter+1) % 70 == 0:
		#try:
			# output log
			log = str(train_iter.epoch)+','+str(loss.data)+','+str((time.time()-start)/60.0)+',mode='+mode+'\r\n'
			f = open('./snapshot/'+str(start)+'/log.txt','a')
			f.write(log)
			f.close()

			print(log)


			# dump architecture
			arc = normal.deter()
			dump_arc(arc, train_iter.epoch, start, "mode")
			dump_arc(best_arc, train_iter.epoch, start, "best", best_val)


			# dump
			f = open('./snapshot/'+str(start)+'/'+str(train_iter.epoch)+'.pickle', 'wb')
			pickle.dump(normal, f)
			pickle.dump(loss_hist, f)
			pickle.dump(train_acc, f)
			pickle.dump(model, f)
			pickle.dump(optimizer1, f)

			#pickle.dump(train_iter, f)
			f.close()


		# anneal down
		epoch = train_iter.epoch
		lrcoef = (math.cos(epoch*math.pi/max_epoch)+1.0)/2.0
		lrcoef2 = (math.cos((epoch-freeze)*math.pi/(max_epoch-freeze))+1.0)/2.0
		optimizer1.lr = 0.1 * lrcoef
		normal.optimizer.alpha = 0.0003 * lrcoef2



		if update_a:
			batch = val_iter.next()
		else:
			batch = train_iter.next()


		x_array, t_array = convert.concat_examples(batch, device=gpu)
		
		## 
		#util2.plot_batch(x_array)

		x = chainer.Variable(x_array)
		t = chainer.Variable(t_array)


		if mode == "N" and train_iter.epoch < freeze:
			a = normal.draw(nasp_random=True)
		else:
			a = normal.draw()

		if mode == "N" and update_w:
			is_nasp_upd_w = True
		else:
			is_nasp_upd_w = False



		y = model(x, a, mode, is_nasp_upd_w)
		model.cleargrads()



		loss = F.softmax_cross_entropy(y,t)
		# trelance
		#loss = -(F.relu(-loss+4)-4)

		loss_hist.append(float(baseline.get()))

		acc = F.accuracy(y,t)
		train_acc.append(float(acc.data))

		loss.backward(retain_grad=True)

		baseline(float(loss.data)) # update baseline

		


		# update
		if update_a:

			if mode == "R":
				normal.update([float(loss.data)-baseline.get()])
			elif mode == "PA":
				normal.update([float(loss.data)])
			else:
				normal.update()

			# stock the best performance arc
			if float(loss.data) < best_val:
				best_val = float(loss.data)
				best_arc = chainer.Variable(a.data.copy())
			

		if update_w:

			print(loss)
			optimizer1.update()




		iter += 1
Exemplo n.º 7
0
def main(tryc):

    try:
        os.mkdir("./output/" + sys.argv[2])
    except:
        print("mkdir error")

    try:
        os.mkdir("./output/" + sys.argv[2] + "/" + str(tryc))
    except:
        print("mkdir error")

    operations = []
    for i in range(npos):
        layer = L.Convolution2D(1, ncand, ksize=7, stride=2)  #,nobias=True)
        layer.to_gpu()
        layer.disable_update()
        #layer.W.data += 0.9
        operations.append(layer)

    second = []
    for i in range(npos):
        layer1 = L.Convolution2D(1, 1, ksize=4, stride=1, nobias=True)
        #layer0 = L.BatchNormalization(1)
        #layer0.to_gpu()
        #layer0.disable_update()

        #layer1 = L.Linear(16,1)
        layer1.to_gpu()
        layer1.disable_update()
        second.append([layer1])

    a = chainer.Parameter(
        xp.random.rand(npos * ncand).reshape((npos, ncand)).astype(xp.float32))
    teacher = F.softmax(a / ZERO, axis=1)

    f = open("./output/" + sys.argv[2] + "/" + str(tryc) + "/teacher", "wb")
    pickle.dump(teacher, f)
    pickle.dump(operations, f)
    pickle.dump(second, f)
    f.close()

    for q in range(len(modes)):
        mode = modes[q]
        mode2 = modes2[q]
        start = None

        losses = []
        timer = []
        orders = []

        if BINOMIAL:
            options = {'zero_pos': None, 'prior': 'unimodal'}
        else:
            options = {'zero_pos': None, 'prior': 'uniform'}

        param = util.Param(mode=mode,
                           shape=(npos, ncand),
                           max_iter=niter,
                           options=options,
                           mode2=mode2)

        ##############
        """
		# set prior
		if mode == "BI":
			ps = []
			for k in range(ncand):
				n = ncand-1
				s = float(F.sigmoid(param.param[0]).data)
				p = (math.factorial(n)/(math.factorial(k)*math.factorial(n-k)))
				p *= s**k
				p *= (1-s) ** (n-k)
				ps.append(p)
		else:
			param.param.data = set_binomial_prior(param.param.data)
		
			ps = F.softmax(param.param, axis=1).data[0]
			ps = cuda.to_cpu(ps).tolist()
			#ps.extend([0,0,0,0,0,0,0,0,0,0])
			
		print(len(ps))
		plt.bar(np.arange(ncand), ps, alpha=0.5)
		plt.show()
		exit()
		"""

        baseline = util.EMA(coef=0.05)
        #acct = EMA(coef=0.2)
        #acct = MA(steps=10)

        for i in range(niter):

            if i % 10 == 0:

                #print(param.param)

                if start is None:
                    timer.append(0.)
                else:
                    timer.append(time.time() - start)

                x = batch_sample(100)
                t = forward(x, teacher, operations, second, mode)
                a = param.deter()
                #order = xp.argmax(a.data)
                #print(order)
                #orders.append(order)
                y = forward(x, a, operations, second, mode)

                #loss = float(F.mean_squared_error(y,t).data) * scale
                loss = float(toy_lossfunc(y, t).data)
                losses.append(loss)
                start = time.time()
                print(tryc, mode, i, losses[-1], timer[-1])
                #print(param.param)

            lrcoef = (math.cos(i * math.pi / niter) + 1.0) / 2.0
            param.optimizer.alpha = 0.001  # * lrcoef
            param.optimizer.beta1 = 0.9

            x = batch_sample(batch_size)
            t = forward(x, teacher, operations, second, mode)
            a = param.draw()
            y = forward(x, a, operations, second, mode)
            #loss = F.mean_squared_error(y,t)	* scale
            loss = toy_lossfunc(y, t)

            loss.backward(retain_grad=True)
            baseline(float(loss.data))
            #hist.append(baseline.get())

            if mode == "R":
                param.update([float(loss.data) - baseline.get()])
            elif mode == "PA":
                param.update([float(loss.data)])
            else:
                param.update([float(loss.data) - baseline.get()])

        f = open(
            "./output/" + sys.argv[2] + "/" + str(tryc) + '/' + mode + "_" +
            mode2, 'wb')
        pickle.dump(losses, f)
        pickle.dump(timer, f)
        pickle.dump(orders, f)
        pickle.dump(param, f)
        f.close()
Exemplo n.º 8
0
 def get_itemlist(self):
     """Get all of the items in the session"""
     return [util.Param(item.get_name(), item) for item in self.items]
Exemplo n.º 9
0
 def get_info(self):
     return (util.Param(HISTORY_STR, self.history),
             util.Param(CONTRACT_STR, self.contract))