def undump(self): self.header = BinaryChunkHeader(self.binary_reader) self.check_header() self.size_upvalues = self.binary_reader.read_uint8() self.main_func = Prototype() self.main_func.init_from_br(self.binary_reader, '') return self.main_func
def getPrototypeDistribution(i): dist = [] prototype = prototypes[i] for state in range(maxState): tempPrototype = Prototype(numActions, stateDimension) tempPrototype.setFixed([state/float(maxState)], 0) tempFeatureDiff = tempPrototype.calculateDiff(prototype) membershipGrade = float(exp(-(tempFeatureDiff*tempFeatureDiff)/2*prototype.getFeatureWidth())) dist.append(membershipGrade*prototype.getTheta()) return dist
def getV(self, state): tempPrototype = Prototype(1, self.stateDimension) tempPrototype.setFixed([state / float(self.maxState + 1)], 0) thetaSum = 0 pIndex = [0] * self.numGroups pGrade = self.getPrototypeIndex(state, pIndex) for i in range(len(pIndex)): thetaSum += self.prototypes[pIndex[i]].getTheta() * pGrade[i] return thetaSum
def getV(self, state): tempPrototype = Prototype(1, self.stateDimension) tempPrototype.setFixed([state/float(self.maxState+1)], 0) thetaSum = 0 pIndex = [0]*self.numGroups pGrade = self.getPrototypeIndex(state, pIndex) for i in range(len(pIndex)): thetaSum += self.prototypes[pIndex[i]].getTheta()*pGrade[i] return thetaSum
def getQ(state, action): tempPrototype = Prototype(numActions, stateDimension) tempPrototype.setFixed([state/float(maxState)], action) thetaSum = 0 for prototype in prototypes: tempFeatureDiff = tempPrototype.calculateDiff(prototype) membershipGrade = float(exp(-(tempFeatureDiff*tempFeatureDiff)/2*prototype.getFeatureWidth())) # print('state: ' + str(tempPrototype.state) + ' prototype.state: ' + str(prototype.state) + ' membershipGrade: ' + str(membershipGrade) + ' theta: ' + str(prototype.getTheta())) # raw_input("Press Enter to continue...") thetaSum += prototype.getTheta() * membershipGrade # print('thetaSum: ' + str(thetaSum)) # raw_input("Press Enter to continue...") return thetaSum
def getPrototypeIndex(self, state, prototypeIndex): tempPrototype = Prototype(1, self.stateDimension) tempPrototype.setFixed([state/float(self.maxState + 1)], 0) pGrade = [0]*self.numGroups for i in range(len(self.prototypes)): prototype = self.prototypes[i] tempFeatureDiff = tempPrototype.calculateDiff(prototype) membershipGrade = float(exp(-(tempFeatureDiff*tempFeatureDiff)/2*prototype.getFeatureWidth()))#/sqrt(2*prototype.getFeatureWidth()*pi) if pGrade[prototype.group] < membershipGrade: prototypeIndex[prototype.group] = i pGrade[prototype.group] = membershipGrade return [p/sum(pGrade) for p in pGrade]
def train_sst(conf): if conf.model_type == 'attention': probe_net = AttentionNet(attention_stages=conf.attention_stages, dim=conf.feat_dim) gallery_net = AttentionNet(attention_stages=conf.attention_stages, dim=conf.feat_dim) elif conf.model_type == 'mobilefacenet': probe_net = MobileFaceNet(conf.feat_dim) gallery_net = MobileFaceNet(conf.feat_dim) moving_average(probe_net, gallery_net, 0) prototype = Prototype(conf.feat_dim, conf.queue_size, conf.scale,conf.margin, conf.loss_type).cuda() criterion = torch.nn.CrossEntropyLoss().cuda() optimizer = optim.SGD(probe_net.parameters(), lr=conf.lr, momentum=conf.momentum, weight_decay=5e-4) lr_schedule = optim.lr_scheduler.MultiStepLR(optimizer, milestones=conf.lr_decay_epochs, gamma=0.1) probe_net = torch.nn.DataParallel(probe_net).cuda() gallery_net = torch.nn.DataParallel(gallery_net).cuda() train_dict = trainlist_to_dict(conf.source_file) for epoch in range(1, conf.epochs + 1): if epoch == 1: curr_train_list, curr_id_list = train_sample(train_dict, conf.class_num, conf.queue_size) else: curr_train_list, curr_id_list = train_sample(train_dict, conf.class_num, conf.queue_size, curr_id_list) data_loader = DataLoader(lmdb_utils.SingleLMDBDataset(conf.source_lmdb, curr_train_list, conf.key), conf.batch_size, shuffle = False, num_workers=4, drop_last = True) train_one_epoch(data_loader, probe_net, gallery_net, prototype, optimizer, criterion, epoch, conf) lr_schedule.step()
def getPrototypeIndex(self, state, prototypeIndex): tempPrototype = Prototype(1, self.stateDimension) tempPrototype.setFixed([state / float(self.maxState + 1)], 0) pGrade = [0] * self.numGroups for i in range(len(self.prototypes)): prototype = self.prototypes[i] tempFeatureDiff = tempPrototype.calculateDiff(prototype) membershipGrade = float( exp(-(tempFeatureDiff * tempFeatureDiff) / 2 * prototype.getFeatureWidth()) ) #/sqrt(2*prototype.getFeatureWidth()*pi) if pGrade[prototype.group] < membershipGrade: prototypeIndex[prototype.group] = i pGrade[prototype.group] = membershipGrade return [p / sum(pGrade) for p in pGrade]
class BinaryChunk: def __init__(self, path): self.binary_reader = BinaryReader(path) self.header = BinaryChunkHeader(self.binary_reader) self.size_upvalues = self.binary_reader.read_uint8() self.main_func = Prototype(self.binary_reader, '') def print_header(self): self.header.dump() def check_header(self): self.header.check() def print_main_func(self): self.main_func.dump() def get_main_func(self): return self.main_func
def generatePrototypes(): if isRandom: for i in range(numPrototypes): numDifferent = 0 prototypes.append(Prototype(numActions, stateDimension)) while numDifferent < i: if prototypes[i].isDifferent(prototypes[numDifferent]): numDifferent += 1 else: prototypes[i].setRandomly() numDifferent = 0 else : for i in range(numPrototypes): p = Prototype(numActions, stateDimension) p.setFixed([i/float(numPrototypes+1) ], 0) prototypes.append(p)
def generatePrototypes(self, isRandom): if isRandom: for i in range(self.numPrototypes): numDifferent = 0 self.prototypes.append(Prototype(1, self.stateDimension)) while numDifferent < i: if self.prototypes[i].isDifferent(self.prototypes[numDifferent]): numDifferent += 1 else: self.prototypes[i].setRandomly() numDifferent = 0 else : for i in range(self.numPrototypes): groupNum = i % self.numGroups p = Prototype(1, self.stateDimension, groupNum) p.setFixed([i/float(self.maxState+1) ], 0) self.prototypes.append(p)
def generatePrototypes(self, isRandom): if isRandom: for i in range(self.numPrototypes): numDifferent = 0 self.prototypes.append(Prototype(1, self.stateDimension)) while numDifferent < i: if self.prototypes[i].isDifferent( self.prototypes[numDifferent]): numDifferent += 1 else: self.prototypes[i].setRandomly() numDifferent = 0 else: for i in range(self.numPrototypes): groupNum = i % self.numGroups p = Prototype(1, self.stateDimension, groupNum) p.setFixed([i / float(self.maxState + 1)], 0) self.prototypes.append(p)
def learn(state1, action1, reward, state2): # maxQ = -float('inf') # for a in range(numActions): # tempQ = getQ(state2, a) # if (maxQ < tempQ): # maxQ = tempQ ### sarsa maxQ = getQ(state2, 0) preQ = getQ(state1, action1) delta = reward + gamma*maxQ - preQ tempPrototype = Prototype(numActions, stateDimension) tempPrototype.setFixed([state1/float(maxState)], action1) for prototype in prototypes: tempFeatureDiff = tempPrototype.calculateDiff(prototype) membershipGrade = float(exp(-(tempFeatureDiff*tempFeatureDiff)/2*prototype.getFeatureWidth())) prototype.setTheta(prototype.getTheta() + alpha * delta * membershipGrade/numPrototypes)
class BinaryChunk: def __init__(self, chunk): self.binary_reader = BinaryReader(chunk) self.header = None self.size_upvalues = None self.main_func = None @staticmethod def is_binary_chunk(data): if data is None or len(data) < 4: return False for i in range(4): if data[i] != BinaryChunkHeader.LUA_SIGNATURE[i]: return False return True def print_header(self): self.header.dump() def check_header(self): self.header.check() def print_main_func(self): self.main_func.dump() def get_main_func(self): return self.main_func def undump(self): self.header = BinaryChunkHeader(self.binary_reader) self.check_header() self.size_upvalues = self.binary_reader.read_uint8() self.main_func = Prototype() self.main_func.init_from_br(self.binary_reader, '') return self.main_func
def add_random_creature(self): position = choice(self.available_tiles) creature = Creature(position, self, Prototype.random_prototype(), DEFAULT_ENERGY) creature.environment = self self.add(creature)
def __init__(self, path): self.binary_reader = BinaryReader(path) self.header = BinaryChunkHeader(self.binary_reader) self.size_upvalues = self.binary_reader.read_uint8() self.main_func = Prototype(self.binary_reader, '')
def add_prototype( dbh, tradeStrategy, tradeStyle, data_series, session_id=None, data_set=None, data_block=None, template_version="prototype_v4", jcl_version="prototype_v3", **params, ): Day = 0 Swing = 1 opt_params = set_opt_params(dbh, tradeStrategy, Day, data_series, params) symbol = data_series[0][0] logger.debug(f"symbol={symbol} data_series={data_series}") market_id = querySecId(dbh, symbol) if session_id is None: session_id = queryDefaultSessionId(dbh, symbol) logger.debug(f"lookup session for {symbol}: {session_id}") sess = querySession(dbh, session_id) session_start = sess.session_start session_end = sess.session_end logger.debug(f"session: {session_start} - {session_end}") if not data_set or not ( data_set >= firstDataBlock() and data_set <= lastDataBlock() ): data_set = lastDataBlock() if not data_block or not (data_block > 0 and data_block < 11): data_block = randint(1, 10) day_swing = Day fitness_function = "TSI" max_bars_back = 200 trades_per_day = 1 chart_series = format_chart_series(data_series) time_frames = [x[2] for x in (x.split(":") for x in chart_series.split(","))] logger.debug(f"chart_series={chart_series} time_frames={time_frames}") dates = setDates( session_start, session_end, max_bars_back, data_set, data_block, time_frames ) new_rec = Prototype( market_id=market_id, market_session_id=session_id, template_version=template_version, jcl_version=jcl_version, data_set=data_set, data_block=data_block, symbol=symbol, chart_series=chart_series, fitness_function=fitness_function, max_days_back=max_bars_back, trades_per_day=trades_per_day, day_swing=day_swing, prestart_dt=dates["pre_start_dt"], start_dt=dates["start_dt"], end_dt=dates["end_dt"], bt_start_dt=dates["bt_start_dt"], bt_end_dt=dates["bt_end_dt"], status="new", status_state="pending", ) dbh.add(new_rec) dbh.commit() dbh.flush() proto_id = new_rec.id updates = { "in_sample_file": f"proto_{proto_id}_is.csv", "out_of_sample_file": f"proto_{proto_id}_oos.csv", } dbUpdatePrototype(dbh, proto_id, updates) for p in opt_params: new_rec = PrototypeParameter( proto_id=proto_id, name=p["name"], input_type=p["input_type"], data_type=p["data_type"], value=p["value"], re_optimize=p["re_optimize"], ) dbh.add(new_rec) dbh.commit() return proto_id
def test_clone(self): object1 = Prototype("object") object2 = object1.clone() self.assertEqual(object1.name, object2.name) self.assertNotEqual(id(object1), id(object2))
# !/usr/env/python python # -*- coding: utf8 -*- from prototype import Prototype class A(object): CLONED = False def __init__(self, *args): self.args = args def is_cloned(self): return self.CLONED class B(A): pass registered_objs = Prototype() registered_objs.reg_obj("A", A) b = registered_objs.clone("A") print A().is_cloned() print B().is_cloned() print B.__name__ print B().ARG
from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from session import Session from prototype import Prototype from base import Base DATABASE_URL="postgres://*****:*****@ec2-54-83-60-13.compute-1.amazonaws.com:5432/d617hc9tos6o76" engine = create_engine(DATABASE_URL) Base.metadata.bind = engine DBSession = sessionmaker(bind=engine) session = DBSession() new_prototype = Prototype(id=100,template='') ''' new_prototype = Prototype(id=55,template="DP_1TF_120m_sess9", data_set=1, data_block=8, status="CandGen", status_state="Done", cand_prefix="[email protected]_120_na", symbol="@RTY.D", timeframe_1=120, timeframe_1_unit="min", fitness_function="TSI", max_days_back=200, session_id=9, prestart_dt="12/28/14",
# !/usr/env/python python # -*- coding: utf8 -*- from prototype import Prototype class A(object): CLONED = False def __init__(self, *args): self.args = args def is_cloned(self): return self.CLONED class B(A): pass registered_objs = Prototype() registered_objs.reg_obj('A', A) b = registered_objs.clone('A') print A().is_cloned() print B().is_cloned() print B.__name__ print B().ARG
def to_proto(self): proto = Prototype() proto.num_params = self.num_params proto.max_stack_size = self.max_regs proto.code = self.insts proto.constants = self.get_constants() proto.upvalues = self.get_upvalues() proto.protos = [] proto.line_infos = [] proto.local_vars = self.local_vars proto.upvalue_names = self.get_upvalue_names() for fi in self.sub_funcs: proto.protos.append(fi.to_proto()) if proto.get_max_stack_size() < 2: proto.max_stack_size = 2 if self.is_vararg: proto.is_vararg = 1 return proto