def __init__(self, codes, startDateTime, endDateTime): self.codes = codes self.startDateTime = startDateTime self.endDateTime = endDateTime performanceDBHelper = PerformanceDBHelper() self.dataCenter = DataCenter() pass
def load_node_info(self, node: DataCenter) -> None: sql = """SELECT * FROM DataCenters WHERE id={}""".format(node.id) dc: tuple = self.exec_sql(sql) if not dc: return node.leftmem = dc[3] node.leftcpu = dc[4] node.current_income = dc[5]
def get_node_cost(self, node: DataCenter): node.cpu = 0 node.cost = 0 if len(node.requests) == 0: return for r in node.requests: r: Request node.cpu += r.process_source node.cost = node.cpu * node.unitCpuPrice gain = self.node_gain(node) node.cpu *= (1 - gain / node.cost) node.cost -= gain
def get_node_cost(self, node: DataCenter): node.cpu = 0 node.cost = 0 if len(node.requests) == 0: return for r in node.requests: r: Request node.cpu += r.process_source # print("node {} deploy req {} needs cpu {}".format(node.id,r.id, r.process_source[node.id])) node.cost = node.cpu * node.unitCpuPrice gain = self.node_gain(node) node.gain = gain node.cpu *= (1 - gain / node.cost) node.cost -= gain node.cost *= 0.9
def node_weight(self, req: Request, node: DataCenter): old_seq = [0 for _ in range(10)] for r in node.requests: r: Request for i in range(10): old_seq[i] += r.bandSeq[i] node.weight = ( 1 - self.correlation(old_seq, req.bandSeq)) / node.unitCpuPrice
class App: def __init__(self): self.parser = configparser.ConfigParser() self.parser.read("config.INI") self.dataCenter = DataCenter(self.parser) self.neuralNetworks = NeuralNetworks(self.parser) def train(self): self.neuralNetworks.train(*self.dataCenter.process_data()) def predict(self, data): """ API to predict the label of incoming question from user input :param data: :return: """ res = self.neuralNetworks.inference( self.dataCenter.process_inference_data(data)) print(res)
def load_nodes(self) -> None: sql = """SELECT * FROM DataCenters""" nodes = Manager().exec_sql(sql) for i in range(len(nodes)): node = DataCenter(nodes[i][0], nodes[i][1], nodes[i][2], nodes[i][3], nodes[i][5], nodes[i][6]) # DataCenter的neighbors属性保存邻接节点的id for item in self.graph[i]: if item != 0: node.neighbors.append(item) self.nodes.append(node)
class Quote(object): def __init__(self,codemap): self.codemap=codemap self.codeset=set(self.codemap.keys()) self.dc=DataCenter() self.__load_history__() def __load_history__(self): self.Data=self.dc.getBarData(self.codemap) #load missing today data, not implement def __getitem__(self,index): return self.Data[index] def update(self,tick): if tick.acsycode in self.codeset: self.Data[tick.acsycode].update(tick)
class Quote(object): def __init__(self, codemap): self.codemap = codemap self.codeset = set(self.codemap.keys()) self.dc = DataCenter() self.__load_history__() def __load_history__(self): self.Data = self.dc.getBarData(self.codemap) #load missing today data, not implement def __getitem__(self, index): return self.Data[index] def update(self, tick): if tick.acsycode in self.codeset: self.Data[tick.acsycode].update(tick)
def __init__(self): self.parser = configparser.ConfigParser() self.parser.read("config.INI") self.dataCenter = DataCenter(self.parser) self.neuralNetworks = NeuralNetworks(self.parser)
from DataCenter import DataCenter from NeuralNetworks import NeuralNetworks from helper import logger if __name__ == "__main__": logger.info("Start Job...") inputs, targets = DataCenter().run() neural_network = NeuralNetworks(inputs, targets, split_fraction=0.8, embed_size=300, lstm_size=256) neural_network.train() neural_network.test() logger.info("Job is done!")
def load_nodes_info(self): for i in range(14): node = DataCenter(i, config.DataCenters[i], config.DataCenters[i]) self.nodes[i] = node
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' if __name__ == "__main__": parser = argparse.ArgumentParser(description='CapitalOneAIEngine') parser.add_argument('--task', dest='task', type=str, help='Predict or Train from the model') args = parser.parse_args() logger.info("Task is :{}".format(args.task)) logger.info("Job started!") save_model_path = './Model/AlertTransactionModel' neural_network = NeuralNetworks(save_model_path) task = args.task if task == "predict": new_transaction, _ = DataCenter().run(task) predict = neural_network.sample(new_transaction) print("*Predict result:{}".format(predict)) with open('prediction.txt', 'w') as outfile: json.dump({'predict': predict}, outfile) elif task == "train": inputs, targets = DataCenter().run(task) neural_network.train(inputs, targets) else: logger.fatal("!No task assigned, check the input arg '--task'") logger.info("Job finished!")
def __init__(self, codes, startDateTime, endDateTime): self.codes = codes self.startDateTime = startDateTime self.endDateTime = endDateTime self.dataCenter = DataCenter() self.config = Config()
def __init__(self, codemap): self.codemap = codemap self.codeset = set(self.codemap.keys()) self.dc = DataCenter() self.__load_history__()
from constants import seed from customer import Customer from product import Product from seller import Seller from utils import plot, regression, Save from DataCenter import DataCenter from constants import ticks import warnings import utils warnings.filterwarnings('ignore') random.seed(seed) # create auctioneer 'dataCenter' dataCenter = DataCenter('dataCenter') # Create some Consumers customers = [Customer(name='consumer_' + str(i), customer_id=i, wallet=7000, dataCenter=dataCenter, crisp_sets=(0.3 + 0.2 * (random.random() - 0.5), 0.7 + 0.2 * (random.random() - 0.5)), price_tolerance=0.5 + 0.4 * random.random(), quality_tolerance=0.5 + 0.4 * random.random()) for i in range(700)] # Create a product iphone7 = Product(name='iphone7', product_id=0, price=300, quality=0.9, prob_map={'galaxy': [(1, 0.6)]}) galaxy = Product(name='galaxy', product_id=1, price=200, quality=0.7, prob_map={'iphone7': [(0, 0.6)]}) iphone5 = Product(name='iphone5', product_id=2, price=220, quality=0.85, prob_map={'iphone7': [(0, 0.7)], 'galaxy': [(1, 0.8)]}) note = Product(name='note', product_id=3, price=240, quality=0.88, prob_map={'iphone7': [(0, 0.7)], 'iphone5': [(2, 0.5)], 'galaxy': [(1, 0.6)]}) headphone = Product(name='headphone', product_id=4, price=80, quality=0.85, prob_map={'iphone7': [(0, 0.9)], 'galaxy': [(1, 0.9)], 'iphone5': [(2, 0.9)], 'note': [(3, 0.9)]}) #
from DataCenter import DataCenter from helper import logger from NeuralNetworks import NeuralNetworks if __name__ == "__main__": logger.info("Job started!") data = DataCenter().run() neural_network = NeuralNetworks(data) neural_network.train() # new_text = neural_network.sample(1000, prime='Far') # print(new_text) logger.info("Job finished!")
def init_nodes(self, path: tuple): for v in path: node = DataCenter(v, "", 40001, 0, 0, 0) Manager().load_node_info(node) self.nodes.append(node)
def __init__(self,codemap): self.codemap=codemap self.codeset=set(self.codemap.keys()) self.dc=DataCenter() self.__load_history__()
class StockNeuworkClassifier: def __init__(self, codes, startDateTime, endDateTime): self.codes = codes self.startDateTime = startDateTime self.endDateTime = endDateTime performanceDBHelper = PerformanceDBHelper() self.dataCenter = DataCenter() pass def createModel(self, perfModel): model = Sequential() model.add( layers.Dense(config.hidden_layer_1_unit, name='hidden_layer_1', activation=config.activation, input_dim=dataDim - 1)) model.add(layers.Dropout(0.2)) model.add( layers.Dense(100, name='hidden_layer_2', activation=config.activation)) # model.add(layers.Dense(8, name='hidden_layer_3', activation='tanh')) # model.add(layers.Dropout(0.2)) model.add( layers.Dense(output_dim, name='output_layer', activation='softmax')) adam = optimizers.Adam(lr=0.1, beta_1=0.9, beta_2=0.999, epsilon=1e-08) sgd = optimizers.SGD(lr=0.01, momentum=0.0, decay=0, nesterov=False) adagrad = optimizers.Adagrad(lr=0.01, epsilon=1e-06) loss = 'categorical_crossentropy' model.compile(loss=loss, optimizer=sgd, metrics=['accuracy']) perfModel.setModelFields(config.hidden_layer_1_unit, config.epochs, config.activation, sgd, loss, config.kFold) return model def drawTrainData(self, x, label='', plot=True, color='red'): fig = plt.figure(figsize=(30, 8)) if plot is True: plt.plot(range(len(x)), x, color=color) plt.scatter(range(len(x)), x) plt.ylabel('Label:' + str(label)) def tryNetwork(self, X, Y, evaluate_x, evaluate_y): perfModel = PerformanceModel(str(X.shape)) perfModel.setDataFields(self.codes, self.startDateTime, self.endDateTime, config.backWindowLength, config.futureWindow, config.skipStep, config.minSizeSamples) model = self.createModel(perfModel) model.fit(X, Y, epochs=config.epochs) print("#####evaluate####") lose, accuracy = model.evaluate(evaluate_x, evaluate_y) print("\n\nAccuracy:" + str(accuracy * 100)) # return ,accuracy * 100 def tryCV(self, X, Y): estimator = KerasClassifier(build_fn=self.createModel, epochs=config.epochs, batch_size=32, verbose=1) kfold = KFold(n_splits=config.kFold, shuffle=True, random_state=seed) result = cross_val_score(estimator, X, Y, cv=kfold) print('baseLine: %.2f%% (%.2f%%)' % (result.mean() * 100, result.std() * 100)) def trainAndTest(self): X, Y, evaluate_x, evaluate_y = self.dataCenter.loadData( self.codes, self.startDateTime, self.endDateTime) # self.tryCV(X, Y) if config.debugForPrepareData is False: self.tryNetwork(X, Y, evaluate_x, evaluate_y)
def node_weight(self, v: DataCenter): # v.weight = v.gain/(self.node_discount(v)*v.unitCpuPrice*v.unitCpuPrice*v.cpu) v.weight = 1 / v.unitCpuPrice