class NManager(): def __init__(self, playerStats): self.status = Status("NManager") self.playerStats = playerStats self.normalstats = self.normalize() def normalize(self): self.status.message(1, "normalize(self)") stats = PlayerStats() stats.state.race = self.bounds(self.playerStats[0], 0, 1) stats.buildings.built = self.bounds(self.playerStats[1], 0, 300) stats.buildings.destroyed = self.bounds(self.playerStats[2], 0, 50) stats.armyUnits.spawned = self.bounds(self.playerStats[3], 0, 1000) stats.armyUnits.dead = self.bounds(self.playerStats[4], 0, 500) stats.workerUnits.spawned = self.bounds(self.playerStats[5], 0, 1000) stats.workerUnits.dead = self.bounds(self.playerStats[6], 0, 500) self.status.message(0, "normalize(self)") return stats def bounds(self, value, lower, upper): # self.status.message(1, "bounds(self, value, lower, upper)") diff = upper - lower result = int((diff * value) + 1) # self.status.message(0, "bounds(self, value, lower, upper)") return result
def __init__(self): self.status = Status("GAN") self.cols = 7 self.img_shape = (self.cols,) optimizer = Adam(0.0002, 0.5) # Build and compile the discriminator self.discriminator = self.build_discriminator() self.discriminator.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy']) # Build and compile the generator self.generator = self.build_generator() self.generator.compile(loss='binary_crossentropy', optimizer=optimizer) # The generator takes noise as input and generated imgs z = Input(shape=(7,)) img = self.generator(z) # For the combined model we will only train the generator self.discriminator.trainable = False # The valid takes generated images as input and determines validity valid = self.discriminator(img) # The combined model (stacked generator and discriminator) takes # noise as input => generates images => determines validity self.combined = Model(z, valid) self.combined.compile(loss='binary_crossentropy', optimizer=optimizer)
class NNManager(): def __init__(self): self.status = Status("NNManager") self.datapath = None def buildGAN(self, trainset, epochs=30000, batch_size=25, sample_interval=200): self.status.message( 1, "buildGAN(self, trainset, epochs, batch_size, sample_interval") gan = GAN() gan.train(trainset, epochs, batch_size, sample_interval) self.status.message( 0, "buildGAN(self, trainset, epochs, batch_size, sample_interval") return True
def loadJSON(path): status = Status("Extraction") status.message(1, "loadJSON(path)") with open(path) as f: datadict = json.load(f) listings = [] for match in datadict: for player in datadict[match]: listings.append(datadict[match][player]) status.message(0, "loadJSON(path)") return listings
class FManager(): def __init__(self): self.status = Status("FManager") self.sitestatus = False def isRunning(self): self.status.message(1, "isRunning(self)") sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) result = sock.connect_ex(('0.0.0.0', 3000)) sock.close() if result == 0: self.sitestatus = True self.status.message(9) else: self.sitestatus = False self.status.message(8) self.status.message(0, "isRunning(self)") return self.sitestatus
class EManager: def __init__(self, jsonpath): self.status = Status("Extraction") self.playerList = self.extractJSON(jsonpath) self.examples = self.numpifyPlayers() def extractReplayDir(self, dirname): replays = sc2reader.load_replays("sc2reader/test_replays/2.1.4") def extractJSON(self, jsonpath): self.status.message(1, "extractJSON(self, jsonpath)") listings = decodeJSON.loadJSON(jsonpath) playerList = [] for i in listings: player = PlayerStats() player.populate(i) playerList.append(player) self.status.message(7) self.status.message(0, "extractJSON(self, jsonpath)") return playerList def numpifyPlayers(self): self.status.message(1, "numpifyPlayers(self)") examples = [] for player in self.playerList: npyplayer = np.array([player.state.race, player.buildings.built, player.buildings.destroyed, player.armyUnits.spawned, player.armyUnits.dead, player.workerUnits.spawned, player.workerUnits.dead]) examples.append(npyplayer) return np.asarray(examples)
def __init__(self, playerStats): self.status = Status("NManager") self.playerStats = playerStats self.normalstats = self.normalize()
def __init__(self): self.status = Status("FManager") self.sitestatus = False
def __init__(self): self.status = Status("Admin") self.extraction = EManager('/home/service/analyticsBot-sc2/modules/Extraction/jsondump/output.json') self.neuralnet = NNManager() self.neuralnet.buildGAN(self.extraction.examples)
class GAN(): def __init__(self): self.status = Status("GAN") self.cols = 7 self.img_shape = (self.cols,) optimizer = Adam(0.0002, 0.5) # Build and compile the discriminator self.discriminator = self.build_discriminator() self.discriminator.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy']) # Build and compile the generator self.generator = self.build_generator() self.generator.compile(loss='binary_crossentropy', optimizer=optimizer) # The generator takes noise as input and generated imgs z = Input(shape=(7,)) img = self.generator(z) # For the combined model we will only train the generator self.discriminator.trainable = False # The valid takes generated images as input and determines validity valid = self.discriminator(img) # The combined model (stacked generator and discriminator) takes # noise as input => generates images => determines validity self.combined = Model(z, valid) self.combined.compile(loss='binary_crossentropy', optimizer=optimizer) def build_generator(self): self.status.message(1, "build_generator(self)") noise_shape = (7,) model = Sequential() model.add(Dense(7, input_shape=noise_shape)) model.add(LeakyReLU(alpha=0.2)) model.add(BatchNormalization(momentum=0.8)) model.add(Dense(7)) model.add(LeakyReLU(alpha=0.2)) model.add(BatchNormalization(momentum=0.8)) model.add(Dense(7)) model.add(LeakyReLU(alpha=0.2)) model.add(BatchNormalization(momentum=0.8)) model.add(Dense(np.prod(self.img_shape), activation='tanh')) model.add(Reshape(self.img_shape)) #model.summary() noise = Input(shape=noise_shape) img = model(noise) self.status.message(0, "build_generator(self)") return Model(noise, img) def build_discriminator(self): self.status.message(1, "build_discriminator(self)") shape = (self.cols,) model = Sequential() #model.add(Flatten(input_shape=shape)) model.add(Dense(7)) model.add(LeakyReLU(alpha=0.2)) model.add(Dense(1, activation='sigmoid')) # model.summary() example = Input(shape=shape) validity = model(example) self.status.message(0, "build_discriminator(self)") return Model(example, validity) def train(self, trainset, epochs, batch_size=128, save_interval=50): # self.status.message(1, "train(self, trainset, epochs, batch_size, save_interval)") # Load the dataset #(X_train, _), (_, _) = mnist.load_data() X_train = trainset # Rescale -1 to 1 X_train = (X_train.astype(np.float32) - 127.5) / 127.5 #X_train = np.expand_dims(X_train, axis=3) half_batch = int(batch_size / 2) for epoch in range(epochs): # --------------------- # Train Discriminator # --------------------- # Select a random half batch of images idx = np.random.randint(0, X_train.shape[0], half_batch) imgs = X_train[idx] noise = np.random.normal(0, 1, (half_batch, 7)) # Generate a half batch of new images gen_imgs = self.generator.predict(noise) # Train the discriminator d_loss_real = self.discriminator.train_on_batch(imgs, np.ones((half_batch, 1))) d_loss_fake = self.discriminator.train_on_batch(gen_imgs, np.zeros((half_batch, 1))) d_loss = 0.5 * np.add(d_loss_real, d_loss_fake) # --------------------- # Train Generator # --------------------- noise = np.random.normal(0, 1, (batch_size, 7)) # The generator wants the discriminator to label the generated samples # as valid (ones) valid_y = np.array([1] * batch_size) # Train the generator g_loss = self.combined.train_on_batch(noise, valid_y) # Plot the progress if epoch % 10 == 0: print ("Epoch: %d [Discriminator loss: %f, acc.: %.2f%%] [Generator loss: %f]" % (epoch, d_loss[0], 100*d_loss[1], g_loss)) # If at save interval => save generated image samples if epoch % save_interval == 0: self.save_imgs(epoch) # self.status.message(0, "train(self, trainset, epochs, batch_size, save_interval)") # return def save_imgs(self, epoch): self.status.message(1, "save_imgs(self, epoch)") rows = 10 noise = np.random.normal(0, 1, (rows, 7)) gen_imgs = self.generator.predict(noise) # Rescale images 0 - 1 gen_imgs = 0.5 * gen_imgs + 0.5 cnt = 0 for i in gen_imgs: normalizer = NManager(i) print(normalizer.normalstats) self.status.message(0, "save_imgs(self, epoch)") return
def __init__(self): self.status = Status("NNManager") self.datapath = None
def __init__(self): self.status = Status("PlayerStats") self.state = State() self.buildings = BuildingUnits() self.armyUnits = ArmyUnits() self.workerUnits = WorkerUnits()
class PlayerStats(): def __init__(self): self.status = Status("PlayerStats") self.state = State() self.buildings = BuildingUnits() self.armyUnits = ArmyUnits() self.workerUnits = WorkerUnits() def __str__(self): print("+++++ Optimal Settings +++++") return str( "%s |\n%s |\n%s |\n%s |\n" % (self.state, self.buildings, self.armyUnits, self.workerUnits)) def getBuildingUnits(self, playerDict): self.status.message(1, "getBuildingUnits(self, playerDict)") self.status.message(0, "getBuildingUnits(self, playerDict)") return BuildingUnits(playerDict["buildingBuilt"], playerDict["buildingKilled"]) def getArmyUnits(self, playerDict): self.status.message(1, "getArmyUnits(self, playerDict)") self.status.message(0, "getArmyUnits(self, playerDict)") return ArmyUnits(playerDict["armyBuilt"], playerDict["armyKilled"]) def getWorkerUnits(self, playerDict): self.status.message(1, "getWorkerUnits(self, playerDict)") self.status.message(0, "getWorkerUnits(self, playerDict)") return WorkerUnits(playerDict["workerBuilt"], playerDict["workerKilled"]) def getState(self, playerDict): self.status.message(1, "getState(self, playerDict)") # print(playerDict) try: if playerDict["result"] == "Loss": won = False else: won = True if playerDict["race"] == "Zerg": race = 0 elif playerDict["race"] == "Protoss": race = 1 elif playerDict["race"] == "Terran": race = 2 else: race = 0 except KeyError: print("Key error occured. Using defaults.") won = False race = 0 self.status.message(0, "getState(self, playerDict)") return State(won, race) def populate(self, playerDict): self.status.message(1, "populate(self, playerDict)") self.state = self.getState(playerDict) self.buildings = self.getBuildingUnits(playerDict) self.armyUnits = self.getArmyUnits(playerDict) self.workerUnits = self.getWorkerUnits(playerDict) self.status.message(0, "populate(self, playerDict)") return True
def __init__(self, jsonpath): self.status = Status("Extraction") self.playerList = self.extractJSON(jsonpath) self.examples = self.numpifyPlayers()