def iota2Formatting(invector, classes, outvector=""): ''' python simplification/ZonalStats.py -wd ~/tmp/ -inr /work/OT/theia/oso/vincent/testmpi/mini_SAR_pad/final/Classif_Seed_0.tif /work/OT/theia/oso/vincent/testmpi/mini_SAR_pad/final/Confidence_Seed_0.tif /work/OT/theia/oso/vincent/testmpi/mini_SAR_pad/final/PixelsValidity.tif -shape /work/OT/theia/oso/vincent/testmpi/mini_SAR_pad/final/simplification/vectors/dept_1.shp -output /work/OT/theia/oso/vincent/outstats_oso.sqlite -params 1:rate 2:statsmaj 3:statsmaj -classes simplification/nomenclature17.cfg -iota2 ''' def Sort(sub_li): sub_li.sort(key=lambda x: x[0]) return sub_li nomenc = nomenclature.Iota2Nomenclature(classes, 'cfg') desclasses = nomenc.HierarchicalNomenclature.get_level_values( int(nomenc.getLevelNumber() - 1)) cols = [[x, str(z)] for x, y, w, z in desclasses] sortalias = [x[1] for x in Sort(cols)] exp = "" for name in sortalias: exp += "CAST(%s AS NUMERIC(6,2)) AS %s, " % (name, name) if outvector == "": layerout = os.path.splitext(os.path.basename(invector))[0] outvector = os.path.splitext(invector)[0] + '_tmp.shp' else: layerout = os.path.splitext(os.path.basename(outvector))[0] command = "ogr2ogr -lco ENCODING=UTF-8 -overwrite -q -f 'ESRI Shapefile' -nln %s -sql "\ "'SELECT CAST(cat AS INTEGER(4)) AS Classe, "\ "CAST(meanmajb3 AS INTEGER(4)) AS Validmean, "\ "CAST(stdmajb3 AS NUMERIC(6,2)) AS Validstd, "\ "CAST(meanmajb2 AS INTEGER(4)) AS Confidence, %s"\ "CAST(area AS NUMERIC(10,2)) AS Aire "\ "FROM %s' "\ "%s %s"%(layerout, exp, layerout, outvector, invector) Utils.run(command)
async def award_take(self, message, is_award): server, channel, author = message.guild, message.channel, message.author split_message = message.content.split(" ") mode_text, mode_change = ("awarded", 1) if is_award else ("deducted", -1) if len(split_message) > 2: amount = split_message[1] if amount.isdigit(): amount = int(amount) user = Utils.get_user(server, split_message[2]) if user is not None: EconomyUtils.set_cash( server.id, user.id, EconomyUtils.get_cash(server.id, user.id) + amount * mode_change) await Utils.simple_embed_reply( channel, "[%s]" % str(author), "User `%s` was %s %d%s." % (str(user), mode_text, amount, EconomyUtils.currency)) else: given_role = Utils.get_role(server, ''.join(split_message[2])) users = [] if given_role is not None: for user in server.members: if given_role in user.roles: EconomyUtils.set_cash( server.id, user.id, EconomyUtils.get_cash(server.id, user.id) + amount * mode_change) users.append(user) if len(users) > 0: await Utils.simple_embed_reply( channel, "[%s]" % str(author), "Users with the role `%s` were %s %d%s." % (str(given_role), mode_text, amount, EconomyUtils.currency)) else: await Utils.simple_embed_reply( channel, "[Error]", "No users are equipped with that role.") else: await Utils.simple_embed_reply( channel, "[Error]", "Invalid user or role supplied.") else: await Utils.simple_embed_reply( channel, "[Error]", "Amount parameter is incorrect.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.")
def __init__(self, mod_name, embed_color): # Config var init self.config = DataManager.JSON("Mods/Waifu/WaifuConfig.json") # Make sure all gifts follow a proper naming convention for gift_name in self.config.get_data("Gifts"): if not re.fullmatch(r"[A-Za-z0-9 ]*", gift_name): raise Exception( "Waifu gift name \"%s\" can only contain spaces, A-Z, a-z, 0-9" % gift_name) # Build command objects self.commands = Utils.parse_command_config( self, mod_name, self.config.get_data('Commands')) # Init DBs self.waifus_db = DataManager.add_manager( "shop_database", "Mods/Waifu/Waifus.db", file_type=DataManager.FileType.SQL) self.gifts_db = DataManager.add_manager( "shop_database", "Mods/Waifu/Gifts.db", file_type=DataManager.FileType.SQL) # Generate and Update DB self.generate_db() # Super... super().__init__(mod_name, self.config.get_data("Mod Description"), self.commands, embed_color)
def __init__(self, mod_name, embed_color): # Config var init self.config = DataManager.JSON("Mods/Shops/ShopsConfig.json") self.delete_delay = self.config.get_data("Message Delete Delay") # Init shop DB self.shop_info_database = DataManager.add_manager( "shop_info_database", "Mods/Shops/ShopsInfo.db", file_type=DataManager.FileType.SQL) self.shops_database = DataManager.add_manager( "shops_database", "Mods/Shops/Shops.db", file_type=DataManager.FileType.SQL) # Create a shop table withing the DB if there isn't one self.shop_info_database.execute( "CREATE TABLE IF NOT EXISTS shops(channel_id TEXT UNIQUE, shop_name TEXT, is_purge BIT)" ) self.shop_info_database.execute( "CREATE TABLE IF NOT EXISTS messages(shop_name TEXT UNIQUE, message_id TEXT, channel_id TEXT)" ) # Verify DB - Check for deleted channels that shops exist in self.verify_db() # Build command objects self.commands = Utils.parse_command_config( self, mod_name, self.config.get_data('Commands')) # Init the super with all the info from this mod super().__init__(mod_name, self.config.get_data('Mod Description'), self.commands, embed_color)
def get_lab_case_info(self): uihelper = Utils.UIDisplayHelper() steps = [] result = [] if self.step_name.upper() == 'SILVER': steps = ['Daily', 'Silver'] elif len(self.step_name) == 0: pass else: steps.append(self.step_name) for step in steps: if len(self.week_name) != 0 and len(self.step_name) != 0: path = r'/%s/%s/%s' % (self.project_name, self.week_name, step) else: path = r'/%s/' % (self.project_name) print 'path:\t', path cases = self.query.enumerate_test_set_folder(path, self.session) if cases == None: return None for item in cases: item.extend([self.query, result, uihelper]) pool = threadpool.ThreadPool(40) requests = threadpool.makeRequests(self.enumerate_plan, cases) [pool.putRequest(req) for req in requests] pool.wait() pool.dismissWorkers(40) print 'result:\t', result return result
async def roll_income(self, message, command_name): server, channel, author = message.guild, message.channel, message.author command_config = self.config.get_data(key="Commands")[command_name] user_cash = EconomyUtils.get_cash(server.id, author.id) # Pick success or failure win_mode, change_mode, balance_change = ( "Success", "Payout", 1) if roll( int( self.config.get_data(key="Commands")[command_name] ["Default Success Rate"])) else ("Failure", "Deduction", -1) balance_change_range = command_config[win_mode][change_mode] cash_change = random.randint( balance_change_range["Min"], balance_change_range["Max"]) * balance_change messages = command_config[win_mode]["Messages"] if len(messages) > 0: reply = messages[rng(len(messages) - 1)] EconomyUtils.set_cash(server.id, author.id, user_cash + cash_change) for section in reply.split(" "): if re.fullmatch(r"{[0-9]{18}}", section) is not None: reply = reply.replace( section, Utils.get_user_by_id(server, section[1:-1]).mention) await Utils.simple_embed_reply( channel, "[" + str(author) + "]", reply.replace("{amount}", str(abs(cash_change)) + EconomyUtils.currency)) else: await Utils.simple_embed_reply( channel, "[" + str(author) + "]", str(cash_change) + EconomyUtils.currency)
def __init__(self, mod_name, embed_color): super().__init__(mod_name, "Just an example mod.", {}, embed_color) # Config var init self.config = DataManager.JSON("Mods/Gamble/GambleConfig.json") # Build command objects self.commands = Utils.parse_command_config(self, mod_name, self.config.get_data('Commands')) # Init the super with all the info from this mod super().__init__(mod_name, self.config.get_data('Mod Description'), self.commands, embed_color)
async def error_cool_down(self, message, command): seconds_left = command.cool_down_seconds - ( time.time() - command.last_called(message.author.id)) time_left_text = Utils.seconds_format(seconds_left) await Utils.simple_embed_reply( message.channel, str(message.author), "You can call " + command.name + " again in " + time_left_text + ".", self.embed_color)
def dataframeExport(geodataframe, output, schema): """Export a GeoPandas DataFrame as a vector file (shapefile, sqlite and geojson) Parameters ---------- geodataframe : GeoPandas DataFrame GeoPandas DataFrame output : string output vector file schema : dict / Fiona schema schema giving colums name and format """ # TODO Export format depending on columns number (shapefile, sqlite, geojson) # Check Issue on framagit convert = False outformat = os.path.splitext(output)[1] if outformat == ".shp": driver = "ESRI Shapefile" elif outformat == ".geojson": driver = "GeoJSON" elif outformat == ".sqlite": driver = "ESRI Shapefile" convert = True else: raise Exception("The output format '%s' is not handled" % (outformat[1:])) if not convert: geodataframe.to_file(output, driver=driver, schema=schema, encoding='utf-8') else: outputinter = os.path.splitext(output)[0] + '.shp' geodataframe.to_file(outputinter, driver=driver, schema=schema, encoding='utf-8') output = os.path.splitext(output)[0] + '.sqlite' Utils.run('ogr2ogr -f SQLite %s %s' % (output, outputinter))
async def create_role(self, server, color): role = await Utils.client.create_role( server, name=color, color=Utils.get_color(color), permissions=discord.Permissions(permissions=0)) self.roles[server.id][role.id] = [] # Move it to top priority (so other roles's colors get over-written) await self.role_max_shift(server, role) return role
def __init__(self, mod_name, embed_color): # Config var init self.config = DataManager.JSON( "Mods/DailyEconomy/DailyEconomyConfig.json") # Build command objects self.commands = Utils.parse_command_config( self, mod_name, self.config.get_data('Commands')) # Init the super with all the info from this mod super().__init__(mod_name, self.config.get_data('Mod Description'), self.commands, embed_color)
def parseLine(line): pos = [m.start() for m in re.finditer("\t", line)] if len(pos) < 4: Utils.error("len(pos)<4") url = line[0:pos[0]] category = line[pos[0] + 1:pos[1]] subcategory = line[pos[1] + 1:pos[2]] title = line[pos[2] + 1:pos[3]] if len(pos) > 4: content = line[pos[3] + 1:pos[4]] else: content = line[pos[3] + 1:] # url = url.strip() category = category.strip() title = title.strip() content = content.strip() subcategory = subcategory.strip() return category, url, title, content, subcategory
def __init__(self, mod_name, embed_color): super().__init__(mod_name, "Just an example mod.", {}, embed_color) # Config var init self.config = DataManager.JSON("Mods/Gacha/GachaConfig.json") # Init DBs self.gacha_database = DataManager.add_manager("gacha_database", "Mods/Gacha/GachaDatabase.db", file_type=DataManager.FileType.SQL) self.generate_db() # Build command objects self.commands = Utils.parse_command_config(self, mod_name, self.config.get_data('Commands')) # Init the super with all the info from this mod super().__init__(mod_name, self.config.get_data('Mod Description'), self.commands, embed_color)
def OSORegularization(classif, umc1, core, path, output, ram = "10000", noSeaVector = None, rssize = None, umc2 = None, logger = logger): if not os.path.exists(output): # OTB Number of threads os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str(core) # first regularization regulClassif, time_regul1 = AdaptRegul23.regularisation(classif, umc1, core, path, ram) logger.info(" ".join([" : ".join(["First regularization", str(time_regul1)]), "seconds"])) # second regularization if umc2 != None : if rssize != None : if os.path.exists(os.path.join(path, "reechantillonnee.tif")): os.remove(os.path.join(path, "reechantillonnee.tif")) command = "gdalwarp -q -multi -wo NUM_THREADS=%s -r mode -tr %s %s %s %s/reechantillonnee.tif" %(core, \ rssize, \ rssize, \ regulClassif, \ path) Utils.run(command) logger.info(" ".join([" : ".join(["Resample", str(time.time() - time_regul1)]), "seconds"])) regulClassif, time_regul2 = AdaptRegul23.regularisation(os.path.join(path, "reechantillonnee.tif"), umc2, core, path, ram) os.remove(os.path.join(path, "reechantillonnee.tif")) logger.info(" ".join([" : ".join(["Second regularization", str(time_regul2)]), "seconds"])) if noSeaVector is not None: outfilename = os.path.basename(output) rastToVectRecode(path, regulClassif, noSeaVector, os.path.join(path, outfilename), ram, "uint8") else: outfilename = regulClassif shutil.copyfile(os.path.join(path, outfilename), output) os.remove(os.path.join(path, outfilename)) else: logger.info("One regularised file '%s' already exists for this classification"%(output))
def generate_db(self): # Created a local DB based on live info (fresh DB) for server in Utils.client.guilds: # Create a user database for each server self.users[server.id], self.roles[server.id] = {}, {} for user in server.members: # Create a role database for each user self.users[server.id][user.id] = None for role in user.roles: # If a user's role is a color -> Save it if Utils.is_hex(role.name): self.users[server.id][user.id] = role.id if role.id in self.roles[server.id].keys(): self.roles[server.id][role.id].append(user.id) else: self.roles[server.id][role.id] = [user.id]
async def get_shop_embed(self, shop_name, server): roles = self.shops_database.execute( "SELECT role_id, price FROM '%s' ORDER BY price DESC" % shop_name) # Convert [id1, cost1, id2, cost2, ...] to [[id1, cost1], [id2, cost2], ...] roles = [[roles[i * 2], str(roles[i * 2 + 1])] for i in range(len(roles) // 2)] embed = discord.Embed(title="[%s]" % shop_name, color=Utils.default_hex_color) if len(roles) > 0: for role_info in roles: role = Utils.get_role_by_id(server, role_info[0]) embed.add_field(name=str(role), value=role_info[1] + EconomyUtils.currency, inline=True) else: embed.description = "No roles currently available." return embed
def __init__(self, mod_name, embed_color): # General var init self.name = mod_name self.embed_color = embed_color self.applied_interest = False # Config var init self.config = DataManager.JSON("Mods/Economy/EconomyConfig.json") # Set the currency for mods to use EconomyUtils.currency = self.config.get_data("Currency") # Build command objects self.commands = Utils.parse_command_config( self, mod_name, self.config.get_data('Commands')) # Generate Update and Init DBs EconomyUtils.init_database() self.generate_db() # Init the super with all the info from this mod super().__init__(mod_name, self.config.get_data('Mod Description'), self.commands, embed_color)
def __init__(self, mod_name, embed_color): # General var init self.users = {} self.roles = {} self.name = mod_name self.embed_color = embed_color # Config var init self.config = json.loads("".join( open("Mods/ColoredRoles/ColoredRolesConfig.json", encoding="utf-8").readlines())) # Build command objects self.commands = Utils.parse_command_config(self, mod_name, self.config['Commands']) # Generate a fresh DB self.generate_db() # Init the super with all the info from this mod super().__init__(mod_name, self.config['Mod Description'], self.commands, embed_color)
async def give_role(self, server, user, role): old_role_id = self.users[server.id][user.id] # If the user has an old role -> Delete old role if old_role_id is not None: old_role = Utils.get_role_by_id(server, old_role_id) # If the role isn't what's needed -> Delete old role if old_role.name is not role.name: await self.remove_role(server, user, old_role) # Give role to user await Utils.client.add_roles(user, role) # Save new user role to user's data self.users[server.id][user.id] = role.id # Save user to the color's data # Color data exists -> Append user id # Color data doesn't exist -> Create and append it if role.id in self.roles[server.id].keys(): self.roles[server.id][role.id].append(user.id) else: self.roles[server.id][role.id] = [user.id]
async def set_income_reply(self, message, is_success): server, channel, author = message.guild, message.channel, message.author split_message = message.content.split(" ") if len(split_message) > 2: income_command = split_message[1].lower() reply = message.content[len(split_message[0]) + len(split_message[1]) + 2:] if len(reply) < 1500: if income_command in ("s**t", "work", "crime"): income_command = get_income_command(income_command) economy_config = self.config.get_data() reply_type = "Success" if is_success else "Failure" # Check to make sure that and {user_id}s supplied are valid for section in reply.split(" "): if re.fullmatch(r"{[0-9]{18}}", section) is not None: if Utils.get_user_by_id(server, section[1:-1]) is None: return await Utils.simple_embed_reply( channel, "[Error]", "User ID `" + section[1:-1] + "` not found.") economy_config["Commands"][income_command][reply_type][ "Messages"].append(reply) self.config.write_data(economy_config) await Utils.simple_embed_reply( message.channel, "[Success]", "Added `" + reply + "` to `" + income_command + "`'s replies.") else: await Utils.simple_embed_reply( message.channel, "[Error]", "Income command parameter is incorrect.") else: await Utils.simple_embed_reply( message.channel, "[Error]", "Reply message is too long - it must be < 1500 characters") else: await Utils.simple_embed_reply( message.channel, "[Error]", "Insufficient parameters supplied.")
def insertDailyData(self): """ Routine for collecting and inserting daily data from the YahooApi. All data is for the previously closed trading day. :return: None """ Logger.logApp("Collecting and inserting daily data...") # chunk the tickers into a managable size, retrieve data for each chunk, and then insert each chunk # chunking allows us to insert periodicly through the data collection process and ensures our YahooApi request # doesnt return a 414 response code (URI too long) iCurChunk = 0 aTickers = self.getQuandlTickers(AppVars.DATA_DAILY_TICKERS) aTickerChunks = Utils.chunk(aTickers, AppVars.CHUNK_TICKERS) for iCurChunk in range(0, len(aTickerChunks)): oData = Api.getData(aTickerChunks[iCurChunk], AppVars.DATA_DAILY_DIMENSIONS) if oData: TradingData.insert(self.oDB, TradingData.S_DAILY_DATA, oData) self.oDB.commit() Logger.logApp("Inserting data for chunk " + str(iCurChunk + 1) + " of " + str(len(aTickerChunks))) else: Logger.logError('There was an error retrieving data for chunk ' + str(iCurChunk + 1)) del oData
def __init__(self, mod_name, description="No description", commands=None, embed_color=Utils.default_hex_color): # Check if parameters are valid assert ' ' not in mod_name, "Mod name \"" + mod_name + "\" contains a space" assert Utils.is_hex( hex(embed_color) ), "Embed Color \"" + hex(embed_color) + "\" is not a valid hex color" assert type(commands) is dict, "Mod command list is not of type dict" for command_name in commands: assert type( commands[command_name] ) is Command, "Mod commands are not of type \"Command\"" # Var init self.commands = {} if commands is None else commands self.name = mod_name self.embed_color = embed_color self.description = description self.command_aliases = [ alias for command_name in self.commands for alias in self.commands[command_name] ]
def solver(mydata, config): #output dir timestamp = time.strftime('%Y-%m-%d-%Hh-%Mm-%Ss') out_dir = os.path.abspath(os.path.join(os.path.curdir, "runs", timestamp)) print("Writing to {}\n".format(out_dir)) #get RCNN rcnn = TextRCNN(sequence_length=config["sequence_length"], num_classes=mydata.getClasses(), vocab_size=mydata.vocabSize, word_embedding_size=config["word_embedding_size"], context_embedding_size=config["context_embedding_size"], cell_type=config["cell_type"], hidden_size=config["hidden_size"], l2_reg_lambda=config["l2_reg_lambda"], W_text_trainable=config["W_text_trainable"], out_dir=out_dir) ## summary sess = rcnn.sess # Checkpoint directory. Tensorflow assumes this directory already exists so we need to create it checkpoint_dir = os.path.abspath(os.path.join(out_dir, "checkpoints")) checkpoint_prefix = os.path.join(checkpoint_dir, "model") if not os.path.exists(checkpoint_dir): os.makedirs(checkpoint_dir) #save vocab/config/category mydata.saveCategory2Index(os.path.join(out_dir, "category_index")) mydata.vocabproc.save(os.path.join(out_dir, "text.vocab")) Utils.showAndSaveConfig(config, os.path.join(out_dir, "config.txt")) print("[*]parameter number: %s" % (getParameterNumbers())) saver = tf.train.Saver(tf.global_variables(), max_to_keep=10) # Initialize all variables sess.run(tf.global_variables_initializer()) restore_from = config["restore_from"] if restore_from != None: saver.restore(sess, restore_from) print("[*]restore success") # Pre-trained word2vec wordInit = {} if config["LoadGoogleModel"] and restore_from == None: print("[*]Loading Google Pre-trained Model") # initial matrix with random uniform initW = np.random.uniform( -0.25, 0.25, (mydata.vocabSize, config["word_embedding_size"])) # load any vectors from the word2vec word2vec = config["Word2Vec"] print(" [*]Load word2vec file {0}".format(word2vec)) cnt_word_in_word2vec = 0 with open(word2vec, "rb") as f: header = f.readline() vocab_size, layer1_size = map(int, header.split()) print(" [*]Google:vocab_size:%s" % (vocab_size)) binary_len = np.dtype('float32').itemsize * layer1_size for line in range(vocab_size): word = [] while True: ch = f.read(1).decode('latin-1') if ch == ' ': word = ''.join(word) break if ch != '\n': word.append(ch) idx = mydata.vocabproc.vocabulary_.get(word.lower()) if idx != 0: if idx not in wordInit: wordInit[idx] = word initW[idx] = np.fromstring(f.read(binary_len), dtype='float32') cnt_word_in_word2vec += 1 elif word == word.lower(): wordInit[idx] = word initW[idx] = np.fromstring(f.read(binary_len), dtype='float32') else: f.read(binary_len) print( " [*]Load Google Model success: word in Word2Vec :%s total word:%s" % (cnt_word_in_word2vec, mydata.vocabSize)) sess.run(rcnn.W_text.assign(initW)) print("[*]Success to load pre-trained word2vec model!\n") # start traning # step && learning rate stlr = STLR(1e-3, 1e-2, 200, 600) step = 0 while True: batch = mydata.nextBatch(config["BatchSize"]) learning_r = stlr.getLearningRate(step) feed_dict = { rcnn.input_text: batch[0], rcnn.input_y: batch[1], rcnn.dropout_keep_prob: config["droupout"], rcnn.learning_rate: learning_r } _, step, summaries, loss, accuracy = sess.run([ rcnn.train_op, rcnn.global_step, rcnn.train_summary_op, rcnn.loss, rcnn.accuracy ], feed_dict) rcnn.summary_writer.add_summary(summaries, step) # Training log display if step % config["TraingLogEverySteps"] == 0: time_str = datetime.datetime.now().isoformat() print(" [*] step %s; loss %s; acc %s; lr %.6f " % (step, loss, accuracy, learning_r)) # Evaluation if step % config["TestEverySteps"] == 0: test_data = mydata.getTestData() test_size = len(test_data[0]) correct_predict_count = 0 dev_loss = 0 for i in range(0, test_size, 500): x_test = test_data[0][i:i + 500] y_test = test_data[1][i:i + 500] feed_dict_dev = { rcnn.input_text: x_test, rcnn.input_y: y_test, rcnn.dropout_keep_prob: 1.0 } summaries_dev, loss, accuracy = sess.run( [rcnn.dev_summary_op, rcnn.loss, rcnn.accuracy], feed_dict_dev) #rcnn.summary_writer.add_summary(summaries_dev, step) # correct_predict_count += int(0.5 + accuracy * len(x_test)) dev_loss += loss * len(x_test) / test_size #dev summary dev_accuracy = correct_predict_count / test_size rcnn.summary_writer.add_summary( tf.Summary(value=[ tf.Summary.Value(tag="dev_loss", simple_value=dev_loss) ]), step) rcnn.summary_writer.add_summary( tf.Summary(value=[ tf.Summary.Value(tag="dev_accu", simple_value=dev_accuracy) ]), step) time_str = datetime.datetime.now().isoformat() print("\n[*]Test:%s step %s, loss %.6f, acc %.6f " % (time_str, step, dev_loss, dev_accuracy)) # Model checkpoint if step % 1000 == 0: path = saver.save(sess, checkpoint_prefix, global_step=step) print("Saved model checkpoint to {}\n".format(path))
def run(self): """ Main daemon process invoked by DataDaemon. This method is a infinite loop that has logic in it's body to execute commands at specific times of day. More specifically, this process is responsible for creating, running, and closing each trading day. This process will get killed when the daemon stops. :return: """ # service variables bTrading = False while True: # Get the current EST time and date oNow = datetime.datetime.now(timezone(Conf.MARKET_TIMEZONE)) oNowDate = datetime.datetime(oNow.year, oNow.month, oNow.day) # Market is only open on week days from 9:30AM EST to 4:00PM EST bIsWeekDay = not(oNow.strftime('%A') == 'sunday' or oNow.strftime('%A') == 'saturday') bIsMarketHours = datetime.time(Conf.MARKET_OPEN_HOUR, Conf.MARKET_OPEN_MINUTE) <= datetime.time(oNow.hour, oNow.minute) \ and datetime.time(oNow.hour, oNow.minute) < datetime.time(Conf.MARKET_CLOSE_HOUR, Conf.MARKET_CLOSE_MINUTE) bIsOpen = bIsWeekDay and bIsMarketHours # it's after 5:00AM EST on a week day, let's collect the previous days data and get everything set up if (bIsWeekDay and not bTrading and oNow.hour >= 5) or Conf.DAEMON_IS_DEBUG: # insert daily data from yesterday if Conf.DAEMON_INSERT_DAILY: self.insertDailyData() # market vars, must be deleted at EOD aTickers = self.getQuandlTickers(AppVars.DATA_RT_TICKERS) aTickerChunks = Utils.chunk(aTickers, AppVars.CHUNK_TICKERS) del aTickers oPortfolioCollection = PortfolioCollection() # OK to stop trading bTrading = True # the market is open! start collecting data and trading if (bTrading and bIsOpen and aTickerChunks) or Conf.DAEMON_IS_DEBUG: Logger.logApp("Starting a trading cycle...") # get current pricing data for all tickers and create a data map where keys are tickers and values are # the location of the ticker's value in the data list aDataList = [] oDataMap = {} for iCurChunk in range(0, len(aTickerChunks)): aChunkData = Api.getData(aTickerChunks[iCurChunk], AppVars.DATA_RT_DIMENSIONS) for iDataIndex in range(len(aDataList), len(aDataList) + len(aChunkData)): oDataMap[aChunkData[iDataIndex - len(aDataList)][Company.SYMBOL]] = iDataIndex aDataList += aChunkData del aChunkData del iCurChunk del iDataIndex # broadcast new data to all portfolios for oPortfolio in oPortfolioCollection.iteritems(): oAlgorithm = oPortfolio['algorithm'] oAlgorithm.run(oDataMap) # insert new data if aDataList: TradingData.insert(self.oDB, TradingData.S_RT_DATA, aDataList) self.oDB.commit() else: Logger.logError('There was an error inserting real time data') del oDataMap Logger.logApp("Finished a trading cycle") # it's after 4:30PM EST on a week day let's close the trading day and go to sleep if (bIsWeekDay and bTrading and oNow.hour >= 16 and oNow.minute > 30) or Conf.DAEMON_IS_DEBUG: # insert portfolio data for oPortfolio in oPortfolioCollection.iteritems(): oAlgorithm = oPortfolio['algorithm'] oAlgorithm.insert() # clean up market vars del aTickerChunks del oPortfolioCollection # OK to start trading bTrading = False time.sleep(Conf.DAEMON_SLEEP)
async def command_called(self, message, command): split_message = message.content.split(" ") server, channel, author = message.guild, message.channel, message.author if command is self.commands["Set Shop Command"]: if len(split_message) > 1: shop_name = split_message[1] if is_valid_shop_name(shop_name): # Drop old shop table if needed self.delete_shop_by_channel_id(channel.id) # Create new tables and rows self.shops_database.execute( """CREATE TABLE IF NOT EXISTS '%s'(role_id TEXT UNIQUE, price NUMERIC, time_added REAL, duration REAL)""" % shop_name) self.shop_info_database.execute( "REPLACE INTO shops VALUES('%s', '%s', 0)" % (channel.id, shop_name)) await Utils.simple_embed_reply( channel, "[Shop Created]", "`%s` has been assigned `%s.`" % (str(channel), shop_name)) else: await Utils.simple_embed_reply( channel, "[Error]", "Shop parameter incorrect.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.") elif command is self.commands["List Shops Command"]: shop_names = [] info = self.shop_info_database.execute( "SELECT channel_id, shop_name FROM shops") known_channels = [channel.id for channel in server.channels] for i in range(0, len(info), 2): if info[i] in known_channels: shop_names.append(info[i + 1]) if len(shop_names) > 0: shop_text = ''.join( [shop_name + "\n" for shop_name in shop_names])[:-1] await Utils.simple_embed_reply(channel, "[Shops]", shop_text) else: await Utils.simple_embed_reply(channel, "[Shops]", "No shops exist.") elif command is self.commands["Delete Shop Command"]: if len(split_message) > 1: shop_name = split_message[1] if is_valid_shop_name(shop_name): shop_names = self.shop_info_database.execute( "SELECT shop_name FROM shops") if len(shop_names) > 0: if shop_name in shop_names: self.shops_database.execute( "DROP TABLE IF EXISTS '%s'" % shop_name) self.shop_info_database.execute( "DELETE FROM shops WHERE shop_name='%s'" % shop_name) self.shop_info_database.execute( "DELETE FROM messages WHERE shop_name='%s'" % shop_name) await Utils.simple_embed_reply( channel, "[Shops]", "Shop `%s` was deleted." % shop_name) else: await Utils.simple_embed_reply( channel, "[Error]", "That shop doesn't exist.") else: await Utils.simple_embed_reply(channel, "[Error]", "No shops exist.") else: await Utils.simple_embed_reply( channel, "[Error]", "Shop parameter incorrect.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.") elif command is self.commands["Set Shop Role Command"]: if len(split_message) > 4: shop_name, price, duration, role_text = split_message[1], split_message[2].lower(), split_message[3], \ split_message[4] if is_valid_shop_name(shop_name): if self.shop_exists(shop_name): if Utils.isfloat(duration) or duration == "permanent": duration = -1 if duration == "permanent" or 0 else float( duration) if price.isdigit(): price = int(price) role = Utils.get_role(server, role_text) if role is not None: role_ids = [ name.lower() for name in self.shops_database. execute("SELECT role_id FROM '%s'" % shop_name) ] role_names = [ str( Utils.get_role_by_id( server, role_id)) for role_id in role_ids ] if str(role).lower() not in role_names: self.shops_database.execute( "INSERT OR IGNORE INTO '%s' VALUES('%s', '%d', '%s', '%s')" % (shop_name, role.id, int(price), time.time(), duration)) await Utils.simple_embed_reply( channel, "[Shops]", "`%s` has been assigned to `%s` at the price of `%s` for `%s` hours." % (str(role), shop_name, str(price), str("infinite" if duration == -1 else duration))) await self.update_messages() else: if role.id in role_ids: self.shops_database.execute( "REPLACE INTO '%s' VALUES('%s', '%d', '%s', '%s')" % (shop_name, role.id, int(price), time.time(), duration)) await Utils.simple_embed_reply( channel, "[Shops]", "The role `%s` within `%s` now has a price of `%s` for `%s` hours." % (str(role), shop_name, str(price), str("infinite" if duration == -1 else duration))) await self.update_messages() else: await Utils.simple_embed_reply( channel, "[Error]", "Duplicate role names not allowed. (lowercase-checked)" ) else: await Utils.simple_embed_reply( channel, "[Error]", "That role doesn't exist.") else: await Utils.simple_embed_reply( channel, "[Error]", "Price parameter incorrect.") else: await Utils.simple_embed_reply( channel, "[Error]", "Duration parameter incorrect.") else: await Utils.simple_embed_reply( channel, "[Error]", "That shop doesn't exist.") else: await Utils.simple_embed_reply( channel, "[Error]", "Shop parameter incorrect.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.") elif command is self.commands["Toggle Shop Autodelete Command"]: if len(split_message) > 1: shop_name = split_message[1] if is_valid_shop_name(shop_name): if self.shop_exists(shop_name): new_value = int( self.shop_info_database.execute( "SELECT is_purge FROM shops WHERE shop_name='%s'" % shop_name)[0]) ^ 1 self.shop_info_database.execute( "UPDATE shops SET is_purge='%d' WHERE shop_name='%s'" % (new_value, shop_name)) await Utils.simple_embed_reply( channel, "[Shops]", "`%s`'s delete mode set to `%r`" % (shop_name, bool(new_value))) else: await Utils.simple_embed_reply( channel, "[Error]", "That shop doesn't exist.") else: await Utils.simple_embed_reply( channel, "[Error]", "Shop parameter incorrect.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.") elif command is self.commands["Shop Command"]: if len(split_message) > 1: shop_name = split_message[1] if self.shop_exists(shop_name): embed = await self.get_shop_embed(shop_name, server) shop_message = await channel.send(embed=embed) self.shop_info_database.execute( "REPLACE INTO messages VALUES('%s', '%s', '%s')" % (shop_name, shop_message.id, channel.id)) else: await Utils.simple_embed_reply(channel, "[Error]", "That shop doesn't exist.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.") elif command is self.commands["Buy Command"]: if len(split_message) > 1: given_name = ' '.join(split_message[1:]).lower() shop = self.shop_info_database.execute( "SELECT shop_name FROM shops where channel_id='%s'" % message.channel.id) if len(shop) > 0: shop = shop[0] role_ids = self.shops_database.execute( "SELECT role_id FROM '%s'" % shop) role_costs = self.shops_database.execute( "SELECT price FROM '%s'" % shop) if len(role_ids) > 0: role_names = [ str(Utils.get_role_by_id(server, role_id)) for role_id in role_ids ] if given_name in [ str(name).lower() for name in role_names ]: for i in range(len(role_ids)): role_id, role_name, role_cost = role_ids[ i], role_names[i], role_costs[i] if role_name.lower() == given_name: user_cash = EconomyUtils.get_cash( server.id, author.id) if user_cash >= role_cost: EconomyUtils.set_cash( server.id, author.id, user_cash - role_cost) role = Utils.get_role_by_id( server, role_id) if role not in author.roles: await Utils.client.add_roles( author, role) await Utils.simple_embed_reply( channel, "[%s]" % shop, "You have purchased `%s`." % role_name) else: await Utils.simple_embed_reply( channel, "[Error]", "You already have that role.") else: await Utils.simple_embed_reply( channel, "[Error]", "You don't have enough cash to do that." ) else: await Utils.simple_embed_reply( channel, "[Error]", "Role not found.") else: await Utils.simple_embed_reply(channel, "[Error]", "No roles found.") else: await Utils.simple_embed_reply( channel, "[Error]", "Shop not found for this channel.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.") elif command is self.commands["Delete Shop Role Command"]: if len(split_message) > 1: given_name = ' '.join(split_message[1:]).lower() shop = self.shop_info_database.execute( "SELECT shop_name FROM shops where channel_id='%s'" % channel.id) if len(shop) > 0: shop = shop[0] role_ids = self.shops_database.execute( "SELECT role_id FROM '%s'" % shop) if len(role_ids) > 0: role_names = [ str(Utils.get_role_by_id(server, role_id)) for role_id in role_ids ] if given_name in [ str(name).lower() for name in role_names ]: for i in range(len(role_ids)): self.shops_database.execute( "DELETE FROM '%s' WHERE role_name='%s'" % (shop, role_names[i])) await Utils.simple_embed_reply( channel, "[%s]" % shop, "Role `%s` has been deleted from `%s`." % (role_names[i], shop)) else: await Utils.simple_embed_reply( channel, "[Error]", "Role not found.") else: await Utils.simple_embed_reply(channel, "[Error]", "No roles found.") else: await Utils.simple_embed_reply( channel, "[Error]", "Shop not found for this channel.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.")
def zonalstats(path, rasters, params, output, paramstats, classes="", bufferDist=None, gdalpath="", write_ouput=False, gdalcachemax="9000"): """Compute zonal statistitics (descriptive and categorical) on multi-band raster or multi-rasters based on Point (buffered or not) or Polygon zonal vector Parameters ---------- path : string working directory rasters : list list of rasters to analyse params : list list of fid list and vector file output : vector file (sqlite, shapefile and geojson) vector file to store statistitics paramstats : list list of statistics to compute (e.g. {1:'stats', 2:'rate'}) - paramstats = {1:"rate", 2:"statsmaj", 3:"statsmaj", 4:"stats", 2:stats_cl} - stats : mean_b, std_b, max_b, min_b - statsmaj : meanmaj, stdmaj, maxmaj, minmaj of majority class - rate : rate of each pixel value (classe names) - stats_cl : mean_cl, std_cl, max_cl, min_cl of one class - val : value of corresponding pixel (only for Point geometry and without other stats) classes : nomenclature file nomenclature bufferDist : int in case of point zonal vector : buffer size gdalpath : string path of gdal binaries (for system execution) write_ouput : boolean if True, wrapped raster are stored in working dir gdalcachemax : string gdal cache for wrapping operation (in Mb) """ # Features and vector file to intersect vector, idvals = params # Raster resolution # TODO : Check if all rasters have same extent and resolution res = abs(fut.getRasterResolution(rasters[0])[0]) # if no vector subsetting (all features) if not idvals: idvals = getFidList(vector) # vector open and iterate features and/or buffer geom vectorname = os.path.splitext(os.path.basename(vector))[0] vectorgeomtype = vf.getGeomType(vector) vectorbuff = None # Read statistics parameters if isinstance(paramstats, list): paramstats = dict([(x.split(':')[0], x.split(':')[1]) for x in paramstats]) # Value extraction if not bufferDist and vectorgeomtype in (1, 4, 1001, 1004): if 'val' in paramstats.values(): if vectorgeomtype == 1: schema = {'geometry': 'Point', 'properties': {}} elif vectorgeomtype == 4: schema = {'geometry': 'MultiPoint', 'properties': {}} else: raise Exception("Only pixel value extraction available "\ "when Point geometry without buffer distance is provided") # Stats extraction else: # Point geometry if vectorgeomtype in (1, 4, 1001, 1004): if vectorgeomtype == 1: schema = {'geometry': 'Point', 'properties': {}} elif vectorgeomtype == 4: schema = {'geometry': 'MultiPoint', 'properties': {}} vectorbuff = vectorname + "buff.shp" _ = bfo.bufferPoly(vector, vectorbuff, bufferDist=bufferDist) # Polygon geometry elif vectorgeomtype in (3, 6, 1003, 1006): if vectorgeomtype == 3: schema = {'geometry': 'Polygon', 'properties': {}} elif vectorgeomtype == 6: schema = {'geometry': 'MultiPolygon', 'properties': {}} else: raise Exception("Geometry type of vector file not handled") # Vector reading dataset = vf.openToRead(vector) lyr = dataset.GetLayer() spatialref = lyr.GetSpatialRef().ExportToProj4() # Prepare stats DataFrame stats = definePandasDf(idvals, paramstats, classes) # Iterate vector's features (FID) for idval in idvals: lyr.SetAttributeFilter("FID=" + str(idval)) feat = lyr.GetNextFeature() geom = feat.GetGeometryRef() if geom: # Insert geometry in DataFrame geomdf = pad.DataFrame(index=[idval], \ columns=["geometry"], \ data=[str(geom.ExportToWkt())]) # Get Point coordinates (pixel value case) if vectorgeomtype in (1, 4, 1001, 1004) and 'val' in paramstats.values(): xpt, ypt, _ = geom.GetPoint() stats.update(geomdf) if vectorbuff: vector = vectorbuff # creation of wrapped rasters if gdalpath != "" and gdalpath is not None: gdalpath = gdalpath + "/" else: gdalpath = "" bands = [] success = True for idx, raster in enumerate(rasters): # Value extraction if 'val' in paramstats.values(): if vectorgeomtype not in (1, 4, 1001, 1004): raise Exception("Type of input vector %s must be "\ "'Point' for pixel value extraction"%(vector)) else: bands.append(raster) tmpfile = raster # Stats Extraction else: tmpfile = os.path.join( path, 'rast_%s_%s_%s' % (vectorname, str(idval), idx)) try: # TODO : test gdal version : >= 2.2.4 if write_ouput: cmd = '%sgdalwarp -tr %s %s -tap -q -overwrite -cutline %s '\ '-crop_to_cutline --config GDAL_CACHEMAX %s -wm %s '\ '-wo "NUM_THREADS=ALL_CPUS" -wo "CUTLINE_ALL_TOUCHED=YES" "\ "-cwhere "FID=%s" %s %s -ot Float32' %(gdalpath, \ res, \ res, \ vector, \ gdalcachemax, \ gdalcachemax, \ idval, \ raster, \ tmpfile) Utils.run(cmd) else: gdal.SetConfigOption("GDAL_CACHEMAX", gdalcachemax) tmpfile = gdal.Warp('', raster, xRes=res, \ yRes=res, targetAlignedPixels=True, \ cutlineDSName=vector, cropToCutline=True, \ cutlineWhere="FID=%s"%(idval), format='MEM', \ warpMemoryLimit=gdalcachemax, \ warpOptions=[["NUM_THREADS=ALL_CPUS"], ["CUTLINE_ALL_TOUCHED=YES"]]) bands.append(tmpfile) success = True except: success = False pass if success: for param in paramstats: # Multi-raster / Multi-band data preparation if len(rasters) != 1: band = bands[int(param) - 1] nbband = 1 else: band = tmpfile nbband = int(param) # Statistics extraction if band: methodstat = paramstats[param] if methodstat == 'rate': classStats, classmaj, posclassmaj = countPixelByClass( band, idval, nbband) stats.update(classStats) # Add columns when pixel values are not identified in nomenclature file if list(classStats.columns) != list(stats.columns): newcols = list( set(list(classStats.columns)).difference( set(list(stats.columns)))) pad.concat([stats, classStats[newcols]], axis=1) elif methodstat == 'stats': cols = ["meanb%s"%(int(param)), "stdb%s"%(int(param)), \ "maxb%s"%(int(param)), "minb%s"%(int(param))] stats.update(pad.DataFrame(data=[rasterStats(band, nbband)], \ index=[idval], \ columns=cols)) elif methodstat == 'statsmaj': if not classmaj: if "rate" in paramstats.values(): idxbdclasses = [ x for x in paramstats if paramstats[x] == "rate" ][0] if len(rasters) != 1: bandrate = bands[idxbdclasses - 1] nbbandrate = 0 else: bandrate = band nbbandrate = idxbdclasses - 1 else: raise Exception("No classification raster provided "\ "to check position of majority class") classStats, classmaj, posclassmaj = countPixelByClass( bandrate, idval, nbbandrate) classStats = None cols = ["meanmajb%s"%(int(param)), "stdmajb%s"%(int(param)), \ "maxmajb%s"%(int(param)), "minmajb%s"%(int(param))] stats.update(pad.DataFrame(data=[rasterStats(band, nbband, posclassmaj)], \ index=[idval], \ columns=cols)) elif "stats_" in methodstat: if "rate" in paramstats.values(): # get positions of class cl = paramstats[param].split('_')[1] idxbdclasses = [ x for x in paramstats if paramstats[x] == "rate" ][0] rastertmp = gdal.Open(bands[idxbdclasses - 1], 0) data = rastertmp.ReadAsArray() posclass = np.where(data == int(cl)) data = None else: raise Exception("No classification raster provided "\ "to check position of requested class") cols = ["meanb%sc%s"%(int(param), cl), "stdb%sc%s"%(int(param), cl), \ "maxb%sc%s"%(int(param), cl), "minb%sc%s"%(int(param), cl)] stats.update(pad.DataFrame(data=[rasterStats(band, nbband, posclass)], \ index=[idval], \ columns=cols)) elif "val" in methodstat: colpt, rowpt = fut.geoToPix(band, xpt, ypt) cols = "valb%s" % (param) stats.update(pad.DataFrame(data=[rasterStats(band, nbband, None, (colpt, rowpt))], \ index=[idval], \ columns=[cols])) else: print("The method %s is not implemented" % (paramstats[param])) band = None if write_ouput: os.remove(tmpfile) else: print( "gdalwarp problem for feature %s (geometry error, too small area, etc.)" % (idval)) # Prepare geometry and projection stats["geometry"] = stats["geometry"].apply(wkt.loads) statsfinal = gpad.GeoDataFrame(stats, geometry="geometry") statsfinal.fillna(0, inplace=True) statsfinal.crs = {'init': 'proj4:%s' % (spatialref)} # change column names if rate stats expected and nomenclature file is provided if "rate" in paramstats and classes != "": # get multi-level nomenclature # classes="/home/qt/thierionv/iota2/iota2/scripts/simplification/nomenclature17.cfg" nomenc = nomenclature.Iota2Nomenclature(classes, 'cfg') desclasses = nomenc.HierarchicalNomenclature.get_level_values( nomenc.getLevelNumber() - 1) cols = [(str(x), str(z)) for x, y, w, z in desclasses] # rename columns with alias for col in cols: statsfinal.rename(columns={col[0]: col[1].decode('utf8')}, inplace=True) # change columns type schema['properties'] = OrderedDict([(x, 'float:10.2') for x in list(statsfinal.columns) \ if x != 'geometry']) # exportation # TO TEST # TODO Export format depending on columns number (shapefile, sqlite, geojson) # Check Issue on framagit convert = False outformat = os.path.splitext(output)[1] if outformat == ".shp": driver = "ESRI Shapefile" elif outformat == ".geojson": driver = "GeoJSON" elif outformat == ".sqlite": driver = "ESRI Shapefile" convert = True else: raise Exception("The output format '%s' is not handled" % (outformat[1:])) if not convert: statsfinal.to_file(output, driver=driver, schema=schema, encoding='utf-8') else: outputinter = os.path.splitext(output)[0] + '.shp' statsfinal.to_file(outputinter, driver=driver, schema=schema, encoding='utf-8') output = os.path.splitext(output)[0] + '.sqlite' Utils.run('ogr2ogr -f SQLite %s %s' % (output, outputinter))
def regularisation(raster, threshold, nbcores, path, ram = "128"): filetodelete = [] # First regularisation in connection 8, second in connection 4 init_regul = time.time() # A mask for each regularization rule # Agricultuture bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster, "exp": '(im1b1==11 || im1b1==12)?im1b1:0', "ram": str(0.2 * float(ram)), "pixType": "uint8", "out": os.path.join(path, 'mask_1.tif')}) bandMathAppli.ExecuteAndWriteOutput() filetodelete.append(os.path.join(path, 'mask_1.tif')) # Forest bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster, "exp": '(im1b1==31 || im1b1==32)?im1b1:0', "ram": str(0.2 * float(ram)), "pixType": "uint8", "out": os.path.join(path, 'mask_2.tif')}) bandMathAppli.ExecuteAndWriteOutput() filetodelete.append(os.path.join(path, 'mask_2.tif')) # Urban bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster, "exp": '(im1b1==41 || im1b1==42 || im1b1==43)?im1b1:0', "ram": str(0.2 * float(ram)), "pixType": "uint8", "out": os.path.join(path, 'mask_3.tif')}) bandMathAppli.ExecuteAndWriteOutput() filetodelete.append(os.path.join(path, 'mask_3.tif')) # Open natural areas bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster, "exp": '(im1b1==34 || im1b1==36 || im1b1==211)?im1b1:0', "ram": str(0.2 * float(ram)), "pixType": "uint8", "out": os.path.join(path, 'mask_4.tif')}) bandMathAppli.ExecuteAndWriteOutput() filetodelete.append(os.path.join(path, 'mask_4.tif')) # Bare soil bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster, "exp": '(im1b1==45 || im1b1==46)?im1b1:0', "ram": str(0.2 * float(ram)), "pixType": "uint8", "out": os.path.join(path, 'mask_5.tif')}) bandMathAppli.ExecuteAndWriteOutput() filetodelete.append(os.path.join(path, 'mask_5.tif')) # Perennial agriculture bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster, "exp": '(im1b1==221 || im1b1==222)?im1b1:0', "ram": str(0.2 * float(ram)), "pixType": "uint8", "out": os.path.join(path, 'mask_6.tif')}) bandMathAppli.ExecuteAndWriteOutput() filetodelete.append(os.path.join(path, 'mask_6.tif')) # Road bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster, "exp": '(im1b1==44)?im1b1:0', "ram": str(0.2 * float(ram)), "pixType": "uint8", "out": os.path.join(path, 'mask_7.tif')}) bandMathAppli.ExecuteAndWriteOutput() filetodelete.append(os.path.join(path, 'mask_7.tif')) # Water bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster, "exp": '(im1b1==51)?im1b1:0', "ram": str(0.2 * float(ram)), "pixType": "uint8", "out": os.path.join(path, 'mask_8.tif')}) bandMathAppli.ExecuteAndWriteOutput() filetodelete.append(os.path.join(path, 'mask_8.tif')) # Snow and glacier bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster, "exp": '(im1b1==53)?im1b1:0', "ram": str(0.2 * float(ram)), "pixType": "uint8", "out": os.path.join(path, 'mask_9.tif')}) bandMathAppli.ExecuteAndWriteOutput() filetodelete.append(os.path.join(path, 'mask_9.tif')) for i in range(9): command = "gdalwarp -q -multi -wo NUM_THREADS=%s -dstnodata 0 %s/mask_%s.tif %s/mask_nd_%s.tif"%(nbcores, \ path, \ str(i + 1), \ path, \ str(i + 1)) Utils.run(command) filetodelete.append("%s/mask_nd_%s.tif"%(path, str(i + 1))) masktime = time.time() print(" ".join([" : ".join(["Masks generation for adaptive rules", str(masktime - init_regul)]), "seconds"])) # Two successive regularisation (8 neighbors then 4 neighbors) for i in range(2): if i == 0: connexion = 8 else : connexion = 4 # Tiles number to treat in parralel pool = Pool(processes = 6) iterable = (np.arange(6)).tolist() function = partial(gdal_sieve, threshold, connexion, path) pool.map(function, iterable) pool.close() pool.join() for j in range(6): command = "gdalwarp -q -multi -wo NUM_THREADS=%s -dstnodata 0 %s/mask_%s_%s.tif %s/mask_nd_%s_%s.tif"%(nbcores, \ path, \ str(j + 1), \ str(connexion), \ path, \ str(j + 1), \ str(connexion)) Utils.run(command) for j in range(6): os.remove(path + "/mask_%s_%s.tif"%(str(j + 1),str(connexion))) for j in range(6): os.remove(path + "/mask_nd_%s_8.tif"%(str(j + 1))) adaptativetime = time.time() print(" ".join([" : ".join(["Adaptative regularizations", str(adaptativetime - masktime)]), "seconds"])) # Fusion of rule-based regularisation rastersList = [os.path.join(path, "mask_nd_1_4.tif"), os.path.join(path, "mask_nd_2_4.tif"), os.path.join(path, "mask_nd_3_4.tif"), \ os.path.join(path, "mask_nd_4_4.tif"), os.path.join(path, "mask_nd_5_4.tif"), os.path.join(path, "mask_nd_6_4.tif"), \ os.path.join(path, "mask_nd_7.tif"), os.path.join(path, "mask_nd_8.tif"), os.path.join(path, "mask_nd_9.tif")] bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": rastersList, "exp": 'im1b1+im2b1+\ im3b1+im4b1+\ im5b1+im6b1+\ im7b1+im8b1+\ im9b1', "ram": str(0.2 * float(ram)), "pixType": "uint8", "out": os.path.join(path, 'mask_regul_adapt.tif')}) bandMathAppli.ExecuteAndWriteOutput() for filemask in rastersList: os.remove(filemask) command = "gdalwarp -q -multi -wo NUM_THREADS=" command += "%s -dstnodata 0 %s/mask_regul_adapt.tif %s/mask_nd_regul_adapt.tif"%(nbcores, \ path, \ path) Utils.run(command) filetodelete.append("%s/mask_regul_adapt.tif"%(path)) # Regularisation based on majority voting # 8 neighbors command = "gdal_sieve.py -q -8 -st " command += "%s %s/mask_nd_regul_adapt.tif %s/mask_regul_adapt_0.tif" %(threshold, \ path, \ path) Utils.run(command) filetodelete.append("%s/mask_nd_regul_adapt.tif"%(path)) command = "gdalwarp -q -multi -wo NUM_THREADS=" command += "%s -dstnodata 0 %s/mask_regul_adapt_0.tif %s/mask_nd_regul_adapt_0.tif"%(nbcores, \ path, \ path) Utils.run(command) filetodelete.append("%s/mask_regul_adapt_0.tif"%(path)) # 4 neighbors command = "gdal_sieve.py -q -4 -st " command += "%s %s/mask_nd_regul_adapt_0.tif %s/regul_adapt_maj.tif" %(threshold, \ path, \ path) Utils.run(command) filetodelete.append("%s/mask_nd_regul_adapt_0.tif"%(path)) out_classif_sieve = "%s/regul_adapt_maj.tif"%(path) majoritytime = time.time() print(" ".join([" : ".join(["Majority voting regularization", str(majoritytime - adaptativetime)]), "seconds"])) for filetodel in filetodelete: if os.path.exists(filetodel): os.remove(filetodel) end_regul = time.time() - init_regul return out_classif_sieve, end_regul
def extractRasterArray(rasters, paramstats, vector, vectorgeomtype, fid, gdalpath="", gdalcachemax="9000", systemcall=True, path=""): """Clip raster and store in ndarrays Parameters ---------- rasters : list list of rasters to analyse paramstats : dict list of statistics to compute (e.g. {1:'stats', 2:'rate'}) vector : string vector file for cutline opetation vectorgeomtype : int Type of geometry of input/output vector (http://portal.opengeospatial.org/files/?artifact_id=25355) fid : integer FID value to clip raster (cwhere parameter of gdalwarp) gdalpath : string gdal binaries path gdalcachemax : string gdal cache for wrapping operation (in Mb) systemcall : boolean if True, use os system call to execute gdalwarp (usefull to control gdal binaries version - gdalpath parameter) path : string temporary path to store temporary date if systemcall is True Return ---------- boolean if True, wrap operation well terminated ndarray ndarrays """ bands = [] todel = [] success = True # Get rasters resolution res = abs(fut.getRasterResolution(rasters[0])[0]) print(fid) # Get vector name vectorname = os.path.splitext(os.path.basename(vector))[0] for idx, raster in enumerate(rasters): # Value extraction if 'val' in list(paramstats.values()): if vectorgeomtype not in (1, 4, 1001, 1004): raise Exception("Type of input vector %s must be "\ "'Point' for pixel value extraction"%(vector)) else: bands.append(raster) todel = [] # Stats Extraction else: try: # TODO : test gdal version : >= 2.2.4 if systemcall: tmpfile = os.path.join( path, 'rast_%s_%s_%s' % (vectorname, str(fid), idx)) cmd = '%sgdalwarp -tr %s %s -tap -q -overwrite -cutline %s '\ '-crop_to_cutline --config GDAL_CACHEMAX %s -wm %s '\ '-wo "NUM_THREADS=ALL_CPUS" -wo "CUTLINE_ALL_TOUCHED=YES" '\ '-cwhere "FID=%s" %s %s -ot Float32'%(os.path.join(gdalpath, ''), \ res, \ res, \ vector, \ gdalcachemax, \ gdalcachemax, \ fid, \ raster, \ tmpfile) Utils.run(cmd) todel.append(tmpfile) else: gdal.SetConfigOption("GDAL_CACHEMAX", gdalcachemax) tmpfile = gdal.Warp('', raster, xRes=res, \ yRes=res, targetAlignedPixels=True, \ cutlineDSName=vector, cropToCutline=True, \ cutlineWhere="FID=%s"%(fid), format='MEM', \ warpMemoryLimit=gdalcachemax, \ warpOptions=[["NUM_THREADS=ALL_CPUS"], ["CUTLINE_ALL_TOUCHED=YES"]]) bands.append(tmpfile) todel = [] # store rasters in ndarray ndbands = storeRasterInArray(bands) except: success = False # Remove tmp rasters for filtodel in todel: os.remove(filtodel) if not success: nbbands = None return success, ndbands
async def command_called(self, message, command): split_message = message.content.split(" ") server, channel, author = message.guild, message.channel, message.author if command is self.commands["Claim Command"]: if len(split_message) > 2: # Try and get a user from the passed arguments user = Utils.get_user(server, split_message[1]) # Check if a user was found (arguments were valid) if user is not None: # Check if the author is trying to claim themselves if user.id != author.id: amount = split_message[2].lower() if amount == "all": amount = str( EconomyUtils.get_cash(server.id, author.id)) # Check if the "Amount" parameter is a digit so it can be used correctly later if amount.isdigit(): amount = int(amount) user_cash = EconomyUtils.get_cash( server.id, author.id) # Calculate the waifu price -> DB Waifu Price + Claim Addition Amount waifu_price = int( self.waifus_db.execute( "SELECT price FROM '%s' WHERE user_id='%s'" % (server.id, user.id))[0]) + int( self.config.get_data( "Claim Addition Amount")) # Check if the user has enough cash if user_cash >= amount: # Check if the given argument is at least the minimum waifu price if amount >= waifu_price: # Set the user's (waifu) owner's ID in the DB self.waifus_db.execute( "UPDATE '%s' SET owner_id='%s', price='%d' WHERE user_id='%s'" % (server.id, author.id, amount, user.id)) # Deduct from the user's cash the amount they spent EconomyUtils.set_cash( server.id, user.id, user_cash - amount) # Tell them it was a successful waifu claim await Utils.simple_embed_reply( channel, "[Waifu]", "You claimed %s for %d%s." % (str(user), amount, EconomyUtils.currency)) else: await Utils.simple_embed_reply( channel, "[Error]", "You must pay at least %d to claim them!" % waifu_price) else: await Utils.simple_embed_reply( channel, "[Error]", "You don't have enough cash to do that.") else: await Utils.simple_embed_reply( channel, "[Error]", "Invalid amount supplied.") else: await Utils.simple_embed_reply( channel, "[Error]", "You cannot claim yourself.") else: await Utils.simple_embed_reply(channel, "[Error]", "Invalid user supplied.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.") elif command is self.commands["Waifu Info Command"]: # Get the user this command is being called on - the author by default user = author if len(split_message) > 1: given_user = Utils.get_user(server, split_message[1]) if given_user is not None: user = given_user else: return await Utils.simple_embed_reply( channel, "[Error]", "Invalid user supplied.") # Start a new embed for the info to be displayed on embed = discord.Embed(title="[Waifu Info]", description="Waifu info for %s" % str(user), color=Utils.default_hex_color) # Create text of the user's waifus from the DB waifus = ''.join([ str(Utils.get_user(server, i)) + "\n" for i in self.waifus_db.execute( "SELECT user_id FROM '%s' WHERE owner_id='%s'" % (server.id, user.id)) ])[:-1] waifus = "None" if waifus == '' else waifus # Grab the price and owner of the user from the Db gifts = self.config.get_data("Gifts") # Get gift names from gifts DB (Table names) for gift and pocket gifts text db_gift_names = [gift_name for gift_name in gifts] # db_gift_names = self.gifts_db.execute("SELECT name FROM sqlite_master WHERE type='table'") # ---------- GIFT TEXT ---------- gifts_text = "" pocket_text = "" for gift_name in db_gift_names: # Get the number of gifts of that type the user has gift_amount, pocket_amount = tuple( self.gifts_db.execute( "SELECT amount, pocket_amount FROM '%s' WHERE server_id='%s' AND user_id='%s'" % (gift_name, server.id, user.id))) # If they have at least one of that gift, add it to the gift text if gift_amount > 0: gifts_text += "%s x%d" % (gifts[gift_name]["Symbol"], gift_amount) if pocket_amount > 0: pocket_text += "%s x%d" % (gifts[gift_name]["Symbol"], pocket_amount) # If the user doesn't have any gifts, set the text to "None" gifts_text = "None" if gifts_text == "" else gifts_text # If the user doesn't have any pocket gifts, set the text to "None" pocket_text = "None" if pocket_text == "" else pocket_text # Grab more info for the embed from the DB price, claimed_by, affinity, changes_of_heart, divorces = tuple( self.waifus_db.execute( "SELECT price, owner_id, affinity, changes_of_heart, divorces FROM '%s' WHERE user_id='%s'" % (server.id, user.id))) # Grab user info - it will be "None" if it doesn't apply claimed_by_user = Utils.get_user(server, str(claimed_by)) affinity_user = Utils.get_user(server, str(affinity)) # Generate the rest of the embed embed.add_field(name="Claimed By", value=str(claimed_by_user), inline=True) embed.add_field(name="Price", value=price, inline=True) embed.add_field(name="Divorces", value=divorces, inline=True) embed.add_field(name="Gifts", value=gifts_text, inline=False) embed.add_field(name="Pocket", value=pocket_text, inline=False) embed.add_field(name="Affinity", value=str(affinity_user), inline=True) embed.add_field(name="Changes of Heart", value=changes_of_heart, inline=True) embed.add_field(name="Waifus", value=waifus, inline=True) # Send the embed as the reply await channel.send(embed=embed) # TODO: Delete marriage role elif command is self.commands["Divorce Command"]: if len(split_message) > 1: # Try and get a user from the passed arguments user = Utils.get_user(server, split_message[1]) # Check if a user was found (arguments were valid) if user is not None: # Get the message author's waifus from DB waifus = self.waifus_db.execute( "SELECT user_id FROM '%s' WHERE owner_id='%s'" % (server.id, author.id)) # Check if the passed user is actually one of the author's waifus if user.id in waifus: # Calculate how much each user will get back from the divorce waifu_cost_split = round( self.waifus_db.execute( "SELECT price FROM '%s' WHERE user_id='%s' LIMIT 1" % (server.id, user.id))[0] / 2) # Grab cash values of each user and update the balance with the divorce money author_cash = EconomyUtils.get_cash( server.id, author.id) user_cash = EconomyUtils.get_cash(server.id, user.id) EconomyUtils.set_cash(server.id, user.id, user_cash + waifu_cost_split) EconomyUtils.set_cash(server.id, author.id, author_cash + waifu_cost_split) # Remove the owner from the user self.waifus_db.execute( "UPDATE '%s' SET owner_id=NULL WHERE user_id='%s'" % (server.id, user.id)) # Increase author divorce count self.waifus_db.execute( "UPDATE '%s' SET divorces=divorces+1 WHERE user_id='%s'" % (server.id, author.id)) # Let the user know it was a successful divorce and how much they got back await Utils.simple_embed_reply( channel, "[Divorce]", "You divorced %s and received %d back!" % (str(user), waifu_cost_split)) else: await Utils.simple_embed_reply( channel, "[Error]", "That is not one of your waifus.") command.reset_cool_down(author.id) else: await Utils.simple_embed_reply(channel, "[Error]", "Invalid user supplied.") command.reset_cool_down(author.id) else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.") command.reset_cool_down(author.id) elif command is self.commands["Gifts Command"]: # Config gifts_per_page = 6 # Default 1st page, or set it as the passed argument if it exists and is valid page = 1 if len(split_message) > 1: if split_message[1].isdigit(): page = int(split_message[1]) # There is no 0th page, but this is a valid digit - so error if it's 0 if page == 0: return await Utils.simple_embed_reply( channel, "[Error]", "Page parameter is incorrect.") else: return await Utils.simple_embed_reply( channel, "[Error]", "Page parameter is incorrect.") # Start the embed for returning embed = discord.Embed(title="[Waifu Gifts]", description="Gifts for your waifus!", color=Utils.default_hex_color) # Grab all the gifts from the config gifts = self.config.get_data("Gifts") # Calculate the page count based on gift count and gifts per page page_count = (len(gifts) + gifts_per_page - 1) // gifts_per_page # Check if the page for display exists if page <= page_count: # Put the gift names into a list from each gift gift_names = [gift for gift in gifts] # Loop through gifts_per_page or the remaining number of gifts times for gift_number in range( min(gifts_per_page, len(gifts) - (page - 1) * gifts_per_page)): # Get the gift name and then the gift itself gift_name = gift_names[gifts_per_page * (page - 1) + gift_number] gift = gifts[gift_name] # Add found info to the embed as a new field embed.add_field(name="%s - %s" % (gift["Symbol"], gift_name), value=gift["Cost"], inline=True) # Set page number info footer embed.set_footer(text="%d/%d" % (page, page_count)) # Send the created embed as a reply await channel.send(embed=embed) else: await Utils.simple_embed_reply(channel, "[Error]", "That page doesn't exist.") elif command is self.commands["Gift Command"]: if len(split_message) > 2: user = Utils.get_user(server, split_message[1]) if user is not None: if user is not author: # Build the given gift name (since it caN CONTAIN spaces) # ["A", "goOD", "gIfT"] -> "a good gift" given_gift_name = (' '.join(split_message[2:])).lower() raw_gifts = self.config.get_data("Gifts") if given_gift_name in [ gift_name.lower() for gift_name in raw_gifts ]: for gift_name in raw_gifts: # Check if the lowercase gift name is the same as the lowercase given gift name if gift_name.lower() == given_gift_name: gift = raw_gifts[gift_name] author_cash = EconomyUtils.get_cash( server.id, author.id) if author_cash >= gift["Cost"]: # Add one to gift counter in DB self.gifts_db.execute( "UPDATE '%s' SET amount=amount+1 WHERE server_id='%s' AND user_id='%s'" % (gift_name, server.id, user.id)) # Update author cash EconomyUtils.set_cash( server.id, author.id, author_cash - gift["Cost"]) # Let the author know the user got the gift await Utils.simple_embed_reply( channel, "[Gift]", "%s was gifted **%s**." % (str(user), "%s %s" % (gift_name, gift["Symbol"]))) break else: await Utils.simple_embed_reply( channel, "[Error]", "You don't have enough cash to do that." ) else: await Utils.simple_embed_reply( channel, "[Error]", "Gift not found.") else: await Utils.simple_embed_reply( channel, "[Error]", "You cannot give yourself a gift.") else: await Utils.simple_embed_reply(channel, "[Error]", "Invalid user supplied.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.") elif command is self.commands["Affinity Command"]: if len(split_message) > 1: # Try and get a user from what was passed user = Utils.get_user(server, split_message[1]) # Check if a valid user was given if user is not None: if user is not author: current_affinity = self.waifus_db.execute( "SELECT affinity FROM '%s' WHERE user_id='%s'" % (server.id, author.id))[0] if current_affinity != user.id: # If the current author affinity is not Null, then increase the changes of heart if current_affinity is not None: self.waifus_db.execute( "UPDATE '%s' SET changes_of_heart=changes_of_heart+1 WHERE user_id='%s'" % (server.id, author.id)) # Set the affinity in the DB self.waifus_db.execute( "UPDATE '%s' SET affinity='%s' WHERE user_id='%s'" % (server.id, user.id, author.id)) # Let the user know the affinity was set await Utils.simple_embed_reply( channel, "[Affinity]", "Your affinity is now set towards %s." % str(user)) else: await Utils.simple_embed_reply( channel, "[Error]", "You already have your affinity set towards %s." % str(user)) else: await Utils.simple_embed_reply( channel, "[Error]", "You cannot set your affinity to yourself.") else: await Utils.simple_embed_reply(channel, "[Error]", "Invalid user supplied.") elif command is self.commands["Waifu Leaderboard Command"]: # Grab ALL user IDs user_ids = self.waifus_db.execute("SELECT user_id FROM '%s'" % server.id) # Get Owner IDs (basically 1 person from each couple) owner_ids = self.waifus_db.execute( "SELECT owner_id FROM '%s' WHERE owner_id IS NOT NULL" % server.id) # If there's at least one couple if len(owner_ids) > 0: gifts = self.config.get_data("Gifts") # User ID : Gift Value gift_values = {} # Populate the dict with user IDs for user_id in user_ids: gift_values[user_id] = 0 # Calculate gift values for each owner for gift_name in gifts: db_data = self.gifts_db.execute( "SELECT user_id, amount FROM '%s' WHERE server_id='%s' AND amount IS NOT 0" % (gift_name, server.id)) # If there was data found if db_data: db_data = [[db_data[i], db_data[i + 1]] for i in range(0, len(db_data), 2)] # Add up each gift price for each user for user_info in db_data: gift_values[user_info[ 0]] += gifts[gift_name]["Cost"] * user_info[1] # Owner ID : Total Value owner_total_values = {} # Add up all the parameters to calculate total value for owner_id in owner_ids: owner_affinity = tuple( self.waifus_db.execute( "SELECT affinity FROM '%s' WHERE user_id='%s'" % (server.id, owner_id))) # Get the other ID of the person in the couple, their value and affinity waifu_id, waifu_value, waifu_affinity = tuple( self.waifus_db.execute( "SELECT user_id, price, affinity FROM '%s' WHERE owner_id='%s'" % (server.id, owner_id))) total_value = waifu_value + gift_values[waifu_id] # Add 10% to the value if both users have their affinities set to each other if waifu_affinity == owner_id and owner_affinity == waifu_id: owner_total_values[owner_id] = int(total_value * 1.1) else: owner_total_values[owner_id] = total_value # Get the top ten values by sorting the keys in the dictionary, reversing it and grabbing 10 or the max top_ten = sorted(owner_total_values.items(), key=lambda x: x[1] )[::-1][:min(len(owner_total_values), 11)] embed = discord.Embed(title=" [Waifu Leaderboard]", color=Utils.default_hex_color) for i in range(len(top_ten)): # Get the info from the current spot in the top 10 owner_id, value = top_ten[i] # Grab the owner's affinity owner_affinity = self.waifus_db.execute( "SELECT affinity FROM '%s' WHERE user_id='%s'" % (server.id, owner_id))[0] # Get the other ID of the person in the couple and their affinity waifu_id, waifu_affinity = tuple( self.waifus_db.execute( "SELECT user_id, affinity FROM '%s' WHERE owner_id='%s'" % (server.id, owner_id))) owner_user = Utils.get_user_by_id(server, owner_id) waifu_user = Utils.get_user_by_id(server, waifu_id) desc = "**%s** - Claimed by **%s**\n" % (str(waifu_user), str(owner_user)) if waifu_affinity == owner_id and owner_affinity == waifu_id: desc += "... and %s likes %s too!" % (str(waifu_user), str(owner_user)) else: other_user = Utils.get_user_by_id( server, waifu_affinity) desc += "... but %s likes %s!" % (str(waifu_user), str(other_user)) embed.add_field(name="%s - %s%s" % (Utils.add_number_abbreviation(i + 1), value, EconomyUtils.currency), value=desc) await channel.send(embed=embed) else: await Utils.simple_embed_reply( channel, "[Waifu Leaderboard]", "No waifus are currently claimed!") elif command is self.commands["Delete Waifu Leaderboard Command"]: # Reset all waifu info self.waifus_db.execute( "UPDATE '%s' SET owner_id=NULL, affinity=NULL, divorces=0, changes_of_heart=0, price='%d'" % (server.id, self.config.get_data("Default Claim Amount"))) # Pocket all gifts for gift_name in self.config.get_data("Gifts"): self.gifts_db.execute( "UPDATE '%s' SET pocket_amount=pocket_amount+amount, amount=0" % gift_name) # Reply await Utils.simple_embed_reply( channel, "[Waifu Leaderboard]", "The leaderboard has been deleted.")
# structural breaks # to detect whether a strategy fails (then stop loss) # key word: sequential detection ###################################################################### def testCUSUM(s): return if __name__ == '__main__': # read data strFile1Min = '1MinExample.pickle' if os.path.exists(strFile1Min): df1Min = pd.read_pickle(strFile1Min) else: dictDataSpec = dict(Utils.dictDataSpecTemplate) dictDataSpec['Secu'] = 'j.dce' dictDataSpec['freq'] = '1min' df1Min = Utils.getTradingDataPoint_Commodity(dictDataSpec).replace( np.inf, np.nan) df1Min.to_pickle(strFile1Min) #dfBM5 = generateTimeClock(df1Min.copy(), 5) #dfBM10 = generateTimeClock(df1Min.copy(), 10) #dfVC = generateVolumeClock1Min(df1Min.copy()) #dfTB = generateTrippleBarrierBar(df1Min) dfBM = generateTimeClock(df1Min, 20) #dfVC = generateVolumeClock1Min(df1Min) dfTB = generateTrippleBarrierBar(df1Min) #dfFD = fractionalDifferentiate(df1Min)
async def command_called(self, message, command): split_message = message.content.split(" ") channel, author, server = message.channel, message.author, message.guild try: # Adding a role if command is self.commands['Add Color Command']: # Check command format if len(split_message) > 1: # If the first parameter is hex if Utils.is_hex(split_message[1]): hex_color = split_message[1].upper() # If role hasn't been created and max color count hasn't been reached -> Create Role if len(self.roles[ server.id]) < self.config['Max Colors']: if self.get_role_by_hex(server, hex_color) is None: new_color_role = await self.create_role( server, hex_color) # Role already exists -> Get it else: new_color_role = self.get_role_by_hex( server, hex_color) # Give the user their color await self.give_role(server, author, new_color_role) await Utils.simple_embed_reply( channel, "[Add Role]", "Added " + hex_color + " to your roles.", hex_color) else: await Utils.simple_embed_reply( channel, "[Added Color]", "Max role count reached.", hex_color=hex_color) # First parameter is not a valid hex value -> Error else: await Utils.simple_embed_reply(channel, "[Error]", "Invalid hex value.", split_message[1]) # Hex parameter not supplied -> Error else: await Utils.simple_embed_reply(channel, "[Error]", "Missing color parameter.") # Removing a role elif command is self.commands["Remove Color Command"]: # Get current role info current_color_role_id = self.users[server.id][author.id] current_color_role = Utils.get_role_by_id( server, current_color_role_id) hex_color = current_color_role.name # Remove the role await self.remove_role(server, author, current_color_role) # Reply await Utils.simple_embed_reply(channel, "[Removed Color]", "Removed " + hex_color + " from your roles.", hex_color=hex_color) # Deleting a role elif command is self.commands["Delete Color Command"]: # If the hex color was supplied if len(split_message) > 1: if Utils.is_hex(split_message[1]): hex_color = split_message[1].upper() color_role = self.get_role_by_hex(server, hex_color) # If the role doesn't exist -> Error if color_role is None: await Utils.simple_embed_reply( channel, "[Error]", "Color not found.", hex_color) # Role found -> Delete it and let the user know else: await self.delete_role(server, color_role) # Reply await Utils.simple_embed_reply(channel, "[Deleted Color]", "Deleted " + hex_color + ".", hex_color=hex_color) # First parameter is not a valid hex value -> Error else: await Utils.simple_embed_reply(channel, "[Error]", "Invalid hex value.", split_message[1]) # Hex parameter not supplied -> Error else: await Utils.simple_embed_reply(channel, "[Error]", "Missing color parameter.") # Listing roles elif command is self.commands["List Colors Command"]: roles_text = "" # If roles exist if len(self.roles[server.id]) > 0: # Build text from every role name for role in self.roles[server.id]: roles_text += Utils.get_role_by_id(server, role).name + "\n" # No roles exist -> state so else: roles_text = "No roles exist." # Reply with the list await Utils.simple_embed_reply(channel, "[Color List]", roles_text) # Listing users equipped with role elif command is self.commands["Equipped Users Command"]: # If the hex color was supplied if len(split_message) > 1: if Utils.is_hex(split_message[1]): hex_color = split_message[1].upper() role = self.get_role_by_hex(server, hex_color) # If the role exists if role is not None: users_text = "" # Check if users are equipped with this role if len(self.roles[server.id][role.id]) > 0: for user_id in self.roles[server.id][role.id]: user = Utils.get_user_by_id( server, user_id) users_text += user.name + "\n" # No users are equipped -> State so else: users_text = "No users are equipped with this role." # Reply with the equipped roles await Utils.simple_embed_reply( channel, "[" + role.name + " Equipped List]", users_text, hex_color) # Hex parameter doesn't have an associated role -> Error else: await Utils.simple_embed_reply( channel, "[Error]", "Color not found.", hex_color) # First parameter is not a valid hex value -> Error else: await Utils.simple_embed_reply(channel, "[Error]", "Invalid hex value.", split_message[1]) # Hex parameter not supplied -> Error else: await Utils.simple_embed_reply(channel, "[Error]", "Missing color parameter.") # List all info known by this mod for current server elif command is self.commands["Color Info Command"]: # If roles exist if len(self.roles[server.id]) > 0: # Begin reply crafting embed = discord.Embed(title="[Info]", Utils.default_hex_color) # Cycle all the roles, creating user list per role for role_id in self.roles[server.id]: role = Utils.get_role_by_id(server, role_id) users_text = "" for user_id in self.roles[server.id][role_id]: user = Utils.get_user_by_id(server, user_id) users_text += user.name + "\n" # Create embed field per role embed.add_field(name=role.name, value=users_text) # Reply await channel.send(embed=embed) # No used roles -> state so else: await Utils.simple_embed_reply(channel, "[Info]", "No color exist.") # Purge a given role elif command is self.commands["Clear Colors Command"]: for role_id in [role for role in self.roles[server.id]]: role = Utils.get_role_by_id(server, role_id) # Delete the role await self.delete_role(server, role) # Let the user know all await Utils.simple_embed_reply(channel, "[Purged Color]", "Purged all colors.") # Bot isn't supplied with sufficient perms -> Error except discord.errors.Forbidden as e: await Utils.simple_embed_reply(channel, "[Error]", "Bot does not have enough perms.") logging.exception("An error occurred.") # Some error I don't know of occurred, PING ALIEN! except Exception as e: # Leave as a general exception! await Utils.simple_embed_reply( channel, "[Error]", "Unknown error occurred (Ping Alien).") logging.exception("An error occurred.")
async def command_called(self, message, command): split_message = message.content.split(" ") server, channel, author = message.guild, message.channel, message.author if command is self.commands["Set Currency Command"]: if len(split_message) > 1: new_currency = split_message[1] self.config.write_data(new_currency, key="Currency") EconomyUtils.currency = new_currency await Utils.simple_embed_reply( channel, "[Set Currency]", "Currency has been set to `%s`." % new_currency) else: await Utils.simple_embed_reply( channel, "[Error]", "Currency parameter not supplied.") elif command is self.commands["Set Starting Balance Command"]: if len(split_message) > 1: starting_balance = split_message[1] if starting_balance.isdigit(): self.config.write_data(int(starting_balance), key="Starting Balance") await Utils.simple_embed_reply( channel, "[Success]", "Starting balance set to `%s`." % starting_balance) else: await Utils.simple_embed_reply( channel, "[Error]", "Starting balance command parameter is incorrect.") else: await Utils.simple_embed_reply( channel, "[Error]", "Starting balance command parameter not supplied.") elif command is self.commands["Balance Command"]: user = author if len(split_message) > 1: given_user = Utils.get_user(server, split_message[1]) if given_user is not None: user = given_user else: await Utils.simple_embed_reply(channel, "[Error]", "Invalid user supplied.") user_cash = EconomyUtils.get_cash(server.id, user.id) user_bank = EconomyUtils.get_bank(server.id, user.id) user_rank = EconomyUtils.get_rank(server.id, user.id) rank_text = Utils.add_number_abbreviation(user_rank) user_worth = user_cash + user_bank embed = discord.Embed(title=" [%s]" % str(user), description="Server Rank: %s" % str(rank_text), color=Utils.default_hex_color) embed.add_field(name="Cash", value=str(user_cash) + EconomyUtils.currency, inline=True) embed.add_field(name="Bank", value=str(user_bank) + EconomyUtils.currency, inline=True) embed.add_field(name="Net Worth", value=str(user_worth) + EconomyUtils.currency, inline=True) await channel.send(embed=embed) elif command is self.commands["Deposit Command"]: if len(split_message) > 1: deposit_amount = split_message[1] user_cash = EconomyUtils.get_cash(server.id, author.id) user_bank = EconomyUtils.get_bank(server.id, author.id) if user_cash != 0: if deposit_amount.isdigit(): deposit_amount = int(deposit_amount) if user_cash >= deposit_amount: EconomyUtils.set_cash(server.id, author.id, user_cash - deposit_amount) EconomyUtils.set_bank(server.id, author.id, user_bank + deposit_amount) await Utils.simple_embed_reply( channel, "[%s]" % str(author), "Deposited %s%s into your bank account." % (str(deposit_amount), EconomyUtils.currency)) else: await Utils.simple_embed_reply( channel, "[%s]" % str(author), "Sorry, but you don't have enough money to do that." ) elif deposit_amount == "all": EconomyUtils.set_cash(server.id, author.id, 0) EconomyUtils.set_bank(server.id, author.id, user_bank + user_cash) await Utils.simple_embed_reply( channel, "[%s]" % str(author), "Deposited %s%s into your bank account." % (str(user_cash), EconomyUtils.currency)) else: await Utils.simple_embed_reply( channel, "[Error]", "Amount parameter is incorrect.") else: await Utils.simple_embed_reply( channel, "[%s]" % str(author), "Sorry, but you don't have any money to deposit.") else: await Utils.simple_embed_reply( channel, "[Error]", "Amount command parameter not supplied.") elif command is self.commands["Withdraw Command"]: if len(split_message) > 1: withdraw_amount = split_message[1] user_cash = EconomyUtils.get_cash(server.id, author.id) user_bank = EconomyUtils.get_bank(server.id, author.id) if user_bank != 0: if withdraw_amount.isdigit(): withdraw_amount = int(withdraw_amount) if user_bank >= withdraw_amount: EconomyUtils.set_cash(server.id, author.id, user_cash + withdraw_amount) EconomyUtils.set_bank(server.id, author.id, user_bank - withdraw_amount) await Utils.simple_embed_reply( channel, "[%s]" % str(author), "Withdrew %s%s into cash." % (str(withdraw_amount), EconomyUtils.currency)) else: await Utils.simple_embed_reply( channel, "[%s]" % str(author), "Sorry, but you don't have enough money to do that." ) elif withdraw_amount == "all": EconomyUtils.set_bank(server.id, author.id, 0) EconomyUtils.set_cash(server.id, author.id, user_cash + user_bank) await Utils.simple_embed_reply( channel, "[%s]" % str(author), "Withdrew %s%s into cash." % (str(user_bank), EconomyUtils.currency)) else: await Utils.simple_embed_reply( channel, "[Error]", "Amount parameter is incorrect.") else: await Utils.simple_embed_reply( channel, "[%s]" % str(author), "Sorry, but you don't have any money to withdraw.") else: await Utils.simple_embed_reply( channel, "[Error]", "Amount command parameter not supplied.") elif command is self.commands["Give Command"]: if len(split_message) > 2: user = Utils.get_user(server, split_message[1]) if user is not None: author_cash = EconomyUtils.get_cash(server.id, author.id) user_cash = EconomyUtils.get_cash(server.id, user.id) if split_message[2].isdigit(): give_amount = int(split_message[2]) if author_cash < int(split_message[2]): return await Utils.simple_embed_reply( channel, "[Error]", "You don't have enough cash to do that.") elif split_message[2] == "all": give_amount = EconomyUtils.get_cash( server.id, author.id) else: return await Utils.simple_embed_reply( channel, "[Error]", "Amount parameter is incorrect.") EconomyUtils.set_cash(server.id, author.id, author_cash - give_amount) EconomyUtils.set_cash(server.id, user.id, user_cash + give_amount) await Utils.simple_embed_reply( channel, "[Success]", "You gave %s %s%s." % (str(user), str(give_amount), EconomyUtils.currency)) else: await Utils.simple_embed_reply(channel, "[Error]", "Invalid user supplied.") else: await Utils.simple_embed_reply( channel, "[Error]", "Insufficient parameters supplied.") # TODO: Optimize this? elif command is self.commands["Leaderboard Command"]: page = 1 if len(split_message) > 1: page = split_message[1] if page.isdigit(): page = int(page) else: return await Utils.simple_embed_reply( channel, "[Error]", "Page number parameter is incorrect.") user_rank_order = EconomyUtils.database_execute( "SELECT user FROM '%s' ORDER BY bank + cash DESC" % server.id) max_page = int((len(user_rank_order) + 9) // 10) if page <= max_page: if (len(user_rank_order) + 10) / 10 >= page: embed = discord.Embed(title="[%s Leaderboard]" % str(server), color=Utils.default_hex_color) for i in range(min(10, len(user_rank_order))): user_rank = (page - 1) * 10 + i rank_text = Utils.add_number_abbreviation(user_rank + 1) if len(user_rank_order) <= user_rank: break user_id = user_rank_order[user_rank] user = Utils.get_user_by_id(server, user_id) user_worth = EconomyUtils.get_bank( server.id, user_id) + EconomyUtils.get_cash( server.id, user_id) embed.add_field( name="%s : %s" % (str(user), rank_text), value=str(user_worth) + EconomyUtils.currency, inline=False) embed.set_footer(text="Page %d/%d" % (page, max_page)) await channel.send(embed=embed) else: await Utils.simple_embed_reply(channel, "[Error]", "Page number is too high.") else: await Utils.simple_embed_reply( channel, "[Error]", "You can only view a page between 1 and %d." % max_page) elif command is self.commands["Bank Command"]: embed = discord.Embed( description="**The bank never goes Bankrupt!**", color=Utils.default_hex_color) # Remove this if? if server.icon is None: embed.set_author(name="%s Bank" % str(server), icon_url=author.default_avatar_url) else: embed.set_author( name="%s Bank" % str(server), icon_url="https://media.discordapp.net/icons/%s/%s.jpg" % (server.id, server.icon)) embed.set_thumbnail(url=self.config.get_data("Bank Icon URL")) total_balance = int( EconomyUtils.database_execute( "SELECT SUM(bank + cash) FROM `%s`" % server.id)[0]) embed.add_field(name="Balance:", value=str(total_balance) + EconomyUtils.currency, inline=True) embed.add_field(name="Interest:", value=str(self.config.get_data("Interest Rate"))) embed.set_footer(text="%s" % str(time.strftime("%m-%d-%Y"))) await channel.send(embed=embed) elif command is self.commands["Award Command"]: await self.award_take(message, True) elif command is self.commands["Take Command"]: await self.award_take(message, False)