async def getMembers(ctx): """Return a CSV of info about members""" # Although data processing/searching is easier the way userList is, # the CSV file output looks better rotated 90 degrees (rows become columns) finalCSV = [[] for _ in range(len(userList) + 1)] finalCSV[0].append("Discord Name") finalCSV[0].append("Ko-fi Username") finalCSV[0].append("Last Donation") finalCSV[0].append("Total Donated") finalCSV[0].append("Has Late Payment") for idx in range(0, len(userList[0])): usr = bot.get_user(userList[0][idx]) finalCSV[idx + 1].append(usr.name) finalCSV[idx + 1].append(userList[1][idx]) timeStamp = float(userList[2][idx]) timeString = datetime.fromtimestamp(timeStamp).strftime("%Y/%m/%d %H:%M:%S") finalCSV[idx + 1].append(timeString) finalCSV[idx + 1].append(userList[3][idx]) finalCSV[idx + 1].append(userList[4][idx]) CSVParser.writeNestedList("tempMemberList.csv", finalCSV, 'w') listFiles = [ discord.File("tempMemberList.csv", "MemberList.csv"), ] dmChannel = await getDmChannel(ctx.author) if dmChannel != None: await dmChannel.send("Here's all info on every supporter for ya!", files=listFiles)
async def checkPaymentTime(): await bot.wait_until_ready() while not bot.is_closed(): gracePeriod = 32 + int(config["mocha_config"]["grace_period"]) idx = 0 while idx < len(userList[0]): if int(userList[0][idx]) == 0: idx += 1 continue if isOlderThan(float(userList[2][idx]), 32): usr = bot.get_user(int(userList[0][idx])) mem = await getMember(usr) msgChannel = await findChannel(config["mocha_config"]["mod_channel"]) if isOlderThan(float(userList[2][idx]), gracePeriod): await msgChannel.send(mem.nick + " has not made a payment in 32 days" " plus grace period! Recommended action is to kick using: " "`m.kickUser " + mem.nick + "` or you can make an " "exception with `m.postpone " + mem.nick + " <days>` to " "delay the next warning") else: if userList[4][idx] == '1': idx += 1 continue await msgChannel.send(mem.nick + " has not made a payment in 32 days" "! Recommended action is to warn using: `m.warnUser " + mem.nick + "` or you can make an exception with `m.postpone " + mem.nick + " <days>` to delay the next warning") userList[4][idx] = '1' CSVParser.writeNestedList(config["mocha_config"]["user_file"], userList, 'w') idx += 1 await asyncio.sleep(int(config["mocha_config"]["payment_check_delay"]))
async def on_message(message): welcomeChannel = findChannel(config["mocha_config"]["welcome_channel"]) # Confirm this is in the welcome channel, by an unconfirmed user, has only # one word, is a ko-fi supporter, and is unattached to a Discord id if message.channel.id == welcomeChannel.id: if str(message.author.id) in userList[5]: if not ' ' in message.content and message.content != "": if message.content in userList[1]: idx = userList[1].index(message.content) if userList[0][idx] == '0' and message.content != "Someone": if isOlderThan(float(userList[2][idx]), 32): return await welcomeChannel.send( config["message_strings"]["no_activity"]) userList[0][idx] = str(message.author.id) idx = userList[5].index(str(message.author.id)) del userList[5][idx] CSVParser.writeNestedList(config["mocha_config"]["user_file"], userList, 'w') newRole = await getRole(config["mocha_config"]["supporter_role"]) await message.author.add_roles(newRole) await welcomeChannel.send(config["message_strings"]["accept"]) else: await welcomeChannel.send( config["message_strings"]["user_taken"]) else: await welcomeChannel.send(config["message_strings"]["no_activity"]) await bot.process_commands(message)
def integrate_images(self): self.judge_user_input_or_not() image_type_list = list() # image_type_list.append(self.var_char1.get()) # image_type_list.append(self.var_char2.get()) # image_type_list.append(self.var_char3.get()) # image_type_list.append(self.var_char4.get()) csv_handler = CSVParser(self.csv_input_path.get()) data_dict = csv_handler.get_dict_from_csv() integrate_handler = Preprocessor(self.image_input_path.get(), self.text_output_path.get()) # group_number = integrate_handler.get_group_number_all() self.img_group_number.set(u"Total number: " + str(len(data_dict)) + u" groups") # keyword_dict = integrate_handler.get_keyword_dict() for k, v in data_dict.iteritems(): return_message = integrate_handler.integrate_images(v) if len(return_message) > 0: self.t_show.insert( END, u"Ref no. " + return_message + u" has not been finished!\n")
async def on_member_remove(member): if str(member.id) in userList[0]: idx = userList[0].index(str(member.id)) userList[0][idx] = '0' if str(member.id) in userList[5]: idx = userList[5].index(str(member.id)) del userList[5][idx] CSVParser.writeNestedList(config["mocha_config"]["user_file"], userList, 'w')
def main(): lexer = CSVLexer(FileStream("./t.csv")) stream = CommonTokenStream(lexer) parser = CSVParser(stream) tree = parser.csvFile() listener = CSVPrintListener() walker = ParseTreeWalker() walker.walk(listener, tree) print(listener.rows)
def __init__(self, csv_file_name, json_file_name): """ Initializes Utils @type json_file_name: str @param json_file_name: Name of json file. @type csv_file: str @param csv_file: Name of json file. """ self.csv_parser = CSVParser(csv_file_name) self.file_object = FileObject(json_file_name)
def main(args): input_stream = FileStream(args.filename) lexer = CSVLexer(input_stream) token_stream = CommonTokenStream(lexer) parser = CSVParser(token_stream) tree = parser.csvfile() table = TableListener() walker = ParseTreeWalker() walker.walk(table, tree) for row in table.result(): print(row)
class Utils: """ Create Utils Object for using FileObject and CSVParser """ def __init__(self, csv_file_name, json_file_name): """ Initializes Utils @type json_file_name: str @param json_file_name: Name of json file. @type csv_file: str @param csv_file: Name of json file. """ self.csv_parser = CSVParser(csv_file_name) self.file_object = FileObject(json_file_name) def write_file(self, index): """ Writes csv file by section using CSV parser object @type index: number @param index: Index """ tup = self.file_object.get_tuples()[index] rows = self.csv_parser.get_parsed_csv_file()[tup[0]:tup[1]] self.csv_parser.write_csv_section( rows, "./output/%s-%s.csv" % (index, self.file_object.get_file_name(index))) def create_base_files(self): """ Writes base csv files """ for index in range(2): self.write_file(index) def create_action_files(self): """ Writes action csv files """ for index in range(2, len(self.file_object.get_tuples())): self.write_file(index) def create_updated_action_files(self, file_name): """ Creates actions files with values inserted. """ self.csv_parser.insert_values(1, self.file_object.get_cutoff_index(), file_name) self.create_action_files()
async def postpone(ctx, *args): if len(args) <= 1 or len(args) > 2: ctx.send("Woops! please specify a guild member and number of days!") else: usr = ctx.guild.get_member_named(args[0]) if str(usr.id) in userList[0]: idx = userList[0].index(str(usr.id)) timestamp = time.mktime((datetime.date.today() + datetime.timedelta(days=int(args[1]))).timetuple()) userList[2][idx] = str(timestamp) userList[4][idx] = '0' CSVParser.writeNestedList(config["mocha_config"]["user_file"], userList, 'w') await ctx.send("Success! User's time has been extended!")
def __init__(self, studentFilePath, companyFilePath): self.studentRows = CSVParser.parseFile(studentFilePath) self.companyRows = CSVParser.parseFile(companyFilePath) self.studentQueue = collections.deque() self.studentRankingDict = {} self.jobsRankingDict = {} self.numPositionsDict = {} self.populateDataStructures() self.result = [] self.students_with_offers = set() self.performAlgorithm() CSVParser.outputCSV(self.result)
async def checkKoFiQueue(): await bot.wait_until_ready() while not bot.is_closed(): while not koFiQueue.empty(): koFiData = "uninitialized" try: koFiData = koFiQueue.get() logger.info("Ko-Fi data received") # needs testing to ensure this timestamp format is correct jsonTime = koFiData["timestamp"] jsonTime = jsonTime.split('.')[0] koFiTime = time.mktime(datetime.datetime.strptime( jsonTime, "%Y-%m-%dT%H:%M:%S").timetuple()) koFiUser = koFiData["from_name"] koFiAmount = float(koFiData["amount"]) if koFiUser in userList[1]: # Existing user, update last donation time and total donated idx = userList[1].index(koFiUser) userList[2][idx] = str(koFiTime) userList[3][idx] = str(koFiAmount + float(userList[3][idx])) userList[4][idx] = '0' if userList[0][idx] != '0': mem = await getMember(bot.get_user(int(userList[0][idx]))) warnRole = await getRole(config["mocha_config"]["warning_role"]) try: await mem.remove_roles(warnRole, reason=userList[1][idx] + " has made a payment") except BaseException as e: pass else: # New user, add to userList userList[0].append('0') userList[1].append(koFiUser) userList[2].append(str(koFiTime)) userList[3].append(str(koFiAmount)) userList[4].append('0') CSVParser.writeNestedList(config["mocha_config"]["user_file"], userList, 'w') except BaseException as e: logger.warning("Error parsing KoFi data\nOriginal Data:\n" + str(koFiData) + "\nError Details:\n" + str(e)) await asyncio.sleep(int(config["mocha_config"]["kofi_check_delay"]))
def travel_rate(self, attendance_entry): if not self.workplaces: self.workplaces = CSVParser('workplaces.csv').all_as_dict() workplace = self.workplaces[attendance_entry['workplace_id']] distance = self.distance(attendance_entry['location'], workplace['location']) if distance >= 5: # Payment is per full km, 5.9 km is 5 complete kms. # Distance is doubled because payment is to/from work return int(distance) * RATES['travel_per_km_to/from'] * 2 return 0
def readLines(self, lines, delimiter=',', allowComments=1, stripWhite=1): haveReadHeadings = 0 parse = CSVParser(fieldSep=delimiter, allowComments=allowComments, stripWhitespace=stripWhite).parse for line in lines: # process a row, either headings or data values = parse(line) if values: if haveReadHeadings: row = TableRecord(self, values) self._rows.append(row) else: self.setHeadings(values) self.createNameToIndexMap() haveReadHeadings = 1 if values is None: raise DataTableError, "Unfinished multiline record." return self
def readLines(self, lines, delimiter=',', allowComments=True, stripWhite=True): if self._defaultType is None: self._defaultType = 'str' haveReadHeadings = False parse = CSVParser(fieldSep=delimiter, allowComments=allowComments, stripWhitespace=stripWhite).parse for line in lines: # process a row, either headings or data values = parse(line) if values: if haveReadHeadings: row = TableRecord(self, values) self._rows.append(row) else: self.setHeadings(values) haveReadHeadings = True if values is None: raise DataTableError("Unfinished multiline record.") return self
def main(): # stations parser = CSVParser("../data/stations.csv") _, parsedData = parser.parse() populate_table("stations", ["stationID", "name", "seller", "capacity", "coordX", "coordY"], parsedData) # velos parser = CSVParser("../data/villos.csv") _, parsedData = parser.parse() populate_table("bicycles", ["bicycleID", "servicedate", "model", "state"], parsedData) # utilisateurs parser = XMLParser("../data/users.xml") subscribers, temporary = parser.parseUsers() populate_table("users", ["userID", "password", "expiryDate", "card"], map(lambda sub: [sub[0], sub[4], sub[-2], sub[-1]], subscribers)) populate_table("subs", ["userID", "RFID", "lastname", "firstname", "phone", "addresscity", "addresscp", "addressstreet", "addressnumber", "subscribeDate"], map(lambda sub: [sub[i] for i in range(0,4)] + [sub[i] for i in range(5,11)], subscribers)) populate_table("users", ["userID", "password", "expiryDate", "card"], temporary) populate_table("tempUsers", ["userID", "paymentDate"], map(lambda sub: [sub[0], datetime.strptime(sub[2], "%Y-%m-%dT%H:%M:%S") - timedelta(days=7)], temporary)) # trajets parser = CSVParser("../data/trips.csv") _, parsedData = parser.parse() populate_trips(parsedData) create_admin()
def get_data(self,nometorrent): parser = CSVParser() return parser.do_query(nometorrent)
self.current = [] def exitRow(self, ctx): # getParent() method does not exist, use 'parentCtx' field here. if ctx.parentCtx.getRuleIndex() == CSVParser.RULE_hdr: return m = dict(zip(self.header, self.current)) self.rows.append(m) if __name__ == '__main__': if len(sys.argv) > 1: input_stream = FileStream(sys.argv[1]) else: input_stream = InputStream(sys.stdin.read()) lexer = CSVLexer(input_stream) token_stream = CommonTokenStream(lexer) parser = CSVParser(token_stream) tree = parser.top() lisp_tree_str = tree.toStringTree(recog=parser) print(lisp_tree_str) # listener print("Start Walking...") listener = CsvLoader() walker = ParseTreeWalker() walker.walk(listener, tree) print('result =', listener.rows)
import sys import antlr3 from CSVLexer import CSVLexer from CSVParser import CSVParser cStream = antlr3.StringStream(sys.stdin.read()) # create char stream lexer = CSVLexer(cStream) # lexer feeds off chars tStream = antlr3.CommonTokenStream(lexer) # buffer up tokens parser = CSVParser(tStream) # parser feeds off tokens parser.file() # begin parse w/rule file
def run(): csvParser = CSVParser() return csvParser.parse()
async def on_member_join(member): welcomeChannel = findChannel(config["mocha_config"]["welcome_channel"]) userList[5].append(str(member.id)) CSVParser.writeNestedList(config["mocha_config"]["user_file"], userList, 'w') if welcomeChannel != None: await welcomeChannel.send(config["message_strings"]["welcome"])
""" Module: AWD, Analysis, wissenschaftliches Rechnen und Datenvisualisierung Course: BSc INF 2015, ZH5-Mo, FS16, Schuler Josef A. This is my solution for the third of eight parts for the module at FFHS. This is an implemenation of a diagram plotter that can either draw curves or plot data according to a two-dimensional list. """ from diagram import Diagram from CSVParser import CSVParser # Demo for Bars csv_data_bars = [] csv_parser = CSVParser(strip_spaces=True, strip_inner_spaces=True, use_heading=True, use_labels=True) csv_parser.data_type = float csv_parser.load_from_csv('population.csv') csv_data = csv_parser.rows diagram_bars = Diagram(type='bars', data=csv_data) diagram_bars.show() # Demo for Piechart csv_parser = CSVParser(strip_spaces=True, strip_inner_spaces=True, use_heading=True, use_labels=True) csv_parser.data_type = float csv_parser.load_from_csv('population.csv') csv_data_piechart = csv_parser.rows diagram_piechart = Diagram(type='piechart', data=csv_data_piechart)
description="Hi! I'm Mocha V" + VERSION + "!\nUse \"m.<command>\"" " to tell me to do something!", case_insensitive=True) bot.remove_command("help") config = configparser.ConfigParser() config.read("MochaConfig.ini") # Format : [[discordIDs(int)], # [koFiName(str)], # [lastDonationTime(float)], # [totalDonated(float)], # [memberStatus(int)], # [unconfirmedUsers(int)]] (last entry not same length as others) userList = CSVParser.parseFile(config["mocha_config"]["user_file"]) koFiQueue = Queue() # Async utility function to slide into them DM's async def getDmChannel(usr): dm_chan = usr.dm_channel if dm_chan == None: await usr.create_dm() dm_chan = usr.dm_channel return dm_chan # Async utility to convert user to member (there is a difference) async def getMember(usr):
def __init__(self, profile=0, runTestSuite=1): self.parse = CSVParser().parse self._shouldProfile = profile self._shouldRunTestSuite = runTestSuite self._iters = 100
import sys from antlr4 import * from antlr4 import InputStream from CSVLexer import CSVLexer from CSVParser import CSVParser if __name__ == '__main__': if len(sys.argv) > 1: input_stream = FileStream(sys.argv[1]) else: input_stream = InputStream(sys.stdin.read()) lexer = CSVLexer(input_stream) token_stream = CommonTokenStream(lexer) parser = CSVParser(token_stream) parser.buildParseTrees = False parser.start()
def setUp(self): self.parse = CSVParser().parse
def main(argv): input_stream = FileStream(argv[1]) lexer = CSVLexer(input_stream) stream = CommonTokenStream(lexer) parser = CSVParser(stream) tree = parser.startRule()
# -*- encoding:utf8 -*- """ Module: AWD, Analysis, wissenschaftliches Rechnen und Datenvisualisierung Course: BSc INF 2015, ZH5-Mo, FS16, Schuler Josef A. This is my solution for the second of eight parts for the module at FFHS. This is an implemenation of a CSVParser class that loads csv-files and stores their data in a two-dimensional list. """ from TablePrinter import TablePrinter from CSVParser import CSVParser parser = CSVParser(strip_spaces=True, start_row=2, start_col=0, end_col=10, end_row=20) parser.load_from_csv('data.csv') TablePrinter(parser.rows).print_as_table()