def __init__(self, value=None, **params): params.setdefault('cls', 'select') table.Table.__init__(self, **params) self.top_selected = button.Button(cls=self.cls + ".selected") table.Table.add(self, self.top_selected) #,hexpand=1,vexpand=1)#,0,0) self.top_selected.value = basic.Label(" ", cls=self.cls + ".option.label") self.top_arrow = button.Button(basic.Image(self.style.arrow), cls=self.cls + ".arrow") table.Table.add(self, self.top_arrow) #,hexpand=1,vexpand=1) #,1,0) self.options = table.Table() #style={'border':3}) self.options_first = None self.options.tr() self.spacer_top = basic.Spacer(0, 0) self.options.add(self.spacer_top) self.options.tr() self._options = table.Table(cls=self.cls + ".options") self.options.add(self._options) self.options.tr() self.spacer_bottom = basic.Spacer(0, 0) self.options.add(self.spacer_bottom) self.options.connect(BLUR, self._close, None) self.spacer_top.connect(CLICK, self._close, None) self.spacer_bottom.connect(CLICK, self._close, None) self.values = [] self.value = value
def cycle(trainingEpochs, testEpochs, totalCycles, learningRate, discountFactor): #Create table and deck for training trainingTable = table.Table(16, learningRate, discountFactor) # Set total epochs in table trainingTable.maximumEpoch = trainingEpochs * totalCycles d = deck.Deck() # create separate table for testing testingTable = table.Table(16, learningRate, discountFactor) # First test for baseline testingTable = updateTestingTable(trainingTable, testingTable) testing(testingTable, d, testEpochs) for currentCycle in range(totalCycles): print('Updating the training table') trainingTable = updateTrainingTable(trainingTable) print('Training...') training(trainingTable, d, trainingEpochs) print('Updating the test table') testingTable = updateTestingTable(trainingTable, testingTable) print('Testing...') testing(testingTable, d, testEpochs) print("Cycle " + str(currentCycle + 1) + " out of " + str(totalCycles) + " finished\n") remainingTime = (((time.time() - start) / 60) / (currentCycle + 1)) * ((totalCycles + 1) - (currentCycle + 1)) print("Expected time needed remaining cycles: {:.4} min".format( remainingTime)) printResults(testingTable, trainingEpochs, testEpochs, totalCycles)
def CG3(): t1 = table.Table("data/posts.xml") #["Id","PostTypeId","OwnerUserId",...] t2 = table.Table("data/comments.xml") #["PostId","UserId","Text",...] t3 = select(t1, attributes=["Id","OwnerUserId"]) t4 = select(t2, attributes=["PostId","UserId"]) t5 = join(t4,t3,[("PostId","Id")]) g = graph.Graph("multiedge") g.addNodes(t1,"OwnerUserId") g.addNodes(t2,"UserId") g.addEdges(t5,"UserId","OwnerUserId") return g
def CG9(): t1 = table.Table("data/badges.xml") #[UserId","Name"...] t2 = table.Table("data/posts.xml") #["Id","PostTypeId","OwnerUserId",...] t3 = join(t1,t2,["UserId"],["OwnerUserId"],["Name"],["PostTypeId","ParentId","Id"]) t4 = select(t3, Condition("PostTypeId", "==", 1)) #Questions t5 = select(t3, Condition("PostTypeId", "==", 2)) #Answers t6 = join(t5,t4,["ParentId"],["Id"],["Name"],["Name"],["Id","Badge1","Badge2"]) t7 = group(t6,["Badge1","Badge2"],"Count","cnt") g = graph.Graph("directed") g.addNodes(t1,"Name") g.addEdges(t7,"Badge1","Badge2",["Count"]) return g
def CG2(): t1 = table.Table("data/posts.xml") #["Id","PostTypeId","OwnerUserId",...] t2 = table.Table("data/votes.xml") #["PostId","VoteTypeId","UserId",...] t3 = select(t1, attributes=["Id","OwnerUserId"]) t4 = select(t2, Condition("VoteTypeId", "==", 2),["PostId","UserId"]) t5 = join(t4,t3,[("PostId","Id")]) t6 = group(t5,["UserId","OwnerUserId"], "Count","cnt") g = graph.Graph("directed") g.addNodes(t1,"OwnerUserId") g.addNodes(t2,"UserId") g.addEdges(t6,"UserId","OwnerUserId",["Count"]) return g
def dist(self, tname, col, metric, threshold): # TODO: This could be moved into table.py self.checkwcontext() wcolidx = self.wtable.getColIndex(self.wcol) table2 = self.getTable(tname) colidx = table2.getColIndex(col) if colidx is None: raise tb.ColumnNotFoundError(col) # Compute result of join in a new table outputTable = tb.Table() outputTable.cols = self.wtable.cols + table2.cols outputTable.types = self.wtable.types + table2.types distMethod = getattr(self, metric) # First remove rows with a None value in either of the two columns: self.wtable.removeNoneInCol(self.wcol) table2.removeNoneInCol(col) # Use double loop to compare all pairs (slow!) for row1 in self.wtable.data: for row2 in table2.data: # Note: if the user attempts to compare two columns with incompatible types, # the result will be empty if distMethod(row1[wcolidx], row2[colidx]) <= threshold: outputTable.data.append(row1 + row2) # Remove duplicate labels addlabels = table2.labels() for i in range(len(self.wtable.cols)): outputTable.cols[i] = outputTable.cols[i].difference(addlabels) # Update working table and working column self.wcol = col self.wtable = outputTable
def __init__(self, title, message): title = basic.Label(title) main = table.Table() import app warningIcon = basic.Image( app.App.app.theme.get('warningdialog.warning', '', 'image')) if type(message) is list: if len(message) >= 1: main.tr() main.td(warningIcon, rowspan=len(message), style={'margin': 5}) main.td(basic.Label(message[0]), align=-1) for aMessage in message[1:]: main.tr() main.td(basic.Label(aMessage), align=-1, colspan=2) else: main.tr() main.td(warningIcon) main.td(basic.Label(message), align=-1, colspan=2) main.tr() self.okayButton = button.Button("Okay") self.okayButton.connect(CLICK, self.okayClicked) self.cancelButton = button.Button("Cancel") self.cancelButton.connect(CLICK, self.close) main.td(basic.Spacer(1, 1)) main.td(self.okayButton, align=1, style={'margin': 10}) main.td(self.cancelButton, align=-1, style={'margin': 10}) Dialog.__init__(self, title, main)
def general_ledger(company_name, account): ## dates start_date = date(datetime.today().year, 1, 1) end_date = datetime.today() start_date_str = start_date.strftime('%Y-%m-%d') end_date_str = end_date.strftime('%Y-%m-%d') filters = { 'company': company_name, 'account': account, 'from_date': start_date_str, 'to_date': end_date_str } try: report = Api.api.query_report(report_name='General ledger', filters=filters) except Exception: return None columns = [ 'posting_date', 'account', 'debit', 'credit', 'balance', 'against', 'remarks', 'voucher_no' ] headings = [ 'Datum', 'Konto', 'Soll', 'Haben', 'Stand', 'Gegenkonto', 'Bemerkungen', 'Beleg' ] report_data = [r for r in report['result'] if is_relevat_GL(r)] report_data = keep_first(report_data, ["'Opening'"]) report_data.reverse() report_data = keep_first(report_data, ["'Total'"]) report_data = keep_first(report_data, ["'Closing (Opening + Total)'"]) report_data.reverse() report_data = [format_GL(r) for r in report_data] return table.Table(report_data, columns, headings, 'Hauptbuch für ' + account)
def history_table(self, verbose): tab = table.Table() tab.add_row([ ["Sequence", "Nr"], ["Time", "Delta"], ["Verbose", "Skipped"], ["From", "State"], "Event", ["Actions and", "Pushed Events"], ["To", "State"], ["Implicit"]]) prev_time = time.time() if verbose: records_to_show = self._verbose_records else: records_to_show = self._records for record in records_to_show: time_delta = prev_time - record.time tab.add_row([ record.seq_nr, "{:06f}".format(time_delta), record.skipped, _state_to_name(record.from_state), _event_to_name(record.event), record.actions_and_pushed_events, _state_to_name(record.to_state), record.implicit]) prev_time = record.time return tab
def create_table(self, title, row=DEFAULT_ROW, col=DEFAULT_COLUMN, new=False): assert hasattr(self, "worksheet") print("[IN PROGRESS] Creating the weight table...") weight_table = table.Table(self.worksheet, row, col, title).format self.spreadsheet.batch_update(weight_table.get_body()) if new: #for a just created sheet it's necessary a initial date initial_date = input("Enter the initial date (dd/mm/yyy): ") else: form = r"%d/%m/%Y" #DD/MM/YY raw_date = datetime.strptime( self.worksheet.cell(row - 1, col).value, form ) + timedelta( days=1 ) #search for the corresponding initial day in the previous body weight table initial_date = raw_date.strftime(form) dates_to_fill = self.construct_dates(initial_date) print("[IN PROGRESS] Filling the dates...") self.fill_data(row + 1, col, dates_to_fill) print("[INFO] Dates filled succesfully.") #update the current working week self.week = [int(char) for char in title if char.isdigit()][0] print("[INFO] The weight table was created succesfully.") self.update_location()
def test_multi_line_cells(): tab = table.Table() tab.add_row([['First', 'Name'], ['Last', 'Name'], 'Address', ['Country', 'of', 'Residence']]) tab.add_row([ 'Marge', 'Simpson', ['742 Evergreen Terrace', 'Springfield'], ['United', 'States', 'of', 'America'] ]) tab.add_row([['Annelies', 'Marie'], 'Frank', ['263 Prinsengracht', 'Amsterdam'], ['The', 'Netherlands']]) tab_str = tab.to_string() assert (tab_str == "+----------+---------+-----------------------+-------------+\n" "| First | Last | Address | Country |\n" "| Name | Name | | of |\n" "| | | | Residence |\n" "+----------+---------+-----------------------+-------------+\n" "| Marge | Simpson | 742 Evergreen Terrace | United |\n" "| | | Springfield | States |\n" "| | | | of |\n" "| | | | America |\n" "+----------+---------+-----------------------+-------------+\n" "| Annelies | Frank | 263 Prinsengracht | The |\n" "| Marie | | Amsterdam | Netherlands |\n" "+----------+---------+-----------------------+-------------+\n")
def history_table(self, verbose): tab = table.Table() tab.add_row([ ['Sequence', 'Nr'], ['Time', 'Delta'], ['Verbose', 'Skipped'], ['From', 'State'], 'Event', ['Actions and', 'Pushed Events'], ['To', 'State'], ['Implicit']]) prev_time = time.time() if verbose: records_to_show = self._verbose_records else: records_to_show = self._records for record in records_to_show: time_delta = prev_time - record.time tab.add_row([ record.seq_nr, "{:06f}".format(time_delta), record.skipped, _state_to_name(record.from_state), _event_to_name(record.event), record.actions_and_pushed_events, _state_to_name(record.to_state), record.implicit]) prev_time = record.time return tab
def command_show_engine(self, cli_session): tab = table.Table(separators=False) tab.add_row(["Stand-alone", self._stand_alone]) tab.add_row(["Interactive", self._interactive]) tab.add_row(["Simulated Interfaces", self.simulated_interfaces]) tab.add_row(["Physical Interface", self.physical_interface_name]) tab.add_row(["Telnet Port File", self._telnet_port_file]) tab.add_row(["IPv4 Multicast Loopback", self.ipv4_multicast_loopback]) tab.add_row(["IPv6 Multicast Loopback", self.ipv6_multicast_loopback]) tab.add_row(["Number of Nodes", self.nr_nodes()]) tab.add_row(["Transmit Source Address", self.tx_src_address]) tab.add_row(["Flooding Reduction Enabled", self.floodred_enabled]) tab.add_row(["Flooding Reduction Redundancy", self.floodred_redundancy]) tab.add_row(["Flooding Reduction Similarity", self.floodred_similarity]) tab.add_row(["Flooding Reduction System Random", self.floodred_system_random]) tab.add_row(["Timer slips > 10ms", scheduler.SCHEDULER.slip_count_10ms]) tab.add_row(["Timer slips > 100ms", scheduler.SCHEDULER.slip_count_100ms]) tab.add_row(["Timer slips > 1000ms", scheduler.SCHEDULER.slip_count_1000ms]) tab.add_row(["Max pending events processing time", "{:06f}".format(scheduler.SCHEDULER.max_pending_events_proc_time)]) tab.add_row(["Max expired timers processing time", "{:06f}".format(scheduler.SCHEDULER.max_expired_timers_proc_time)]) tab.add_row(["Max select processing time", "{:06f}".format(scheduler.SCHEDULER.max_select_proc_time)]) tab.add_row(["Max ready-to-read processing time", "{:06f}".format(scheduler.SCHEDULER.max_ready_to_read_proc_time)]) cli_session.print(tab.to_string())
def read_input(self, input): fi = False if input == False: fi = fileinput.input else: fi = open(input, 'r').xreadlines self.tables.append(table.Table(fi))
def tworzTabele(szukanie=None): global tabele #tworzenie frame dla tabeli tabele = table.Table(root, ['lp.', 'Opis', 'Data', 'Rodz', 'Cena', 'Kasjer'], column_minwidths=[None, 300, 300, None, None, 300]) tabele.grid(columnspan=1) #download raw json object url = "http://localhost/api/index.php?szukaj=" + str(szukanie) print(url) data = urllib.request.urlopen(url).read().decode() # parse json object obj = json.loads(data) data = [] for i in range(len(obj)): data.append([ obj[i]['id'], obj[i]['opis'], obj[i]['data'], obj[i]['rodz'], obj[i]['koszt'], obj[i]['kasjer'] ]) tabele.set_data(data)
def table(self): """総理大臣のCSVファイルを基にしたテーブルから、HTMLページを基にするテーブルに変換して、それを応答する。""" html_table = table.Table('output') html_at = html_table.attributes() tuples = self._input_table.tuples() is_first = True for a_tuple in tuples: if is_first: is_first = False else: values = a_tuple.values() keys = a_tuple.attributes().keys() attributes = a_tuple.attributes() output = [] output.append(values[keys.index('no')]) output.append(values[keys.index('order')]) output.append(values[keys.index('_names')]) output.append(values[keys.index('kana')]) output.append(values[keys.index('period')]) output.append(self.compute_string_of_days(values[keys.index('period')])) output.append(values[keys.index('school')]) output.append(values[keys.index('party')]) output.append(values[keys.index('birth')]) output.append(self.compute_string_of_image(a_tuple)) output_tuple = tuple.Tuple(html_at, output) self._output_table.add(output_tuple) return self._output_table
def getTable(self, addDefaultProduct=None, quiet=False, verbose=0): """ return an in-memory instance of the product table. This will be loaded from the path returned by tableFileName() (and cached for subsequent accesses) on-the-fly. None is returned if this product is known not to have a table file. A TableError is raised if the table file cannot be loaded: if it cannot be found, a TableFileNotFound is raised; if it contains unparsable errors, a BadTableContent is raised. """ if quiet: verbose -= 2 if not self._table: tablepath = self.tableFileName() if tablepath is None: return None if not os.path.exists(tablepath): raise TableFileNotFound(tablepath, self.name, self.version, self.flavor) self._table = mod_table.Table(tablepath, self, addDefaultProduct=addDefaultProduct, verbose=verbose, ).expandEupsVariables(self, quiet) if self._prodStack and self.name and self.version and self.flavor: # pass the loaded table back to the cache try: self._prodStack.loadTableFor(self.name, self.version, self.flavor, self._table) except ProductNotFound: pass return self._table
def events_table(self): tab = table.Table() tab.add_row(["Event", "Verbose"]) for event in self.event_enum: verbose = (event in self.verbose_events) tab.add_row([event.name, verbose]) return tab
def __init__(self, title_txt="File Browser", button_txt="Okay", cls="filedialog", path=None): if not path: self.curdir = os.getcwd() else: self.curdir = path import app self.dir_img = basic.Image(app.App.app.theme.get(cls+'.folder', '', 'image')) td_style = {'padding_left': 4, 'padding_right': 4, 'padding_top': 2, 'padding_bottom': 2} self.title = basic.Label(title_txt, cls=cls+".title.label") self.body = table.Table() self.list = area.List(width=350, height=150) self.input_dir = input.Input(cls=cls+".input") self.input_file = input.Input(cls=cls+".input") self._list_dir_() self.button_ok = button.Button(button_txt) self.body.tr() self.body.td(basic.Label("Path", cls=cls+".label"), style=td_style, align=-1) self.body.td(self.input_dir, style=td_style) self.body.tr() self.body.td(basic.Label("File", cls=cls+".label"), style=td_style, align=-1) self.body.td(self.input_file, style=td_style) self.body.td(self.button_ok, style=td_style) self.body.tr() self.body.td(self.list, colspan=3, style=td_style) self.list.connect(CHANGE, self._item_select_changed_, None) self.button_ok.connect(CLICK, self._button_okay_clicked_, None) self.value = None Dialog.__init__(self, self.title, self.body)
def command_show_interfaces(self, cli_session): # TODO: Report neighbor uptime (time in THREE_WAY state) tab = table.Table() tab.add_row(interface.Interface.cli_summary_headers()) for intf in self._interfaces.values(): tab.add_row(intf.cli_summary_attributes()) cli_session.print(tab.to_string())
def __init__(self): self.list = area.List(width=350, height=150) okButton = button.Button("Okay", style={ 'width': 80, 'height': 28, 'margin': 8 }) okButton.connect(CLICK, self.okayClicked) cancelButton = button.Button("Cancel", style={ 'width': 80, 'height': 28, 'margin': 8 }) cancelButton.connect(CLICK, self.close) body = table.Table() body.tr() body.td(basic.Label("Select your teacher"), colspan=2) body.tr() body.td(self.list, colspan=2) body.tr() body.td(okButton) body.td(cancelButton) Dialog.__init__(self, basic.Label("Teachers"), body)
def cli_routes_table(self, table_nr): links = self.ipr.get_links() rows = [] for route in self.ipr.get_routes(): family = route["family"] dst_prefix_str = self.kernel_route_dst_prefix_str(route) route_table_nr = route.get_attr('RTA_TABLE') if (table_nr is not None) and (table_nr != route_table_nr): continue next_hops = sorted(self.kernel_route_nhops(route, links)) oif_cell = [] gateway_cell = [] weight_cell = [] for nhop in next_hops: oif_cell.append(nhop[0]) gateway_cell.append(nhop[1]) weight_cell.append(nhop[2]) rows.append([ route_table_nr, self.af_str(family), dst_prefix_str, self.route_type_str(route["type"]), self.proto_str(route["proto"]), oif_cell, gateway_cell, weight_cell ]) rows.sort(key=self.route_row_key) # Now that the output is sorted, replace number numbers with symbolic names for row in rows: row[0] = self.table_nr_to_name(row[0]) tab = table.Table() tab.add_row([ "Table", ["Address", "Family"], "Destination", "Type", "Protocol", ["Outgoing", "Interface"], "Gateway", "Weight" ]) tab.add_rows(rows) return tab
def cli_route_prefix_table(self, table_nr, prefix): route = None for rte in self.ipr.get_routes(): route_table_nr = rte.get_attr('RTA_TABLE') dst_prefix_str = self.kernel_route_dst_prefix_str(rte) if (table_nr == route_table_nr) and (dst_prefix_str == str(prefix)): route = rte break if route is None: return None links = self.ipr.get_links() tab = table.Table(separators=False) next_hops = self.kernel_route_nhops(route, links) next_hops_cell = [] for nhop in next_hops: next_hop_str = str(nhop[0]) + " " + str(nhop[1]) + " " + str(nhop[2]) next_hops_cell.append(next_hop_str) tab.add_row(["Table", self.table_nr_to_name(table_nr)]) tab.add_row(["Address Family", self.af_str(self.af_str(route["family"]))]) tab.add_row(["Destination", str(prefix)]) tab.add_row(["Type", self.route_type_str(route["type"])]) tab.add_row(["Protocol", self.proto_str(route["proto"])]) tab.add_row(["Scope", self.scope_str(route["scope"])]) tab.add_row(["Next-hops", next_hops_cell]) tab.add_row(["Priority", self.to_str(route.get_attr('RTA_PRIORITY'))]) tab.add_row(["Preference", self.to_str(route.get_attr('RTA_PREF'))]) tab.add_row(["Preferred Source Address", self.to_str(route.get_attr('RTA_PREFSRC'))]) tab.add_row(["Source", self.kernel_route_src_prefix_str(route)]) tab.add_row(["Flow", self.to_str(route.get_attr('RTA_FLOW'))]) tab.add_row(["Encapsulation Type", self.to_str(route.get_attr('RTA_ENCAP_TYPE'))]) tab.add_row(["Encapsulation", self.to_str(route.get_attr('RTA_ENCAP'))]) tab.add_row(["Metrics", self.to_str(route.get_attr('RTA_METRICS'))]) tab.add_row(["Type of Service", route["tos"]]) tab.add_row(["Flags", route["flags"]]) return tab
def parse(clz, lines): columnNames = [ n.strip('"') for n in lines[0].split() ] numCol = len(columnNames) rows = [] for line in lines[1:]: row= [table.bestConvert(c) for c in line.split()[1:]] rows.append(row) if rows: columns = ( (row[i] for row in rows) for i in range(numCol) ) columnTypes = [table.common_type_for(col) for col in columns ] knownTypes = [ (i, clz.typeDefaults.get(columnNames[i] )) for i in range(numCol) ] for i, type_ in knownTypes: if type_ is not None: columnTypes[i] = type_ formats = [table.standardFormats[type_] for type_ in columnTypes] knownFormats = [ (i, clz.formatDefaults.get(columnNames[i] )) for i in range(numCol) ] for i, format_ in knownFormats: if format_ is not None: formats[i] = format_ for i, row in enumerate(rows): rows[i] = [ t(v) if t is not object else v for (t, v) in zip(columnTypes, row) ] else: columnTypes = numCol * (str, ) formats = numCol * ("%r", ) return table.Table(columnNames, columnTypes, formats, rows)
def cli_details_table(self): # TODO: Report capabilities (is it possible to report the unknown ones too?" # TODO: Report neighbor direction in show command if self.neighbor_system_id: your_system_id_str = utils.system_id_str(self.neighbor_system_id) else: your_system_id_str = "" if self.neighbor_link_id: your_link_id_str = "{}".format(self.neighbor_link_id) else: your_link_id_str = "" tab = table.Table(separators=False) tab.add_rows([ ["Name", self.name], ["System ID", utils.system_id_str(self.system_id)], ["IPv4 Address", self.ipv4_address], ["IPv6 Address", self.ipv6_address], ["LIE UDP Source Port", self.port], ["Link ID", self.local_id], ["Level", self.level], ["Flood UDP Port", self.flood_port], ["MTU", self.link_mtu_size], ["POD", self.pod], ["Hold Time", self.holdtime], ["Not a ZTP Offer", self.not_a_ztp_offer], ["You are Flood Repeater", self.you_are_flood_repeater], ["Your System ID", your_system_id_str], ["Your Local ID", your_link_id_str], ]) return tab
def evaluate_blank(args: list, i: int) -> str: while blank in args[i]: if i + 1 == len(args): raise util.ParseException( "Failure: missing value for _ in '{}'".format(args[i])) if blank in args[i + 1]: # parse that blank first evaluate_blank(args, i + 1) # replace blanks with next value after executed if args[i + 1] == left_sep: # if the next arg is the beginning of a group, parse and execute the group depth = 0 j = i + 1 while depth >= 0: j += 1 depth += 1 if args[j] == left_sep else ( -1 if args[j] == right_sep else 0) nextvalue = str(Group(args[i + 2:j]).execute(print_rolls=False)) del args[i + 1:j + 1] elif expression.is_expression(args[i + 1]): # if the arg is an expression, evaluate it nextvalue = str(int(expression.parse_math(args[i + 1]))) del args[i + 1] elif table.is_table(args[i + 1]): nextvalue = table.Table(args[i + 1]).execute() del args[i + 1] else: # try to parse next arg as a roll nextvalue = str(Roll(args[i + 1]).execute(print_output=False)) del args[i + 1] # replace blank with resulting value args[i] = args[i].replace(blank, nextvalue, 1)
def create_table(selected_types, first_date, last_date): values = [] values.append(["Id", "Name", "Type", "Date", "Price", "Amount", "Purchase"]) if len(first_date) == 0 or len(last_date) == 0: # an toulaxiston ena entry einai adeio des mono ta filtra twn types for item in sales_list: if (item.prod_type in selected_types): values.append([item.prod_id, item.prod_name, item.prod_type, item.prod_date, item.prod_price, item.prod_amount, item.prod_purchase]) else: # alliws des ola ta filtra for item in sales_list: if (item.prod_type in selected_types) and (str(item.prod_date) >= first_date and str(item.prod_date) <= last_date): values.append([item.prod_id, item.prod_name, item.prod_type, item.prod_date, item.prod_price, item.prod_amount, item.prod_purchase]) global total_rows global total_columns total_rows = len(values) total_columns = len(values[0]) #ypologismwn synolikwn esodwn total_income = 0 for item in sales_list: if item.prod_type in selected_types: total_income += item.get_income() global records_list records_list = table.Table(sales, total_rows, total_columns, values, total_income)
def main(): test = tester.Core('table test', 5) tester.includepath() import table t = table.Table(int, "abcdefg", range(7)) init = random.randint(0, 99) skip = random.randint(1, 9) curr = init for x in t.xlabels: for y in t.ylabels: t[y, x] = curr curr += skip curr = init for x in t.xlabels: for y in t.ylabels: if t[y, x] != curr: return curr += skip test.add_mark(4) del t[3, 'b'] if t[3, 'b'] is not None: return test.add_mark(1)
def runpostprocess(src_file, sample_filk, sample_filv, ann_filk, ann_filv): print('Started loading sample.') sample = t.Table(sample_file, filter_key=sample_filk, filter_value=sample_filv, vstartswith=sample_sw) print('Finished loading sample.') print('Started loading annotations.') annotation = t.Table(annot_file, filter_key=ann_filk, filter_value=ann_filv, vstartswith=ann_sw) print('Finished loading annotations.') print('Started loading clusters.') clusters = t.Table(src_file + '/clusters.csv') print('Finished loading clusters.') print('Started loading centers.') centers = t.Table(src_file + '/centers.csv') print('Finished loading centers.') with open(src_file + '/stats.csv', 'w') as out: print('Center,avg,stdev,min,max', file=out) for center in centers: #print(center[0]) cluster = clusters.select([('Cluster', center[0])]) sumx, sumx2, max, min = 0., 0., -999999999999999, 9999999999999999 count = 0 for CpG in cluster: #print('CpG', CpG) position = float(annotation.get(CpG[0], 'pos')) if position < min: min = position if position > max: max = position sumx += position sumx2 += position**2 count += 1 print(count) ex, ex2 = sumx / count, sumx2 / count stdev = (ex2 - ex**2)**0.5 print(center[0] + ',' + str(ex) + ',' + str(stdev) + ',' + str(min) + ',' + str(max), file=out) print(file=out)
def _prepareTable(self, groups): result = table.Table('lr') for group in groups: formatted = 'avg[{0}] tot[{1}]'.format( helpers.formatValues(group.aggregate(self._mean)), helpers.formatValues(group.aggregate(self._sum))) result.add(self._label(group), formatted) return result