def track(self, bunch): bunch_list, local_slice_sets, signal_slice_sets_x, signal_slice_sets_y = self._get_slice_sets( bunch) if (self._signal_x is None) and (self._signal_y is None): self._init_signals(bunch_list, signal_slice_sets_x, signal_slice_sets_y) if self._processors_x is not None: self._read_signal(self._signal_x, signal_slice_sets_x, 'x', self._phase_x, self._beta_x) end_parameters_x, end_signal_x = process( self._parameters_x, self._signal_x, self._processors_x, slice_sets=signal_slice_sets_x) if self._processors_y is not None: if self._signal_y is None: self._init_signals(bunch_list, signal_slice_sets_x, signal_slice_sets_y) self._read_signal(self._signal_y, signal_slice_sets_y, 'y', self._phase_y, self._beta_y) end_parameters_y, end_signal_y = process( self._parameters_y, self._signal_y, self._processors_y, slice_sets=signal_slice_sets_y)
def generate_plot(players, pond, matches, teams, bets): report = dict() player_score_list = dict() for player_name in players: report[player_name] = 0 player_score_list[player_name] = [0] for a_match in sorted(matches, cmp=lambda a, b: cmp(a["date"], b["date"]), reverse=False): if a_match["weight"] > 0: pond_report = {"sum": 0} core.process(report, teams, a_match, bets[a_match["ID"]], pond_report) for player_name in players: player_score_list[player_name].append(report[player_name]) player_plot_list = [] style_list = ['b->', 'y-d', 'k-x', 'm-o', 'c-^', 'r-1', 'g-s', 'b-p'] for player_name in players: player_plot_list.append( pl.plot(range(len(player_score_list[player_name])), player_score_list[player_name], style_list.pop(), label=player_name)) pl.xlabel('x axis') # make axis labels pl.ylabel('y axis') pl.legend(loc=2, fontsize='10') savefig("plot.jpg")
def generate_plot(players, pond, matches, teams, bets): report = dict() player_score_list = dict() for player_name in players: report[player_name] = 0 player_score_list[player_name] = [0] for a_match in sorted(matches, cmp=lambda a, b: cmp(a["date"], b["date"]), reverse=False): if a_match["weight"] > 0: pond_report = {"sum": 0} core.process(report, teams, a_match, bets[a_match["ID"]], pond_report) for player_name in players: player_score_list[player_name].append(report[player_name]) player_plot_list = [] style_list = ['b->', 'y-d', 'k-x', 'm-o', 'c-^', 'r-1', 'g-s', 'b-p'] for player_name in players: player_plot_list.append(pl.plot( range(len(player_score_list[player_name])), player_score_list[player_name], style_list.pop(), label=player_name )) pl.xlabel('x axis')# make axis labels pl.ylabel('y axis') pl.legend(loc=2, fontsize='10') savefig("plot.jpg") # pl.show()
def track(self, bunch): bunch_list, local_slice_sets, signal_slice_sets_x, signal_slice_sets_y = self._get_slice_sets( bunch) if (self._signal_x is None) and (self._signal_y is None): self._init_signals(bunch_list, signal_slice_sets_x, signal_slice_sets_y) if self._processors_x is not None: parameters_x, signal_x = self._combiner_x.process() parameters_x, signal_x = process(parameters_x, signal_x, self._processors_x, slice_sets=signal_slice_sets_x) if signal_x is not None: signal_x = signal_x * self._gain_x self._kick_bunches(signal_x, 'x', local_slice_sets, bunch_list, self._loc_signal_sets_x) if self._processors_y is not None: # print('Kick y, gain: ' + str(self._gain_y)) self._parameters_y, self._signal_y = self._combiner_y.process() kick_parameters_y, kick_signal_y = process( self._parameters_y, self._signal_y, self._processors_y, slice_sets=signal_slice_sets_y) if kick_signal_y is not None: kick_signal_y = kick_signal_y * self._gain_y self._kick_bunches(kick_signal_y, 'y', local_slice_sets, bunch_list, self._loc_signal_sets_y)
def generate_markdown(players, players_stat, pond, matches, teams, bets, filename="README.md"): md = u'''# E cup guess competition report ## real-time ranking ''' ISOTIMEFORMAT='%Y-%m-%d %X' md += time.strftime( ISOTIMEFORMAT, time.localtime()) + '\n' md += "\n|rank|name|score|win|loss|\n|:---:|:---:|:---:|:---:|:---:|\n" rank = 0 matches_sum = len(matches) for player_name in sorted(players, key=lambda p: players[p], reverse=True): rank += 1 md += '|' + str(rank) + '|' + player_name + '|' + "%.2f" % players[player_name] + '|' \ + "%.0f%%" % (players_stat[player_name]["win"] / matches_sum * 100) + '|' \ + "%.0f%%" % (players_stat[player_name]["loss"] / matches_sum * 100) + '|\n' md += "\n## Pond\n" + "%.2f" % pond["sum"] + '\n' md += "\n## History\n" md += "![image](https://github.com/Anserw/E_Cup/blob/master/plot.jpg)" for a_match in sorted(matches, cmp=lambda a, b: cmp(a["date"], b["date"]), reverse=True): if a_match["weight"] > 0: md += "\n### " + str(a_match["date"]) + ' ' + a_match["teamA"] + ' ' + str(a_match["scoreA"]) + \ " : " + str(a_match["scoreB"]) + ' ' + a_match["teamB"] + '\n' if a_match["HandicapA"] == a_match["HandicapB"]: md += "- handicap: " + str(a_match["HandicapA"]) + '\n' else: md += "- handicap: " + str(a_match["HandicapA"]) + ' / ' + str(a_match["HandicapB"]) + '\n' md += "- scorer: " + a_match["scorer"] + '\n' md += "- owners:\n" md += " - " + a_match["teamA"] + ": " for a_team in teams: if a_team == a_match["teamA"]: md += teams[a_team] + "\n" md += " - " + a_match["teamB"] + ": " for a_team in teams: if a_team == a_match["teamB"]: md += teams[a_team] + "\n\n" md += "\n|name|guess|score change|\n|:---:|:---:|:---:|\n" report = dict() pond_report = {"sum": 0} for player_name in players: report[player_name] = 0 core.process(report, teams, a_match, bets[a_match["ID"]], pond_report) for player_name in report: md += '|' + player_name + '|' + bets[a_match["ID"]][player_name] + '|' + "%.2f" % report[player_name] + '|\n' md += '|' + "Pond" + '|' + '..' + '|' + "%.2f" % pond_report["sum"] + '|\n' f = open(filename, 'w') f.write(md) f.close()
def track(self, bunch): bunch_list, local_slice_sets, signal_slice_sets_x, signal_slice_sets_y = self._get_slice_sets( bunch) if (self._signal_x is None) and (self._signal_y is None): self._init_signals(bunch_list, signal_slice_sets_x, signal_slice_sets_y) if self._processors_x is not None: self._read_signal(self._signal_x, signal_slice_sets_x, 'x', self._phase_x, self._beta_x) kick_parameters_x, kick_signal_x = process( self._parameters_x, self._signal_x, self._processors_x, slice_sets=signal_slice_sets_x) if kick_signal_x is not None: kick_signal_x = kick_signal_x * self._gain_x if self._pickup_axis == 'displacement' and self._kicker_axis == 'divergence': kick_signal_x = kick_signal_x / self._beta_x elif self._pickup_axis == 'divergence' and self._kicker_axis == 'displacement': kick_signal_x = kick_signal_x * self._beta_x self._kick_bunches(kick_signal_x, 'x', local_slice_sets, bunch_list, self._loc_signal_sets_x) if self._processors_y is not None: self._read_signal(self._signal_y, signal_slice_sets_y, 'y', self._phase_y, self._beta_y) kick_parameters_y, kick_signal_y = process( self._parameters_y, self._signal_y, self._processors_y, slice_sets=signal_slice_sets_y) if kick_signal_y is not None: kick_signal_y = kick_signal_y * self._gain_y if self._pickup_axis == 'displacement' and self._kicker_axis == 'divergence': kick_signal_y = kick_signal_y / self._beta_y elif self._pickup_axis == 'divergence' and self._kicker_axis == 'displacement': kick_signal_y = kick_signal_y * self._beta_y self._kick_bunches(kick_signal_y, 'y', local_slice_sets, bunch_list, self._loc_signal_sets_y)
def run_serial(input, output, core, prefix='', indexing=True): output = codecs.open(output, 'w') index = 1 for line in codecs.open(input): print prefix, 'processing line number:', index start_time_doc = time.time() jl = json.loads(line) jl.pop('knowledge_graph', None) if 'content_extraction' in jl: ce = jl['content_extraction'] if 'inferlink_extractions' in ce: ce.pop('inferlink_extractions') jl['content_extraction'] = ce jl.pop('indexed', None) result = core.process(jl, create_knowledge_graph=True) if indexing: result = index_knowledge_graph_fields(result) if result: output.write(json.dumps(result) + '\n') time_taken_doc = time.time() - start_time_doc # if time_taken_doc > 5: # print prefix, "Took", str(time_taken_doc), " seconds" else: print 'Failed line number:', index index += 1 output.close()
def on_message(self, message): """Handle messages from Discord.""" text, author_id, author = message.content, message.author.id, message.author.name if self.update['check']: # Update check edit_time = max([os.stat(f)[8] for f in self.update['sources']]) if edit_time > self.update['last'] or (text == 'RLD' and author_id in self.admins): print(stime() + ' reloading') os.execl(sys.executable, *([sys.executable] + sys.argv)) if author_id in self.admins and text == ';masterkill': print(stime() + ' masterkill by ' + author) self.running = False self.client.logout() exit(0) elif author_id in self.admins and text == ';kill': print(stime() + ' killed by ' + author) self.running = False elif author_id in self.admins and text == ';reload': print(stime() + ' reloaded by ' + author) self.running = True yield from self.send_message(message.channel, '`Bot reloaded.`') elif self.running: yield from core.process(self, message, self.admins)
def process_wrapper(core, input, chunk_start, chunk_size, queue): results = [] with open(input) as f: f.seek(chunk_start) lines = f.read(chunk_size).splitlines() for i, line in enumerate(lines): document = json.loads(line) try: document = core.process(document, create_knowledge_graph=True) except Exception as e: print "Failed - ", e
def run_serial(input, output, core): output = codecs.open(output, 'w') index = 1 for line in codecs.open(input): print 'processing line number:', index start_time_doc = time.time() jl = json.loads(line) result = core.process(jl, create_knowledge_graph=True) output.write(json.dumps(result) + '\n') time_taken_doc = time.time() - start_time_doc if time_taken_doc > 5: print "Took", str(time_taken_doc), " seconds" index += 1 output.close()
def test_process(self): info = { "text": 'Lorem ipsum "dolor sit" amet, consectetur "adipiscing (elit, sed https://jestocke.com( do"', "tags": ({ "char": "\"", "function": tags_processors.strong }, { "char": "(", "function": tags_processors.a }) } self.assertEqual( core.process(info), "Lorem ipsum <strong>dolor sit</strong> amet, consectetur <strong>adipiscing <a " "href='https://jestocke.com'>elit, sed</a> do</strong>")
def format_text() -> List[Tuple[str, str]]: rep = [] data = data_IO.read_input() info = { "text": "", "tags": ({ "char": "\"", "function": tags_processors.strong }, { "char": "(", "function": tags_processors.a }) } for row in data: info["text"] = row[1] rep.append((row[0], core.process(info))) return rep
def handler(data=None): # No input if not data: return try: # Process resp = core.process(data) post(resp) return # Handle Errors except Exception as err: if config['debug']: raise else: log("ERROR - " + str(err))
if not last_check: last_check = get_newest_id(switcharoo, 1) submissions = [] for submission in switcharoo.new( params={"before": "t3_{}".format(last_check)}): submissions.append(submission) if submissions: print("Processing new submissions...") submissions.reverse() # Process every submission for submission in submissions: process(reddit, submission, last_switcharoo, action) action.reset() print("Checked up to", submissions[len(submissions) - 1].id) save_last_data(last_data, last_switcharoo) time.sleep(consts.sleep_time) except prawcore.exceptions.RequestException: # Unable to connect to Reddit print("Unable to connect to Reddit, is the internet down?") time.sleep(consts.sleep_time * 2) except prawcore.exceptions.ResponseException as e: print("weird other exceptions?", e) time.sleep(consts.sleep_time * 2)
f.write(md) f.close() if __name__ == "__main__": players = loader.loadPlayers() teams = loader.loadTeam() matches = loader.loadMatch() bets = loader.loadBet() pond = loader.loadPond() players_stat = copy.deepcopy(players) for a_player in players_stat: players_stat[a_player] = {"win": 0.0, "loss": 0.0} # reports = dict() for a_match in matches: core.process(players, teams, a_match, bets[a_match["ID"]], pond, players_stat) # report = dict() # pond_report = {"sum": 0} # for player_name in players: # report[player_name] = 0 # core.process(report, teams, a_match, bets[a_match["ID"]], pond_report) # report["ID"] = a_match["ID"] # report["pond"] = pond_report["sum"] # reports["ID"] = report # print "result", players # print "report", report try: plot.generate_plot(players, pond, matches, teams, bets) except: print "Error: cannot draw a plot!" generate_markdown(players, players_stat, pond, matches, teams, bets)
# # load data # buffering(path, BUFFERS, ORIGS, METAS, KEYS, CUTOFF) # print pos if None == pos: # # get rectangles # POSITIONS = {} RECTANGLES = {} core.process(path, POSITIONS, RECTANGLES, BUFFERS, ORIGS, KEYS, \ EPOCHES, MEAN_MUL, FRAME_SIZE, STRIDE, KOEF, LOW_VAL, HIGH_VAL, remove_zeros=False) rects = RECTANGLES[path] # # get the best rectangle # bestR = None bestP = 0 data = np.zeros((1, COLUMNS), dtype=np.float64) for i in range(len(rects)): r = rects[i] data[0, :20] = METAS[path] data[0, 20:] = r[:4]
import sys import core if __name__ == "__main__": if len(sys.argv) < 3: print("Usage: %s <filename> <destination folder>" % sys.argv[0]) sys.exit(-1) else: sys.exit(core.process(sys.argv[1], sys.argv[2]))