def calc_median_score_increase(xml): from statistics import median score_increases = [] for chart in xml.iter("ScoresAt"): # Chronologically sorted scores scores = sorted(iter_scores(chart), key=lambda s: s.findtext("DateTime")) for i in range(0, len(scores) - 1): datetime_1 = parsedate(scores[i].findtext("DateTime")) datetime_2 = parsedate(scores[i + 1].findtext("DateTime")) time_delta = datetime_2 - datetime_1 play_time = float(scores[i].findtext("SurviveSeconds")) idle_time = time_delta.total_seconds() - play_time # If the same chart is played twice within 60 seconds if idle_time < 60: score_1 = float(scores[i].findtext("SSRNormPercent")) score_2 = float(scores[i + 1].findtext("SSRNormPercent")) score_increase = 100 * (score_2 - score_1) score_increases.append(score_increase) if len(score_increases) == 0: return 0 else: return median(score_increases)
def divide_into_sessions(xml): if cache("sessions_division_cache"): return cache("sessions_division_cache") session_end_threshold = timedelta(hours=1) scores = list(iter_scores(xml)) datetimes = [parsedate(s.find("DateTime").text) for s in scores] zipped = zip(scores, datetimes) zipped = sorted(zipped, key=lambda pair: pair[1]) # zipped is a list of chronologically sorted (score object, datetime) tuples prev_score_datetime = zipped[0][1] # first datetime current_session = [ zipped[0] ] # list of (score object, datetime) tuples in current session sessions = [ ] # list of sessions where every session is like `current_session` for score, score_datetime in zipped[1:]: score_interval = score_datetime - prev_score_datetime # check if timedelta between two scores is too high if score_interval > session_end_threshold: sessions.append(current_session) current_session = [] current_session.append((score, score_datetime)) prev_score_datetime = score_datetime sessions.append(current_session) return cache("sessions_division_cache", sessions)
def create(self, transaction, prec, succs=(), flag=0, parents=None, date=None, metadata=None): """obsolete: add a new obsolete marker * ensuring it is hashable * check mandatory metadata * encode metadata If you are a human writing code creating marker you want to use the `createmarkers` function in this module instead. return True if a new marker have been added, False if the markers already existed (no op). """ if metadata is None: metadata = {} if date is None: if 'date' in metadata: # as a courtesy for out-of-tree extensions date = util.parsedate(metadata.pop('date')) else: date = util.makedate() if len(prec) != 20: raise ValueError(prec) for succ in succs: if len(succ) != 20: raise ValueError(succ) if prec in succs: raise ValueError(_('in-marker cycle with %s') % node.hex(prec)) metadata = tuple(sorted(metadata.iteritems())) marker = (str(prec), tuple(succs), int(flag), metadata, date, parents) return bool(self.add(transaction, [marker]))
def __init__(self, repo, text="", user=None, date=None, extra=None, changes=None): self._repo = repo self._rev = None self._node = None self._text = text if date: self._date = util.parsedate(date) if user: self._user = user if changes: self._status = list(changes[:4]) self._unknown = changes[4] self._ignored = changes[5] self._clean = changes[6] else: self._unknown = None self._ignored = None self._clean = None self._extra = {} if extra: self._extra = extra.copy() if 'branch' not in self._extra: branch = self._repo.dirstate.branch() try: branch = branch.decode('UTF-8').encode('UTF-8') except UnicodeDecodeError: raise util.Abort(_('branch name not in UTF-8!')) self._extra['branch'] = branch if self._extra['branch'] == '': self._extra['branch'] = 'default'
def __init__(self, repo, parents=None, text="", user=None, date=None, extra=None, changes=None): self._repo = repo self._rev = None self._node = None self._text = text if date: self._date = util.parsedate(date) if user: self._user = user if parents: self._parents = [changectx(self._repo, p) for p in parents] if changes: self._status = list(changes) self._extra = {} if extra: self._extra = extra.copy() if 'branch' not in self._extra: branch = self._repo.dirstate.branch() try: branch = branch.decode('UTF-8').encode('UTF-8') except UnicodeDecodeError: raise util.Abort(_('branch name not in UTF-8!')) self._extra['branch'] = branch if self._extra['branch'] == '': self._extra['branch'] = 'default'
def __init__(self, repo, parents, text, files, filectxfn, user=None, date=None, extra=None): self._repo = repo self._rev = None self._node = None self._text = text self._date = date and util.parsedate(date) or util.makedate() self._user = user parents = [(p or nullid) for p in parents] p1, p2 = parents self._parents = [changectx(self._repo, p) for p in (p1, p2)] files = sorted(set(files)) self._status = [files, [], [], [], []] self._filectxfn = filectxfn self._extra = extra and extra.copy() or {} if self._extra.get('branch', '') == '': self._extra['branch'] = 'default'
def add(self, manifest, files, desc, transaction, p1, p2, user, date=None, extra={}): user = user.strip() # An empty username or a username with a "\n" will make the # revision text contain two "\n\n" sequences -> corrupt # repository since read cannot unpack the revision. if not user: raise error.RevlogError(_("empty username")) if "\n" in user: raise error.RevlogError(_("username %s contains a newline") % repr(user)) # strip trailing whitespace and leading and trailing empty lines desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n') user, desc = encoding.fromlocal(user), encoding.fromlocal(desc) if date: parseddate = "%d %d" % util.parsedate(date) else: parseddate = "%d %d" % util.makedate() if extra and extra.get("branch") in ("default", ""): del extra["branch"] if extra: extra = encodeextra(extra) parseddate = "%s %s" % (parseddate, extra) l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc] text = "\n".join(l) return self.addrevision(text, transaction, len(self), p1, p2)
def __init__(self, repo, text="", user=None, date=None, extra=None, changes=None): self._repo = repo self._rev = None self._node = None self._text = text if date: self._date = util.parsedate(date) if user: self._user = user if changes: self._status = list(changes[:4]) self._unknown = changes[4] self._ignored = changes[5] self._clean = changes[6] else: self._unknown = None self._ignored = None self._clean = None self._extra = {} if extra: self._extra = extra.copy() if "branch" not in self._extra: try: branch = encoding.fromlocal(self._repo.dirstate.branch()) except UnicodeDecodeError: raise util.Abort(_("branch name not in UTF-8!")) self._extra["branch"] = branch if self._extra["branch"] == "": self._extra["branch"] = "default"
def add_user(self, user): if user is None: return # We don't need the same user twice if user in self.plotted_users: return dates = user["dates"] x = [util.date_to_year_float(util.parsedate(d)) for d in dates] y = user["ratings"] x = [*x, x[-1]] # Duplicate last element to satisfy pyqtgraph # Also, do that out-of-place as to not modify the original data # Draw curve pen = (1, 1) item = self.ui.plot.plot(x, y, pen=pen, antialias=True, stepMode=True, name=user["username"]) # Add click callback item.curve.setClickable(True, 4) # second argument is px item.sigClicked.connect(lambda: self.show_user_info(user)) # Add objects to lists self.items.append(item) self.plotted_users.append(user) self.redistribute_colors()
def gen_text_general_info(xml, r): from dateutil.relativedelta import relativedelta total_notes = 0 for tap_note_scores in xml.iter("TapNoteScores"): total_notes += sum(int(e.text) for e in tap_note_scores) total_notes_string = util.abbreviate(total_notes, min_precision=3) scores = list(iter_scores(xml)) num_charts = len(list(xml.iter("Chart"))) hours = sum(float(s.findtext("SurviveSeconds")) / 3600 for s in scores) first_play_date = min([parsedate(s.findtext("DateTime")) for s in scores]) duration = relativedelta(datetime.now(), first_play_date) grades = count_nums_grades(xml) # ~ grades_string_1 = ", ".join(f"{name}: {grades[name]}" for name in ("AAAA", "AAA", "AA")) # ~ grades_string_2 = ", ".join(f"{name}: {grades[name]}" for name in ("A", "B", "C", "D")) grades_string = ", ".join(f"{name}: {grades[name]}" for name in "AAAA AAA AA A B C D".split()) grade_names = list(reversed(util.grade_names)) best_aaa = (None, 0) best_aaaa = (None, 0) for score in iter_scores(xml): wifescore = float(score.findtext("SSRNormPercent")) skillset_ssrs = score.find("SkillsetSSRs") if skillset_ssrs is None: continue overall = float(skillset_ssrs.findtext("Overall")) if wifescore < util.AAA_THRESHOLD: pass # we don't care about sub-AAA scores elif wifescore < util.AAAA_THRESHOLD: if overall > best_aaa[1]: best_aaa = (score, overall) else: if overall > best_aaaa[1]: best_aaaa = (score, overall) def get_score_desc(score, overall) -> str: if score is None: return "[none]" chart = util.find_parent_chart(xml, score) dt = score.findtext("DateTime") wifescore = float(score.findtext("SSRNormPercent")) pack = chart.get("Pack") song = chart.get("Song") return f"{overall:.2f}, {wifescore*100:.2f}% - \"{song}\" ({pack}) - {dt[:10]}" return "<br>".join([ f"You started playing {duration.years} years {duration.months} months ago", f"Total hours spent playing: {round(hours)} hours", f"Number of scores: {len(scores)}", f"Number of unique files played: {num_charts}", f"Grades: {grades_string}", # ~ f"Grades: {grades_string_1}", # ~ f"{util.gen_padding_from('Grades: ')}{grades_string_2}", f"Total arrows hit: {total_notes_string}", f"Best AAA: {get_score_desc(best_aaa[0], best_aaa[1])}", f"Best AAAA: {get_score_desc(best_aaaa[0], best_aaaa[1])}", ])
def gen_plays_by_hour(xml): num_plays = [0] * 24 for score in iter_scores(xml): datetime = parsedate(score.find("DateTime").text) num_plays[datetime.hour] += 1 # I tried to use a datetime as key (would be nicer to display), but # it doesn't play nicely with matplotlib, so we need to use an # integer to represent the hour of the day. #return {time(hour=i): num_plays[i] for i in range(24)} return list(range(24)), num_plays
def commit(ui, repo, commitfunc, pats, opts): '''commit the specified files or all outstanding changes''' date = opts.get('date') if date: opts['date'] = util.parsedate(date) message = logmessage(opts) # extract addremove carefully -- this function can be called from a command # that doesn't support addremove if opts.get('addremove'): addremove(repo, pats, opts) return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
def commit(ui, repo, commitfunc, pats, opts): '''commit the specified files or all outstanding changes''' date = opts.get('date') if date: opts['date'] = util.parsedate(date) message = logmessage(opts) # extract addremove carefully -- this function can be called from a command # that doesn't support addremove if opts.get('addremove'): addremove(repo, pats, opts) fns, match, anypats = matchpats(repo, pats, opts) if pats: status = repo.status(files=fns, match=match) modified, added, removed, deleted, unknown = status[:5] files = modified + added + removed slist = None for f in fns: if f == '.': continue if f not in files: rf = repo.wjoin(f) rel = repo.pathto(f) try: mode = os.lstat(rf)[stat.ST_MODE] except OSError: raise util.Abort(_("file %s not found!") % rel) if stat.S_ISDIR(mode): name = f + '/' if slist is None: slist = list(files) slist.sort() i = bisect.bisect(slist, name) if i >= len(slist) or not slist[i].startswith(name): raise util.Abort( _("no match under directory %s!") % rel) elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)): raise util.Abort( _("can't commit %s: " "unsupported file type!") % rel) elif f not in repo.dirstate: raise util.Abort(_("file %s not tracked!") % rel) else: files = [] try: return commitfunc(ui, repo, files, message, match, opts) except ValueError, inst: raise util.Abort(str(inst))
def commit(ui, repo, commitfunc, pats, opts): '''commit the specified files or all outstanding changes''' date = opts.get('date') if date: opts['date'] = util.parsedate(date) message = logmessage(opts) # extract addremove carefully -- this function can be called from a command # that doesn't support addremove if opts.get('addremove'): addremove(repo, pats, opts) fns, match, anypats = matchpats(repo, pats, opts) if pats: status = repo.status(files=fns, match=match) modified, added, removed, deleted, unknown = status[:5] files = modified + added + removed slist = None for f in fns: if f == '.': continue if f not in files: rf = repo.wjoin(f) rel = repo.pathto(f) try: mode = os.lstat(rf)[stat.ST_MODE] except OSError: raise util.Abort(_("file %s not found!") % rel) if stat.S_ISDIR(mode): name = f + '/' if slist is None: slist = list(files) slist.sort() i = bisect.bisect(slist, name) if i >= len(slist) or not slist[i].startswith(name): raise util.Abort(_("no match under directory %s!") % rel) elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)): raise util.Abort(_("can't commit %s: " "unsupported file type!") % rel) elif f not in repo.dirstate: raise util.Abort(_("file %s not tracked!") % rel) else: files = [] try: return commitfunc(ui, repo, files, message, match, opts) except ValueError, inst: raise util.Abort(str(inst))
def commit(ui, repo, commitfunc, pats, opts): '''commit the specified files or all outstanding changes''' date = opts.get('date') if date: opts['date'] = util.parsedate(date) message = logmessage(opts) # extract addremove carefully -- this function can be called from a command # that doesn't support addremove if opts.get('addremove'): addremove(repo, pats, opts) m = match(repo, pats, opts) if pats: modified, added, removed = repo.status(match=m)[:3] files = util.sort(modified + added + removed) def is_dir(f): name = f + '/' i = bisect.bisect(files, name) return i < len(files) and files[i].startswith(name) for f in m.files(): if f == '.': continue if f not in files: rf = repo.wjoin(f) rel = repo.pathto(f) try: mode = os.lstat(rf)[stat.ST_MODE] except OSError: if is_dir(f): # deleted directory ? continue raise util.Abort(_("file %s not found!") % rel) if stat.S_ISDIR(mode): if not is_dir(f): raise util.Abort(_("no match under directory %s!") % rel) elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)): raise util.Abort(_("can't commit %s: " "unsupported file type!") % rel) elif f not in repo.dirstate: raise util.Abort(_("file %s not tracked!") % rel) m = matchfiles(repo, files) try: return commitfunc(ui, repo, message, m, opts) except ValueError, inst: raise util.Abort(str(inst))
def gen_scores_per_hour(xml): hours_of_day = [] overalls = [] ids = [] for score in xml.iter("Score"): skillset_ssrs = score.find("SkillsetSSRs") if not skillset_ssrs: continue overalls.append(float(skillset_ssrs.findtext("Overall"))) dt = parsedate(score.findtext("DateTime")) midnight = dt.replace(hour=0, minute=0, second=0, microsecond=0) hour_of_day = (dt - midnight).total_seconds() / 3600 hours_of_day.append(hour_of_day) ids.append(score) return (hours_of_day, overalls), ids
def __init__(self, repo, parents, text, files, filectxfn, user=None, date=None, extra=None): self._repo = repo self._rev = None self._node = None self._text = text self._date = date and util.parsedate(date) or util.makedate() self._user = user parents = [(p or nullid) for p in parents] p1, p2 = parents self._parents = [changectx(self._repo, p) for p in (p1, p2)] files = sorted(set(files)) self._status = [files, [], [], [], []] self._filectxfn = filectxfn self._extra = extra and extra.copy() or {} if self._extra.get("branch", "") == "": self._extra["branch"] = "default"
def add(self, manifest, files, desc, transaction, p1, p2, user, date=None, extra=None): # Convert to UTF-8 encoded bytestrings as the very first # thing: calling any method on a localstr object will turn it # into a str object and the cached UTF-8 string is thus lost. user, desc = encoding.fromlocal(user), encoding.fromlocal(desc) user = user.strip() # An empty username or a username with a "\n" will make the # revision text contain two "\n\n" sequences -> corrupt # repository since read cannot unpack the revision. if not user: raise error.RevlogError(_("empty username")) if "\n" in user: raise error.RevlogError( _("username %s contains a newline") % repr(user)) # strip trailing whitespace and leading and trailing empty lines desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n') if date: parseddate = "%d %d" % util.parsedate(date) else: parseddate = "%d %d" % util.makedate() if extra: branch = extra.get("branch") if branch in ("default", ""): del extra["branch"] elif branch in (".", "null", "tip"): raise error.RevlogError( _('the name \'%s\' is reserved') % branch) if extra: extra = encodeextra(extra) parseddate = "%s %s" % (parseddate, extra) l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc] text = "\n".join(l) return self.addrevision(text, transaction, len(self), p1, p2)
def add(self, manifest, list, desc, transaction, p1=None, p2=None, user=None, date=None, extra={}): user, desc = util.fromlocal(user), util.fromlocal(desc) if date: parseddate = "%d %d" % util.parsedate(date) else: parseddate = "%d %d" % util.makedate() if extra and extra.get("branch") in ("default", ""): del extra["branch"] if extra: extra = self.encode_extra(extra) parseddate = "%s %s" % (parseddate, extra) list.sort() l = [hex(manifest), user, parseddate] + list + ["", desc] text = "\n".join(l) return self.addrevision(text, transaction, self.count(), p1, p2)
def gen_plays_per_week(xml): datetimes = [parsedate(s.findtext("DateTime")) for s in iter_scores(xml)] datetimes.sort() weeks = {} week_end = datetimes[0] week_start = week_end - timedelta(weeks=1) i = 0 while i < len(datetimes): if datetimes[i] < week_end: weeks[week_start] += 1 i += 1 else: week_start += timedelta(weeks=1) week_end += timedelta(weeks=1) weeks[week_start] = 0 return (list(weeks.keys()), list(weeks.values()))
def calc_ratings(user): from itertools import groupby # Prepare scores scores = [s for s in user["scores"] if s["nerf"] != 0] # Filter invalid scores = zip(scores, [util.parsedate(s["datetime"]) for s in scores]) # Zip with parsed dates scores = sorted(scores, key=lambda pair: pair[1]) # Sort by date skillsets = np.empty([7, len(scores)], dtype="float64") ss_len = 0 dates = [] ratings = [] for date, pairs in groupby(scores, lambda s: s[1]): # Extract nerfed skillset values into `skillsets` for score in (pair[0] for pair in pairs): # Skip score if it's invalid if (score["overall"] == 0 # EO says it's invalid or date < datetime(year=2000, month=1, day=1) # Too old or date > datetime.today() # In the future or score["overall"] > 40 # Unreasonably high wife or score["wifescore"] > 100): # Impossible accuracy continue nerf_multiplier = score["nerf"] / score["overall"] for ss in range(7): # Iterate skillsets skillsets[ss][ ss_len] = score["skillsets"][ss] * nerf_multiplier ss_len += 1 # Overall rating rating = find_ratings(skillsets[:, :ss_len])[0] # If rating changed from previous play-day (or if there's are # no entries yet in thet ratings list).. if len(ratings) == 0 or rating != ratings[-1]: # ..append year and overall ([0]) rating dates.append(util.formatdate(date)) ratings.append(rating) return dates, ratings
def __init__(self, repo, parents, text, files, filectxfn, user=None, date=None, extra=None): self._repo = repo self._rev = None self._node = None self._text = text self._date = date and util.parsedate(date) or util.makedate() self._user = user parents = [(p or nullid) for p in parents] p1, p2 = parents self._parents = [changectx(self._repo, p) for p in (p1, p2)] files = util.sort(util.unique(files)) self._status = [files, [], [], [], []] self._filectxfn = filectxfn self._extra = extra and extra.copy() or {} if 'branch' not in self._extra: self._extra['branch'] = 'default' elif self._extra.get('branch') == '': self._extra['branch'] = 'default'
def gen_avg_score_per_hour(xml): nums_scores = [0] * 24 score_sums = [0] * 24 for score in xml.iter("Score"): skillset_ssrs = score.find("SkillsetSSRs") if not skillset_ssrs: continue hour = parsedate(score.findtext("DateTime")).hour nums_scores[hour] += 1 score_sums[hour] += float(skillset_ssrs.findtext("Overall")) x, y = [], [] for i, (num_scores, score_sum) in enumerate(zip(nums_scores, score_sums)): x.append(i) try: y.append(score_sum / num_scores) except ZeroDivisionError: y.append(0) return x, y
def gen_idle_time_buckets(xml): # Each bucket is 5 seconds. Total 10 minutes is tracked buckets = [0] * 600 a, b = 0, 0 scores = [] for scoresat in xml.iter("ScoresAt"): rate = float(scoresat.get("Rate")) scores.extend(((score, rate) for score in iter_scores(scoresat))) # Sort scores by datetime, oldest first scores.sort(key=lambda pair: pair[0].findtext("DateTime")) last_play_end = None for score, rate in scores: a += 1 datetime = util.parsedate(score.findtext("DateTime")) survive_seconds = float(score.findtext("SurviveSeconds")) #print(survive_seconds, rate) length = timedelta(seconds=survive_seconds * rate) #print("Datetime:", datetime) #print("Play length:", str(length)[:-7], "(according to SurviveSeconds)") if last_play_end is not None: idle_time = datetime - last_play_end if idle_time >= timedelta(): bucket_index = int(idle_time.total_seconds() // 5) if bucket_index < len(buckets): buckets[bucket_index] += 1 else: #print("Negative idle time!") b += 1 last_play_end = datetime + length #print("Finished", last_play_end) #print() # ~ keys = [i * 5 for i in range(len(buckets))] keys = range(len(buckets)) return (keys, buckets)
def add(self, manifest, files, desc, transaction, p1=None, p2=None, user=None, date=None, extra={}): user = user.strip() if "\n" in user: raise error.RevlogError(_("username %s contains a newline") % repr(user)) user, desc = util.fromlocal(user), util.fromlocal(desc) if date: parseddate = "%d %d" % util.parsedate(date) else: parseddate = "%d %d" % util.makedate() if extra and extra.get("branch") in ("default", ""): del extra["branch"] if extra: extra = self.encode_extra(extra) parseddate = "%s %s" % (parseddate, extra) l = [hex(manifest), user, parseddate] + util.sort(files) + ["", desc] text = "\n".join(l) return self.addrevision(text, transaction, len(self), p1, p2)
def gen_hours_per_week(xml): scores = iter_scores(xml) pairs = [(s, parsedate(s.findtext("DateTime"))) for s in scores] pairs.sort(key=lambda pair: pair[1]) # Sort by datetime weeks = {} week_end = pairs[0][1] # First (earliest) datetime week_start = week_end - timedelta(weeks=1) i = 0 while i < len(pairs): score, datetime = pairs[i][0], pairs[i][1] if datetime < week_end: score_seconds = float(score.findtext("SurviveSeconds")) or 0 weeks[week_start] += score_seconds / 3600 i += 1 else: week_start += timedelta(weeks=1) week_end += timedelta(weeks=1) weeks[week_start] = 0 return (list(weeks.keys()), list(weeks.values()))
def map_scores(xml, mapper, *mapper_args, discard_errors=True, brush_color_over_10_notes=None): x, y = [], [] ids = [] if brush_color_over_10_notes: brushes = [] for score in iter_scores(xml): if discard_errors: try: value = (mapper)(score, *mapper_args) except Exception: continue else: value = (mapper)(score, *mapper_args) if value is None: continue x.append(parsedate(score.findtext("DateTime"))) y.append(value) ids.append(score) if brush_color_over_10_notes: tap_note_scores = score.find("TapNoteScores") if tap_note_scores: judgements = ["Miss", "W1", "W2", "W3", "W4", "W5"] total_notes = sum( int(tap_note_scores.findtext(x)) for x in judgements) else: total_notes = 500 # just assume 100 as a default yolo brushes.append( brush_color_over_10_notes if total_notes > 10 else "#AAAAAA") if brush_color_over_10_notes: return (((x, y), ids), brushes) else: return ((x, y), ids)
def add(self, manifest, files, desc, transaction, p1, p2, user, date=None, extra=None): # Convert to UTF-8 encoded bytestrings as the very first # thing: calling any method on a localstr object will turn it # into a str object and the cached UTF-8 string is thus lost. user, desc = encoding.fromlocal(user), encoding.fromlocal(desc) user = user.strip() # An empty username or a username with a "\n" will make the # revision text contain two "\n\n" sequences -> corrupt # repository since read cannot unpack the revision. if not user: raise error.RevlogError(_("empty username")) if "\n" in user: raise error.RevlogError(_("username %s contains a newline") % repr(user)) # strip trailing whitespace and leading and trailing empty lines desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n') if date: parseddate = "%d %d" % util.parsedate(date) else: parseddate = "%d %d" % util.makedate() if extra: branch = extra.get("branch") if branch in ("default", ""): del extra["branch"] elif branch in (".", "null", "tip"): raise error.RevlogError(_('the name \'%s\' is reserved') % branch) if extra: extra = encodeextra(extra) parseddate = "%s %s" % (parseddate, extra) l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc] text = "\n".join(l) return self.addrevision(text, transaction, len(self), p1, p2)
def week_from_score(score) -> int: datetime = parsedate(score.findtext("DateTime")) week = datetime.isocalendar()[1] return week
def localdate(text): """:localdate: Date. Converts a date to local date.""" return (util.parsedate(text)[0], util.makedate()[1])
def analyze(xml, replays) -> Optional[ReplaysAnalysis]: import savegame_analysis """ create(prefix: &str, scorekeys: Vec<&str>, wifescores: Vec<f32>, packs: Vec<&str>, songs: Vec<&str>, songs_root: &str """ r = ReplaysAnalysis() chartkeys: List[str] = [] wifescores: List[float] = [] packs: List[str] = [] songs: List[str] = [] rates: List[float] = [] all_scores: List[Any] = [] for chart in xml.iter("Chart"): pack = chart.get("Pack") song = chart.get("Song") if "Generation Rock" in song and "German Dump Mini Pack" in pack: continue # this file is borked for scoresat in chart: rate = float(scoresat.get("Rate")) for score in scoresat: # We exclude failed scores because those exhibit some.. weird behavior in the replay # file. not sure what exactly it is, but somehow the wifescore in the xml doesn't # match the wifescore we get when recalculating it manually using the replay file # We don't want such outliers in our graphs, so - be gone, failed scores if score.findtext("Grade") == "Failed": continue chartkeys.append(score.get("Key")) wifescores.append(float(score.findtext("SSRNormPercent"))) packs.append(pack) songs.append(song) rates.append(rate) all_scores.append(score) prefix = os.path.join(replays, "a")[:-1] print("Starting replays analysis...") rustr = savegame_analysis.ReplaysAnalysis(prefix, chartkeys, wifescores, packs, songs, rates, app.app.prefs.songs_root) print("Done with replays analysis") def convert_combo_info(rust_combo_info): return FastestCombo( length=rust_combo_info.length, speed=rust_combo_info.speed, start_second=rust_combo_info.start_second, end_second=rust_combo_info.end_second, score=None) # this field is set below, in the score xml iteration r.fastest_combo = convert_combo_info(rustr.fastest_combo) r.fastest_jack = convert_combo_info(rustr.fastest_jack) r.fastest_acc = convert_combo_info(rustr.fastest_acc) r.manipulations = rustr.manipulations if len(r.manipulations) == 0: # When no replay could be parsed correctly. For cases when # someone selects a legacy folder with 'correct' file names, # but unexcepted (legacy) content. Happened to Providence util.logger.warning("No valid replays found at all in the directory") return None # this is NOT part of replays analysis. this is xml analysis. this is in here anyway because # it's easier. this should really be moved into a separate xml analysis module (in case I'll # ever get around implementing that...?) r.total_notes = 0 for tap_note_scores in xml.iter("TapNoteScores"): judgements = ["Miss", "W1", "W2", "W3", "W4", "W5"] r.total_notes += sum( int(tap_note_scores.findtext(x)) for x in judgements) r.wife2_wifescores = rustr.wife2_wifescores r.offset_mean = rustr.deviation_mean r.notes_per_column = rustr.notes_per_column r.cbs_per_column = rustr.cbs_per_column r.num_near_hits = sum(r.notes_per_column) / sum(r.cbs_per_column) r.standard_deviation = rustr.standard_deviation for i, num_hits in enumerate(rustr.sub_93_offset_buckets): r.sub_93_offset_buckets[i - 180] = num_hits r.current_wifescores = rustr.current_wifescores r.new_wifescores = rustr.new_wifescores r.wifescore_scores = [ all_scores[i] for i in rustr.timing_info_dependant_score_indices ] r.scores = [all_scores[score_index] for score_index in rustr.score_indices] r.datetimes = [parsedate(score.findtext("DateTime")) for score in r.scores] # replace the scorekeys returned from Rust replays analysis with the actual score elements for score in r.scores: scorekey = score.get("Key") if scorekey == rustr.longest_mcombo[1]: r.longest_mcombo = (rustr.longest_mcombo[0], util.find_parent_chart(xml, score)) if scorekey == rustr.fastest_combo_scorekey: r.fastest_combo.score = score if scorekey == rustr.fastest_jack_scorekey: r.fastest_jack.score = score if scorekey == rustr.fastest_acc_scorekey: r.fastest_acc.score = score print(r.fastest_acc) print(rustr.fastest_acc_scorekey) return r
def gen_cmod_over_time(xml): # These values were gathered through a quick-and-dirty screen recording based test perspective_mod_multipliers = { "Incoming": 1 / 1.2931, "Space": 1 / 1.2414, "Hallway": 1 / 1.2931, "Distant": 1 / 1.2759, } datetime_cmod_map = {} for score in xml.iter("Score"): modifiers = score.findtext("Modifiers").split(", ") cmod = None receptor_size = None perspective_mod_multiplier = 1 for modifier in modifiers: if cmod is None and modifier.startswith( "C") and modifier[1:].isdecimal(): try: cmod = float(modifier[1:]) except ValueError: print("huh a weird cmod:", modifier) continue elif receptor_size is None and modifier.endswith("Mini"): mini_percentage_string = modifier[:-4] if mini_percentage_string == "": receptor_size = 0.5 else: if not mini_percentage_string.endswith("% "): continue # false positive mini = float(mini_percentage_string[:-2]) / 100 receptor_size = 1 - mini / 2 elif any(persp_mod in modifier for persp_mod in perspective_mod_multipliers.keys()): # modifier can either be something like "Distant" or "50% Distant" tokens = modifier.split(" ") if len(tokens) == 1: perspective_mod_multiplier = perspective_mod_multipliers[ tokens[0]] elif len(tokens) == 2: perspective_mod_multiplier = perspective_mod_multipliers[ tokens[1]] # factor in the "50%" (or whichever number it is) perspective_strength = float(tokens[0][:-1]) / 100 perspective_mod_multiplier **= perspective_strength else: print(f"uhh this shouldn't happen? '{modifier}'") if receptor_size is None: receptor_size = 1 # TODO: decide if MMod should be counted as CMod in this function if cmod is None: continue # player's using xmod or something effective_cmod = cmod * receptor_size * perspective_mod_multiplier dt = parsedate(score.findtext("DateTime")) datetime_cmod_map[dt] = effective_cmod datetimes = list(sorted(datetime_cmod_map.keys())) cmods = [datetime_cmod_map[dt] for dt in datetimes] return datetimes, cmods