def update(self): try: self.profile = self.conn.users_show(screen_name=self.id) # numerical fields, convert them to strings to make the buffer code more clean for field in ["id", "created_at", "followers_count", "friends_count", "favourites_count", "statuses_count"]: self.profile[field] = str(self.profile[field]) # special handling for following if self.profile["following"]: self.profile["following"] = "Yes" else: self.profile["following"] = "No" # create this field specially locale.setlocale( locale.LC_TIME, "C" ) # hacky fix because statusnet uses english timestrings regardless of locale datetime_joined = datetime.datetime.strptime(self.profile["created_at"], DATETIME_FORMAT) locale.setlocale(locale.LC_TIME, "") # other half of the hacky fix days_since_join = helpers.single_unit(helpers.time_since(datetime_joined), "days")["days"] self.profile["notices_per_day"] = "%0.2f" % (float(self.profile["statuses_count"]) / days_since_join) except StatusNetError, e: if e.errcode == 404: self.profile = None
def update(self): try: self.profile = self.conn.users_show(screen_name=self.id) # numerical fields, convert them to strings to make the buffer code more clean for field in [ 'id', 'created_at', 'followers_count', 'friends_count', 'favourites_count', 'statuses_count' ]: self.profile[field] = str(self.profile[field]) # special handling for following if self.profile['following']: self.profile['following'] = "Yes" else: self.profile['following'] = "No" # create this field specially locale.setlocale( locale.LC_TIME, 'C' ) # hacky fix because statusnet uses english timestrings regardless of locale datetime_joined = datetime.datetime.strptime( self.profile['created_at'], DATETIME_FORMAT) locale.setlocale(locale.LC_TIME, '') # other half of the hacky fix days_since_join = helpers.single_unit( helpers.time_since(datetime_joined), "days")['days'] self.profile['notices_per_day'] = "%0.2f" % ( float(self.profile['statuses_count']) / days_since_join) except StatusNetError, e: if e.errcode == 404: self.profile = None
def update(self): self.update_name() if self.paused: self.update_buffer() return get_count = config.config['notice_limit'] if self.prev_page != self.page: self.timeline = [] last_id = 0 if len(self.timeline) > 0: for notice in self.timeline: if notice["ic__from_web"]: # don't consider inserted posts latest last_id = notice['id'] break if self.timeline_type == "home": raw_timeline = self.conn.statuses_home_timeline(count=get_count, page=self.page, since_id=last_id) elif self.timeline_type == "mentions": raw_timeline = self.conn.statuses_mentions(count=get_count, page=self.page, since_id=last_id) elif self.timeline_type == "direct": raw_timeline = self.conn.direct_messages(count=get_count, page=self.page, since_id=last_id) elif self.timeline_type == "user": raw_timeline = self.conn.statuses_user_timeline(user_id=self.type_params['user_id'], screen_name=self.type_params['screen_name'], count=get_count, page=self.page, since_id=last_id) try: self.profile = self.conn.users_show(screen_name=self.type_params['screen_name']) # numerical fields, convert them to strings to make the buffer code more clean for field in ['id', 'created_at', 'followers_count', 'friends_count', 'favourites_count', 'statuses_count']: self.profile[field] = str(self.profile[field]) # special handling for following if self.profile['following']: self.profile['following'] = "Yes" else: self.profile['following'] = "No" # create this field specially datetime_joined = helpers.normalise_datetime(self.profile['created_at']) days_since_join = helpers.single_unit(helpers.time_since(datetime_joined), "days")['days'] self.profile['notices_per_day'] = "%0.2f" % (float(self.profile['statuses_count']) / days_since_join) except StatusNetError, e: if e.errcode == 404: self.profile = None
if convergence_tracker > 1: print('Convergence. Stopping training.', flush=True) abort = True else: convergence_tracker = 0 # Abort if threshold has been reached if threshold != None: if training_loss < threshold: print(f'Loss has sunken below threshold ({threshold}). Stopping training.') abort = True last_error = training_loss writer.add_scalar('epoch time', (time.time() - epoch_time) / 60, epoch) print(helpers.time_since(epoch_time), flush=True) if save_intermediate_models and epoch % save_every == 0: torch.save(model.state_dict(), savepath[0:-4] + '_' + str(epoch) + '.pth') csv_f = open(savepath[0:-4] + '.csv', 'w') csv_f.write(csv_export) csv_f.close() epoch = epoch + 1 print('Training finished.') torch.save(model.state_dict(), savepath) print(f'Model saved to {savepath}.\n\n')
def update_buffer(self): self.buffer.clear() maxx = self.window.getmaxyx()[1] c = 1 longest_metadata_string_len = 0 for n in self.timeline: if "direct" in self.timeline_type: user_string = "%s -> %s" % (n["sender"]["screen_name"], n["recipient"]["screen_name"]) source_msg = "" else: user_string = "%s" % (n["user"]["screen_name"]) raw_source_msg = "from %s" % (n["source"]) source_msg = self.html_regex.sub("", raw_source_msg) if "in_reply_to_status_id" in n and n[ "in_reply_to_status_id"] is not None: if not config.config["show_source"]: user_string += " +" else: source_msg += " [+]" if "retweeted_status" in n: user_string = "%s [%s's RD]" % ( n["retweeted_status"]["user"]["screen_name"], n["user"]["screen_name"]) if "in_reply_to_status_id" in n["retweeted_status"]: if not config.config["show_source"]: user_string += " +" else: source_msg += " [+]" datetime_notice = helpers.notice_datetime(n) time_msg = helpers.format_time(helpers.time_since(datetime_notice), short_form=True) metadata_string = time_msg + " " + user_string if config.config["show_source"]: metadata_string += " " + source_msg if len(metadata_string) > longest_metadata_string_len: longest_metadata_string_len = len(metadata_string) for n in self.timeline: from_user = None to_user = None repeating_user = None if "direct" in self.timeline_type: from_user = n["sender"]["screen_name"] to_user = n["recipient"]["screen_name"] source_msg = "" else: if "retweeted_status" in n: repeating_user = n["user"]["screen_name"] n = n["retweeted_status"] from_user = n["user"]["screen_name"] raw_source_msg = "from %s" % (n["source"]) source_msg = self.html_regex.sub("", raw_source_msg) repeat_msg = "" if n["in_reply_to_status_id"] is not None: source_msg += " [+]" datetime_notice = helpers.notice_datetime(n) time_msg = helpers.format_time(helpers.time_since(datetime_notice), short_form=True) for user in [ user for user in [from_user, to_user, repeating_user] if user is not None ]: if not user in config.session_store.user_cache: config.session_store.user_cache[user] = random.choice( identicurse.base_colours.items())[1] if "ic__paused_on" in n and c != 1: self.buffer.append([("-", identicurse.colour_fields["pause_line"])]) self.buffer.append([("", identicurse.colour_fields["none"])]) # Build the line line = [] if c < 10: cout = " " + str(c) else: cout = str(c) line.append((cout, identicurse.colour_fields["notice_count"])) if (c - 1) == self.chosen_one: line.append((' * ', identicurse.colour_fields["selector"])) else: line.append((' ' * 3, identicurse.colour_fields["selector"])) if config.config['compact_notices']: line.append((time_msg, identicurse.colour_fields["time"])) line.append((" ", identicurse.colour_fields["none"])) if config.config['user_rainbow']: line.append( (from_user, config.session_store.user_cache[from_user])) else: line.append((from_user, identicurse.colour_fields["username"])) user_length = len(from_user) if to_user is not None: line.append((" -> ", identicurse.colour_fields["none"])) if config.config['user_rainbow']: line.append( (to_user, config.session_store.user_cache[to_user])) else: line.append( (to_user, identicurse.colour_fields["username"])) user_length += (len(" -> ") + len(to_user)) if repeating_user is not None: if config.config["compact_notices"]: line.append((" [", identicurse.colour_fields["none"])) else: line.append( (" [ repeat by ", identicurse.colour_fields["none"])) if config.config['user_rainbow']: line.append( (repeating_user, config.session_store.user_cache[repeating_user])) else: line.append((repeating_user, identicurse.colour_fields["username"])) if config.config["compact_notices"]: line.append(("'s RD]", identicurse.colour_fields["none"])) user_length += (len(" [") + len(repeating_user) + len("'s RD]")) else: line.append((" ]", identicurse.colour_fields["none"])) user_length += (len(" [ repeat by ") + len(repeating_user) + len(" ]")) if not config.config['compact_notices']: if config.config["show_source"]: line.append( (' ' * (maxx - ((len(source_msg) + len(time_msg) + user_length + (6 + len(cout))))), identicurse.colour_fields["none"])) else: line.append((' ' * (maxx - ((len(time_msg) + user_length + (5 + len(cout))))), identicurse.colour_fields["none"])) line.append((time_msg, identicurse.colour_fields["time"])) if config.config["show_source"]: line.append((' ', identicurse.colour_fields["none"])) line.append( (source_msg, identicurse.colour_fields["source"])) self.buffer.append(line) line = [] else: detail_char = "" if (not config.config["show_source"]): if "in_reply_to_status_id" in n and n[ "in_reply_to_status_id"] is not None: detail_char = "+" elif "retweeted_status" in n: detail_char = "~" line.append((" %s" % (detail_char), identicurse.colour_fields["source"])) if config.config["show_source"]: line.append((" " + source_msg, identicurse.colour_fields["source"])) line.append((" " * ( (longest_metadata_string_len - (user_length + len(time_msg) + len(source_msg) + 2))), identicurse.colour_fields["none"])) else: if detail_char == "": line.append((" ", identicurse.colour_fields["none"])) line.append((" " * ((longest_metadata_string_len - (user_length + len(time_msg) + 1))), identicurse.colour_fields["none"])) line.append((" | ", identicurse.colour_fields["none"])) try: notice_entities = helpers.split_entities(n['text']) for entity in notice_entities: if len(entity['text']) > 0: if entity['type'] in ['user', 'group', 'tag']: entity_text_no_symbol = entity['text'][1:] cache = getattr(config.session_store, '%s_cache' % (entity['type'])) if not entity_text_no_symbol in cache: cache[entity_text_no_symbol] = random.choice( identicurse.base_colours.items())[1] if config.config['%s_rainbow' % (entity['type'])]: line.append((entity['text'], cache[entity_text_no_symbol])) else: if entity['type'] == "user": line.append(( entity['text'], identicurse.colour_fields["username"])) else: line.append((entity['text'], identicurse.colour_fields[ entity['type']])) else: line.append((entity['text'], identicurse.colour_fields["notice"])) self.buffer.append(line) except UnicodeDecodeError: self.buffer.append([ ("Caution: Terminal too shit to display this notice.", identicurse.colour_fields["warning"]) ]) if config.config["show_notice_links"]: line = [] base_url = helpers.base_url_regex.findall( self.conn.api_path)[0][0] if self.timeline_type in ["direct", "sentdirect"]: notice_link = "%s/message/%s" % (base_url, str(n["id"])) else: notice_link = "%s/notice/%s" % (base_url, str(n["id"])) line.append(("<%s>" % (notice_link), identicurse.colour_fields["notice_link"])) self.buffer.append(line) if not config.config['compact_notices']: self.buffer.append([]) c += 1
total += nn print('Number of parameters:', total) # print example predictions and ground truths print('Printing 8 random frame truths, predictions and output vectors...') test_dl = DataLoader(testset, batch_size=8, shuffle=True) dataiter = iter(test_dl) image, label = dataiter.next() #output = model(image) #_, prediction = torch.max(output, 1) #_, label = torch.max(label, 1) #for t, p, o in zip(label, prediction, output): # print('Truth: ' + truth_table[t] + '; prediction: ' + truth_table[p]) # print('Output: ', o) print('\nValidating over the entire set...', flush=True) acc, confusion_matrix = helpers.evaluate(testset, model, truth_table, device=device, verbose=True) print('Frame accuracy: ' + str(acc) + '\n') # If specified, add the suffix to the savepath if suffix is not None: savepath = savepath[:-4] + '_' + suffix + '.png' helpers.print_confusion_matrix(confusion_matrix, truth_table, savepath) print('All done. Time:', helpers.time_since(start)) print('\n\n\n')
# Run the train step loss = train(input_variable, target_variable, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion) # Keep track of loss print_loss_total += loss plot_loss_total += loss if epoch == 0: continue if epoch % print_every == 0: print_loss_avg = print_loss_total / print_every print_loss_total = 0 time_since = helpers.time_since(start, epoch / n_epochs) print('%s (%d %d%%) %.4f' % (time_since, epoch, epoch / n_epochs * 100, print_loss_avg)) if epoch % plot_every == 0: plot_loss_avg = plot_loss_total / plot_every plot_losses.append(plot_loss_avg) plot_loss_total = 0 # Save our models torch.save(encoder.state_dict(), '../data/encoder_params_{}'.format(args.language)) torch.save(decoder.state_dict(), '../data/decoder_params_{}'.format(args.language)) torch.save(decoder.attention.state_dict(), '../data/attention_params_{}'.format(args.language))
def update_buffer(self): self.buffer.clear() if self.timeline_type == "user": if self.profile is not None: for field in [ # display name, internal field name, skip a line after this field? ("Real Name", "name", True), ("Bio", "description", False), ("Location", "location", False), ("URL", "url", False), ("User ID", "id", False), ("Joined at", "created_at", True), ("Followed by", "followers_count", False), ("Following", "friends_count", False), ("Followed by you", "following", True), ("Favourites", "favourites_count", False), ("Notices", "statuses_count", False), ("Average daily notices", "notices_per_day", True) ]: if (self.profile[field[1]] is not None) and (self.profile[field[1]] != ""): line = [] line.append((field[0] + ":", identicurse.colour_fields['profile_fields'])) line.append((" ", identicurse.colour_fields['none'])) line.append((self.profile[field[1]], identicurse.colour_fields['profile_values'])) self.buffer.append(line) if field[2]: self.buffer.append([("", identicurse.colour_fields['none'])]) else: self.buffer.append([("There is no user called @%s on this instance." % (self.type_params['screen_name']), identicurse.colour_fields['none'])]) if self.timeline_type == "group": if self.profile is not None: for field in [ # display name, internal field name, skip a line after this field? ("Name", "fullname", True), ("Description", "description", False), ("Location", "location", False), ("Homepage", "homepage", False), ("Group ID", "id", False), ("Created at", "created", False), ("Members", "member_count", True), ]: if (self.profile[field[1]] is not None) and (self.profile[field[1]] != ""): line = [] line.append((field[0] + ":", identicurse.colour_fields['profile_fields'])) line.append((" ", identicurse.colour_fields['none'])) line.append((self.profile[field[1]], identicurse.colour_fields['profile_values'])) self.buffer.append(line) if field[2]: self.buffer.append([("", identicurse.colour_fields['none'])]) else: self.buffer.append([("There is no group called !%s on this instance." % (self.type_params['nickname']), identicurse.colour_fields['none'])]) maxx = self.window.getmaxyx()[1] c = 1 longest_metadata_string_len = 0 for n in self.timeline: if n["text"] is None: n["text"] = "" if "direct" in self.timeline_type: user_string = "%s -> %s" % (n["sender"]["screen_name"], n["recipient"]["screen_name"]) source_msg = "" else: atless_reply = False if "in_reply_to_screen_name" in n and n["in_reply_to_screen_name"] is not None: atless_reply = True for entity in helpers.split_entities(n["text"]): if entity["type"] == "user" and entity["text"][1:].lower() == n["in_reply_to_screen_name"].lower(): atless_reply = False break if atless_reply: if "user" in n: user_string = "%s" % (n["user"]["screen_name"]) else: user_string = "<no username>" user_string += " -> %s" % (n["in_reply_to_screen_name"]) else: if "user" in n: user_string = "%s" % (n["user"]["screen_name"]) else: user_string = "" if (n["source"] == "ostatus") and ("user" in n) and "statusnet_profile_url" in n["user"]: raw_source_msg = "from %s" % (helpers.domain_regex.findall(n["user"]["statusnet_profile_url"])[0][2]) else: raw_source_msg = "from %s" % (n["source"]) source_msg = self.html_regex.sub("", raw_source_msg) if "in_reply_to_status_id" in n and n["in_reply_to_status_id"] is not None: if not config.config["show_source"]: user_string += " +" else: source_msg += " [+]" if "retweeted_status" in n: user_string = "%s [%s's RD]" % (n["retweeted_status"]["user"]["screen_name"], n["user"]["screen_name"]) if "in_reply_to_status_id" in n["retweeted_status"]: if not config.config["show_source"]: user_string += " +" else: source_msg += " [+]" datetime_notice = helpers.normalise_datetime(n["created_at"]) time_msg = helpers.format_time(helpers.time_since(datetime_notice), short_form=True) metadata_string = time_msg + " " + user_string if config.config["show_source"]: metadata_string += " " + source_msg if len(metadata_string) > longest_metadata_string_len: longest_metadata_string_len = len(metadata_string) for n in self.timeline: if n["text"] is None: n["text"] = "" from_user = None to_user = None repeating_user = None if "direct" in self.timeline_type: from_user = n["sender"]["screen_name"] to_user = n["recipient"]["screen_name"] source_msg = "" else: if "retweeted_status" in n: repeating_user = n["user"]["screen_name"] n = n["retweeted_status"] if "user" in n: from_user = n["user"]["screen_name"] else: from_user = "******" atless_reply = False if "in_reply_to_screen_name" in n and n["in_reply_to_screen_name"] is not None: atless_reply = True for entity in helpers.split_entities(n["text"]): if entity["type"] == "user" and entity["text"][1:].lower() == n["in_reply_to_screen_name"].lower(): atless_reply = False break if atless_reply: to_user = n["in_reply_to_screen_name"] if (n["source"] == "ostatus") and ("user" in n) and "statusnet_profile_url" in n["user"]: raw_source_msg = "from %s" % (helpers.domain_regex.findall(n["user"]["statusnet_profile_url"])[0][2]) else: raw_source_msg = "from %s" % (n["source"]) source_msg = self.html_regex.sub("", raw_source_msg) repeat_msg = "" if n["in_reply_to_status_id"] is not None: source_msg += " [+]" datetime_notice = helpers.normalise_datetime(n["created_at"]) time_msg = helpers.format_time(helpers.time_since(datetime_notice), short_form=True) for user in [user for user in [from_user, to_user, repeating_user] if user is not None]: if not user in config.session_store.user_cache: config.session_store.user_cache[user] = helpers.colour_from_name([item[1] for item in identicurse.base_colours.items()], user.lower()) if "ic__paused_on" in n and c != 1: self.buffer.append([("-", identicurse.colour_fields["pause_line"])]) self.buffer.append([("", identicurse.colour_fields["none"])]) # Build the line line = [] if c < 10: cout = " " + str(c) else: cout = str(c) line.append((cout, identicurse.colour_fields["notice_count"])) if (c - 1) == self.chosen_one: line.append((' * ', identicurse.colour_fields["selector"])) else: line.append((' ' * 3, identicurse.colour_fields["selector"])) if config.config['compact_notices']: line.append((time_msg, identicurse.colour_fields["time"])) line.append((" ", identicurse.colour_fields["none"])) if config.config['user_rainbow']: line.append((from_user, config.session_store.user_cache[from_user])) else: line.append((from_user, identicurse.colour_fields["username"])) if from_user is not None: user_length = len(from_user) else: user_length = None if to_user is not None: line.append((" -> ", identicurse.colour_fields["none"])) if config.config['user_rainbow']: line.append((to_user, config.session_store.user_cache[to_user])) else: line.append((to_user, identicurse.colour_fields["username"])) user_length += len(" -> ") + len(to_user) if repeating_user is not None: if config.config["compact_notices"]: line.append((" [", identicurse.colour_fields["none"])) else: line.append((" [ repeat by ", identicurse.colour_fields["none"])) if config.config['user_rainbow']: line.append((repeating_user, config.session_store.user_cache[repeating_user])) else: line.append((repeating_user, identicurse.colour_fields["username"])) if config.config["compact_notices"]: line.append(("'s RD]", identicurse.colour_fields["none"])) user_length += len(" [") + len(repeating_user) + len("'s RD]") else: line.append((" ]", identicurse.colour_fields["none"])) user_length += len(" [ repeat by ") + len(repeating_user) + len(" ]") if not config.config['compact_notices']: if config.config["show_source"]: line.append((' ' * (maxx - ((len(source_msg) + len(time_msg) + user_length + (6 + len(cout))))), identicurse.colour_fields["none"])) else: line.append((' ' * (maxx - ((len(time_msg) + user_length + (5 + len(cout))))), identicurse.colour_fields["none"])) line.append((time_msg, identicurse.colour_fields["time"])) if config.config["show_source"]: line.append((' ', identicurse.colour_fields["none"])) line.append((source_msg, identicurse.colour_fields["source"])) self.buffer.append(line) line = [] else: detail_char = "" if (not config.config["show_source"]): if "in_reply_to_status_id" in n and n["in_reply_to_status_id"] is not None: detail_char = "+" elif "retweeted_status" in n: detail_char = "~" line.append((" %s" % (detail_char), identicurse.colour_fields["source"])) if config.config["show_source"]: line.append((" " + source_msg, identicurse.colour_fields["source"])) line.append((" "*((longest_metadata_string_len - (user_length + len(time_msg) + len(source_msg) + 2))), identicurse.colour_fields["none"])) else: if detail_char == "": line.append((" ", identicurse.colour_fields["none"])) line.append((" "*((longest_metadata_string_len - (user_length + len(time_msg) + 1))), identicurse.colour_fields["none"])) line.append((" | ", identicurse.colour_fields["none"])) try: min_x_offset = reduce((lambda acc_length, block: (acc_length if (len(block) < 3) else max(acc_length, block[2])) + len(block[0])), line, 0) # determine how far along the line items beginning now would be; this will be used so that wrapped lines get correct indentation notice_entities = helpers.split_entities(n['text'] or "") for entity in notice_entities: if len(entity['text']) > 0: if entity['type'] in ['user', 'group', 'tag']: entity_text_no_symbol = entity['text'][1:] cache = getattr(config.session_store, '%s_cache' % (entity['type'])) if not entity_text_no_symbol in cache: cache[entity_text_no_symbol] = helpers.colour_from_name([item[1] for item in identicurse.base_colours.items()], entity_text_no_symbol.lower()) if config.config['%s_rainbow' % (entity['type'])]: line.append((entity['text'], cache[entity_text_no_symbol], min_x_offset)) else: if entity['type'] == "user": line.append((entity['text'], identicurse.colour_fields["username"], min_x_offset)) else: line.append((entity['text'], identicurse.colour_fields[entity['type']], min_x_offset)) else: line.append((entity['text'], identicurse.colour_fields["notice"], min_x_offset)) self.buffer.append(line) except UnicodeDecodeError: self.buffer.append([("Caution: Terminal too shit to display this notice.", identicurse.colour_fields["warning"])]) if config.config["show_notice_links"]: line = [] base_url = helpers.base_url_regex.findall(self.conn.api_path)[0][0] if self.timeline_type in ["direct", "sentdirect"]: notice_link = "%s/message/%s" % (base_url, str(n["id"])) else: notice_link = "%s/notice/%s" % (base_url, str(n["id"])) line.append(("<%s>" % (notice_link), identicurse.colour_fields["notice_link"])) self.buffer.append(line) if not config.config['compact_notices']: self.buffer.append([]) c += 1
def update_buffer(self): self.buffer.clear() maxx = self.window.getmaxyx()[1] c = 1 longest_metadata_string_len = 0 for n in self.timeline: if "direct" in self.timeline_type: user_string = "%s -> %s" % (n["sender"]["screen_name"], n["recipient"]["screen_name"]) source_msg = "" else: user_string = "%s" % (n["user"]["screen_name"]) raw_source_msg = "from %s" % (n["source"]) source_msg = self.html_regex.sub("", raw_source_msg) if "in_reply_to_status_id" in n and n["in_reply_to_status_id"] is not None: if not config.config["show_source"]: user_string += " +" else: source_msg += " [+]" if "retweeted_status" in n: user_string = "%s [%s's RD]" % (n["retweeted_status"]["user"]["screen_name"], n["user"]["screen_name"]) if "in_reply_to_status_id" in n["retweeted_status"]: if not config.config["show_source"]: user_string += " +" else: source_msg += " [+]" datetime_notice = helpers.notice_datetime(n) time_msg = helpers.format_time(helpers.time_since(datetime_notice), short_form=True) metadata_string = time_msg + " " + user_string if config.config["show_source"]: metadata_string += " " + source_msg if len(metadata_string) > longest_metadata_string_len: longest_metadata_string_len = len(metadata_string) for n in self.timeline: from_user = None to_user = None repeating_user = None if "direct" in self.timeline_type: from_user = n["sender"]["screen_name"] to_user = n["recipient"]["screen_name"] source_msg = "" else: if "retweeted_status" in n: repeating_user = n["user"]["screen_name"] n = n["retweeted_status"] from_user = n["user"]["screen_name"] raw_source_msg = "from %s" % (n["source"]) source_msg = self.html_regex.sub("", raw_source_msg) repeat_msg = "" if n["in_reply_to_status_id"] is not None: source_msg += " [+]" datetime_notice = helpers.notice_datetime(n) time_msg = helpers.format_time(helpers.time_since(datetime_notice), short_form=True) for user in [user for user in [from_user, to_user, repeating_user] if user is not None]: if not user in config.session_store.user_cache: config.session_store.user_cache[user] = random.choice(identicurse.base_colours.items())[1] if "ic__paused_on" in n and c != 1: self.buffer.append([("-", identicurse.colour_fields["pause_line"])]) self.buffer.append([("", identicurse.colour_fields["none"])]) # Build the line line = [] if c < 10: cout = " " + str(c) else: cout = str(c) line.append((cout, identicurse.colour_fields["notice_count"])) if (c - 1) == self.chosen_one: line.append((" * ", identicurse.colour_fields["selector"])) else: line.append((" " * 3, identicurse.colour_fields["selector"])) if config.config["compact_notices"]: line.append((time_msg, identicurse.colour_fields["time"])) line.append((" ", identicurse.colour_fields["none"])) if config.config["user_rainbow"]: line.append((from_user, config.session_store.user_cache[from_user])) else: line.append((from_user, identicurse.colour_fields["username"])) user_length = len(from_user) if to_user is not None: line.append((" -> ", identicurse.colour_fields["none"])) if config.config["user_rainbow"]: line.append((to_user, config.session_store.user_cache[to_user])) else: line.append((to_user, identicurse.colour_fields["username"])) user_length += len(" -> ") + len(to_user) if repeating_user is not None: if config.config["compact_notices"]: line.append((" [", identicurse.colour_fields["none"])) else: line.append((" [ repeat by ", identicurse.colour_fields["none"])) if config.config["user_rainbow"]: line.append((repeating_user, config.session_store.user_cache[repeating_user])) else: line.append((repeating_user, identicurse.colour_fields["username"])) if config.config["compact_notices"]: line.append(("'s RD]", identicurse.colour_fields["none"])) user_length += len(" [") + len(repeating_user) + len("'s RD]") else: line.append((" ]", identicurse.colour_fields["none"])) user_length += len(" [ repeat by ") + len(repeating_user) + len(" ]") if not config.config["compact_notices"]: if config.config["show_source"]: line.append( ( " " * (maxx - ((len(source_msg) + len(time_msg) + user_length + (6 + len(cout))))), identicurse.colour_fields["none"], ) ) else: line.append( ( " " * (maxx - ((len(time_msg) + user_length + (5 + len(cout))))), identicurse.colour_fields["none"], ) ) line.append((time_msg, identicurse.colour_fields["time"])) if config.config["show_source"]: line.append((" ", identicurse.colour_fields["none"])) line.append((source_msg, identicurse.colour_fields["source"])) self.buffer.append(line) line = [] else: detail_char = "" if not config.config["show_source"]: if "in_reply_to_status_id" in n and n["in_reply_to_status_id"] is not None: detail_char = "+" elif "retweeted_status" in n: detail_char = "~" line.append((" %s" % (detail_char), identicurse.colour_fields["source"])) if config.config["show_source"]: line.append((" " + source_msg, identicurse.colour_fields["source"])) line.append( ( " " * ((longest_metadata_string_len - (user_length + len(time_msg) + len(source_msg) + 2))), identicurse.colour_fields["none"], ) ) else: if detail_char == "": line.append((" ", identicurse.colour_fields["none"])) line.append( ( " " * ((longest_metadata_string_len - (user_length + len(time_msg) + 1))), identicurse.colour_fields["none"], ) ) line.append((" | ", identicurse.colour_fields["none"])) try: notice_entities = helpers.split_entities(n["text"]) for entity in notice_entities: if len(entity["text"]) > 0: if entity["type"] in ["user", "group", "tag"]: entity_text_no_symbol = entity["text"][1:] cache = getattr(config.session_store, "%s_cache" % (entity["type"])) if not entity_text_no_symbol in cache: cache[entity_text_no_symbol] = random.choice(identicurse.base_colours.items())[1] if config.config["%s_rainbow" % (entity["type"])]: line.append((entity["text"], cache[entity_text_no_symbol])) else: if entity["type"] == "user": line.append((entity["text"], identicurse.colour_fields["username"])) else: line.append((entity["text"], identicurse.colour_fields[entity["type"]])) else: line.append((entity["text"], identicurse.colour_fields["notice"])) self.buffer.append(line) except UnicodeDecodeError: self.buffer.append( [("Caution: Terminal too shit to display this notice.", identicurse.colour_fields["warning"])] ) if config.config["show_notice_links"]: line = [] base_url = helpers.base_url_regex.findall(self.conn.api_path)[0][0] if self.timeline_type in ["direct", "sentdirect"]: notice_link = "%s/message/%s" % (base_url, str(n["id"])) else: notice_link = "%s/notice/%s" % (base_url, str(n["id"])) line.append(("<%s>" % (notice_link), identicurse.colour_fields["notice_link"])) self.buffer.append(line) if not config.config["compact_notices"]: self.buffer.append([]) c += 1
def train( filename = "poets.txt", hidden_size = 128, n_layers = 2, learning_rate=0.01, n_epochs = 10000, chunk_len=20, batch_size = 1024, print_every = 100 ): #%% Global Configuration file, file_len, all_characters, n_characters = helpers.read_file( filename ) sentences = file.split("\n") print( "There are %d unique characters in the dataset" % n_characters ) print( "There are %d sentences in the dataset with total of %d characters" % ( len(sentences), len(file) ) ) #%% Model Saving and Loading model_filename = helpers.pt_name if os.path.exists( model_filename ): decoder = load( model_filename ) else: decoder = CharRNN( n_characters, hidden_size, n_characters, model = helpers.mcell, n_layers=n_layers, ) decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate) criterion = nn.CrossEntropyLoss() if helpers.USE_CUDA: decoder.cuda() start = time.time() all_losses = [] try: print("Training for %d epochs..." % n_epochs) for epoch in range(n_epochs): if epoch != 0 and epoch % 1000 == 0: learning_rate /= 2 decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate) inp, target = random_training_set( sentences, chunk_len, batch_size ) loss = train_one_entry(decoder, decoder_optimizer, criterion, inp, target, chunk_len, batch_size ) all_losses.append( loss ) if epoch != 0 and epoch % print_every == 0: print('%s: [%s (%d %d%%) %.4f]' % ( time.ctime(), helpers.time_since(start), epoch, epoch / n_epochs * 100, loss)) print(generate(decoder, '新年', 100, cuda= helpers.USE_CUDA), '\n') save( decoder, model_filename ) except KeyboardInterrupt: save( decoder, model_filename ) import matplotlib.pyplot as plt plt.plot( all_losses ) plt.xlabel( "iteration" ) plt.ylabel( "train loss" )
def update_buffer(self): self.buffer.clear() if self.timeline_type == "user": if self.profile is not None: for field in [ # display name, internal field name, skip a line after this field? ("Real Name", "name", True), ("Bio", "description", False), ("Location", "location", False), ("URL", "url", False), ("User ID", "id", False), ("Joined at", "created_at", True), ("Followed by", "followers_count", False), ("Following", "friends_count", False), ("Followed by you", "following", True), ("Favourites", "favourites_count", False), ("Notices", "statuses_count", False), ("Average daily notices", "notices_per_day", True) ]: if (self.profile[field[1]] is not None) and (self.profile[field[1]] != ""): line = [] line.append( (field[0] + ":", identicurse.colour_fields['profile_fields'])) line.append((" ", identicurse.colour_fields['none'])) line.append( (self.profile[field[1]], identicurse.colour_fields['profile_values'])) self.buffer.append(line) if field[2]: self.buffer.append([ ("", identicurse.colour_fields['none']) ]) else: self.buffer.append([ ("There is no user called @%s on this instance." % (self.type_params['screen_name']), identicurse.colour_fields['none']) ]) if self.timeline_type == "group": if self.profile is not None: for field in [ # display name, internal field name, skip a line after this field? ("Name", "fullname", True), ("Description", "description", False), ("Location", "location", False), ("Homepage", "homepage", False), ("Group ID", "id", False), ("Created at", "created", False), ("Members", "member_count", True), ]: if (self.profile[field[1]] is not None) and (self.profile[field[1]] != ""): line = [] line.append( (field[0] + ":", identicurse.colour_fields['profile_fields'])) line.append((" ", identicurse.colour_fields['none'])) line.append( (self.profile[field[1]], identicurse.colour_fields['profile_values'])) self.buffer.append(line) if field[2]: self.buffer.append([ ("", identicurse.colour_fields['none']) ]) else: self.buffer.append([ ("There is no group called !%s on this instance." % (self.type_params['nickname']), identicurse.colour_fields['none']) ]) maxx = self.window.getmaxyx()[1] c = 1 longest_metadata_string_len = 0 for n in self.timeline: if n["text"] is None: n["text"] = "" if "direct" in self.timeline_type: user_string = "%s -> %s" % (n["sender"]["screen_name"], n["recipient"]["screen_name"]) source_msg = "" else: atless_reply = False if "in_reply_to_screen_name" in n and n[ "in_reply_to_screen_name"] is not None: atless_reply = True for entity in helpers.split_entities(n["text"]): if entity[ "type"] == "user" and entity["text"][1:].lower( ) == n["in_reply_to_screen_name"].lower(): atless_reply = False break if atless_reply: if "user" in n: user_string = "%s" % (n["user"]["screen_name"]) else: user_string = "<no username>" user_string += " -> %s" % (n["in_reply_to_screen_name"]) else: if "user" in n: user_string = "%s" % (n["user"]["screen_name"]) else: user_string = "" if (n["source"] == "ostatus") and ( "user" in n) and "statusnet_profile_url" in n["user"]: raw_source_msg = "from %s" % (helpers.domain_regex.findall( n["user"]["statusnet_profile_url"])[0][2]) else: raw_source_msg = "from %s" % (n["source"]) source_msg = self.html_regex.sub("", raw_source_msg) if "in_reply_to_status_id" in n and n[ "in_reply_to_status_id"] is not None: if not config.config["show_source"]: user_string += " +" else: source_msg += " [+]" if "retweeted_status" in n: user_string = "%s [%s's RD]" % ( n["retweeted_status"]["user"]["screen_name"], n["user"]["screen_name"]) if "in_reply_to_status_id" in n["retweeted_status"]: if not config.config["show_source"]: user_string += " +" else: source_msg += " [+]" datetime_notice = helpers.normalise_datetime(n["created_at"]) time_msg = helpers.format_time(helpers.time_since(datetime_notice), short_form=True) metadata_string = time_msg + " " + user_string if config.config["show_source"]: metadata_string += " " + source_msg if len(metadata_string) > longest_metadata_string_len: longest_metadata_string_len = len(metadata_string) for n in self.timeline: if n["text"] is None: n["text"] = "" from_user = None to_user = None repeating_user = None if "direct" in self.timeline_type: from_user = n["sender"]["screen_name"] to_user = n["recipient"]["screen_name"] source_msg = "" else: if "retweeted_status" in n: repeating_user = n["user"]["screen_name"] n = n["retweeted_status"] if "user" in n: from_user = n["user"]["screen_name"] else: from_user = "******" atless_reply = False if "in_reply_to_screen_name" in n and n[ "in_reply_to_screen_name"] is not None: atless_reply = True for entity in helpers.split_entities(n["text"]): if entity[ "type"] == "user" and entity["text"][1:].lower( ) == n["in_reply_to_screen_name"].lower(): atless_reply = False break if atless_reply: to_user = n["in_reply_to_screen_name"] if (n["source"] == "ostatus") and ( "user" in n) and "statusnet_profile_url" in n["user"]: raw_source_msg = "from %s" % (helpers.domain_regex.findall( n["user"]["statusnet_profile_url"])[0][2]) else: raw_source_msg = "from %s" % (n["source"]) source_msg = self.html_regex.sub("", raw_source_msg) repeat_msg = "" if n["in_reply_to_status_id"] is not None: source_msg += " [+]" datetime_notice = helpers.normalise_datetime(n["created_at"]) time_msg = helpers.format_time(helpers.time_since(datetime_notice), short_form=True) for user in [ user for user in [from_user, to_user, repeating_user] if user is not None ]: if not user in config.session_store.user_cache: config.session_store.user_cache[ user] = helpers.colour_from_name([ item[1] for item in identicurse.base_colours.items() ], user.lower()) if "ic__paused_on" in n and c != 1: self.buffer.append([("-", identicurse.colour_fields["pause_line"])]) self.buffer.append([("", identicurse.colour_fields["none"])]) # Build the line line = [] if c < 10: cout = " " + str(c) else: cout = str(c) line.append((cout, identicurse.colour_fields["notice_count"])) if (c - 1) == self.chosen_one: line.append((' * ', identicurse.colour_fields["selector"])) else: line.append((' ' * 3, identicurse.colour_fields["selector"])) if config.config['compact_notices']: line.append((time_msg, identicurse.colour_fields["time"])) line.append((" ", identicurse.colour_fields["none"])) if config.config['user_rainbow']: line.append( (from_user, config.session_store.user_cache[from_user])) else: line.append((from_user, identicurse.colour_fields["username"])) if from_user is not None: user_length = len(from_user) else: user_length = None if to_user is not None: line.append((" -> ", identicurse.colour_fields["none"])) if config.config['user_rainbow']: line.append( (to_user, config.session_store.user_cache[to_user])) else: line.append( (to_user, identicurse.colour_fields["username"])) user_length += len(" -> ") + len(to_user) if repeating_user is not None: if config.config["compact_notices"]: line.append((" [", identicurse.colour_fields["none"])) else: line.append( (" [ repeat by ", identicurse.colour_fields["none"])) if config.config['user_rainbow']: line.append( (repeating_user, config.session_store.user_cache[repeating_user])) else: line.append((repeating_user, identicurse.colour_fields["username"])) if config.config["compact_notices"]: line.append(("'s RD]", identicurse.colour_fields["none"])) user_length += len(" [") + len(repeating_user) + len( "'s RD]") else: line.append((" ]", identicurse.colour_fields["none"])) user_length += len(" [ repeat by ") + len( repeating_user) + len(" ]") if not config.config['compact_notices']: if config.config["show_source"]: line.append( (' ' * (maxx - ((len(source_msg) + len(time_msg) + user_length + (6 + len(cout))))), identicurse.colour_fields["none"])) else: line.append((' ' * (maxx - ((len(time_msg) + user_length + (5 + len(cout))))), identicurse.colour_fields["none"])) line.append((time_msg, identicurse.colour_fields["time"])) if config.config["show_source"]: line.append((' ', identicurse.colour_fields["none"])) line.append( (source_msg, identicurse.colour_fields["source"])) self.buffer.append(line) line = [] else: detail_char = "" if (not config.config["show_source"]): if "in_reply_to_status_id" in n and n[ "in_reply_to_status_id"] is not None: detail_char = "+" elif "retweeted_status" in n: detail_char = "~" line.append((" %s" % (detail_char), identicurse.colour_fields["source"])) if config.config["show_source"]: line.append((" " + source_msg, identicurse.colour_fields["source"])) line.append((" " * ( (longest_metadata_string_len - (user_length + len(time_msg) + len(source_msg) + 2))), identicurse.colour_fields["none"])) else: if detail_char == "": line.append((" ", identicurse.colour_fields["none"])) line.append((" " * ((longest_metadata_string_len - (user_length + len(time_msg) + 1))), identicurse.colour_fields["none"])) line.append((" | ", identicurse.colour_fields["none"])) try: min_x_offset = reduce( (lambda acc_length, block: (acc_length if (len(block) < 3) else max( acc_length, block[2])) + len(block[0])), line, 0 ) # determine how far along the line items beginning now would be; this will be used so that wrapped lines get correct indentation notice_entities = helpers.split_entities(n['text'] or "") for entity in notice_entities: if len(entity['text']) > 0: if entity['type'] in ['user', 'group', 'tag']: entity_text_no_symbol = entity['text'][1:] cache = getattr(config.session_store, '%s_cache' % (entity['type'])) if not entity_text_no_symbol in cache: cache[ entity_text_no_symbol] = helpers.colour_from_name( [ item[1] for item in identicurse.base_colours.items() ], entity_text_no_symbol.lower()) if config.config['%s_rainbow' % (entity['type'])]: line.append((entity['text'], cache[entity_text_no_symbol], min_x_offset)) else: if entity['type'] == "user": line.append( (entity['text'], identicurse.colour_fields["username"], min_x_offset)) else: line.append( (entity['text'], identicurse.colour_fields[ entity['type']], min_x_offset)) else: line.append((entity['text'], identicurse.colour_fields["notice"], min_x_offset)) self.buffer.append(line) except UnicodeDecodeError: self.buffer.append([ ("Caution: Terminal too shit to display this notice.", identicurse.colour_fields["warning"]) ]) if config.config["show_notice_links"]: line = [] base_url = helpers.base_url_regex.findall( self.conn.api_path)[0][0] if self.timeline_type in ["direct", "sentdirect"]: notice_link = "%s/message/%s" % (base_url, str(n["id"])) else: notice_link = "%s/notice/%s" % (base_url, str(n["id"])) line.append(("<%s>" % (notice_link), identicurse.colour_fields["notice_link"])) self.buffer.append(line) if not config.config['compact_notices']: self.buffer.append([]) c += 1
def update(self): self.update_name() if self.paused: self.update_buffer() return get_count = config.config['notice_limit'] if self.prev_page != self.page: self.timeline = [] last_id = 0 if len(self.timeline) > 0: for notice in self.timeline: if notice[ "ic__from_web"]: # don't consider inserted posts latest last_id = notice['id'] break if self.timeline_type == "home": raw_timeline = self.conn.statuses_home_timeline(count=get_count, page=self.page, since_id=last_id) elif self.timeline_type == "mentions": raw_timeline = self.conn.statuses_mentions(count=get_count, page=self.page, since_id=last_id) elif self.timeline_type == "direct": raw_timeline = self.conn.direct_messages(count=get_count, page=self.page, since_id=last_id) elif self.timeline_type == "user": raw_timeline = self.conn.statuses_user_timeline( user_id=self.type_params['user_id'], screen_name=self.type_params['screen_name'], count=get_count, page=self.page, since_id=last_id) try: self.profile = self.conn.users_show( screen_name=self.type_params['screen_name']) # numerical fields, convert them to strings to make the buffer code more clean for field in [ 'id', 'created_at', 'followers_count', 'friends_count', 'favourites_count', 'statuses_count' ]: self.profile[field] = str(self.profile[field]) # special handling for following if self.profile['following']: self.profile['following'] = "Yes" else: self.profile['following'] = "No" # create this field specially datetime_joined = helpers.normalise_datetime( self.profile['created_at']) days_since_join = helpers.single_unit( helpers.time_since(datetime_joined), "days")['days'] self.profile['notices_per_day'] = "%0.2f" % ( float(self.profile['statuses_count']) / days_since_join) except StatusNetError, e: if e.errcode == 404: self.profile = None
def fit_network(self, e_b, e_n, a_max, l, a, d, passes=1, test_data=None, global_train_err=None, global_test_err=None, num_mature_neurons=None): # logging variables accumulated_local_error = [] network_order = [] network_size = [] total_units = [] num_mature_neurons = [] mature_neurons_ratio = 0 self.units_created = 0 # 0. start with two units a and b at random position w_a and w_b w_a = [np.random.uniform(-2, 2) for _ in range(np.shape(self.data)[1])] w_b = [np.random.uniform(-2, 2) for _ in range(np.shape(self.data)[1])] self.network = nx.Graph() self.network.add_node(self.units_created, vector=w_a, error=0) self.units_created += 1 self.network.add_node(self.units_created, vector=w_b, error=0) self.units_created += 1 # 1. iterate through the data sequence = 0 start = time.time() for p in range(passes): # print(' Pass #%d' % (p + 1)) np.random.shuffle(self.data) steps = 0 for observation in self.data: # 2. find the nearest unit s_1 and the second nearest unit s_2 nearest_units = self.find_nearest_units(observation) s_1 = nearest_units[0] s_2 = nearest_units[1] # 3. increment the age of all edges emanating from s_1 for u, v, attributes in self.network.edges(data=True, nbunch=[s_1]): self.network.add_edge(u, v, age=attributes['age'] + 1) # 4. add the squared distance between the observation and the nearest unit in input space self.network.node[s_1]['error'] += spatial.distance.euclidean( observation, self.network.node[s_1]['vector'])**2 # 5 .move s_1 and its direct topological neighbors towards the observation by the fractions # e_b and e_n, respectively, of the total distance update_w_s_1 = e_b * (np.subtract( observation, self.network.node[s_1]['vector'])) self.network.node[s_1]['vector'] = np.add( self.network.node[s_1]['vector'], update_w_s_1) update_w_s_n = e_n * (np.subtract( observation, self.network.node[s_1]['vector'])) for neighbor in self.network.neighbors(s_1): self.network.node[neighbor]['vector'] = np.add( self.network.node[neighbor]['vector'], update_w_s_n) # 6. if s_1 and s_2 are connected by an edge, set the age of this edge to zero # if such an edge doesn't exist, create it self.network.add_edge(s_1, s_2, age=0) # 7. remove edges with an age larger than a_max # if this results in units having no emanating edges, remove them as well self.prune_connections(a_max) # 8. if the number of steps so far is an integer multiple of parameter l, insert a new unit steps += 1 if steps % l == 0: sequence += 1 # 8.a determine the unit q with the maximum accumulated error q = 0 error_max = 0 for u in self.network.nodes(): if self.network.node[u]['error'] > error_max: error_max = self.network.node[u]['error'] q = u # 8.b insert a new unit r halfway between q and its neighbor f with the largest error variable f = -1 largest_error = -1 for u in self.network.neighbors(q): if self.network.node[u]['error'] > largest_error: largest_error = self.network.node[u]['error'] f = u w_r = 0.5 * (np.add(self.network.node[q]['vector'], self.network.node[f]['vector'])) r = self.units_created self.units_created += 1 # 8.c insert edges connecting the new unit r with q and f # remove the original edge between q and f self.network.add_node(r, vector=w_r, error=0) self.network.add_edge(r, q, age=0) self.network.add_edge(r, f, age=0) self.network.remove_edge(q, f) # 8.d decrease the error variables of q and f by multiplying them with a # initialize the error variable of r with the new value of the error variable of q self.network.node[q]['error'] *= a self.network.node[f]['error'] *= a self.network.node[r]['error'] = self.network.node[q][ 'error'] # 9. decrease all error variables by multiplying them with a constant d error = 0 for u in self.network.nodes(): error += self.network.node[u]['error'] accumulated_local_error.append(error) network_order.append(self.network.order()) network_size.append(self.network.size()) total_units.append(self.units_created) for u in self.network.nodes(): self.network.node[u]['error'] *= d if self.network.degree(nbunch=[u]) == 0: print(u) # global_error.append(self.compute_global_error()) neurons = [] for k in self.network._node: neurons.append(self.network._node[k]['vector']) mature_torch_neurons = torch.tensor(neurons) mean_train_error = compute_global_error(mature_torch_neurons, torch.tensor(self.data), cuda=True) mean_test_error = compute_global_error(mature_torch_neurons, torch.tensor(test_data), cuda=True) global_train_err.append(mean_train_error) global_test_err.append(mean_test_error) actual_mature_neurons = mature_torch_neurons.shape[0] if len(num_mature_neurons) > 1: mature_neurons_ratio = num_mature_neurons[ -1] / actual_mature_neurons num_mature_neurons.append(actual_mature_neurons) print( "\repoch [{}/{}] - Train euclidean error : {:.4f} - Test euclidean error : {:.4f} - #mature neurons: {} - Time :{} - Process:{}%" .format(p + 1, passes, mean_train_error, mean_test_error, actual_mature_neurons, time_since(start), round(p / passes * 100), 3), end="") if mature_neurons_ratio > 0.99: break
decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=args.learning_rate) criterion = nn.CrossEntropyLoss() if args.cuda: decoder.cuda() start = time.time() all_losses = [] loss_avg = 0 try: print("Training for %d epochs..." % args.n_epochs) for epoch in tqdm(range(1, args.n_epochs + 1)): loss = train(*random_training_set(args.chunk_len, args.batch_size)) loss_avg += loss if epoch % args.print_every == 0: print('[%s (%d %d%%) %.4f]' % (time_since(start), epoch, epoch / args.n_epochs * 100, loss)) print(generate(decoder, 'Wh', 100, cuda=args.cuda), '\n') print("Saving...") save() except KeyboardInterrupt: print("Saving before quit...") save()