def import_testevents(): attr_values = [ ("e.test.1", "odinlake", "2016-06-16 10:11:12", "edit", "event", "", "story", "tng1x01", "", "", "description", "nothing", "something", "reverted"), ("e.test.2", "odinlake", "2016-06-16 13:12:11", "new", "event", "featureof", "story", "tos2x02", "theme", "obsession", "weight", "", "choice", "merged"), ("e.test.3", "odinlake", "2016-06-16 15:15:15", "edit", "event", "featureof", "story", "tng7x02", "theme", "redemption", "motivation", "", "motivation goes here", "pending"), ] fields = [ "eventid", "userid", "entrytime", "action", "category", "refcategory", "category1", "name1", "category2", "name2", "field", "oldvalue", "newvalue", "eventstate", ] fstr = ", ".join('`%s`' % s for s in fields) vstr = ", ".join("%s" for s in fields) do("REPLACE INTO `web_events` (%s) values (%s)" % (fstr, vstr), attr_values)
def setup_method(self, method): db.clear() db.drivers.autoregister_class(TestDriver) db.from_url(MEM_URL) db.do(CREATE_FOO_SQL) db.do("INSERT INTO foo VALUES (1, 'foo')") self.db = db.get()
def import_stories(): q = """ SELECT StoryID, Title, Description, Director, StoryWriter, Airdate, Stardate FROM `meta_ext_episodes_all` """ qq = [] attr_values = [] obj_values = [] for sid, t, desc, direct, sw, ad, sd in do(q): desc += """ Director: %s Writer: %s Stardate: %s """ % (direct, sw, sd) sid = sid.encode("utf-8") t = t.encode("utf-8") desc = desc.encode("utf-8") ad = str(ad).encode("utf-8") attr_values.append(('story', sid, 'title', t)) attr_values.append(('story', sid, 'description', desc)) attr_values.append(('story', sid, 'date', str(ad))) obj_values.append(('story', sid)) print obj_values[-1] do( "INSERT INTO `web_attributes` (`category`, `name`, `attr`, `value`) " "values(%s, %s, %s, %s)", attr_values) do("INSERT INTO `web_objects` (`category`, `name`) " "values(%s, %s)", obj_values)
def create_tables(recreate = False): if recreate: for key, _query in TABLES.iteritems(): do("""DROP TABLE `%s`""" % key) for key, query in TABLES.iteritems(): do(query)
def import_stories(): q = """ SELECT StoryID, Title, Description, Director, StoryWriter, Airdate, Stardate FROM `meta_ext_episodes_all` """ qq = [] attr_values = [] obj_values = [] for sid, t, desc, direct, sw, ad, sd in do(q): desc += """ Director: %s Writer: %s Stardate: %s """ % (direct, sw, sd) sid = sid.encode("utf-8") t = t.encode("utf-8") desc = desc.encode("utf-8") ad = str(ad).encode("utf-8") attr_values.append(('story', sid, 'title', t)) attr_values.append(('story', sid, 'description', desc)) attr_values.append(('story', sid, 'date', str(ad))) obj_values.append(('story', sid)) print obj_values[-1] do("INSERT INTO `web_attributes` (`category`, `name`, `attr`, `value`) " "values(%s, %s, %s, %s)", attr_values) do("INSERT INTO `web_objects` (`category`, `name`) " "values(%s, %s)", obj_values)
def import_testevents(): attr_values = [ ("e.test.1", "odinlake", "2016-06-16 10:11:12", "edit", "event", "", "story", "tng1x01", "", "", "description", "nothing", "something", "reverted" ), ("e.test.2", "odinlake", "2016-06-16 13:12:11", "new", "event", "featureof", "story", "tos2x02", "theme", "obsession", "weight", "", "choice", "merged" ), ("e.test.3", "odinlake", "2016-06-16 15:15:15", "edit", "event", "featureof", "story", "tng7x02", "theme", "redemption", "motivation", "", "motivation goes here", "pending" ), ] fields = [ "eventid", "userid", "entrytime", "action", "category", "refcategory", "category1", "name1", "category2", "name2", "field", "oldvalue", "newvalue", "eventstate", ] fstr = ", ".join('`%s`' % s for s in fields) vstr = ", ".join("%s" for s in fields) do("REPLACE INTO `web_events` (%s) values (%s)" % (fstr, vstr), attr_values)
def test_insert(self): db.do("DELETE FROM foo") db.do("INSERT INTO foo (foo_id, value) VALUES (1, 'baz')") rows = self.items("SELECT * FROM foo") assert len(rows) == 1 row = rows[0] assert row.foo_id == 1 assert row.value == 'baz'
def setup_method(self, method): db.clear() import db_psycopg2 db.from_environ("DB_PSYCOPG2_TEST_URL") self.db = db.get() db.do("""CREATE OR REPLACE FUNCTION always1() RETURNS INTEGER AS $$ BEGIN RETURN 1; END $$ LANGUAGE 'plpgsql'; """)
def initial_import(): do("DROP table web_events") create_tables() #import_counters() #import_testevents() #import_stories() #import_storythemes() for obj in webobject.TSTPEvent.load(): print obj
def create_tables(recreate=False, subset="web_.*$", quietish=False): if isinstance(subset, str): subset = set(k for k in TABLES.keys() if re.match(subset, k)) if subset is None: subset = set(TABLES.keys()) if recreate: for key, _query in TABLES.iteritems(): if key in subset: do("""DROP TABLE `%s`""" % key, quietish=quietish) for key, query in TABLES.iteritems(): if key in subset: do(query, quietish=quietish)
def edit_object(cls, cat, name, attrs, vals): table = "web_attributes" dt = datetime.datetime.now() cat, name = esc(cat, name) attrstr = cls.sql_filter_list(attrs) #vals = [ esc(x) for x in vals_in ] oldvalue = None alu = dict(izip(attrs, vals)) evidbase = "ev.%d-" % uuid("event") idx = 0 events = [] updates = [] for attr, oldvalue in do(""" SELECT attr, value FROM `%s` WHERE category = "%s" AND name = "%s" AND attr IN %s LIMIT 1 """ % (table, cat, name, attrstr)): evid = evidbase + str(idx) idx += 1 newvalue = alu[attr] event = (evid, "odinlake", dt, "edit", "event", "", cat, name, "", "", attr, oldvalue, newvalue, "pending") update = (cat, name, attr, newvalue) obj = (cat, name) events.append(event) updates.append(update) evfields = [ "eventid", "userid", "entrytime", "action", "category", "refcategory", "category1", "name1", "category2", "name2", "field", "oldvalue", "newvalue", "eventstate", ] fstr = ", ".join('`%s`' % s for s in evfields) vstr = ", ".join("%s" for s in evfields) do("REPLACE INTO `web_events` (%s) values (%s)" % (fstr, vstr), events) cls.commit_updates(updates)
def propose_edit_object(cls, cat1, name1, cat2, name2, attrs, vals): table = "web_connections" dt = datetime.datetime.now() cat, name = esc(cat, name) attrstr = cls.sql_filter_list(attrs) oldvalue = None alu = dict(izip(attrs, vals)) evidbase = "ev.%d-" % uuid("event") idx = 0 events = [] updates = [] for attr, oldvalue in do(""" SELECT attr, value FROM `%s` WHERE category1 = "%s" AND name1 = "%s" AND category2 = "%s" AND name2 = "%s" AND attr IN %s LIMIT 1 """ % (table, cat1, name1, cat2, name2, attrstr)): evid = evidbase + str(idx) idx += 1 newvalue = alu[attr] event = (evid, "odinlake", dt, "edit", "event", "", cat1, name1, cat2, name2, attr, oldvalue, newvalue, "pending") update = ("featureof", cat1, name1, cat2, name2, attr, newvalue) events.append(event) updates.append(update) return events, updates
def cached_special_query(act_type, req_type, obj_name): """ Use cached version if available, else re-generate. Queries may be cached on disk or in SQL. The latter case is recovered through procedures. """ if act_type in PROC_SPECIAL_QUERIES: return json.dumps({ "data": PROC_SPECIAL_QUERIES[act_type](act_type, req_type, obj_name) }) path = get_cache_path(act_type, req_type, obj_name) if os.path.isfile(path): log.debug("returning cached: %s", path) with open(path, "rb") as fh: return pickle.load(fh) else: log.warn("missing cache: %s", path) if act_type == "urlimport": from webimport import json_story_from_url_webquery return json_story_from_url_webquery() if act_type == "themelist": themes = list(x[0] for x in do(""" SELECT DISTINCT name from `web_attributes` WHERE category = "theme" """)) return json.dumps(themes) if act_type == "metathemedata": from webdb import get_metatheme_data return json.dumps(get_metatheme_data()) if act_type == "themesimilarity": from lib.datastats import get_themes_similarity_v1 return json.dumps(get_themes_similarity_v1()) if act_type == "stats" and req_type == "theme": from lib.datastats import get_theme_stats return json.dumps(get_theme_stats(obj_name)) if act_type in webdb.SUPPORTED_OBJECTS: return json.dumps(webdb.get_defenitions(act_type)) if act_type == "protostory_saved": rets = [] basepath = os.path.join(tempfile.gettempdir(), "tstp", "protostory") if not os.path.exists(basepath): os.makedirs(basepath) for fn in os.listdir(basepath): if fn.endswith("st.txt"): path = os.path.join(basepath, fn) with open(path, "r+") as fh: rets.append([fn, fh.read()]) return json.dumps(rets) return None
def get_commit_log(*args): """ Return commit log stored in SQL. """ return [(cid, time.strftime("%Y-%m-%d\n%H:%M:%S"), author, message) for cid, time, author, message in do(""" SELECT id, time, author, message from commits_log ORDER BY time DESC """)]
def react_to_commit(): """ M-4 will do a variety of things when there is a new commit. """ if "--debug" in sys.argv: global DEBUG DEBUG = True if DEBUG: args = [x for x in sys.argv if x.startswith("--dt")] hours = int(args[-1][4:]) if args else 24 ts = (datetime.utcnow() - timedelta(hours=hours)).strftime('%Y-%m-%d %H:%M:%S') db.do("""DELETE FROM commits_log WHERE time > '%s'""" % ts) commitssql = """SELECT id, time FROM commits_log ORDER BY time DESC LIMIT 1""" commitslog = list(db.do(commitssql)) if not commitslog: lib.log.debug("no previous commits logged, running from scratch...") lib.commits.dbstore_commit_data(fromdate=None, recreate=True, quieter=False) commitslog = list(db.do(commitssql)) if not commitslog: lib.log.debug("no previous commits logged still, failing...") return fromid, fromtime = commitslog[0] sfromtime = fromtime.strftime('%Y-%m-%d %H:%M:%S') lib.log.debug("last previously known commit is %s at %s", fromid, sfromtime) lib.commits.dbstore_commit_data(fromdate=fromtime, recreate=False, quieter=True) entries = list(db.do(""" SELECT id, time, author, committype, message FROM commits_log WHERE time > '%s' ORDER BY time ASC""" % sfromtime )) if not entries: lib.log.debug("NO NEW CHANGES! Aborting.") else: toid, totime = entries[-1][:2] stotime = totime.strftime('%Y-%m-%d %H:%M:%S') lib.log.debug("last newly discovered commit is %s at %s", toid, stotime) diffcmd = 'git diff %s..%s' % (fromid, toid) txtdiff = subprocess.check_output(diffcmd.split()).decode("utf-8") if DEBUG: txtdiff += """+ and some profane shit\n""" txtdiff += """+ you Dick\n""" txtdiff += """+ look, a Tit\n""" pass maildef = makemail(entries, txtdiff) lib.email.sendmail(maildef)
def import_storythemes(): q = """ SELECT StoryID, FieldName, Keyword, Comment FROM `master_all_exploaded` WHERE FieldName LIKE '%Theme%' """ attr_values = [] for sid, fn, theme, comment in do(q): sid = sid.lower() cmm = fn.split()[0].lower() attr_values.append(("featureof", "story", sid, "theme", theme, "weight", cmm)) attr_values.append(("featureof", "story", sid, "theme", theme, "motivation", comment)) do("REPLACE INTO `web_connections` (`category`, `category1`, `name1`, " "`category2`, `name2`, `attr`, `value`) " "values(%s, %s, %s, %s, %s, %s, %s)", attr_values)
def migrate(fromdb, todb): db.connect(todb) dbdefine.create_tables() for table in dbdefine.TABLES: db.connect(fromdb) rows = [] for row in db.do("SELECT * FROM `%s`" % table): rows.append(row) log.info("Read %d rows from %s@%s", len(rows), table, fromdb) if rows: db.connect(todb) vpatt = ', '.join("%s" for _ in rows[0]) db.do("INSERT INTO `%s` VALUES (%s)" % (table, vpatt), rows) log.info("Wrote %d rows to %s@%s", len(rows), table, todb)
def commit_updates(cls, updates): """ Write multiple attribute updates for this class into the database without further ado. Dangerous. """ chunksize = 10000 attrfields = [ "category", "category1", "name1", "category2", "name2", "attr", "value", ] fstr = ", ".join('`%s`' % s for s in attrfields) vstr = ", ".join("%s" for s in attrfields) for i in range(0, len(updates), chunksize): do("REPLACE INTO `web_connections` (%s) values (%s)" % (fstr, vstr), updates[i:i + chunksize])
def commit_updates(cls, updates): """ Write multiple attribute updates for this class into the database without further ado. Dangerous. """ chunksize = 10000 attrfields = [ "category", "name", "attr", "value", ] for i in range(0, len(updates), chunksize): updatechunk = updates[i:i + chunksize] objs = [ x[:2] for x in updatechunk ] fstr = ", ".join('`%s`' % s for s in attrfields) vstr = ", ".join("%s" for s in attrfields) do("REPLACE INTO `web_attributes` (%s) values (%s)" % (fstr, vstr), updatechunk) do("REPLACE INTO `web_objects` (`category`, `name`) values (%s, %s)", objs)
def import_storythemes(): q = """ SELECT StoryID, FieldName, Keyword, Comment FROM `master_all_exploaded` WHERE FieldName LIKE '%Theme%' """ attr_values = [] for sid, fn, theme, comment in do(q): sid = sid.lower() cmm = fn.split()[0].lower() attr_values.append( ("featureof", "story", sid, "theme", theme, "weight", cmm)) attr_values.append( ("featureof", "story", sid, "theme", theme, "motivation", comment)) do( "REPLACE INTO `web_connections` (`category`, `category1`, `name1`, " "`category2`, `name2`, `attr`, `value`) " "values(%s, %s, %s, %s, %s, %s, %s)", attr_values)
def cached_special_query(act_type, req_type, obj_name): """ Use cached version if available, else re-generate. """ path = get_cache_path(act_type, req_type, obj_name) if os.path.isfile(path): log.debug("returning cached: %s", path) with open(path, "rb") as fh: return pickle.load(fh) else: log.warn("missing cache: %s", path) if act_type == "urlimport": from webimport import json_story_from_url_webquery return json_story_from_url_webquery() if act_type == "themelist": themes = list( x[0] for x in do(""" SELECT DISTINCT name from `web_attributes` WHERE category = "theme" """)) return json.dumps(themes) if act_type == "metathemedata": from webdb import get_metatheme_data return json.dumps(get_metatheme_data()) if act_type == "themesimilarity": from lib.datastats import get_themes_similarity_v1 return json.dumps(get_themes_similarity_v1()) if act_type == "stats" and req_type == "theme": from lib.datastats import get_theme_stats return json.dumps(get_theme_stats(obj_name)) if act_type in webdb.SUPPORTED_OBJECTS: return json.dumps(webdb.get_defenitions(act_type)) if act_type == "protostory_saved": rets = [] basepath = os.path.join(tempfile.gettempdir(), "tstp", "protostory") if not os.path.exists(basepath): os.makedirs(basepath) for fn in os.listdir(basepath): if fn.endswith("st.txt"): path = os.path.join(basepath, fn) with open(path, "r+") as fh: rets.append([fn, fh.read()]) return json.dumps(rets) return None
def commit_updates(cls, updates): """ Write multiple attribute updates for this class into the database without further ado. Dangerous. """ chunksize = 10000 attrfields = [ "category", "category1", "name1", "category2", "name2", "attr", "value", ] fstr = ", ".join('`%s`' % s for s in attrfields) vstr = ", ".join("%s" for s in attrfields) for i in range(0, len(updates), chunksize): do( "REPLACE INTO `web_connections` (%s) values (%s)" % (fstr, vstr), updates[i:i + chunksize])
def commit_edit_object(cls, events, updates): evfields = [ "eventid", "userid", "entrytime", "action", "category", "refcategory", "category1", "name1", "category2", "name2", "field", "oldvalue", "newvalue", "eventstate", ] fstr = ", ".join('`%s`' % s for s in evfields) vstr = ", ".join("%s" for s in evfields) do("REPLACE INTO `web_events` (%s) values (%s)" % (fstr, vstr), events) cls.commit_updates(updates)
def setup_method(self, method): self.Null = drape("Null") self.sql = "SELECT COUNT(*) AS n FROM tests" db.drivers.clear() db.drivers.sqlite3x.register(":memory:") db.do("CREATE TABLE tests (name TEXT)") db.do("""CREATE TABLE users ( username TEXT, encrypted_pw TEXT, dob DATE )""") db.do("INSERT INTO tests (name) VALUES ('foo')") db.do("""INSERT INTO users (username, encrypted_pw, dob) VALUES ('john', 'drowssap', '07/22/1975') """)
def commit_updates(cls, updates): """ Write multiple attribute updates for this class into the database without further ado. Dangerous. """ chunksize = 10000 attrfields = [ "category", "name", "attr", "value", ] for i in range(0, len(updates), chunksize): updatechunk = updates[i:i + chunksize] objs = [x[:2] for x in updatechunk] fstr = ", ".join('`%s`' % s for s in attrfields) vstr = ", ".join("%s" for s in attrfields) do("REPLACE INTO `web_attributes` (%s) values (%s)" % (fstr, vstr), updatechunk) do( "REPLACE INTO `web_objects` (`category`, `name`) values (%s, %s)", objs)
def write_many(cls, events, chunksize=5000): """ Write many events to db. This will not affect any other objects. """ evfields = [ "eventid", "userid", "entrytime", "action", "category", "refcategory", "category1", "name1", "category2", "name2", "field", "oldvalue", "newvalue", "eventstate", ] n = 0 while n < len(events): evvalues = [] for event in events[n:n + chunksize]: row = [] for field in evfields: row.append(getattr(event, field)) evvalues.append(row) fstr = ", ".join('`%s`' % s for s in evfields) vstr = ", ".join("%s" for s in evfields) do("REPLACE INTO `web_events` (%s) values (%s)" % (fstr, vstr), evvalues) n += chunksize
def write_many(cls, events, chunksize = 5000): """ Write many events to db. This will not affect any other objects. """ evfields = [ "eventid", "userid", "entrytime", "action", "category", "refcategory", "category1", "name1", "category2", "name2", "field", "oldvalue", "newvalue", "eventstate", ] n = 0 while n < len(events): evvalues = [] for event in events[n:n+chunksize]: row = [] for field in evfields: row.append(getattr(event, field)) evvalues.append(row) fstr = ", ".join('`%s`' % s for s in evfields) vstr = ", ".join("%s" for s in evfields) do("REPLACE INTO `web_events` (%s) values (%s)" % (fstr, vstr), evvalues) n += chunksize
def query_for(cls, attrs, filters, table, limit): qfilters = ["category = '%s'" % cls.category] for k, v in filters: if v: qfilters.append('%s IN ' % k + cls.sql_filter_list(v)) oattrs = ', '.join(esc(f[0]) for f in filters) fattrs = ', '.join(esc(a) for a in attrs) qfilters = ' AND '.join(qfilters) alimit = limit * len(cls.fields) for row in do(""" SELECT %s FROM `%s` WHERE %s ORDER BY %s LIMIT %d """ % (fattrs, table, qfilters, oattrs, alimit)): yield row
def query_for(cls, attrs, filters, table, limit): qfilters = [ "category = '%s'" % cls.category ] for k, v in filters: if v: qfilters.append('%s IN ' % k + cls.sql_filter_list(v)) oattrs = ', '.join(esc(f[0]) for f in filters) fattrs = ', '.join(esc(a) for a in attrs) qfilters = ' AND '.join(qfilters) alimit = limit * len(cls.fields) for row in do(""" SELECT %s FROM `%s` WHERE %s ORDER BY %s LIMIT %d """ % (fattrs, table, qfilters, oattrs, alimit)): yield row
def get_commits_data(period='weekly'): """ Return information about the state of the repository for regularly spaced dates, by default Friday midnights every week. :return: """ entries = list( db.do(""" SELECT id, time, author, stats FROM commits_stats ORDER BY time ASC """)) if not entries: return [] dt1 = entries[0][1] dt2 = entries[-1][1] if period == 'weekly': dtiter = iter_days(dt1, dt2, daysofweek="fri", attime="00:00") elif period == 'daily': dtiter = iter_days(dt1, dt2, attime="00:00") else: raise ValueError("Bad period: {}".format(period)) atdt = next(dtiter) data = [] for idx, (commit, date, author, sdata) in enumerate(entries): while atdt < date: try: atdt = next(dtiter) except StopIteration: atdt = None break if atdt is None: break if idx < len(entries) - 1: if atdt >= entries[idx + 1][1]: continue # date must be the last viable date less than atdt datapoint = json.loads(sdata) nthemes = datapoint["themes"] if nthemes > 500: data.append((atdt, datapoint)) return data
def import_counters(): do("INSERT INTO `web_counters` (id, value) values (%s, %s)" , [ ("event", 0), ])
def test_complex_count(self): db.do("""CREATE TABLE bar ( bar_id INTEGER PRIMARY KEY, value TEXT )""") db.do("INSERT INTO foo VALUES (2, 'bar')") db.do("INSERT INTO foo VALUES (3, 'baz')") db.do("INSERT INTO foo VALUES (4, 'bim')") db.do("INSERT INTO bar VALUES (1, 'foo')") db.do("INSERT INTO bar VALUES (2, 'bart')") db.do("INSERT INTO bar VALUES (3, 'bazzle')") db.do("INSERT INTO bar VALUES (4, 'bim')") assert self.count("foo, bar WHERE foo.value = bar.value") == 2
def import_counters(): do("INSERT INTO `web_counters` (id, value) values (%s, %s)", [ ("event", 0), ])
def get(tweet, inventory, id, position): rend = re.sub(r'http\S+', '', tweet).lower().split() # remove articles here? if (rend[0] == 'drop') and (len(rend) >= 2): # drop(0) banana(1) # drop(0) the(1) dawn(2) porter(3) quantity = None if (len(rend) >= 3) and (rend[1] == 'all'): # or check if it can be converted to a valid int quantity = 'all' drop_item = cleanstr(' '.join(rend[2:len(rend)])) elif (len(rend) >= 3) and ((rend[1] == 'the') or (rend[1] == 'a') or (rend[1] == 'an') or (rend[1] == 'some')): drop_item = cleanstr(' '.join(rend[2:len(rend)])) else: drop_item = cleanstr(' '.join(rend[1:len(rend)])) if db.select('name', 'items', 'name', drop_item) != None: return (True, item.drop(drop_item, inventory, id, quantity=quantity)) elif (rend[0] == 'give') and (len(rend) >= 3): # give(0) @benlundsten(1) the(2) dawn(3) porter() if (len(rend) >= 4) and ((rend[2] == 'the') or (rend[2] == 'a') or (rend[2] == 'an') or (rend[2] == 'some')): give_item = cleanstr(' '.join(rend[3:len(rend)])) else: give_item = cleanstr(' '.join(rend[2:len(rend)])) if db.select('name', 'items', 'name', give_item) != None: return (True, item.give(give_item, inventory, id, position, rend[1][1:].lower())) elif (rend[0] == 'inventory') or (' '.join(rend) == 'check inventory') or (' '.join(rend) == 'what am i holding'): if inventory == {}: return (True, 'Your inventory is empty at the moment.') else: return (True, invbuild(inventory)) elif (' '.join(rend) == 'delete me from lilt') or (rend[0] == u'💀💀💀'): db.delete('users', 'id', id) return (True, 'You\'ve been removed from Lilt. Thanks for playing!') elif ((rend[0] == 'liltadd') or (rend[0] == 'la')) and ((id == '15332057') or (id == '724754312757272576') or (id == '15332062')): dbrend = str(' '.join(rend[1:len(rend)])).split('~') if len(dbrend) >= 2: if dbrend[0] == 'item': # liltadd item~n|paste~m|10 traits = dict(trait.split('|') for trait in dbrend[1:len(dbrend)]) for trait in traits: if trait == 'n': traits['name'] = traits['n'] del traits['n'] if trait == 'm': traits['max'] = traits['m'] del traits['m'] db.newitem(traits) return (True, traits['name'].capitalize() + ' was added to Lilt.') elif dbrend[0] == 'copy': if len(dbrend) == 3: db.copymove(dbrend[1], dbrend[2], position) return (True, '\'' + dbrend[2] + '\' was added to Lilt as a copy of \'' + dbrend[1] + '\'.') elif dbrend[0] == 'do': # la do~insert~moves~move|look at cat~response|It's sassy.~c|box^open~t|cat^sighted # la do~update~moves~c|cat^spotted~move|look at cat~response|It's sassy.~c|box^open~t|cat^sighted if dbrend[1] == 'select': dbval = dbrend[3] data = dict(key.split('|') for key in dbrend[4:len(dbrend)]) elif dbrend[1] == 'update': dbval = dict(key.split('|') for key in dbrend[3:4]) data = dict(key.split('|') for key in dbrend[4:len(dbrend)]) for key in dbval: if len((dbval[key]).split('^')) >= 2: dbval[key] = dict(k.split('^') for k in (dbval[key]).split('~')) else: # insert/delete dbval = None data = dict(key.split('|') for key in dbrend[3:len(dbrend)]) for key in data: #shorthands if key == 'n': data['name'] = data['n'] del data['n'] if key == 'mx': data['max'] = data['mx'] del data['mx'] if key == 'm': data['move'] = data['m'] del data['m'] if key == 'p': data['position'] = data['p'] del data['p'] if key == 'i': data['item'] = data['i'] del data['i'] if key == 'd': data['drop'] = data['d'] del data['d'] if key == 'c': data['condition'] = data['c'] del data['c'] if key == 't': data['trigger'] = data['t'] del data['t'] if key == 'tr': data['travel'] = data['tr'] del data['tr'] for key in data: # convert condition/trigger to dicts if len((data[key]).split('^')) >= 2: data[key] = dict(k.split('^') for k in (data[key]).split('~')) dbfetch = db.do(dbrend[1], dbrend[2], data, val=dbval) if dbrend[1] == 'insert': if dbrend[2] == 'moves': return (True, '\'' + str(data['move']) + '\' was added to ' + dbrend[2].capitalize() + '.') elif dbrend[2] == 'items': return (True, '\'' + str(data['name']) + '\' was added to ' + dbrend[2].capitalize() + '.') else: return (True, 'That was added to ' + dbrend[2].capitalize() + '.') elif dbrend[1] == 'select': if len(dbfetch) < 1: return (True, 'Nothing was selected from ' + str(dbval) + '.') elif len(dbfetch) == 1: return (True, '\'' + str(dbfetch[0][0]) + '\' was fetched from ' + str(dbval) + ' in ' + dbrend[2].capitalize() + '.') elif len(dbfetch) == 2: return (True, '\'' + str(dbfetch[0][0]) + '\' was fetched from ' + str(dbval) + ' in ' + dbrend[2].capitalize() + ', along with ' + str(len(dbfetch) - 1) + ' other.') else: return (True, '\'' + str(dbfetch[0][0]) + '\' was fetched from ' + str(dbval) + ' in ' + dbrend[2].capitalize() + ', along with ' + str(len(dbfetch) - 1) + ' others.') elif dbrend[1] == 'update': return (True, dbrend[2].capitalize() + ' was updated with ' + str(dbval) + '.') elif dbrend[1] == 'delete': return (True, '\'' + str(data) + '\' was deleted from ' + dbrend[2].capitalize() + '.') else: # newmove # la(rend[0]) eat meat cake(1)~It looks pretty nasty! But you eat it...(2)~c|meat cake^inventory(3)~d|meat cake(4) if len(dbrend) >= 3: traits = dict(trait.split('|') for trait in dbrend[2:len(dbrend)]) # this right? for trait in traits: # update shorthand keys if trait == 'i': traits['item'] = traits['i'] del traits['i'] if trait == 'd': traits['drop'] = traits['d'] del traits['d'] if trait == 'c': traits['condition'] = traits['c'] del traits['c'] if trait == 't': traits['trigger'] = traits['t'] del traits['t'] if trait == 'tr': traits['travel'] = traits['tr'] del traits['tr'] for trait in traits: # convert condition/trigger to dicts if len((traits[trait]).split('^')) >= 2: traits[trait] = dict(t.split('^') for t in (traits[trait]).split('~')) else: traits = None db.newmove(dbrend[0], dbrend[1], position, traits) return (True, '\'' + dbrend[0] + '\' was added to Lilt.') return (False, '')
def insert_another(self): db.do("INSERT INTO foo (foo_id, value) VALUES (2, 'bar')")
def get(tweet, inventory, id, position): # Remove articles here? tweet_array = re.sub(r'http\S+', '', tweet).lower().split() print('Scanning words:', tweet_array) # Drop an item. # - drop[0] banana[1] # - drop[0] the[1] dawn[2] porter[3] if (tweet_array[0] == 'drop') and (len(tweet_array) >= 2): # Initialize quantity at None. quantity = None # If the first word in the tweet is 'all', drop all of that item. if (len(tweet_array) >= 3) and (tweet_array[1] == 'all'): # Set quantity to all. quantity = 'all' # Derive item to drop from tweet array. drop_item = filter_tweet(' '.join(tweet_array[2:len(tweet_array)])) # Remove articles and derive item to drop from tweet array. elif (len(tweet_array) >= 3) and ((tweet_array[1] == 'the') or (tweet_array[1] == 'a') or (tweet_array[1] == 'an') or (tweet_array[1] == 'some')): drop_item = filter_tweet(' '.join(tweet_array[2:len(tweet_array)])) # Derive item to drop from tweet array. else: drop_item = filter_tweet(' '.join(tweet_array[1:len(tweet_array)])) # Check that the derived item exists before returning it. if db.select('name', 'items', 'name', drop_item) != None: return item.drop(drop_item, inventory, id, quantity=quantity) # Give another player an item. # - give[0] @benlundsten[1] the[2] dawn[3] porter[4] elif (tweet_array[0] == 'give') and (len(tweet_array) >= 3): # Derive item to give from tweet word array. if (len(tweet_array) >= 4) and ((tweet_array[2] == 'the') or (tweet_array[2] == 'a') or (tweet_array[2] == 'an') or (tweet_array[2] == 'some')): give_item = filter_tweet(' '.join(tweet_array[3:len(tweet_array)])) else: give_item = filter_tweet(' '.join(tweet_array[2:len(tweet_array)])) print('Giving ' + give_item + '.') # Check if the item exists. if db.select('name', 'items', 'name', give_item) != None: return (item.give(give_item, inventory, id, position, tweet_array[1][1:].lower())) # Inventory request. elif (tweet_array[0] == 'inventory') or (' '.join(tweet_array) == 'check inventory') or ( ' '.join(tweet_array) == 'check my inventory') or (' '.join(tweet_array) == 'what am i holding'): if inventory == {}: return 'Your inventory is empty at the moment.' else: return build_inventory_tweet(inventory) # Deletion request. elif (' '.join(tweet_array) == 'delete me from lilt') or (tweet_array[0] == u'💀💀💀'): db.delete('users', 'id', id) return 'You\'ve been removed from Lilt. Thanks for playing!' # Admin only from this point down. # - Add items # - Copy, add, or update moves # - Or do basically anything else with the database... elif (tweet_array[0] == 'liltadd' or tweet_array[0] == 'la') and (id == MKNEPPRATH or id == LILTBUILDER): builder_query_array = str(' '.join( tweet_array[1:len(tweet_array)])).split('~') if len(builder_query_array) >= 2: if builder_query_array[0] == 'item': # liltadd item~n|paste~m|10 traits = dict( trait.split('|') for trait in builder_query_array[1:len(builder_query_array)]) for trait in traits: if trait == 'n': traits['name'] = traits['n'] del traits['n'] if trait == 'm': traits['max'] = traits['m'] del traits['m'] db.new_item(traits) return traits['name'].capitalize() + ' was added to Lilt.' elif builder_query_array[0] == 'copy': if len(builder_query_array) == 3: db.copy_move(builder_query_array[1], builder_query_array[2], position) return '\'' + builder_query_array[ 2] + '\' was added to Lilt as a copy of \'' + builder_query_array[ 1] + '\'.' elif builder_query_array[0] == 'do': # la do~insert~moves~move|look at cat~response|It's sassy.~c|box^open~t|cat^sighted # la do~update~moves~c|cat^spotted~move|look at cat~response|It's sassy.~c|box^open~t|cat^sighted if builder_query_array[1] == 'select': dbval = builder_query_array[3] data = dict( key.split('|') for key in builder_query_array[4:len(builder_query_array)]) elif builder_query_array[1] == 'update': dbval = dict( key.split('|') for key in builder_query_array[3:4]) data = dict( key.split('|') for key in builder_query_array[4:len(builder_query_array)]) for key in dbval: if len((dbval[key]).split('^')) >= 2: dbval[key] = dict( k.split('^') for k in (dbval[key]).split('~')) else: # insert/delete dbval = None data = dict( key.split('|') for key in builder_query_array[3:len(builder_query_array)]) for key in data: # shorthands if key == 'n': data['name'] = data['n'] del data['n'] if key == 'mx': data['max'] = data['mx'] del data['mx'] if key == 'm': data['move'] = data['m'] del data['m'] if key == 'p': data['position'] = data['p'] del data['p'] if key == 'i': data['item'] = data['i'] del data['i'] if key == 'd': data['drop'] = data['d'] del data['d'] if key == 'c': data['condition'] = data['c'] del data['c'] if key == 't': data['trigger'] = data['t'] del data['t'] if key == 'tr': data['travel'] = data['tr'] del data['tr'] for key in data: # convert condition/trigger to dicts if len((data[key]).split('^')) >= 2: data[key] = dict( k.split('^') for k in (data[key]).split('~')) dbfetch = db.do(builder_query_array[1], builder_query_array[2], data, val=dbval) if builder_query_array[1] == 'insert': if builder_query_array[2] == 'moves': return '\'' + str( data['move'] ) + '\' was added to ' + builder_query_array[ 2].capitalize() + '.' elif builder_query_array[2] == 'items': return '\'' + str( data['name'] ) + '\' was added to ' + builder_query_array[ 2].capitalize() + '.' else: return 'That was added to ' + builder_query_array[ 2].capitalize() + '.' elif builder_query_array[1] == 'select': if len(dbfetch) < 1: return 'Nothing was selected from ' + str(dbval) + '.' elif len(dbfetch) == 1: return '\'' + str( dbfetch[0][0]) + '\' was fetched from ' + str( dbval) + ' in ' + builder_query_array[ 2].capitalize() + '.' elif len(dbfetch) == 2: return '\'' + str( dbfetch[0][0]) + '\' was fetched from ' + str( dbval) + ' in ' + builder_query_array[ 2].capitalize() + ', along with ' + str( len(dbfetch) - 1) + ' other.' else: return '\'' + str( dbfetch[0][0]) + '\' was fetched from ' + str( dbval) + ' in ' + builder_query_array[ 2].capitalize() + ', along with ' + str( len(dbfetch) - 1) + ' others.' elif builder_query_array[1] == 'update': return builder_query_array[2].capitalize( ) + ' was updated with ' + str(dbval) + '.' elif builder_query_array[1] == 'delete': return '\'' + str( data) + '\' was deleted from ' + builder_query_array[ 2].capitalize() + '.' else: # new_move # la(tweet_array[0]) eat meat cake(1)~It looks pretty nasty! But you eat it...(2)~c|meat cake^inventory(3)~d|meat cake(4) if len(builder_query_array) >= 3: traits = dict( trait.split('|') for trait in builder_query_array[2:len(builder_query_array )]) # this right? for trait in traits: # update shorthand keys if trait == 'i': traits['item'] = traits['i'] del traits['i'] if trait == 'd': traits['drop'] = traits['d'] del traits['d'] if trait == 'c': traits['condition'] = traits['c'] del traits['c'] if trait == 't': traits['trigger'] = traits['t'] del traits['t'] if trait == 'tr': traits['travel'] = traits['tr'] del traits['tr'] for trait in traits: # convert condition/trigger to dicts if len((traits[trait]).split('^')) >= 2: traits[trait] = dict( t.split('^') for t in (traits[trait]).split('~')) else: traits = None db.new_move(builder_query_array[0], builder_query_array[1], position, traits) return ('\'' + builder_query_array[0] + '\' was added to Lilt.') return ''
def dbstore_commit_data(fromdate=None, recreate=False, quieter=False): """ Store data for the last commit each date. """ dbdefine.create_tables(subset={"commits_stats", "commits_log"}, recreate=recreate) commits = list(db.do("""SELECT id, time FROM commits_stats""")) donerevs = set(x[0] for x in commits) if not commits: fromdate = None if fromdate == "<latest>": fromdate = max(x[1] for x in commits) basepath = GIT_THEMING_PATH_HIST notespath = os.path.join(basepath, "notes") os.chdir(basepath) entries = list_commits(basepath) bydate = defaultdict(list) latestcommits = set() logrows = [(commit, date, author, committype, msg) for commit, author, date, committype, msg in entries] db.do("""REPLACE INTO commits_log VALUES(%s, %s, %s, %s, %s)""", values=logrows) for commit, _, date, _, _ in entries: bydate[date.date()].append((date, commit)) for datelist in bydate.values(): date, commit = max(datelist) latestcommits.add(commit) for idx, (commit, author, date, _, _) in enumerate(entries): if fromdate and date <= fromdate: if not quieter: print("EARLIER:", (commit, author, date), "...SKIPPING") elif commit in donerevs: if not quieter: print("EXISTS:", (commit, author, date), "...SKIPPING") elif commit not in latestcommits: if not quieter: print("SKIPPING EARLIER COMMIT:", (commit, author, date)) else: print("processing date: %s" % date) try: res = subprocess.check_output( ['git', 'checkout', '-f', commit]).decode("utf-8") except Exception as e: print("GIT ERROR", repr(e)) continue try: datapoint = get_datapoint(notespath) except AssertionError as e: print("PARSE ERROR", repr(e)) continue except Exception as e: print("UNKNOWN ERROR", repr(e)) continue data = json.dumps(datapoint) row = (commit, date.strftime('%Y-%m-%d %H:%M:%S'), author, data) db.do("""REPLACE INTO commits_stats VALUES(%s, %s, %s, %s)""", values=[row]) if not quieter: print("INSERTED: ", str(row)[:120], "...") print(dict(datapoint))