def get_or_suggest_tags(): opt = request.args query = v_dict_entry(opt, 'query', vv=strv(min_len=1), optional=True) if query is not None: return { "suggestions": fetch_all_as_dict( c=get_db().cursor(), tables=(Table('tag'), ), cols=( Column('id'), Column('name', 'label'), ), where=Cond('name LIKE ?', f'{query}%'), ), } return { "tags": fetch_all_as_dict( c=get_db().cursor(), tables=(Table('tag'), ), cols=( Column('id'), Column('name'), Column('comment'), ), where=Cond('TRUE'), ) }
def get_tag_name(**args): tag = v_dict_entry(args, 'tag', vv=intv(min_val=0, parse_from_str=True)) return { "name": require_one(get_db().cursor().execute( "SELECT name FROM tag WHERE id = ?", (tag, )).fetchall())[0], }
def delete_transaction(**args): transaction = v_dict_entry(args, 'transaction', vv=intv(min_val=0, parse_from_str=True)) c = get_db().cursor() c.execute("DELETE FROM txn WHERE id = ?", (transaction, )) require_changed_row(c.rowcount) return {}
def create_tag(): opt = require_json_object_body() name = v_dict_entry(opt, 'name', vv=strv()) comment = v_dict_entry(opt, 'comment', vv=strv()) c = get_db().cursor() c.execute("INSERT INTO tag (name, comment) VALUES (?, ?)", (name, comment)) return { "id": c.lastrowid, }
def get_category_name(**args): category = v_dict_entry(args, 'category', vv=intv(min_val=0, parse_from_str=True)) return { "name": require_one(get_db().cursor().execute( "SELECT name FROM category WHERE id = ?", (category, )).fetchall())[0], }
def create_dataset_source(): opt = require_json_object_body() name = v_dict_entry(opt, 'name', vv=strv(min_len=1)) comment = v_dict_entry(opt, 'comment', optional='', vv=strv()) c = get_db().cursor() # TODO Handle errors c.execute("INSERT INTO dataset_source (name, comment) VALUES (?, ?)", (name, comment)) return { "id": c.lastrowid, }
def get_dataset_sources(): return { "sources": fetch_all_as_dict( c=get_db().cursor(), tables=(Table('dataset_source'), ), cols=( Column('id'), Column('name'), ), where=Cond('TRUE'), ), }
def get_or_suggest_categories(): opt = request.args query = v_dict_entry(opt, 'query', vv=strv(min_len=1), optional=True) if query is not None: return { "suggestions": fetch_all_as_dict( c=get_db().cursor(), tables=(Table('category'), ), cols=( Column('id'), Column('name', 'label'), ), where=Cond('name LIKE ?', f'{query}%'), ), } return { "categories": fetch_all_as_dict( c=get_db().cursor(), tables=( Table('category', 'node'), Table('category', 'parent'), ), cols=( Column('node.id', 'id'), Column('node.name', 'name'), Column('node.comment', 'comment'), Column('COUNT(parent.id) - 1', 'depth'), ), where=Cond( 'node.set_start BETWEEN parent.set_start AND parent.set_end'), group_by='node.id', order_by='node.set_start', ) }
def create_dataset(**args): source = v_dict_entry(args, 'source', vv=intv(min_val=0, parse_from_str=True)) opt = request.args timestamp_column = v_dict_entry(opt, 'timestamp_column', vv=intv(parse_from_str=True)) timestamp_format = v_dict_entry(opt, 'timestamp_format', vv=strv()) description_column = v_dict_entry(opt, 'description_column', vv=intv(parse_from_str=True)) amount_column = v_dict_entry(opt, 'amount_column', vv=intv(parse_from_str=True)) c = get_db().cursor() # TODO Handle errors c.execute("INSERT INTO dataset (source, comment, created) VALUES (?, '', DATETIME('now'))", (source,)) dataset_id = c.lastrowid for row in csv.reader(StringIO(request.get_data(as_text=True))): raw = json.dumps(row) malformed = False try: # TODO Convert to UTC timestamp = datetime.strptime(row[timestamp_column], timestamp_format) except (IndexError, ValueError): # TODO Convert to UTC timestamp = datetime.now() malformed = True try: description = row[description_column] except IndexError: description = '' malformed = True try: amount = -parse_money_amount(row[amount_column]) except (IndexError, ValueError): amount = 0 malformed = True c.execute( "INSERT INTO txn (dataset, raw, comment, malformed, timestamp, description, amount) VALUES (?, ?, '', ?, ?, ?, ?)", (dataset_id, raw, malformed, timestamp, description, amount) ) transaction_id = c.lastrowid if not malformed: c.execute( "INSERT INTO txn_part (txn, comment, amount, category) VALUES (?, '', ?, NULL)", (transaction_id, amount) ) return { "id": dataset_id, }
def create_transaction_part(**args): transaction = v_dict_entry(args, 'transaction', vv=intv(min_val=0, parse_from_str=True)) opt = require_json_object_body() comment = v_dict_entry(opt, 'comment', optional='', vv=strv()) amount = v_dict_entry(opt, 'amount', vv=intv(min_val=0)) category = v_dict_entry(opt, 'category', optional=True, vv=intv()) c = get_db().cursor() c.execute( "INSERT INTO txn_part (txn, comment, amount, category) VALUES (?, ?, ?, ?)", (transaction, comment, amount, category) ) return { "id": c.lastrowid, }
def get_transaction_parts(**args): transaction = v_dict_entry(args, 'transaction', vv=intv(min_val=0, parse_from_str=True)) parts = fetch_all_as_dict( c=get_db().cursor(), tables=( Table('txn_part'), ), cols=( Column('txn_part.id', 'id'), Column('txn_part.comment', 'comment'), Column('txn_part.amount', 'amount'), Column('category.id', 'category_id'), Column('category.name', 'category_name'), Column("GROUP_CONCAT(tag.id, '\1')", 'tag_ids'), Column("GROUP_CONCAT(tag.name, '\1')", 'tag_names'), ), joins=( Join(JoinMethod.left, Table('category'), 'txn_part.category = category.id'), Join(JoinMethod.left, Table('txn_part_tag'), 'txn_part.id = txn_part_tag.txn_part'), Join(JoinMethod.left, Table('tag'), 'txn_part_tag.tag = tag.id'), ), where=Cond('txn = ?', transaction), group_by="txn_part.id", ) for t in parts: category_id = t.pop('category_id') category_name = t.pop('category_name') t['category'] = None if category_id is None else { "id": category_id, "name": category_name, } raw_tag_ids = t.pop('tag_ids') raw_tag_names = t.pop('tag_names') if not raw_tag_ids: t['tags'] = [] else: t['tags'] = [ {"id": tid, "name": tname} for tid, tname in zip( map(int, raw_tag_ids.split('\1')), raw_tag_names.split('\1'), ) ] return { "parts": parts, }
def create_category(): opt = require_json_object_body() name = v_dict_entry(opt, 'name', vv=strv()) target = v_dict_entry(opt, 'target', vv=intv(min_val=0), optional=True) mode = v_dict_entry(opt, 'mode', vv=strv()) # TODO Lock table c = get_db().cursor() if target is None: if mode != 'root': raise BadRequest('Target is missing') if c.execute("SELECT COUNT(*) FROM category").fetchone()[0] != 0: raise BadRequest('Root already exists') target_start = -1 mode = 'first' else: c.execute("SELECT set_start, set_end FROM category WHERE id = ?", (target, )) target_start, target_end = require_one(c.fetchall()) if mode == 'after': shift_greater_than = target_end elif mode == 'before': shift_greater_than = target_start - 1 elif mode == 'first': shift_greater_than = target_start else: raise BadRequest('Invalid mode') c.execute( "UPDATE category SET set_start = set_start + 2 WHERE set_start > ?", (shift_greater_than, )) c.execute("UPDATE category SET set_end = set_end + 2 WHERE set_end > ?", (shift_greater_than, )) # TODO Handle unique c.execute( "INSERT INTO category (name, comment, set_start, set_end) VALUES (?, '', ?, ?)", (name, shift_greater_than + 1, shift_greater_than + 2)) category_id = c.lastrowid # TODO Unlock table return { "id": category_id, }
def update_transaction_part(**args): part = v_dict_entry(args, 'part', vv=intv(min_val=0, parse_from_str=True)) opt = require_json_object_body() comment = v_dict_entry(opt, 'comment', optional=True, vv=strv()) amount = v_dict_entry(opt, 'amount', optional=True, vv=intv()) category = v_dict_entry(opt, 'category', vv=intv(min_val=0), nullable=True, optional=True) require_changed_row(patch_row( c=get_db().cursor(), table='txn_part', values=( ('comment', comment), ('amount', amount), ('category', category), ), cond=Cond('id = ?', part), )) return {}
def get_transactions(): opt = request.args dt_from = v_dict_entry(opt, 'from', optional=True, vv=timestampv(parse_from_str=True)) dt_to = v_dict_entry(opt, 'to', optional=True, vv=timestampv(parse_from_str=True)) dataset = v_dict_entry(opt, 'dataset', vv=intv(min_val=0, parse_from_str=True), optional=True) categories = v_list(opt.getlist('category'), 'categories', vv=intv(min_val=0, parse_from_str=True)) tags = v_list(opt.getlist('tag'), 'tags', vv=intv(min_val=0, parse_from_str=True)) cond = Cond('TRUE') if dt_from is not None: cond += Cond("txn.timestamp >= ?", dt_from) if dt_to is not None: cond += Cond("txn.timestamp <= ?", dt_to) if dataset is not None: cond += Cond("txn.dataset = ?", dataset) if categories: cond += Cond( f"txn_part.category IN ({','.join(map(str, categories))})") if tags: cond += Cond( f"txn_part.id IN (SELECT txn_part FROM txn_part_tag WHERE tag IN ({','.join(map(str, tags))}))" ) return { "transactions": fetch_transactions(get_db().cursor(), cond, group_by="txn_part.txn"), }
def update_transaction(**args): transaction = v_dict_entry(args, 'transaction', vv=intv(min_val=0, parse_from_str=True)) opt = require_json_object_body() comment = v_dict_entry(opt, 'comment', optional=True, vv=strv()) timestamp = v_dict_entry(opt, 'timestamp', optional=True, vv=timestampv()) description = v_dict_entry(opt, 'description', optional=True, vv=strv()) amount = v_dict_entry(opt, 'amount', optional=True, vv=intv()) require_changed_row( patch_row( c=get_db().cursor(), table='txn', values=( ('malformed', False), ('comment', comment), ('timestamp', timestamp), ('description', description), ('amount', amount), ), cond=Cond('id = ?', transaction), )) return {}
def get_transactions_analysis(): opt = request.args dt_from = v_dict_entry(opt, 'from', optional=True, vv=timestampv(parse_from_str=True)) dt_to = v_dict_entry(opt, 'to', optional=True, vv=timestampv(parse_from_str=True)) split_by = v_dict_entry(opt, 'split_by', vv=enumv(options=['category', 'none'])) time_unit = v_dict_entry( opt, 'time_unit', vv=enumv(options=['year', 'month', 'day', 'none'])) categories = v_list(opt.getlist('category'), 'categories', vv=intv(min_val=0, parse_from_str=True)) tags = v_list(opt.getlist('tag'), 'tags', vv=intv(min_val=0, parse_from_str=True)) columns = [ Column('SUM(txn_part.amount)', 'combined_amount'), ] group_by = [] if split_by == 'category': columns.append(Column('category.name', 'category_name')) group_by.append('category.id') if time_unit != 'none': if time_unit == 'year': time_unit_fmt = '%Y' elif time_unit == 'month': time_unit_fmt = '%Y-%m' elif time_unit == 'day': time_unit_fmt = '%Y-%m-%d' else: assert False columns.append( Column(f"strftime('{time_unit_fmt}', txn.timestamp)", 'time_unit')) group_by.append(f"strftime('{time_unit_fmt}', txn.timestamp)") cond = Cond('txn_part.amount > 0') if dt_from is not None: cond += Cond("txn.timestamp >= ?", dt_from) if dt_to is not None: cond += Cond("txn.timestamp <= ?", dt_to) if categories: cond += Cond( f"txn_part.category IN ({','.join(map(str, categories))})") if tags: cond += Cond( f"txn_part.id IN (SELECT txn_part FROM txn_part_tag WHERE tag IN ({','.join(map(str, tags))}))" ) return { "analysis": fetch_all_as_dict( c=get_db().cursor(), tables=(Table('txn_part'), ), cols=columns, joins=( Join(JoinMethod.left, Table('txn'), 'txn_part.txn = txn.id'), Join(JoinMethod.left, Table('category'), 'txn_part.category = category.id'), ), where=cond, group_by=','.join(group_by), ), }
def get_dataset(**opt): dataset = v_dict_entry(opt, 'dataset', vv=intv(min_val=0, parse_from_str=True)) return { "datasets": fetch_datasets(get_db().cursor(), Cond('dataset.id = ?', dataset)) }
def set_name(): opt = require_json_object_body() name = v_dict_entry(opt, 'name', vv=strv()) get_db().cursor().execute( "UPDATE setting SET value = ? WHERE name = 'name'", (name, )) return {}
def get_name(): return { "name": get_db().cursor().execute( "SELECT value FROM setting WHERE name = 'name'").fetchone()[0] }
def get_datasets(): return { "datasets": fetch_datasets(get_db().cursor(), Cond('TRUE')) }