def dbInit(): if 'CLEARDB_DATABASE_URL' in os.environ: PROD = True url = urlparse.urlparse(os.environ['CLEARDB_DATABASE_URL']) db = peewee.MySQLDatabase(url.path[1:], host=url.hostname, user=url.username, passwd=url.password) else: db = SqliteExtDatabase('weather.db') class BaseModel(Model): class Meta: database = db class CityGraph(BaseModel): image_url = CharField() city = CharField() title = CharField() date = DateField() class Meta: primary_key = CompositeKey('city', 'date') db.connect() db.create_tables([CityGraph], safe=True) return CityGraph
def makedb(dbfile): db = SqliteExtDatabase(dbfile) db.connect() global BaseModel class BaseModel(Model): class Meta: database = db global UniqueString class UniqueString(BaseModel): raw = CharField(index=True) count = IntegerField() malware_count = IntegerField() p_malware = FloatField(index=True) global Entity class Entity(BaseModel): name = CharField(index=True) label = FloatField() global String class String(BaseModel): uniquestring = ForeignKeyField(UniqueString, index=True) entity = ForeignKeyField(Entity, index=True) source = CharField(index=True, null=True) # create database db.create_tables([Entity, String, UniqueString]) return db
def makedb(dbfile): db = SqliteExtDatabase(dbfile) db.connect() global BaseModel class BaseModel(Model): class Meta: database = db global UniqueString class UniqueString(BaseModel): raw = CharField(index=True) count = IntegerField() malware_count = IntegerField() p_malware = FloatField(index=True) global Entity class Entity(BaseModel): name = CharField(index=True) label = FloatField() global String class String(BaseModel): uniquestring = ForeignKeyField(UniqueString,index=True) entity = ForeignKeyField(Entity,index=True) source = CharField(index=True,null=True) # create database db.create_tables([Entity,String,UniqueString]) return db
def test_database(): test_db = SqliteExtDatabase(":memory:") with test_db.bind_ctx(ALL_MODELS): test_db.create_tables(ALL_MODELS) try: yield finally: test_db.drop_tables(ALL_MODELS) test_db.close()
class Database(object): def __init__(self, database): """Create a Database object. :param database: database file name :type database: str :returns: Database """ self.database = SqliteExtDatabase('%s.db' % database) self.models = models def create(self, force = False, verbose = True): """Create the database: create the file and the database tables. :param force: force the creation of a database even if another with the same name already exists :type db: bool """ if not os.path.isfile(self.database.database) or force == True: # self.database.database corresponds to the database file try: self.database.create_tables([self.models.Category, self.models.Music, self.models.Rating, self.models.Score]) except Exception: print("Une erreur est survenue lors de la création de la base de données.") else: if verbose: print("La base de données existe déjà.") def connect(self): """Connect to the database.""" try: self.database.connect() except Exception: print("Une erreur est survenue lors de la connexion à la base de données.") def populate(self, what): """ Populate database with CSV files""" if what == "categories": csv_data = csv.get_csv_data("%s/categories" % settings.POPULATE_FILES_DIR) for data in csv_data: try: models.Category.create(name=data[0]) except Exception: print("Une erreur est survenue lors du remplissage de la base de données.") elif what == "musics": csv_data_files = csv.get_csv_data("%s/musics" % settings.POPULATE_FILES_DIR, 7) # Hardcoded number of musics CSV file should be fixed for csv_data in csv_data_files: for data in csv_data: try: models.Music.create(name=data[0], category=data[1]) except Exception: print("Une erreur est survenue lors du remplissage de la base de données.")
def memdb(): mdb = SqliteExtDatabase(':memory:') models = [db.Job] mdb.bind(models) mdb.connect() mdb.create_tables(models) yield mdb mdb.close()
def main(config: CommandConfig): influxdb_config = config.influxdb client = InfluxDBClient(influxdb_config.host, influxdb_config.port, influxdb_config.username, influxdb_config.password, influxdb_config.database) client.create_database(influxdb_config.database) db = SqliteExtDatabase(config.database.filename) try: db_model.database_proxy.initialize(db) db.create_tables([db_model.InfluxdbPostPosition]) except IntegrityError as ex: logger.error("database open error.", ex) db.rollback() exit(1) while True: db_influx_last_position = read_influx_post_position(db) last_post_position = db_influx_last_position.post_log_position logger.info(f'db_influx_last_position={last_post_position}') with db.transaction() as tran: db_result = db_model.EnvSensorLog.select() \ .where(db_model.EnvSensorLog.id > last_post_position) \ .order_by(db_model.EnvSensorLog.id) \ .limit(1000) logger.info(F"db select count={db_result.count()}") if db_result.count() == 0: break influx_data = [] for env_sensor_log in db_result: logger.debug( f"log_id={env_sensor_log.id}, data={env_sensor_log.data}") db_influx_last_position.post_log_position = env_sensor_log.id db_influx_last_position.timestamp = datetime.now() influx_data.append(convert(env_sensor_log.data)) logger.debug(f"write_data={influx_data}") logger.info( f"processed position={db_influx_last_position.post_log_position}" ) client.write_points(influx_data) db_influx_last_position.save() logger.info(F"process complete.")
title = CharField() url = CharField() rss_feed = CharField() description = TextField(default='') created_date = DateTimeField(default=datetime.now) class Meta: database = db def jointext(self, rss_feed): text = '' h = html2text.HTML2Text() h.ignore_links = True description = h.handle(self.description) if rss_feed.ShowName is True: text += '_' + rss_feed.Name + '_\n' if rss_feed.ShowTitle is True: text += '*' + self.title + '*\n' if rss_feed.ShowDescription is True: text += description + '\n' if rss_feed.ShowUrl is True: text += self.url return text db.create_tables([RssFeedItem], safe=True)
class BaseModel(Model): class Meta: database = db class CityTemp(BaseModel): id = PrimaryKeyField() station_id = IntegerField() city = CharField() temp = FloatField() date = DateTimeField() db.connect() db.create_tables([CityTemp], safe=True) now = dt.datetime.utcnow() current_hour = now.hour cities = pd.read_csv("data/cities.csv") # Removes old rows CityTemp.delete().where( CityTemp.date < dt.datetime.now() - dt.timedelta(days=10)).execute() db.execute_sql("optimize table CityTemp;") for index, city in cities.iterrows(): url = "http://api.openweathermap.org/data/2.5/weather?q=%s,%s&APPID=%s" % ( city["City"], city["Country"], os.environ["OWMKEY"]) r = requests.get(url)
config = read_config() if config['database_migrate']: log.debug("=====================") log.debug("Migration stuff...") try: from playhouse.migrate import * migrator = SqliteMigrator(db) open_count = IntegerField(default=0) migrate(migrator.add_column('Entry', 'open_count', open_count)) log.debug("Migration success") log.debug("=====================") config['database_migrate'] = False write_config(config) except: log.error("Could not migrate", exc_info=True) log.debug("=====================") # endregion log.info(" ".join(["Using DB", str(db), "At path:", str(db_path)])) # On init make sure we create database db.connect() db.create_tables([Entry]) # endregion
pubdate = DateTimeField() class Meta: indexes = ((('podcast', 'title', 'pubdate'), True), ) # Model: EpisodeIndex class EpisodeIndex(FTSModel): # Full-text search index. rowid = RowIDField() title = SearchField() description = SearchField() class Meta: database = db options = {'tokenize': 'porter'} # Connect db.connect() if __name__ == '__main__': db.create_tables([Podcast, Episode, EpisodeIndex]) p1 = { 'name': 'Film Junk Podcast', 'short_name': 'fjp', 'feed_url': 'http://feeds.feedburner.com/filmjunk', 'color': '00C3E2' } Podcast.create(**p1)
class UserLanguageLevel(BaseModel): user = IntegerField() lang = FixedCharField(max_length=8) level = IntegerField() class SentenceRating(BaseModel): sentence = IntegerField() rating = IntegerField(default=0) class Link(BaseModel): src = IntegerField(primary_key=True) dest = IntegerField() def dump_object(self): o = dict() o['src'] = self.src o['dest'] = self.dest return o db.connect() if __name__ == '__main__': db.create_tables([User, Sentence, Link, UserLanguageLevel, SentenceRating])
query_id = ForeignKeyField(Query, backref='clicks') document_id = CharField() field = CharField() class Meta: database = db class Stay(Model): duration = BigIntegerField() query_id = ForeignKeyField(Query, backref='stays') document_id = CharField() class Meta: database = db class Drag(Model): query_id = ForeignKeyField(Query, backref='drags') document_id = CharField() class Meta: database = db db.connect() # perform migrate # http://docs.peewee-orm.com/en/latest/peewee/playhouse.html#migrate # db.drop_tables([Query, Hover, Click, Drag, Stay]) db.create_tables([Query, Hover, Click, Drag, Stay])
# ...a class to represent users... class User(BaseModel): username = CharField(unique=True) # ...and a class to represents tweets. class Tweet(BaseModel): user = ForeignKeyField(User, related_name='tweets') message = TextField() is_published = BooleanField(default=True) # Connect to the database and create a few things. db.connect() try: db.create_tables([User, Tweet]) charlie = User.create(username='******') huey = User.create(username='******') Tweet.create(user=charlie, message='My first tweet') except OperationalError: pass # tables already exist # A simple query selecting a user. u = User.get(User.username == 'charlie') print('user is', u.username) # "<<" corresponds to the SQL "IN" operator. usernames = ['charlie', 'huey', 'mickey'] users = User.select().where(User.username << usernames) print('all selected users', [u.username for u in users]) tweets = Tweet.select().where(Tweet.user << users)
class Channel(BaseModel): name = CharField(unique=True) num = IntegerField(default=0) class ChannelPublisher(BaseModel): ch_id = ForeignKeyField(Channel, related_name='channel_pub') pub_id = ForeignKeyField(User, related_name='publisher') class ChannelSubscriber(BaseModel): ch_id = ForeignKeyField(Channel, related_name='channel_sub') sub_id = ForeignKeyField(User, related_name='subscriber') ts = IntegerField() class Feed(BaseModel): ch_id = ForeignKeyField(Channel, related_name='channel_feed') pub_id = ForeignKeyField(User, related_name='publisher_feed') text = CharField() sid = IntegerField() db.connect() print 'Database Connected' if __name__ == "__main__": if len(sys.argv) >=2 and sys.argv[1] == 'clear': db.close() os.remove('my_database.db') db.connect() db.create_tables([User, Channel, ChannelPublisher, ChannelSubscriber, Feed]) db.close() print 'Database cleared'
class Meta: db_table = "commands" class Quote(BaseModel): username = ForeignKeyField(User) channel = ForeignKeyField(Channel) created_at = DateTimeField(default=datetime.datetime.now) message = CharField(null=False) game = CharField(null=False) class Meta: db_table = "quotes" class Message(BaseModel): username = ForeignKeyField(User) channel = ForeignKeyField(Channel) message = CharField(null=False) created_at = DateTimeField(default=datetime.datetime.now) class Meta: db_table = "messages" try: db.connect() db.create_tables([User, Channel, ChannelUser, Command, Quote, Message]) except OperationalError: pass
db = SqliteExtDatabase(os.path.join(BASE_DIR, 'data\\amazon.db')) class BaseModel(Model): class Meta: database = db class Url(BaseModel): key = CharField(unique=True, null=True) name = CharField(null=True) url = CharField(unique=True) type = IntegerField() # URL_TYPE has_crawled = BooleanField(default=False) db.connect() db.create_tables([ Url, ], safe=True) class URL_TYPE(): PRODUCT_URL = 99 BEST_SELL_CATEGORY = 2
class BaseModel(Model): class Meta: database = db class User(BaseModel): username = CharField(unique=True) class Tweet(BaseModel): user = ForeignKeyField(User, related_name='tweets') message = TextField() created_date = DateTimeField(default=datetime.datetime.now) is_published = BooleanField(default=True) db.connect() db.create_tables([User, Tweet], True) ''' charlie = User.create(username='******') huey = User(username='******') huey.save() # No need to set `is_published` or `created_date` since they # will just use the default values we specified. Tweet.create(user=charlie, message='My first tweet') # A simple query selecting a user. User.get(User.username == 'charles') ''' # Get tweets created by one of several users. The "<<" operator # corresponds to the SQL "IN" operator.
class RssFeedItem(Model): title = CharField() url = CharField() rss_feed = CharField() description = TextField(default='') created_date = DateTimeField(default=datetime.now) class Meta: database = db def jointext(self, rss_feed): text = '' h = html2text.HTML2Text() h.ignore_links = True description = h.handle(self.description) if rss_feed.ShowName is True: text += '_' + rss_feed.Name + '_\n' if rss_feed.ShowTitle is True: text += '*' + self.title + '*\n' if rss_feed.ShowDescription is True: text += description + '\n' if rss_feed.ShowUrl is True: text += self.url return text db.create_tables([RssFeedItem], safe=True)
class Member(BaseModel): member_id = peewee.IntegerField(unique=True) member_name = peewee.CharField(max_length=32) member_room_id = peewee.IntegerField(null=True) class LiveSubscription(BaseModel): subscriptor = peewee.ForeignKeyField(Subscriptor, related_name='subscriptors') member = peewee.ForeignKeyField(Member, related_name='subscriptors') deleted = peewee.IntegerField(default=0) db.connect() try: db.create_tables([Subscriptor, Member, LiveSubscription]) except peewee.OperationalError: pass def get_live_subscription(): """ 返回值格式: { 成员ID: 订阅者数组, } """ lss = LiveSubscription.select().where(LiveSubscription.deleted==0) result = {} for ls in lss: subscriptors = result.get(ls.member.member_id, [])
return current_entries def get_graphing_data(room, minute_grouping=10, max_age_minutes=2880): query = (SensorData.select( SensorData.room, SensorData.timestamp, peewee.fn.Avg(SensorData.temperature), peewee.fn.Avg(SensorData.humidity)).where( SensorData.room == room, SensorData.timestamp.between( datetime.datetime.now() - datetime.timedelta(minutes=max_age_minutes), datetime.datetime.now())).group_by( peewee.SQL("room, strftime('%%s', timestamp) / (%d * 60)" % (minute_grouping, ))).order_by( SensorData.timestamp.desc())) data = [{ "time": arrow.get(entry.timestamp).to('local').format("YYYY-MM-DD HH:mm:ss"), "temperature": entry.temperature, "humidity": entry.humidity } for entry in query] return {"room": PRINTABLE_ROOM_NAMES[room], "room_id": room, "data": data} db.connect() db.create_tables([SensorData, BatteryData], safe=True)
name = TextField(unique=True) sendtime = DateTimeField(default=datetime.now, unique=True) class Meta: db_table = 'notification' database = sqlite_db from telegram import send_msg def send_notification(msg, parse_mode=None): ''' msg: Must contain id and content attributes parse_mode: Telegram parse mode ''' msg_id = msg['id'] if not Notification.select().where(Notification.name == msg_id).exists(): results = send_msg(msg['content'], parse_mode=parse_mode) if results['ok']: n = Notification.create(name=msg_id) n.save() return True return False if __name__ == '__main__': sqlite_db.create_tables([Notification]) # create_tables()
self.current_point.second_pos_x = x self.current_point.second_pos_y = y elif event.inaxes == self.ax3: self.current_point.third_pos_x = x self.current_point.third_pos_y = y def save(self): if self.current_point is not None: self.current_point.save() self.points.append(self.current_point) self.current_point = MultiDimensionalPoint() #connect to db db.connect() #create tables db.create_tables([MultiDimensionalPoint]) finder = ImageFinder() #add buttons resetax = plt.axes([0.7, 0.025, 0.1, 0.04]) button = Button(resetax, 'Load files', color='lightgoldenrodyellow', hovercolor='0.975') resetax_save = plt.axes([0.85, 0.025, 0.1, 0.04]) button_save = Button(resetax_save, 'Save point', color='lightgoldenrodyellow', hovercolor='0.975') def load(event): finder.read_and_transform_image(finder.first_path, 1) finder.read_and_transform_image(finder.second_path, 2) finder.read_and_transform_image(finder.third_path, 3)
# import sqlite3 as sql import json import os, sys, inspect from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase from flask import Flask from model.entities.partnerEntity import * #========================================================================= # Initialize #========================================================================= with open('config.json', 'r') as f: config = json.load(f) app = Flask(__name__) #========================================================================= # Run script #========================================================================= if os.path.exists(config.get("database_file")) == False: db = SqliteExtDatabase(config.get("database_file")) db.connect() db.create_tables([Partner]) else: print "The database file: ", config.get("database_file"), " has existed!"
class CoAuthorPLOSArticle(BaseModel): """ CoAuthorPLOSArticle is a link table to relate a co-author with a PLOS article. Fields: corr_author: This field is linked to CorrespondingAuthor table. article: This field is linked to PLOSArticle table. """ corr_author = ForeignKeyField(CorrespondingAuthor) article = ForeignKeyField(PLOSArticle) db.connect() db.create_tables([ Journal, PLOSArticle, ArticleType, CoAuthorPLOSArticle, CorrespondingAuthor, JATSType, Affiliations, Country, SubjectsPLOSArticle, Subjects ]) corpus_dir = starterdir if args.starter else None all_files = Corpus(corpus_dir) num_files = len(all_files) if args.random is None else args.random for article in tqdm(islice(all_files, args.random), total=num_files): journal_name = journal_title_dict[article.journal.upper()] with db.atomic() as atomic: try: journal = Journal.create(journal=journal_name) except IntegrityError: db.rollback() journal = Journal.get(Journal.journal == journal_name)
if config["logging"]["method"] == "stdout": handler = logging.StreamHandler(sys.stdout) elif config["logging"]["method"] == "syslog": handler = logging.handlers.SysLogHandler( address=config["logging"]["address"]) elif config["logging"]["method"] == "file": handler = logging.FileHandler(config["logging"]["file"]) handler.setFormatter(formatter) logger.addHandler(handler) logger.info("Logging enabled.") # Make a DB connection and create the tables if they need to be created. db = SqliteExtDatabase(config["general"]["database"]) db.connect() db.create_tables([Setting, Feed, FeedEvent], safe=True) # Create a sharted feeding queue that will be used by several workers. Create # a notification queue for sending status notifications feed_queue = queue.Queue() notification_queue = queue.Queue() # Our semaphore for feeding and pausing. petfeedd.feeding_semaphore = True # Holds our thread pool. thread_pool = [] # Start the web worker if requested. if strtobool(config["web"]["enabled"]) == 1: thread_pool.append(WebWorker(feed_queue, config).begin())
from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase db = SqliteExtDatabase('store/virus_manager.db', threadlocals=True) class BaseModel(Model): class Meta: database = db class ManagedMachine(BaseModel): image_name = TextField(unique=True) reference_image = TextField() creation_time = IntegerField() class Infection(BaseModel): name = TextField() machine = ForeignKeyField(ManagedMachine, related_name='infections') db.create_tables([ManagedMachine, Infection], True)
r = list() for t in ts: ws = query_word(t.word) r.extend(ws) return r def print_result(r): print str(r) if __name__ == '__main__': import sys opt = sys.argv[1] if opt == 'create': db.create_tables([Word, Kanji, Translation, Sentence, WordSentence]) elif opt == 'qw': text = sys.argv[2] print_result(query_words(text)) elif opt == 'qt': text = sys.argv[2] print_result(query_translations(text)) elif opt == 'qwt': print_result(query_word_translations(sys.argv[2])) elif opt == 'qtw': print_result(query_translation_words(sys.argv[2]))
from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase import datetime from data_models import User from data_models import ActionType from data_models import EncsPPC_Action from settings_provider import SettingsProvider from pprint import pprint settings_provider = SettingsProvider('settings.json') database_path = settings_provider.database_path() db = SqliteExtDatabase(database_path) db.connect() db.create_tables([User, ActionType, EncsPPC_Action]) action_type_list = ['in', 'out'] for action_type_value in action_type_list: action_type = ActionType() action_type.description = action_type_value action_type.save()
followers_count = IntegerField() friends_count = IntegerField() favourites_count = IntegerField() statuses_count = IntegerField() utc_offset = IntegerField(null=True) class Tweet(BaseModel): id = BigIntegerField(primary_key=True) user = ForeignKeyField(User, related_name='tweets') text = TextField() favorite_count = IntegerField() lang = CharField() place = CharField(null=True) timestamp_ms = TimestampField() created_at = TextField() class TweetOriginal(BaseModel): tweet = ForeignKeyField(Tweet, related_name='tweetorigin') tweet_json = CharField() class Hashtag(BaseModel): hashtag = CharField() tweet = ForeignKeyField(Tweet, related_name='hashtags') class UserMention(BaseModel): screen_name = CharField() tweet = ForeignKeyField(Tweet, related_name='user_mentions') db.connect() db.create_tables([User, Tweet, Hashtag, UserMention], safe=True)
#!/usr/bin/env python from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase import datetime db = SqliteExtDatabase('data/data.db') class BaseModel(Model): class Meta: database = db class Website(BaseModel): chat_id = CharField() url = CharField() created_date = DateTimeField(default=datetime.datetime.now) last_seen = DateTimeField(default=datetime.datetime.now) last_status_code = IntegerField(default=0) msg_send = IntegerField(default=0) last_checked = DateTimeField(default=datetime.datetime.now) db.connect() if not Website.table_exists(): db.create_tables([Website])
class Instrument(BaseModel): ticker = CharField(unique=True) class OrderBookUpdate(BaseModel): created_utc = DateTimeField(default=datetime.datetime.utcnow) ms_out = IntegerField() instrument = ForeignKeyField(Instrument) class OrderBook(BaseModel): is_bid = BooleanField() cm = FloatField() price = FloatField() qty = FloatField() update = ForeignKeyField(OrderBookUpdate) class Summary(BaseModel): high = FloatField() last = FloatField() low = FloatField() update = ForeignKeyField(OrderBookUpdate) if not os.path.exists(DB_PATH): db.connect() db.create_tables([Instrument, OrderBookUpdate, OrderBook, Summary])
def __init__(self, db_path): db = SqliteExtDatabase(db_path) db.bind([Issue]) db.connect() db.create_tables([Issue])
return path return None class ReleasePath(BaseModel): release = ForeignKeyField(Release, backref="paths") operating_system = CharField() installer_path = CharField() dist_path = CharField() CONN.connect() with open("./update_embed.py", "r") as update_embed: UPDATE_EMBED = update_embed.read() if FIRST_INIT: CONN.create_tables([Product, Release, ReleasePath]) """ Folder structure on the server ./ server.py server.db build/ ...temp files... dist/ ...temp files... releases/ my_app/ 0.0.0/ myapp 0.0.0.dmg 1.0.0/ myapp 1.0.0.dmg
cdb = SqliteExtDatabase( os.path.join(os.getenv('SNAP_COMMON', './'), 'config.db')) cdb.connect() qdb = SqliteExtDatabase( os.path.join(os.getenv('SNAP_COMMON', './'), 'queue.db')) qdb.connect() class NodeConfig(Model): node_id = TextField(primary_key=True) config = JSONField(null=True) access_key = DateTimeField(null=True) class Meta: database = cdb class NVQueue(Model): # Use integer timestamp as default row ID id = PrimaryKeyField() item = JSONField() class Meta: database = qdb # Create tables if they don't already exist cdb.create_tables([NodeConfig], safe=True) qdb.create_tables([NVQueue], safe=True)
name = peewee.CharField() students = ManyToManyField(Student, related_name='groups') StudentGroup = Group.students.get_through_model() class Discipline(BaseModel): name = peewee.CharField(unique=True) class Task(BaseModel): name = peewee.CharField() maxRate = peewee.IntegerField() rateWeight = peewee.IntegerField() discipline = peewee.ForeignKeyField(Discipline, related_name='tasks') date = peewee.DateTimeField() class Mark(BaseModel): task = peewee.ForeignKeyField(Task, related_name='marks') student = peewee.ForeignKeyField(Student, related_name='marks') rate1 = peewee.FloatField() rate2 = peewee.FloatField(null=True) date = peewee.DateTimeField() db.drop_tables([Student, Group, StudentGroup, Discipline, Task, Mark], safe=True) db.create_tables([Student, Group, StudentGroup, Discipline, Task, Mark])
class Meta: db_table = "channelusers" class Quote(BaseModel): username = ForeignKeyField(User) channel = ForeignKeyField(Channel) created_at = DateTimeField(default=datetime.datetime.now) message = CharField(null=False) game = CharField(null=False) class Meta: db_table = "quotes" class Message(BaseModel): username = ForeignKeyField(User) channel = ForeignKeyField(Channel) message = CharField(null=False) created_at = DateTimeField(default=datetime.datetime.now) class Meta: db_table = "messages" try: db.connect() db.create_tables([User, Channel, ChannelUser, Quote, Message]) except OperationalError: pass
database = db class User(BaseModel): # id en auto-incrementing name = CharField() fistname = CharField() ip = CharField(null=True) mac = CharField(null=True) team = CharField(null=True) pizza = CharField(null=True) db.connect() try: db.create_tables([User]) except Exception as e: print e UPLOAD_FOLDER = './uploads' app = Flask(__name__, static_url_path="", static_folder="contents") app.config.from_object(__name__) def check_auth(username, password): """This function is called to check if a username / password combination is valid. """ return username == 'asimov' and password == 'tagada72'
class Meta: database = db class House(TimestampModel): job_id = ForeignKeyField(Job, backref='houses') house_id = CharField() list_meta = JSONField(null=True, json_dumps=friendly_json_dumps) detail_meta = JSONField(null=True, json_dumps=friendly_json_dumps) rough_gps = JSONField(null=True, json_dumps=friendly_json_dumps) class Meta: database = db indexes = ((('job_id', 'house_id'), True), ) class HouseStats(TimestampModel): job_id = ForeignKeyField(Job, backref='houses') house_id = CharField() list_count = IntegerField(default=0) detail_count = IntegerField(default=0) gps_count = IntegerField(default=0) is_vip = BooleanField(default=False) class Meta: database = db indexes = ((('job_id', 'house_id'), True), ) db.create_tables([Job, House, HouseStats])
# language = CharField(index = True) # # class Meta: # database = db # #class SnippetsIndex(FTSModel): # uuid = CharField() # title = SearchField() # content = SearchField() # # class Meta: # database = db db.create_tables([Note, NoteIndex, Tags], safe = True) def load_json(f): return json.loads(open(f, 'r').read()) # Store notes with db.atomic(): for notebook in iglob(libpath + "/*.qvnotebook"): meta = load_json(list(iglob(notebook + "/meta.json"))[0]) nb_name = meta["name"] for c in iglob(notebook + "/*.qvnote/content.json"): meta = load_json(c.replace("content.json", "meta.json")) content = load_json(c) tagset = meta["tags"] full_content = ' '.join([x["data"] for x in content["cells"]])