def makedb(dbfile): db = SqliteExtDatabase(dbfile) db.connect() global BaseModel class BaseModel(Model): class Meta: database = db global UniqueString class UniqueString(BaseModel): raw = CharField(index=True) count = IntegerField() malware_count = IntegerField() p_malware = FloatField(index=True) global Entity class Entity(BaseModel): name = CharField(index=True) label = FloatField() global String class String(BaseModel): uniquestring = ForeignKeyField(UniqueString, index=True) entity = ForeignKeyField(Entity, index=True) source = CharField(index=True, null=True) # create database db.create_tables([Entity, String, UniqueString]) return db
def makedb(dbfile): db = SqliteExtDatabase(dbfile) db.connect() global BaseModel class BaseModel(Model): class Meta: database = db global UniqueString class UniqueString(BaseModel): raw = CharField(index=True) count = IntegerField() malware_count = IntegerField() p_malware = FloatField(index=True) global Entity class Entity(BaseModel): name = CharField(index=True) label = FloatField() global String class String(BaseModel): uniquestring = ForeignKeyField(UniqueString,index=True) entity = ForeignKeyField(Entity,index=True) source = CharField(index=True,null=True) # create database db.create_tables([Entity,String,UniqueString]) return db
def dbInit(): if 'CLEARDB_DATABASE_URL' in os.environ: PROD = True url = urlparse.urlparse(os.environ['CLEARDB_DATABASE_URL']) db = peewee.MySQLDatabase(url.path[1:], host=url.hostname, user=url.username, passwd=url.password) else: db = SqliteExtDatabase('weather.db') class BaseModel(Model): class Meta: database = db class CityGraph(BaseModel): image_url = CharField() city = CharField() title = CharField() date = DateField() class Meta: primary_key = CompositeKey('city', 'date') db.connect() db.create_tables([CityGraph], safe=True) return CityGraph
class Database(object): def __init__(self, database): """Create a Database object. :param database: database file name :type database: str :returns: Database """ self.database = SqliteExtDatabase('%s.db' % database) self.models = models def create(self, force = False, verbose = True): """Create the database: create the file and the database tables. :param force: force the creation of a database even if another with the same name already exists :type db: bool """ if not os.path.isfile(self.database.database) or force == True: # self.database.database corresponds to the database file try: self.database.create_tables([self.models.Category, self.models.Music, self.models.Rating, self.models.Score]) except Exception: print("Une erreur est survenue lors de la création de la base de données.") else: if verbose: print("La base de données existe déjà.") def connect(self): """Connect to the database.""" try: self.database.connect() except Exception: print("Une erreur est survenue lors de la connexion à la base de données.") def populate(self, what): """ Populate database with CSV files""" if what == "categories": csv_data = csv.get_csv_data("%s/categories" % settings.POPULATE_FILES_DIR) for data in csv_data: try: models.Category.create(name=data[0]) except Exception: print("Une erreur est survenue lors du remplissage de la base de données.") elif what == "musics": csv_data_files = csv.get_csv_data("%s/musics" % settings.POPULATE_FILES_DIR, 7) # Hardcoded number of musics CSV file should be fixed for csv_data in csv_data_files: for data in csv_data: try: models.Music.create(name=data[0], category=data[1]) except Exception: print("Une erreur est survenue lors du remplissage de la base de données.")
def memdb(): mdb = SqliteExtDatabase(':memory:') models = [db.Job] mdb.bind(models) mdb.connect() mdb.create_tables(models) yield mdb mdb.close()
def get_tape_mgr(db=None): changer = get_changer() if db is None: db = SqliteExtDatabase(':memory:') db.connect() database_proxy.initialize(db) return tapemgr.TapeManager(db, changer, storage_choosers, cleaning_chooser, init_db=True)
def _init_db(self, db_path): from playhouse.sqlite_ext import SqliteExtDatabase if db_path is None: db_path = os.path.join(os.path.expanduser('~'), '.etesync', 'data.db') directory = os.path.dirname(db_path) if directory != '' and not os.path.exists(directory): os.makedirs(directory) database = SqliteExtDatabase(db_path, pragmas={'foreign_keys': 1}) database.connect() self._set_db(database)
def test_get_relations(self): test_db = 'test.db' if isfile(test_db): remove(test_db) db = SqliteExtDatabase(test_db) db.connect() Subreddit, Relationship = obtain_models(db) s_a = Subreddit.create(name='subreddit A', subs=666, over18=True, iconURL='https://domain/pic.jpg') s_b = Subreddit.create(name='subreddit B', subs=777, over18=False, iconURL='https://domain/pic2.jpg') Relationship.create(a=s_a, b=s_b) relations_a = s_a.find_relations() self.assertIn(s_b, relations_a)
def main(): '''setup db, get connection to bittorrent, download movies''' movie_database = SqliteExtDatabase('movies.db') class Movie(Model): '''movie object for database''' title = TextField() name = TextField() label = TextField() magnet_link = TextField() downloaded = BooleanField() class Meta: '''set database for the model''' database = movie_database movie_database.connect() movie_database.create_table(Movie, safe=True) bittorrent = setup_bittorrent() while True: download_movies(bittorrent, Movie) time.sleep(60)
#!/usr/bin/env python2 from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase import datetime import os db = SqliteExtDatabase(os.path.dirname(os.path.realpath(__file__))+'/corpus.db') db.connect() class BaseModel(Model): class Meta: database = db class Tweet(BaseModel): id = BigIntegerField(unique=True) mentioning = CharField() screen_name = CharField() text = TextField() created_at = DateTimeField(null=True) ingested_at = DateTimeField(default=datetime.datetime.now) classification = CharField(null=True) #db.create_tables([Tweet])
import logging from playhouse.sqlite_ext import Model, SqliteExtDatabase, PrimaryKeyField, TextField, DateTimeField, JSONField import os logger = logging.getLogger(__name__) # Create separate database files for configuration storage and for non-volatile queue storage # Naively, it seems that doing it this way will reduce the fact that the config file # might get corrupted due to issues with writing to the queue file (in case of e.g. power outage) cdb = SqliteExtDatabase( os.path.join(os.getenv('SNAP_COMMON', './'), 'config.db')) cdb.connect() qdb = SqliteExtDatabase( os.path.join(os.getenv('SNAP_COMMON', './'), 'queue.db')) qdb.connect() class NodeConfig(Model): node_id = TextField(primary_key=True) config = JSONField(null=True) access_key = DateTimeField(null=True) class Meta: database = cdb class NVQueue(Model): # Use integer timestamp as default row ID id = PrimaryKeyField()
#!/usr/bin/env python # -*- coding: utf-8 -*- import math from collections import Counter import NLP, utiltools import TFIDF import re import random import requests import numpy as np import dealSQL DBPLACE = '/Users/masaMikam/Dropbox/Project/umiA/Data/LANGUAGEsid.sqlite3' from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase database = SqliteExtDatabase(DBPLACE, autocommit=False, journal_mode='persist') database.connect() class BaseModel(Model): class Meta: database = database class trigram(BaseModel): W1 = CharField(null=True) W2 = CharField(null=True) W3 = CharField(null=True) P1 = CharField(null=True) P2 = CharField(null=True) P3 = CharField(null=True) cnt = IntegerField(null=True) ok = IntegerField(null=True) ng = IntegerField(null=True) class Meta:
import json import time from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase from polymath.constants import DB_NAME client = SqliteExtDatabase(DB_NAME, pragmas=( ('journal_mode', 'WAL'), ('cache_size', 10000), ('mmap_size', 1024 * 1024 * 32), )) client.connect() class SqliteModel(Model): class Meta(object): database = client class Category(SqliteModel): #CategoryID, CategoryName, CategoryLevel, BestOfferEnabled, CategoryParentID category_id = PrimaryKeyField(null=False) category_parent_id = ForeignKeyField('self', null=True, backref='children') category_level = IntegerField(null=False) category_name = TextField() category_updated = IntegerField(null=False) best_offer_enabled = BooleanField(default=False) expired = BooleanField(default=False) last_updated = IntegerField(null=False)
def __init__(self, db_path): db = SqliteExtDatabase(db_path) db.bind([Issue]) db.connect() db.create_tables([Issue])
class Database(object): def __init__(self, database): """Create a Database object. :param database: database file name :type database: str :returns: Database """ self.database = SqliteExtDatabase('%s.db' % database) self.models = models def create(self, force=False, verbose=True): """Create the database: create the file and the database tables. :param force: force the creation of a database even if another with the same name already exists :type db: bool """ if not os.path.isfile( self.database.database ) or force == True: # self.database.database corresponds to the database file try: self.database.create_tables([ self.models.Category, self.models.Music, self.models.Rating, self.models.Score ]) except Exception: print( "Une erreur est survenue lors de la création de la base de données." ) else: if verbose: print("La base de données existe déjà.") def connect(self): """Connect to the database.""" try: self.database.connect() except Exception: print( "Une erreur est survenue lors de la connexion à la base de données." ) def populate(self, what): """ Populate database with CSV files""" if what == "categories": csv_data = csv.get_csv_data("%s/categories" % settings.POPULATE_FILES_DIR) for data in csv_data: try: models.Category.create(name=data[0]) except Exception: print( "Une erreur est survenue lors du remplissage de la base de données." ) elif what == "musics": csv_data_files = csv.get_csv_data( "%s/musics" % settings.POPULATE_FILES_DIR, 7) # Hardcoded number of musics CSV file should be fixed for csv_data in csv_data_files: for data in csv_data: try: models.Music.create(name=data[0], category=data[1]) except Exception: print( "Une erreur est survenue lors du remplissage de la base de données." )
class BaseModel(Model): class Meta: database = DB class Server(BaseModel): server_id = CharField(unique=True) created_at = DateTimeField(default=datetime.datetime.now) user_id = CharField(unique=True) # ebs_volume_id = TextField(unique=True) @classmethod def new_server(cls, server_id, user_id): cls.create(server_id=server_id, user_id=user_id) @classmethod def get_server(cls, user_id): return cls.get(user_id=user_id) @classmethod def get_server_count(cls): return cls.select().count() @classmethod def remove_server(cls, server_id): cls.delete().where(cls.server_id == server_id).execute() DB.connect() Server.create_table(True)
def run_save_command(task_id, current_print, cmd_queue): from ...env import Env from ...data.db import Database from playhouse.sqlite_ext import SqliteExtDatabase import zmq Env.setup() conn = SqliteExtDatabase( Database.path, pragmas=( ('cache_size', -1024 * 64), # 64MB page-cache. ('journal_mode', 'wal'), # Use WAL-mode (you should always use this!). ('foreign_keys', 1), ('threadlocals', True))) conn.connect() # res = conn.execute_sql("PRAGMA journal_size_limit = -1;").fetchall() # res = conn.execute_sql("PRAGMA wal_autocheckpoint = -1;").fetchall() res = conn.execute_sql("PRAGMA wal_checkpoint(TRUNCATE);").fetchall() print(f'PROCESS INITIAL WALL CHECKPOINT = {res}', flush=True) # res = conn.execute_sql("PRAGMA wal_autocheckpoint;").fetchall() PrinterCommand._meta.database = conn Print._meta.database = conn save_queue = OrderedDict() agent = SaveAgent(save_queue) agent_thread = Thread(target=agent.start, daemon=True) agent_thread.start() start_time = time.time() cnt = 1 running = True while running: try: # payload = resp_queue.get() payload = None polltimeout = 0.0001 respcommand = None # if queuesize >= maxqueuesize: # polltimeout = 10 # res = cmd_queue.poll(polltimeout) # if res: payload = cmd_queue.recv() # payload = cmd_queue.recv_multipart() # queuesize -= 1 if payload: (key, prnt, respcommand) = payload # print(f"JSONCMD = {jsoncmd}") # respcommand = pickle.loads(pcb) # respcommand = PrinterCommand(**jsoncmd) if key == "cmd": cnt += 1 # respcommand = json.loads(resp.decode('utf-8')) # if cnt % 20 == 0: # print(f"Save command cnt: {cnt} {time.time()}") if respcommand: # print(f"has resp command {respcommand._meta.database.__dict__}") with agent.condition: save_queue[prnt.id] = prnt.state if respcommand.status == "error": break # if respcommand["status"] == "finished": # if respcommand.status == "finished": # current_print.state["pos"] = pos # current_print.save() # prnt.save() # Print.update(state=prnt.state).where(Print.id == prnt.id).execute() # respcommand.save() # cmd_queue.send(('done', respcommand.id)) elif key == "close": running = False except Exception as e: print(f"RES READ EXCEPTION {type(e).__name__}, {str(e)}", flush=True) # cmd_queue.put(("state", {"status": "error", "reason": str(e)})) cmd_queue.send(("state", {"status": "error", "reason": str(e)})) agent.running = False res = conn.execute_sql("PRAGMA wal_checkpoint(TRUNCATE);").fetchall() print(f'WALL CHECKPOINT = {res}')
def __init__(self, tokens, db_path): self.tokens = tokens db = SqliteExtDatabase(db_path) db.bind([RateLimit]) db.connect() db.create_tables([RateLimit])
from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase import datetime import time custom_db = SqliteExtDatabase('blah.db') class BaseModel(Model): class Meta: database = custom_db class User(BaseModel): name = CharField(unique=True) time = IntegerField() custom_db.connect() # custom_db.create_tables([User]) # db.connect() try: charlie = User.create(name='charlie', time=0) charlie.save() except: print "user creation error" # User.create(name='benard').save() try: print User.get(User.name == 'sally') except DoesNotExist: print "Does not exist" user = User.get(User.name == 'charlie')
import math from collections import Counter import natural_language_processing import re import random import numpy as np from itertools import chain # from numba import double # from numba.decorators import jit from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase db = SqliteExtDatabase('../twtrData.sqlite3', autocommit=False, journal_mode='persist') db.connect() def f7(seq): seen = set() seen_add = seen.add return [x for x in seq if x not in seen and not seen_add(x)] class BaseModel(Model): class Meta: database = db class Tweets(BaseModel): createdat = DateTimeField(db_column='createdAt')
class BefrankPipeline(object): def __init__(self): self.db = None self.setup_db_connection() self.create_tables() def setup_db_connection(self): self.db = SqliteExtDatabase('movies.db') self.db.connect() def create_tables(self): return self.db.create_tables[Movie, Star, Writer, Director, Genre, PlotKeyword] def close_db(self): self.db.close() def process_item(self, item, spider): for key, value in six.iteritems(item): if key == "CastMembers": continue if isinstance(value, list): if value: templist = [] for obj in value: temp = self.stripHTML(obj) templist.append(temp) item[key] = templist else: item[key] = "" elif key is not 'MainPageUrl': item[key] = self.stripHTML(value) else: item[key] = value self.store_in_db(item) return item def store_in_db(self, item): self.storeFilmInfoInDb(item) film_id = self.cursor.lastrowid for cast in item['CastMembers']: self.storeActorInfoInDb(cast, film_id) def storeFilmInfoInDb(self, item): self.cur.execute("INSERT INTO Films(\ title, \ rating, \ ranking, \ release_date, \ page_url, \ director, \ writers, \ runtime, \ sinopsis, \ genres, \ mpaa_rating, \ budget, \ language, \ country, \ gross_profit, \ opening_weekend_profit, \ aspect_ratio, \ sound_mix, \ color\ ) \ VALUES( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? )" , \ ( \ item.get('Title', ''), float(item.get('Rating', 0.0)), int(item.get('Ranking', 0)), item.get('ReleaseDate', ''), item.get('MainPageUrl', ''), ', '.join(item.get('Director', '')), ', '.join(item.get('Writers', '')), item.get('Runtime', ''), item.get('Sinopsis', '').strip(), ', '.join(item.get('Genres', '')), item.get('MpaaRating', ''), self.cleanMoney(item.get('Budget', '')), item.get('Language', ''), item.get('Country', ''), self.cleanMoney(item.get('GrossProfit', '')), self.cleanMoney(item.get('OpeningWeekendProfit', '')), item.get('AspectRatio', '').strip(), ', '.join(item.get('SoundMix', '')), item.get('Color', '') )) self.con.commit() def storeActorInfoInDb(self, item, film_id): self.cur.execute( "INSERT INTO Actors(\ film_id, \ actor_name, \ charecter_name, \ ranking \ ) \ VALUES(?,?,?,?)", (film_id, self.stripHTML(item.get('ActorName', '')).strip(), self.stripHTML(item.get('CharacterName', '')).strip(), item.get('Ranking', 0))) self.con.commit() def stripHTML(self, string): tagStripper = MLStripper() tagStripper.feed(string) return tagStripper.get_data() def cleanMoney(self, string): # you could add more simpbles to this, but it gets kinda complex with some of the symbles being unicode, so I skpped that for now. currencySymbles = "$" cleanMoneyString = "" stopAdding = False for index, char in enumerate(list(string)): if char in currencySymbles and not stopAdding: cleanMoneyString += char elif char == "," and not stopAdding: cleanMoneyString += char elif char.isdigit() and not stopAdding: cleanMoneyString += char elif char in ' ': # we know that numbers do not have spaces in them, so we can assume that once the number # has started there will be no spaces if len(cleanMoneyString) > 0: stopAdding = True return cleanMoneyString
def libgen(mol_list, output_name): """ function to generate a database format library of fragments from a mol, list of mol objects, .smi, or .sdf file :param mol_list: list of molecules, a single molecule, or a filename of molecules to read :type mol_list: str|Chem.Mol|[Chem.Mol] :param output_name: name of the database to use? :type output_name: str :return: """ # if a file not a list then read into list if isinstance(mol_list, str) and mol_list.endswith(".smi"): mol_list = Chem.SmilesMolSupplier(mol_list, delimiter="\t", titleLine=False) elif isinstance(mol_list, str) and mol_list.endswith(".sdf"): mol_list = Chem.SDMolSupplier(mol_list) elif type(mol_list) == Chem.Mol: mol_list = [mol_list] elif type(mol_list) == list: assert type(mol_list[0]) == Chem.Mol else: raise Exception( "Did you provide a list of mol objects? Input type error.") fragment_dict_deque = deque() heritage_dict_deque = deque() atoms_dict_deque = deque() pseudoatoms_dict_deque = deque() logger.info("Fragmenting:") n = len(mol_list) i = 0 t0 = time.time() for mol in mol_list: re_mol = RecomposerMol.fromMol(mol=mol) frag_list, heritage_list, atoms_list, pseudo_atoms_list = re_mol.get_all_fragments( 7) fragment_dict_deque.extend(frag_list) heritage_dict_deque.extend(heritage_list) atoms_dict_deque.extend(atoms_list) pseudoatoms_dict_deque.extend(pseudo_atoms_list) logger.info("DONE: %d/%d %.f" % (i, n, 1000 * (time.time() - t0) / (i + 1))) i += 1 logger.info("Done") logger.info("Saving:") # create the database for the output db = SqliteExtDatabase( output_name, pragmas={ 'cache_size': -1024 * 64, # 64MB page-cache. 'journal_mode': 'wal', # Use WAL-mode (you should always use this!). 'foreign_keys': 0, 'wal_autocheckpoint': 10, }) db.connect() # get the models Fragment, Heritage, PseudoAtoms, Atoms = lib_read(db) Fragment.create_table(safe=True) Heritage.create_table(safe=True) PseudoAtoms.create_table(safe=True) Atoms.create_table(safe=True) with db.atomic(): if len(fragment_dict_deque) > 0: for ents in chunked(fragment_dict_deque, 200): query = Fragment.replace_many(ents) query.execute() for ents in chunked(heritage_dict_deque, 200): query = Heritage.replace_many(ents) query.execute() for ents in chunked(pseudoatoms_dict_deque, 200): query = PseudoAtoms.replace_many(ents) query.execute() for ents in chunked(atoms_dict_deque, 200): query = Atoms.replace_many(ents) query.execute() db.close() clean(output_name) return 1
db = SqliteExtDatabase('links.db') class BaseModel(Model): class Meta: database = db class Link(BaseModel): url = CharField() class Meta: primary_key = CompositeKey('url') db.connect(reuse_if_open=True) db.create_tables([Link], safe=True) def checkDuplicate(link): try: # Shorten link to 255 chars link = link[:254] Link.create(url=link) return True except IntegrityError: return False pass
from sklearn.ensemble import RandomForestClassifier from sklearn.decomposition import TruncatedSVD from sklearn import datasets from sklearn.cross_validation import cross_val_score import NLP from gensim import corpora, matutils import re import numpy as np from itertools import chain from peewee import * from playhouse.sqlite_ext import SqliteExtDatabase db = SqliteExtDatabase('../Data/twtrData.sqlite3', autocommit=False, journal_mode='persist') db.connect() class TBaseModel(Model): class Meta: database = db class Tweets(TBaseModel): createdat = DateTimeField(db_column='createdAt') name = CharField(null=True) screen_name = CharField(null=True) text = CharField(null=True) updatedat = DateTimeField(db_column='updatedAt') user = CharField(db_column='user_id', null=True)
def get_path(self, operating_system: str) -> 'ReleasePath': for path in self.paths: if path.operating_system == operating_system: return path return None class ReleasePath(BaseModel): release = ForeignKeyField(Release, backref="paths") operating_system = CharField() installer_path = CharField() dist_path = CharField() CONN.connect() with open("./update_embed.py", "r") as update_embed: UPDATE_EMBED = update_embed.read() if FIRST_INIT: CONN.create_tables([Product, Release, ReleasePath]) """ Folder structure on the server ./ server.py server.db build/ ...temp files... dist/ ...temp files... releases/ my_app/