class PersistentEventBus(EventBus): def __init__(self, config): super().__init__(config) self.db = PostgresqlExtDatabase(**config['db']) database_proxy.initialize(self.db) self._migrations() def _migrations(self): self.db.create_tables([EventModel], safe=True) def publish(self, event: Event): exec_id = event.metadata['blueprint_execution_id'] existing_event = self.get_event(event.topic, exec_id) if not existing_event: newevent = EventModel(topic=event.topic, body=event.body, metadata=event.metadata) newevent.save() else: existing_event.body = event.body existing_event.metadata = event.metadata existing_event.save() def get_event(self, topic: str, blueprint_execution_id: str): try: eventmodel = EventModel.get((EventModel.topic == topic) & (EventModel.metadata['blueprint_execution_id'] == blueprint_execution_id)) return eventmodel except DoesNotExist as e: return
from playhouse.postgres_ext import PostgresqlExtDatabase db = PostgresqlExtDatabase( "mega_chat", user="******", host="localhost", port=5432, password="******", ) from data import models db.connect() db.create_tables( [models.User, models.Message, models.Chat, models.Salt, models.UsersChats] ) db.close()
return super(BaseModel, self).save(*args, **kwargs) class Meta: database = db # Check this one out here http://docs.peewee-orm.com/en/latest/peewee/models.html#table-names legacy_table_names = False class UserProfile(BaseModel): username = pw.CharField() password = pw.CharField() class TodoList(BaseModel): name = pw.CharField() user_profile = pw.ForeignKeyField(UserProfile, backref="lists") class TodoTask(BaseModel): task = pw.CharField() completed = pw.BooleanField(default=False) todo_list = pw.ForeignKeyField(TodoList, backref="todos") db.connect() db.create_tables([UserProfile, TodoList, TodoTask]) # Define your models here # class if __name__ == "__main__":
class PersistentBlueprintInstructionExecutionStore(BlueprintInstructionExecutionStore): def __init__(self, manager: BlueprintManager, config): super().__init__(manager, config) self.config = config self.db = PostgresqlExtDatabase(**config['db']) self._initialize() self._migrations() self.receipthandle_by_instructionstateid = dict() def remove_effects(self): self.sqs.delete_queue(QueueUrl=self._queue_url) self.db.drop_tables([BlueprintExecutionModel, BlueprintInstructionStateModel], safe=True) def _initialize(self): database_proxy.initialize(self.db) sqs_region = self.config.get('sqs', {}).get('region_name') kwargs = {} if not sqs_region else dict(region_name=sqs_region) self.sqs = boto3.client('sqs', **kwargs) def _get_queue_name(self): queue_name = 'BlueprintInstructionExecutionStore' return self.config['sqs'].get('prefix', '') + queue_name def _migrations(self): def does_queue_exist(queue_name): response = self.sqs.list_queues() for url in response.get('QueueUrls', []): if queue_name in url: self._queue_url = url return True return False queue_name = self._get_queue_name() self.db.create_tables([BlueprintExecutionModel, BlueprintInstructionStateModel], safe=True) if not does_queue_exist(queue_name): response = self.sqs.create_queue(QueueName=queue_name) self._queue_url = response['QueueUrl'] def _store_blueprint_execution(self, blueprint_execution: BlueprintExecution): bem = BlueprintExecutionModel(execution_id=blueprint_execution.execution_id, execution_context=blueprint_execution.execution_context, blueprint=asdict(blueprint_execution.blueprint)) bem.save() def _store_instruction_state(self, instruction_state: BlueprintInstructionState): instruction_definition = asdict(instruction_state.instruction) instr_state_model = BlueprintInstructionStateModel(instruction_state_id=instruction_state.id_, blueprint_execution_id=instruction_state.blueprint_execution_id, instruction=instruction_definition, status=instruction_state.status.value) instr_state_model.save() self.sqs.send_message( QueueUrl=self._queue_url, MessageBody=superjson(instruction_state) ) def _get_instruction_to_process(self, worker_id) -> Optional[BlueprintInstructionState]: response = self.sqs.receive_message(QueueUrl=self._queue_url, MaxNumberOfMessages=1) messages = response.get('Messages') if not messages: return b = json.loads(messages[0]['Body']) self.receipthandle_by_instructionstateid[b['id_']] = messages[0]['ReceiptHandle'] try: instruction_state_model = BlueprintInstructionStateModel.select().for_update().where( (BlueprintInstructionStateModel.instruction_state_id == b['id_']) & (BlueprintInstructionStateModel.status == InstructionStatus.IDLE.value)).get() except DoesNotExist: instruction_state_model = None if not instruction_state_model: log.info("Got message with body {b} but did not get corresponding row in table. Might be a race condition.") return return BlueprintInstructionState( instruction=self.manager.objectify_instruction(b['instruction']), blueprint_execution_id=b['blueprint_execution_id'], status=InstructionStatus(b['status']), id_=b['id_'] ) def _remove_from_queue(self, instruction_state: BlueprintInstructionState): self.sqs.delete_message( QueueUrl=self._queue_url, ReceiptHandle=self.receipthandle_by_instructionstateid[instruction_state.id_] ) def _set_status_for_instruction(self, instruction_state: BlueprintInstructionState, status: InstructionStatus): log.info(f"Setting instruction_state {instruction_state.id_}'s status to be {status}") terminal_states = [InstructionStatus.SUCCESS, InstructionStatus.FAILED, InstructionStatus.END] instruction_state.status = status BlueprintInstructionStateModel.update(status=status.value).where(BlueprintInstructionStateModel.instruction_state_id == instruction_state.id_).execute() if instruction_state.status in terminal_states: self._remove_from_queue(instruction_state) else: # We're relying on the visibility timeout to retry pass def get_execution_context_from_id(self, blueprint_execution_id) -> Dict: model: BlueprintExecutionModel = BlueprintExecutionModel.select().where(BlueprintExecutionModel.execution_id == blueprint_execution_id).get() return dict(model.execution_context) def get_instruction_to_process(self, worker_id=None) -> Optional[BlueprintInstructionState]: with self.db.atomic(): return super().get_instruction_to_process(worker_id)
if account_id: return Response( app.config['JAVASCRIPT'] % (app.config['DOMAIN'], account_id), mimetype='text/javascript') return Response('', mimetype='text/javascript') @app.errorhandler(404) def not_found(e): return Response('<h3>Not found.</h3>') # Request handlers -- these two hooks are provided by flask and we will use them # to create and tear down a database connection on each request. @app.before_request def before_request(): g.db = database g.db.connection() @app.after_request def after_request(response): g.db.close() return response if __name__ == '__main__': database.create_tables([Account, PageView], safe=True) app.run(debug=True)
__author__ = 'Ken' import unittest import requests from playhouse.test_utils import test_database from playhouse.postgres_ext import PostgresqlExtDatabase from models import Student TEST_DB = PostgresqlExtDatabase(database='test', user='******') TEST_DB.connect() TEST_DB.drop_tables([Student]) TEST_DB.create_tables([Student], safe=True) STUDENT_DATA = { 'th_username': '******', 'email': '*****@*****.**', 'first_name': 'Ken', 'last_name': 'Alger', 'password': '******', 'github_username': '******', 'city': 'Keizer', 'state': 'OR', 'country': 'USA' } STUDENT_LIST = ['kenalger', 'craigsdennis', 'kennethlove'] class StudentModelTestCase(unittest.TestCase):
class BinaryBank(SQLModel): # _id = TextField(primary_key = True, unique = True, default = uuid_generater()) _id = UUIDField(primary_key=True, unique=True, default=uuid_generater()) url = TextField(null=True, default='') filename = TextField(null=True, default='') _format = TextField(null=True, default='') data = BlobField() owner = TextField(null=True, default='') json = BinaryJSONField(dumps=None, null=True) created_at = DateTimeField(default=datetime.now(JST)) class Meta: db_table = 'binarybank' if __name__ == '__main__': s = '山って知ってる?' db.create_tables([BinaryBank], True) # import hashlib # import base64 # import random # sha256ed_guess_unique_id = hashlib.sha256(datetime.now(JST).strftime('%Y%m%d%H%M%S').encode('utf-8')).hexdigest() # unique_id = base64.b64encode('/'.join([datetime.now(JST).strftime('%Y%m%d%H%M%S'), str(os.getpid())]).encode('utf-8')) # print(unique_id) # print(base64.b64decode(unique_id)) # a = datetime.now(JST)+timedelta(hours = 9) # print(a) # import psycopg2
@app.route('/a.js') def script(): account_id = request.args.get('id') if account_id: return Response( app.config['JAVASCRIPT'] % (app.config['DOMAIN'], account_id), mimetype='text/javascript') return Response('', mimetype='text/javascript') @app.errorhandler(404) def not_found(e): return Response('<h3>Not found.</h3>') # Request handlers -- these two hooks are provided by flask and we will use them # to create and tear down a database connection on each request. @app.before_request def before_request(): g.db = database g.db.connection() @app.after_request def after_request(response): g.db.close() return response if __name__ == '__main__': database.create_tables([Account, PageView], safe=True) app.run(debug=True)
value = IntegerField() time = DateTimeField(default=datetime.now) class Meta: database = db class NetworkSentMonitor(Model): """ Table used for storing sent bytes over the network Fields: - value: the value (in bytes) of the sent bytes over the network - time: the time at the sent bytes had the value """ value = IntegerField() time = DateTimeField(default=datetime.now) class Meta: database = db if __name__ == '__main__': db.connect() db.drop_tables([ CPUMonitor, MemoryMonitor, DiskMonitor, Partition, NetworkReceiveMonitor, NetworkSentMonitor ]) db.create_tables([ CPUMonitor, MemoryMonitor, DiskMonitor, Partition, NetworkReceiveMonitor, NetworkSentMonitor ])
def is_authenticated(self): return self.authenticated def is_active(self): return self.active def is_anonymous(self): return False def get_id(self): return self.login def set_password(self, password): self.pw_hash = generate_password_hash(password) def check_password(self, password): return check_password_hash(self.pw_hash, password) class Package(BaseModel): id = UUIDField(primary_key=True) class UserPackage(BaseModel): user = ForeignKeyField(User) package = ForeignKeyField(Package) db.create_tables([User, Package, UserPackage], safe=True)
for ville in villes: Proximite.create(id_ville=ville.id_ville, nombre_ecole='nombre inconnue', nombre_habitant='nombre inconnue', moins_25ans='nombre inconnue', nombre_commerce='nombre inconnue') if __name__ == '__main__': crea_table = False test_utilisateur = False if crea_table: tout = (Logement, Ville, Types_bien, Utilisateurs, Mot_de_passe, Proximite) #création d'une table db.drop_tables([ Logement, Ville, Types_bien, Utilisateurs, Mot_de_passe, Proximite, Cave, Jardin, Exterieur, Stationnement, Coordonnees ]) db.create_tables([ Logement, Ville, Types_bien, Utilisateurs, Mot_de_passe, Proximite, Cave, Jardin, Exterieur, Stationnement, Coordonnees ]) if test_utilisateur: for utilisateur in (Utilisateurs.select().where( Utilisateurs.pseudo == "Sarah")): Sarah = utilisateur Sarah._test_SUCCESS()
description = CharField() is_training = BooleanField() trained_model = BlobField(null=True) # Trained model for aggregate data class Observation(BaseModel): id_ = AutoField(primary_key=True) station = ForeignKeyField(model=Station, backref='observations') time = DateTimeField() is_training = BooleanField() sample_frequency = FloatField() sample_count = IntegerField() sample_data = ArrayField(FloatField) rms = FloatField() crest = FloatField() peak_to_peak = FloatField() kurtosis = FloatField() is_anomaly = BooleanField(null=True) def clear_db(): pg_db.drop_tables([Station, Observation]) pg_db.create_tables([Station, Observation]) pg_db.create_tables([Station, Observation]) log.info('Loaded DB')
host=os.environ.get('POSTGRES_HOST'), port=os.environ.get('POSTGRES_PORT'), register_hstore=False) class Product(Model): class Meta: database = db name = CharField() amount = IntegerField() price = BigIntegerField(null=True) sales = BigIntegerField(null=True, default=0) db.create_tables([Product]) app = Flask(__name__) auth = HTTPBasicAuth() USERS = { 'name': os.environ.get('BASIC_AUTH_NAME'), 'password': os.environ.get('BASIC_AUTH_PW') } ERROR_TEMPLATE_PATH = './templates/401.html' @auth.verify_password def verify_password(name, password): if (USERS['name'] != name and USERS['password'] != password): return False return True
from vk.settings import DATABASE_CONNECTION # настройки подключения к БД db = PostgresqlExtDatabase(register_hstore=False, **DATABASE_CONNECTION) # базовая модель class BaseModel(peewee.Model): id = peewee.PrimaryKeyField() class Meta: database = db # пользователи class Users(BaseModel): vk_id = peewee.IntegerField(index=True) meta = BinaryJSONField() depth = peewee.IntegerField() # связи между пользователями class Friends(BaseModel): user1_id = peewee.IntegerField(index=True) user2_id = peewee.IntegerField(index=True) # чтоб вручную таблички не создавать, можно будет просто запустить скрипт if __name__ == '__main__': db.create_tables([Users, Friends])
last_status = IntegerField(null=True) class Meta: indexes = ((('year', 'month', 'day', 'hour'), False), ) table_name = 'request_ts' class RegionTS(BaseTSModel): top_region = CharField() sub_region = CharField() count = IntegerField(null=True) class Meta: indexes = ((('year', 'month', 'day', 'hour', 'top_region'), True), ) table_name = 'region_ts' class HouseTS(BaseTSModel, BaseHouse): class Meta: indexes = ( (('year', 'month', 'day', 'hour', 'vendor', 'vendor_house_id'), True), (('created', 'deal_status'), False), ) table_name = 'house_ts' db.connect() db.create_tables( [RegionTS, HouseTS, RequestTS, House, HouseEtc, Vendor, SubRegion])
def __repr__(self): return f"Store name is {self.name}" # warehouse table | indexing warehouse to make sure that each store ONLY has one warehouse class Warehouse(BaseModel): # indexed warehouse store, dont need to do index=true, as foreignKey are indexed already by default store = pw.ForeignKeyField(Store, backref='warehouses', unique=True) location = pw.TextField() # product table | indexed on product name class Product(BaseModel): name = pw.CharField(index=True) # indexed product name. description = pw.TextField() warehouse = pw.ForeignKeyField(Warehouse, backref='products') color = pw.CharField(null=True) # connect db to python & create `orm_practise.db` db.connect() # create the tables that we defined in python db.create_tables([Store, Warehouse, Product]) print(" ") print('orm_practise.py have successfully imported') print(" ")