import logging import sys import traceback from flask import Flask, jsonify from pesto_common.log.logger_factory import LoggerFactory from pesto_example.router.example import app_example logger = LoggerFactory.get_logger('main') log = logging.getLogger('werkzeug') log.setLevel(logging.WARNING) app = Flask(__name__) app.register_blueprint(app_example) @app.route('/') def index(): data = {'name': 'pesto-example'} return jsonify(data) if __name__ == '__main__': port = 8080 try: app.run(host='0.0.0.0', port=port) except (KeyboardInterrupt, SystemExit): print('')
# -*- coding:utf8 -*- import sys import threading from enum import Enum import mysql.connector import mysql.connector.cursor import mysql.connector.pooling from mysql.connector.conversion import MySQLConverter from pesto_common.config.configer import Configer from pesto_common.log.logger_factory import LoggerFactory from pesto_orm.error import reraise, DBError logger = LoggerFactory.get_logger('dialect.mysql.factory') class ExecuteMode(Enum): ONE_MODE = 0 MANY_MODE = 1 class ResultMode(Enum): STORE_RESULT_MODE = 0 USE_RESULT_MODE = 1 class CursorMode(Enum): CURSOR_MODE = 0 DICT_CURSOR_MODE = 1
# -*- coding:utf8 -*- from abc import ABCMeta, abstractmethod from pesto_common.log.logger_factory import LoggerFactory from pesto_common.utils.reflect_utils import ReflectUtils from pesto_orm.core.base import ExecuteMode from pesto_orm.core.error import DBError, DBErrorType logger = LoggerFactory.get_logger('core.repository') class BaseRepository(object): __metaclass__ = ABCMeta def __init__(self, model_class, module=None, class_name=None): self.model_class = model_class self.module = module # model_class.__module__ self.class_name = class_name # model_class.__name__ self.db_name = None self.table_name = None self.table_alias = None self.primary_key = None self.sequence = None model = self._create_instance() if not isinstance(model, dict): if hasattr(model, 'db.name'): self.db_name = model.db_name if hasattr(model, 'table_name'): self.table_name = model.table_name
import multiprocessing import os import signal import traceback from pesto_common.log.logger_factory import LoggerFactory logger = LoggerFactory.get_logger('mode.downgrade') """ :param exceptions exception class list :param timeout 超时时间秒 :param callback 超时回调函数,如果不设置会抛出timeout异常 """ def fallback(exceptions=[], timeout=-1, callback=None): def wrap(func): def handle(result, **kwargs): # 真实执行原方法. func = kwargs['func'] args = kwargs['args'] kwargs = kwargs['kwargs'] return_value = func(*args, **kwargs) logger.info("Handle method: " + func.__name__) result.append(return_value) def to_do(*args, **kwargs): manager = multiprocessing.Manager() new_kwargs = {'func': func, 'args': args, 'kwargs': kwargs} result = manager.list()
# coding=utf-8 import traceback from pesto_common.config.configer import Configer from pesto_common.log.logger_factory import LoggerFactory from pesto_orm.dialect.mysql.mysql_factory import MysqlFactory from pesto_orm.dialect.mysql.mysql_factory import NumpyMySQLConverter logger = LoggerFactory.get_logger('dialect.mysql.domain') mysqlExecutor = None if Configer.contains('db.'): db_config = { 'user': Configer.get("db.user"), 'password': Configer.get("db.password"), 'host': Configer.get("db.host"), 'port': Configer.get("db.port"), 'database': Configer.get("db.database"), 'raise_on_warnings': bool(Configer.get("db.raise_on_warnings", False)), 'charset': Configer.get("db.charset", 'utf8mb4'), 'connection_timeout': int(Configer.get('db.connection_timeout', 180)), 'autocommit': bool(Configer.get('db.autocommit', True)), 'converter_class': NumpyMySQLConverter } mysqlExecutor = MysqlFactory.get_executor(db_config) def transaction(rollback_exceptions=[]): def wrap(func): def handle(result, **kwargs): # 真实执行原方法. func = kwargs['func']
# -*- coding:utf8 -*- from pesto_common.log.logger_factory import LoggerFactory from pesto_orm.dialect.mysql.domain import mysqlExecutor from pesto_orm.dialect.mysql.mysql_factory import ExecuteMode, DBErrorType from pesto_orm.error import DBError from pesto_orm.repository import BaseRepository logger = LoggerFactory.get_logger('dialect.mysql.repository') class MysqlBaseRepository(BaseRepository): def __init__(self, model_class=None): super(MysqlBaseRepository, self).__init__(model_class) self.db_name = None self.table_name = None self.primary_key = None self.from_sql = None model = self._create_instance() if not isinstance(model, dict): if hasattr(model, "db.name"): self.db_name = model.db_name if hasattr(model, "table_name"): self.table_name = model.table_name if hasattr(model, "primary_key"): self.primary_key = model.primary_key def _assembly_from_sql(self): if self.from_sql is None: self.from_sql = ("`" + self.db_name +
from threading import Condition from pesto_common.log.logger_factory import LoggerFactory from pesto_orm.core.error import DBError from pesto_orm.db.connection import Connection, InvalidConnection logger = LoggerFactory.get_logger('db.pool') class TooManyConnections(DBError): pass class ConnectionPool(object): def __init__(self, target, core_size=20, max_size=100, max_wait=100, *args, **kwargs): self._target = target self._core_size = core_size self._max_size = max_size self._max_wait = max_wait self._curr_size = 0 self._curr_wait = 0 self._lock = Condition() self._args, self._kwargs = args, kwargs self._using_conns = [] self._idle_conns = [self.__connection() for i in range(core_size)]
import sys import threading from abc import ABCMeta from pesto_common.log.logger_factory import LoggerFactory from pesto_orm.core.base import ExecuteMode, CursorMode from pesto_orm.core.error import DBErrorType, DBError, reraise from pesto_orm.db.pool import ConnectionPool logger = LoggerFactory.get_logger('core.executor') class Executor(object): __metaclass__ = ABCMeta def __init__(self, pool, show_sql=False): self.__pool = pool self.__show_sql = show_sql # 保留本地conn self.__local_conn = threading.local() self.__local_conn.conn = None self.__local_conn.use_transaction = False def show_sql(self, sql, params=None): if isinstance(params, list) and isinstance(params[0], tuple): log_params = params[:5] logger.info('Execute sql: {}, params(top5): \n{}'.format( sql, ', \n'.join([str(param_tuple) for param_tuple in log_params]))) else:
#!/usr/bin/env python # encoding: utf-8 from abc import abstractmethod, ABCMeta from pesto_common.log.logger_factory import LoggerFactory from pesto_orm.core.model import Model logger = LoggerFactory.get_logger("pipeline.step") class PipelineError(RuntimeError): pass class PipelineStepData(Model): pass class PipelineStep: __metaclass__ = ABCMeta @abstractmethod def __init__(self, data={}, next_step=None): if isinstance(data, PipelineStepData): self.data = data else: self.data = PipelineStepData(init_attrs=data) # 执行需要的数据 self.result = PipelineStepData() # 执行的结果数据 self.next_step = next_step # 下一步执行内容 self.index = 1 self.exit = False