Ejemplo n.º 1
0
class SQLManager(object):
    def __init__(self):
        self.connect_to_database()

    def connect_to_database(self):
        DB_NAME = settings.get_database()

        self.connpool = MySQLConnectionPool(user=settings.DatabaseConfig.USERNAME,
                                            password=settings.DatabaseConfig.PASSWORD,
                                            host=settings.DatabaseConfig.HOST,
                                            pool_name="order_manager",
                                            pool_size=5,
                                            autocommit=True,
                                            database=DB_NAME
                                            )
        self.cnx1 = self.connpool.get_connection()
        self.cur1 = self.cnx1.cursor()
        self.cnx2 = self.connpool.get_connection()
        self.cur2 = self.cnx2.cursor()
        self.cnx3 = self.connpool.get_connection()
        self.cur3 = self.cnx3.cursor()
        self.cnx4 = self.connpool.get_connection()
        self.cur4 = self.cnx4.cursor()
        self.cnx5 = self.connpool.get_connection()
        self.cur5 = self.cnx5.cursor()
Ejemplo n.º 2
0
class DataSource:
    def __init__(self,
                 host='127.0.0.1',
                 port='3306',
                 user='******',
                 password='',
                 database='jground',
                 _pool_name='remote_query',
                 _pool_size=10):

        self.pool = MySQLConnectionPool(pool_size=_pool_size,
                                        host=host,
                                        port=port,
                                        user=user,
                                        password=password,
                                        database=database,
                                        pool_reset_session=True,
                                        autocommit=True,
                                        charset='utf8mb4')

    def getConnection(self):
        return self.pool.get_connection()

    def returnConnection(self, con):
        con.close()
Ejemplo n.º 3
0
class DatabaseConnectionPool(object):
    INSTANCE = None

    def __init__(self, filename='../resources/db_properties.ini'):
        if self.INSTANCE is not None:
            raise ValueError("An instantiation already exists!")
        else:
            db_config = read_db_config(filename)
            self.__cnxPool = MySQLConnectionPool(pool_name="myPool",
                                                 pool_size=10,
                                                 **db_config)

    @classmethod
    def get_instance(
        cls,
        filename='../resources/db_properties.ini',
    ):
        print("get_instance() ------------", filename)
        if cls.INSTANCE is None:
            cls.INSTANCE = DatabaseConnectionPool(filename)
        return cls.INSTANCE

    def get_connection(self):
        return self.__cnxPool.get_connection()

    @classmethod
    def pool_close(cls):
        cls.INSTANCE = None
Ejemplo n.º 4
0
class MySQLPooledConnection(MySQLConnectionABC):
    """ MySQL Connection from connection-pool """
    def __init__(self,
                 *args,
                 db_schema: DBSchema,
                 dictionary: bool = False,
                 named_tuple: bool = False,
                 pool_size: int = 16,
                 **kwargs):
        print('MySQLPooledConnection: Init with pool_size=%d' % pool_size)
        self.cnx_pool = MySQLConnectionPool(pool_size=pool_size,
                                            *args,
                                            **kwargs)
        super().__init__(*args,
                         db_schema=db_schema,
                         dictionary=dictionary,
                         named_tuple=named_tuple,
                         **kwargs)

    def new_cnx(self, *args, **kwargs):
        return self.cnx_pool.get_connection()

    def create_cursor(self, *args, **options) -> 'MySQLCursor':
        self.close()
        self.cnx = self.new_cnx()
        return MySQLCursor(
            self, self.cnx.cursor(*args, **self._cursor_options(**options)))
Ejemplo n.º 5
0
class MySQL(object):
    def __init__(self):
        self._cnxpool = MySQLConnectionPool(pool_name="mysqlPool",
                                            pool_size=3,
                                            **Config.dbconfig)

    def _getConnection(self):
        return self._cnxpool.get_connection()

    def query(self, query, queryArgsTuple=None):
        cnx = self._getConnection()

        with closing(cnx.cursor()) as cursor:
            cursor.execute(query, queryArgsTuple)
            results = cursor.fetchall()

        cnx.close()

        return results

    def modify(self, query, queryArgsTuple=None):
        cnx = self._getConnection()

        with closing(cnx.cursor()) as cursor:
            cursor.execute(query, queryArgsTuple)
            cnx.commit()

        cnx.close()
Ejemplo n.º 6
0
class Event(Flask):
    def __init__(self, *args, **kwargs):
        kwargs.update({
            'template_folder': settings.STATIC_ASSETS_PATH,
            'static_folder': settings.STATIC_ASSETS_PATH,
            'static_url_path': '',
        })
        super(Event, self).__init__(__name__, *args, **kwargs)
        self.connection_pool = None
        self.enums = None

    def create_connection_pool(self):
        pool_config = {
            "host": settings.EVENT_MYSQL_DATABASE_HOST,
            "port": settings.EVENT_MYSQL_DATABASE_PORT,
            "database": settings.EVENT_MYSQL_DATABASE_NAME,
            "user": settings.EVENT_MYSQL_DATABASE_USER,
            "password": settings.EVENT_MYSQL_DATABASE_PASSWORD,
            "charset": settings.EVENT_MYSQL_DATABASE_CHARSET,
            "use_unicode": True,
            "get_warnings": True,
            "ssl_verify_cert": False,
            "pool_name": self.__class__.__name__,
            "pool_reset_session":
            settings.EVENT_MYSQL_DATABASE_POOL_RESET_SESSION,
            "pool_size": settings.EVENT_MYSQL_DATABASE_POOL_SIZE,
            "autocommit": True
        }
        if settings.EVENT_MYSQL_DATABASE_SSL_VERIFY_CERT:
            pool_config["ssl_verify_cert"] = True
            pool_config["ssl_ca"] = settings.EVENT_MYSQL_DATABASE_SSL_CA_PATH
        self.connection_pool = MySQLConnectionPool(**pool_config)

    def load_enums(self):
        sql = 'SELECT * FROM c_enum'
        cnx = self.connection_pool.get_connection()
        enum_list = execute_sql_statement(sql, sql_cnx=cnx)
        enum_map = defaultdict(list)
        enum_obj_map = {}
        for enum in enum_list:
            enum_map[enum.get("enum_type_name")].append(enum)
        value_tuple = namedtuple("VALUE", ["value", "name"])
        for enum_name_code, enum_value_list in enum_map.items():
            value_map = {
                enum_value.get("enum_code"):
                value_tuple(enum_value.get("enum_value"),
                            enum_value.get("enum_name"))
                for enum_value in enum_value_list
            }
            temp_named_tuple = namedtuple(enum_name_code,
                                          list(value_map.keys()))
            enum_obj_map[enum_name_code.upper()] = temp_named_tuple(
                **value_map)
        enum_type_tuple = namedtuple("ENUM", enum_obj_map.keys())
        self.enums = enum_type_tuple(**enum_obj_map)
        cnx.close()
        REGISTRATION_TYPE_LIST.extend(
            self.enums.REGISTRATION_TYPE._asdict().keys())
Ejemplo n.º 7
0
class SQLPool:
    __slots__ = ('conn', )

    def __init__(self, **kwargs):
        self.conn = MySQLConnectionPool(autocommit=True, **kwargs)

    def execute(self, query: str, params: SQLParams = []) -> int:
        if not (cnx := self.conn.get_connection()):
            raise Exception('MySQL: Failed to retrieve a worker.')

        cur = cnx.cursor()
        cur.execute(query, params)
        cur.fetchmany()

        # Since we are executing a command, we
        # simply return the last row affected's id.
        res = cur.lastrowid

        [x.close() for x in (cur, cnx)]
        return res
Ejemplo n.º 8
0
class MySqlDriver:
    """MySQL driver class."""
    def __init__(self):
        """Mysqldriver Constructor."""
        self.config = {
            "host":
            "db-espresso-dev" if environ.get("APP_ENV") == "development" else
            environ.get("MYSQL_HOST"),
            "database":
            environ.get("MYSQL_DATABASE", "espresso_db"),
            "user":
            environ.get("MYSQL_USER", "root"),
            "password":
            environ.get("MYSQL_ROOT_PASSWORD", "strawberry"),
            "port":
            int(environ.get("MYSQL_PORT", 3306)),
            "pool_name":
            "mysql_pool",
            "pool_size":
            5,
            "pool_reset_session":
            False,  # MySQL version 5.7.2 and earlier does not support COM_RESET_CONNECTION.
        }
        try:
            self.cnxpool = MySQLConnectionPool(**self.config)
        except errors.Error as err:
            print(err)

    def query(self, query: str, data: tuple = ()) -> dict or False:
        """Execute a query on database instance."""
        try:
            cnx = self.cnxpool.get_connection()
            cursor = cnx.cursor()
            cursor.execute(query, data)
            result = cursor.fetchall()
            return result

        except errors.Error as err:
            print(err)
            return False

        finally:
            cursor.close()
            cnx.close()
Ejemplo n.º 9
0
class ExplicitlyConnectionPool(object):
    INSTANCE = None

    def __init__(self):
        if self.INSTANCE is not None:
            raise ValueError("An instantiation already exists!")
        else:
            self.__cnxPool = MySQLConnectionPool(pool_name="myPool",
                                                 pool_size=5,
                                                 option_files='config.conf')

    @classmethod
    def get_instance(cls):
        if cls.INSTANCE is None:
            cls.INSTANCE = ExplicitlyConnectionPool()
        return cls.INSTANCE

    def get_connection(self):
        return self.__cnxPool.get_connection()
class DatabaseConnectionPool:
    INSTANCE = None

    def __init__(self):
        if self.INSTANCE is not None:
            raise ValueError("An instantiation already exists!")
        else:
            db_config = read_db_config(filename='Config.ini', section='mysql')
            print(type(db_config), db_config)
            self.__cnxPool = MySQLConnectionPool(pool_name='myPool', pool_size=5, **db_config)

    @classmethod
    def get_instance(cls):
        if cls.INSTANCE is None:
            cls.INSTANCE = DatabaseConnectionPool()
        return cls.INSTANCE

    def get_connection(self):
        return self.__cnxPool.get_connection()
class MysqlPooledDataSource(AbstractDataSource):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._conn_params['use_pure'] = True
        self._conn_params['pool_name'] = 'test_pool'
        self._conn_params['pool_size'] = 10
        # self._pool = MySQLConnectionPool(**{'user':self._user_name,'password':self._pass_word,\
        #                                     'host':self._host,'port':self._port,'database':self._data_base,\
        #                                     'use_pure':True,'pool_name':"test_pool",'pool_size':10})
        try:
            self._traceback = None
            self._pool = MySQLConnectionPool(**self._conn_params)
        except InterfaceError as e:
            print('MysqlPooledDataSource connect error')
            e.with_traceback(self._traceback)
            raise e

    def get_conn(self):
        return PooledMySQLConnection(self, self._pool.get_connection())
Ejemplo n.º 12
0
class DatabaseConnectionPool(object):
    INSTANCE = None

    def __init__(self):
        if self.INSTANCE is not None:
            raise ValueError("An instantiation already exists!")
        else:
            db_config = read_db_config()
            self.__cnxPool = MySQLConnectionPool(pool_name="myPool",
                                                 pool_size=5,
                                                 **db_config)

    @classmethod
    def get_instance(cls):
        if cls.INSTANCE is None:
            cls.INSTANCE = DatabaseConnectionPool()
        return cls.INSTANCE

    def get_connection(self):
        return self.__cnxPool.get_connection()
Ejemplo n.º 13
0
class Database(object, metaclass=Singleton):
    def __init__(self):
        cfg = self.__get_cfg()
        self.pool = MySQLConnectionPool(pool_name='dbpool',
                                        pool_size=5,
                                        **cfg)

    def __get_cfg(self):
        with open('database.yml') as f:
            cfg = yaml.load(f.read())['production']

        keys = ['username', 'password', 'host', 'port', 'database', 'ssl_ca',
                'ssl_cert', 'ssl_key']

        no_ = lambda s: s.replace('_', '')
        cfg = { k: cfg.get(no_(k)) for k in keys if no_(k) in cfg }
        cfg['ssl_verify_cert'] = True
        # Mysql documentation incorrectly says 'username' is an alias of 'user'.
        cfg['user'] = cfg.pop('username')
        return cfg

    @contextmanager
    def get_connection(self):
        def get_conn(attempt=0):
            try:
                return self.pool.get_connection()
            except PoolError:
                if 20 > attempt:
                    time.sleep(0.2)
                    return get_conn(attempt + 1)
                else:
                    raise
        connection = get_conn()
        yield connection
        # connection.close() will fail with an unread_result.
        if connection._cnx.unread_result:
            connection._cnx.get_rows()
        connection.close()

    def close_all(self):
        self.pool.reset_session()
Ejemplo n.º 14
0
class Db(object):
    """
    Instantiate a db connection.
    """

    def __init__(self):

        dbconfig = {
            "database": "geoDev",
            "user": "******",
            "password": "******",
            "host": "localhost",
            #"raw": True,
            "pool_name": "geo_pool",
            "pool_size": 20,
            "pool_reset_session": True
        }

        try:
            self.__conn_pool = MySQLConnectionPool(**dbconfig)
            #self.__conn_pool = mysql.connect(**dbconfig)
        except Exception:
            raise

    def __get_session(self):
        """
        Returns the private session var.
        """
        return self.__conn_pool.get_connection()
        #return self.__conn_pool

    def __cant_set(self):
        """Raises runtime error."""
        raise RuntimeError("Private property cannot be set.")

    def __cant_get(self):
        """Raises runtime error."""
        raise RuntimeError("Cannot get protected property.")

    db_conn = property(__cant_get, __cant_set)
    session = property(__get_session, __cant_set)
class ConnectionPool:
    INSTANCE = None

    def __init__(self):
        if self.INSTANCE is not None:
            raise ValueError("An instantiation already exists.")
        else:
            self.__cnxPool = MySQLConnectionPool(pool_name='myPool', pool_size=5, option_files='conf/db_config.conf')

    @classmethod
    def get_instance(cls):
        if cls.INSTANCE is None:
            cls.INSTANCE = ConnectionPool()
        return cls.INSTANCE

    def get_connection(self):
        return self.__cnxPool.get_connection()

    @classmethod
    def pool_close(cls):
        cls.INSTANCE = None
Ejemplo n.º 16
0
class MysqlPooledDataSource(AbstractDataSource):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        # self._conn_params['use_pure'] = True
        self._conn_params['pool_name'] = 'test_pool'
        self._conn_params['pool_size'] = 10
        try:
            self._pool = MySQLConnectionPool(**self._conn_params)
            self._closed = False
        except InterfaceError as e:
            log.error('MysqlPooledDataSource connect error')
            log.error(formatErrorMsg(e))
            raise e

    def _inner_close(self):
        self._pool.close()

    def get_conn(self):
        return PooledMySQLConnection(self, self._pool.get_connection())

    def release_conn(self, conn):
        conn.close()
Ejemplo n.º 17
0
class DbPlugin(plugins.SimplePlugin):
    def __init__(self, bus, config):
        plugins.SimplePlugin.__init__(self, bus)
        self.engine = None
        self._pool = MySQLConnectionPool(pool_size=32, **config)

    def start(self):
        self.bus.subscribe('before_request', self._create_connection)
        self.bus.subscribe('after_request', self._close_connection)
        self.bus.log('DB plugin STARTED')

    def stop(self):
        self.bus.unsubscribe('before_request', self._create_connection)
        self.bus.unsubscribe('after_request', self._close_connection)
        if self._pool:
            del self._pool
        self.bus.log('DB plugin STOPPED')

    def _create_connection(self):
        cp.request.db = self._pool.get_connection()

    def _close_connection(self):
        cp.request.db.close()
        del cp.request.db
Ejemplo n.º 18
0
class Database:
    def __init__(self, **kwargs):
        self.pool = MySQLConnectionPool(**kwargs)

    def get_connection(self):
        return self.pool.get_connection()

    def execute(self, sql, multi=False):
        try:
            con = self.get_connection()
            cursor = con.cursor()
            cursor.execute(sql, multi=multi)
            try:
                res = cursor.fetchall()
                con.commit()
                return res
            except Exception as e1:
                con.commit()
                return cursor.lastrowid
        except Exception as e:
            print(e)
            raise e
        finally:
            con.close()
Ejemplo n.º 19
0
def main(argv):
    inputfile = ''
    database = ''
    try:
        opts, args = getopt.getopt(argv, "hi:d:", ["ifile=", "db="])
    except getopt.GetoptError:
        print('import_iaa_iiif.py -i <inputfile> -d <database_name>')
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('import_iaa_iiif.py -i <inputfile> -d <database_name>')
            sys.exit()
        elif opt in ("-i", "--ifile"):
            inputfile = arg
        elif opt in ("-d", "--db"):
            database = arg
    print('Input file is', inputfile)
    print('Database is', database.rstrip(' '))

    dbconfig = {
        'host': "localhost",
        'port': "3307",
        'user': "******",
        'password': "******",
        'database': database
    }

    cnxpool = MySQLConnectionPool(pool_name="mypool", pool_size=30, **dbconfig)

    db = cnxpool.get_connection()
    cursor = db.cursor()
    unprocessed = []
    exclude = [
        '1094', '1095', '1096', '1097', '1098', '1099', '1100', '1101', '1102',
        '1103', '1104', '1106', '1107', '998'
    ]
    lines = [line.rstrip('\n') for line in open(inputfile)]
    for line in lines:
        print(line)
        m = re.search(
            r'P([\*]{0,1}\d{1,5})(\_\d|[a-zA-Z]{0,1}).*Fg(\d{1,5}).*-(R|V)-.*(LR445|LR924|ML445|ML924|_026|_028)',
            line)
        if m is not None and len(m.groups(0)) == 5:
            plate = str(m.group(1)) + m.group(2).replace('_', '/')
            fragment = str(m.group(3)).lstrip('0')
            side = '0'
            if ('R' in str(m.group(4))):
                side = '0'
            else:
                side = '1'
            wvStart = '0'
            wvEnd = '0'
            type = '1'
            master = '0'
            if ('445' in str(m.group(5))):
                wvStart = '445'
                wvEnd = '704'
                type = '0'
                master = '1'
            elif ('26' in str(m.group(5))):
                wvStart = '924'
                wvEnd = '924'
                type = '2'
            elif ('28' in str(m.group(5))):
                wvStart = '924'
                wvEnd = '924'
                type = '3'
            elif ('924' in str(m.group(5))):
                wvStart = '924'
                wvEnd = '924'
            sql = 'SELECT image_catalog_id, edition_catalog_id FROM image_catalog '\
                'JOIN image_to_edition_catalog USING(image_catalog_id) '\
                'WHERE institution = "IAA" '\
                'AND catalog_number_1 = "' + plate + '" '\
                'AND catalog_number_2 = "' + fragment + '" '\
                'AND catalog_side = ' + side + ';'
            cursor.execute(sql)
            result_set = cursor.fetchall()
            print(plate, fragment, side, result_set)
            # I should perhaps have an else clause following
            # this conditional that sticks images without edition
            # cataloguing data into 4Q9999 or something like that.
            if (cursor.rowcount != 0):
                imageCatalogId = str(result_set[0][0])
                editionCatalogId = str(result_set[0][1])
                print(plate, fragment, side, wvStart, wvEnd, type,
                      imageCatalogId, editionCatalogId)
                if any(x not in plate for x in exclude):
                    sql = 'INSERT IGNORE INTO SQE_image '\
                    '(image_urls_id, filename, native_width, native_height, dpi, type, wavelength_start, wavelength_end, is_master, image_catalog_id, edition_catalog_id) '\
                    'VALUES(2,"' + line + '",7216,5412,1215,' + type +',' + wvStart + ',' + wvEnd + ',' + master + ',' + imageCatalogId + ',' + editionCatalogId + ');'
                    cursor.execute(sql)
                    db.commit()
                    print('New id:', cursor.lastrowid)
        else:
            unprocessed.append(line)
    cursor.close()
    db.close()
    print(unprocessed)
Ejemplo n.º 20
0
class PooledQueryExecutor:
    def __init__(self, host, port, username, password, database, poolsize=1):
        self.host = host
        self.port = port
        self.user = username
        self.password = password
        self.database = database
        self._poolsize = poolsize

        self._pool = None
        self._pool_mutex = Lock()

        self._connection_semaphore = Semaphore(poolsize)

        self._init_pool()

    def _init_pool(self):
        logger.info("Connecting to DB")
        dbconfig = {
            "host": self.host,
            "port": self.port,
            "user": self.user,
            "password": self.password,
            "database": self.database
        }
        with self._pool_mutex:
            self._pool = MySQLConnectionPool(pool_name="db_wrapper_pool",
                                             pool_size=self._poolsize,
                                             **dbconfig)

    def close(self, conn, cursor):
        """
        A method used to close connection of mysql.
        :param conn:
        :param cursor:
        :return:
        """
        cursor.close()
        conn.close()

    def setup_cursor(self, conn, **kwargs):
        conn_args = {}
        use_dict = kwargs.get('use_dict', False)
        prepared = kwargs.get('prepared', False)
        if use_dict:
            conn_args['dictionary'] = True
        if prepared:
            conn_args['prepared'] = True
        return conn.cursor(**conn_args)

    def execute(self, sql, args=(), commit=False, **kwargs):
        """
        Execute a sql, it could be with args and with out args. The usage is
        similar with execute() function in module pymysql.
        :param sql: sql clause
        :param args: args need by sql clause
        :param commit: whether to commit
        :return: if commit, return None, else, return result
        """
        self._connection_semaphore.acquire()
        conn = self._pool.get_connection()
        cursor = self.setup_cursor(conn, **kwargs)
        get_id = kwargs.get('get_id', False)
        get_dict = kwargs.get('get_dict', False)
        raise_exc = kwargs.get('raise_exc', False)
        suppress_log = kwargs.get('suppress_log', False)
        # We do not want to display binary data
        has_binary = False
        disp_args = []
        if args and type(args) is tuple:
            for value in args:
                if isinstance(value, bytes):
                    disp_args.append(value[:10])
                    has_binary = True
                else:
                    disp_args.append(value)
        else:
            disp_args = (args)
        try:
            multi = False
            if type(args) != tuple and args is not None:
                args = (args,)
            if sql.count(';') > 1:
                multi = True
                for _ in conn.cmd_query_iter(sql):
                    pass
            else:
                cursor.execute(sql, args)
            if not has_binary:
                logger.debug3(cursor.statement)
            else:
                logger.debug3("SQL: {}", sql)
                logger.debug3("Args: {}", disp_args)
            if commit is True:
                conn.commit()
                if not multi:
                    affected_rows = cursor.rowcount
                    if get_id:
                        return cursor.lastrowid
                    else:
                        return affected_rows
            else:
                if not multi:
                    res = cursor.fetchall()
                    if get_dict:
                        return self.__convert_to_dict(cursor.column_names, res)
                    return res
        except mysql.connector.Error as err:
            if not suppress_log:
                logger.error("Failed executing query: {} ({}), error: {}", sql, disp_args, err)
            if raise_exc:
                raise err
            return None
        except Exception as e:
            logger.error("Unspecified exception in dbWrapper: {}", str(e))
            return None
        finally:
            self.close(conn, cursor)
            self._connection_semaphore.release()

    def executemany(self, sql, args, commit=False, **kwargs):
        """
        Execute with many args. Similar with executemany() function in pymysql.
        args should be a sequence.
        :param sql: sql clause
        :param args: args
        :param commit: commit or not.
        :return: if commit, return None, else, return result
        """
        # get connection form connection pool instead of create one.
        self._connection_semaphore.acquire()
        conn = self._pool.get_connection()
        cursor = conn.cursor()

        try:
            cursor.executemany(sql, args, **kwargs)

            if commit is True:
                conn.commit()
                return None
            else:
                res = cursor.fetchall()
                return res
        except mysql.connector.Error as err:
            logger.error("Failed executing query: {}", str(err))
            return None
        except Exception as e:
            logger.error("Unspecified exception in dbWrapper: {}", str(e))
            return None
        finally:
            self.close(conn, cursor)
            self._connection_semaphore.release()

    # ===================================================
    # =============== DB Helper Functions ===============
    # ===================================================

    def __convert_to_dict(self, descr, rows):
        desc = [n for n in descr]
        return [dict(zip(desc, row)) for row in rows]

    def __create_clause(self, col_names, col_subs):
        """ Creates a clause and handles lists
        Args:
            col_names (list): List of column names
            col_subs (list): List of column value substitutions
        Returns (list):
            List of elements for the clause
        """
        clause = []
        for ind, name in enumerate(col_names):
            if col_subs[ind].find(",") != -1:
                clause.append("`%s` IN (%s)" % (name, col_subs[ind]))
            else:
                clause.append("`%s` = %s" % (name, col_subs[ind]))
        return clause

    def __fix_table(self, table):
        """ Encapsualtes the table in backticks
        Args:
            table (str): Table to encapsulate
        Returns (str):
            Encapsulated table
        """
        split_table = table.split(".")
        table_name = ""
        if len(split_table) > 2:
            raise Exception("Invalid table format, %s" % table)
        for name in split_table:
            name = name.replace("`", "")
            if len(table_name) != 0:
                table_name += "."
            table_name += "`%s`" % name
        return table_name

    def __process_literals(self, optype, keyvals, literals):
        """ Processes literals and returns a tuple containing all data required for the query
        Args:
            keyvals (dict): Data to insert into the table
            literals (list): Datapoints that should not be escaped
            optype (str): Type of operation
        Returns (tuple):
            (Column names, Column Substitutions, Column Values, Literal Values, OnDuplicate)
        """
        column_names = []
        column_substituion = []
        column_values = []
        literal_values = []
        ondupe_out = []
        for key, value in keyvals.items():
            if type(value) is list and optype not in ["DELETE", "UPDATE"]:
                raise Exception("Unable to process a list in key %s" % key)
            column_names += [key]
            # Determine the type of data to insert
            sub_op = "%%s"
            if key in literals:
                sub_op = "%s"
            # Number of times to repeat
            num_times = 1
            if type(value) is list:
                num_times = len(value)
            column_substituion += [",".join(sub_op for _ in range(0, num_times))]
            # Add to the entries
            if key in literals:
                if type(value) is list:
                    literal_values += value
                else:
                    literal_values += [value]
            else:
                if type(value) is list:
                    column_values += value
                else:
                    column_values += [value]
        for key, value in keyvals.items():
            if optype == "ON DUPLICATE":
                tmp_value = "`%s` = %%s" % key
                if key in literals:
                    tmp_value = tmp_value % value
                else:
                    column_values += [value]
                ondupe_out += [tmp_value]
        return (column_names, column_substituion, column_values, literal_values, ondupe_out)

    def autofetch_all(self, sql, args=(), **kwargs):
        """ Fetch all data and have it returned as a dictionary """
        return self.execute(sql, args=args, get_dict=True, raise_exc=True, **kwargs)

    def autofetch_value(self, sql, args=(), **kwargs):
        """ Fetch the first value from the first row """
        data = self.execute(sql, args=args, raise_exc=True, **kwargs)
        if not data or len(data) == 0:
            return None
        return data[0][0]

    def autofetch_row(self, sql, args=(), **kwargs):
        """ Fetch the first row and have it return as a dictionary """
        # TODO - Force LIMIT 1
        data = self.execute(sql, args=args, get_dict=True, raise_exc=True, **kwargs)
        if not data or len(data) == 0:
            return {}
        return data[0]

    def autofetch_column(self, sql, args=None, **kwargs):
        """ get one field for 0, 1, or more rows in a query and return the result in a list
        """
        data = self.execute(sql, args=args, raise_exc=True, **kwargs)
        if data is None:
            data = []
        returned_vals = []
        for row in data:
            returned_vals.append(row[0])
        return returned_vals

    def autoexec_delete(self, table, keyvals, literals=None, where_append=None, **kwargs):
        """ Performs a delete
        Args:
            table (str): Table to run the query against
            keyvals (dict): Data to insert into the table
            literals (list): Datapoints that should not be escaped
            where_append (list): Additional data to append to the query
        """
        if literals is None:
            literals = []
        if where_append is None:
            where_append = []
        if type(keyvals) is not dict:
            raise Exception("Data must be a dictionary")
        if type(literals) is not list:
            raise Exception("Literals must be a list")
        table = self.__fix_table(table)
        parsed_literals = self.__process_literals("DELETE", keyvals, literals)
        (column_names, column_substituion, column_values, literal_values, _) = parsed_literals
        query = "DELETE FROM %s\nWHERE "
        where_clauses = where_append + self.__create_clause(column_names, column_substituion)
        query += "\nAND ".join(k for k in where_clauses)
        literal_values = [table] + literal_values
        query = query % tuple(literal_values)
        self.execute(query, args=tuple(column_values), commit=True, raise_exc=True, **kwargs)

    def autoexec_insert(self, table, keyvals, literals=None, optype="INSERT", **kwargs):

        """ Auto-inserts into a table and handles all escaping
        Args:
            table (str): Table to run the query against
            keyvals (dict): Data to insert into the table
            literals (list): Datapoints that should not be escaped
            optype (str): Type of operation.  Valid operations are ["INSERT", "REPLACE", "INSERT IGNORE",
                "ON DUPLICATE"]
            log (bool): If the query should be logged
            logger (logging.logger): Logger that will be used if log = True
        Returns (int):
            Primary key for the row
        """
        if literals is None:
            literals = []
        optype = optype.upper()
        if optype not in ["INSERT", "REPLACE", "INSERT IGNORE", "ON DUPLICATE"]:
            raise ProgrammingError("MySQL operation must be 'INSERT', 'REPLACE', 'INSERT IGNORE', 'ON DUPLICATE',"
                                   "got '%s'" % optype)
        if type(keyvals) is not dict:
            raise Exception("Data must be a dictionary")
        if type(literals) is not list:
            raise Exception("Literals must be a list")
        table = self.__fix_table(table)
        parsed_literals = self.__process_literals(optype, keyvals, literals)
        (column_names, column_substituion, column_values, literal_values, ondupe_out) = parsed_literals
        ondupe_values = []
        inital_type = optype
        if optype == "ON DUPLICATE":
            inital_type = "INSERT"
        if inital_type in ["INSERT", "REPLACE"]:
            inital_type += " INTO"
        rownames = ",".join("`%s`" % k for k in column_names)
        rowvalues = ", ".join(k for k in column_substituion)
        query = "%s %s\n" \
                "(%s)\n" \
                "VALUES(%s)" % (inital_type, table, rownames, rowvalues) % tuple(literal_values)
        if optype == "ON DUPLICATE":
            dupe_out = ",\n".join("%s" % k for k in ondupe_out)
            query += "\nON DUPLICATE KEY UPDATE\n" \
                     "%s" % dupe_out
            column_values += ondupe_values
        return self.execute(query, args=tuple(column_values), commit=True, get_id=True, raise_exc=True, **kwargs)

    def autoexec_update(self, table, set_keyvals, literals=None, where_keyvals=None, where_literals=None, **kwargs):
        """ Auto-updates into a table and handles all escaping
        Args:
            table (str): Table to run the query against
            set_keyvals (dict): Data to set
            set_literals (list): Datapoints that should not be escaped
            where_keyvals (dict): Data used in the where clause
            where_literals (list): Datapoints that should not be escaped
        """
        if literals is None:
            literals = []
        if where_keyvals is None:
            where_keyvals = {}
        if where_literals is None:
            where_literals = []
        if type(set_keyvals) is not dict:
            raise Exception("Set Keyvals must be a dictionary")
        if type(literals) is not list:
            raise Exception("Literals must be a list")
        if type(where_keyvals) is not dict:
            raise Exception("Where Keyvals must be a dictionary")
        if type(where_literals) is not list:
            raise Exception("Literals must be a list")
        parsed_set = self.__process_literals("SET", set_keyvals, literals)
        (set_col_names, set_col_sub, set_val, set_literal_val, _) = parsed_set
        parsed_where = self.__process_literals("UPDATE", where_keyvals, where_literals)
        (where_col_names, where_col_sub, where_val, where_literal_val, _) = parsed_where
        first_sub = [table]
        actual_values = set_val + where_val
        set_clause = self.__create_clause(set_col_names, set_col_sub)
        first_sub.append(",".join(set_clause) % tuple(set_literal_val))
        query = "UPDATE %s\n" \
                "SET %s"
        if where_col_names:
            query += "\nWHERE %s"
            where_clause = self.__create_clause(where_col_names, where_col_sub)
            first_sub.append("\nAND".join(where_clause) % tuple(where_literal_val))
        query = query % tuple(first_sub)
        self.execute(query, args=tuple(actual_values), commit=True, raise_exc=True, **kwargs)
Ejemplo n.º 21
0
class DatabaseManager:

	def __init__(self):
		print("DatabaseManager: __init__")
		self.createConnectionPool()

	def createConnectionPool(self):
		dbconfig = {
		"user": "******",
		"password":"******",
		"host":'mihass-g-mysql', #set host to mysql using docker run link
		"database":'ProjectOrganiser',
		"port":'3306'
		}

		try:
			self.cnxpool = MySQLConnectionPool(
				pool_name = "mypool",
				pool_size = 32,
				**dbconfig)
		except:
			# sleep - hopefully will help - might be that the MySQL
			#container is not up and running yet
			print("Exception... sleeping for 5 seconds then retry")
			tb = traceback.format_exc()
			print("tb: " + tb)
			time.sleep(5)
			# try again
			return self.createConnectionPool()

	def insert_into_table(self, table_name, my_dict):
		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)

		placeholders = ", ".join(["%s"] * len(my_dict))

		stmt = "INSERT INTO `{table}` ({columns}) VALUES ({values});".format(
			table=table_name,
			columns=",".join(my_dict.keys()),
			values=placeholders
		)


		cursor.execute(stmt, list(my_dict.values()))

		connector.commit()
		cursor.close()
		connector.close()
		print("complete")

	def update_table(self, table_name, tracker_arr):
		connector = self.cnxpool.get_connection()
		cursor = connector.cursor()
		stmt = ""

		for i in tracker_arr:
			stmt = "UPDATE `{table}` SET {column} = CONCAT(ifnull({column},'{value}'), '{value}') WHERE {field} = '{conditional}';".format(
				table=table_name,
				column="logs",
				value=json.dumps(i),
				field="email",
				conditional=i["userID"]
			)

			cursor.execute(stmt)
		connector.commit()
		cursor.close()
		connector.close()
		print("table updated")


	def replace_into_table(self, table_name, my_dict):
		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)

		placeholders = ", ".join(["%s"] * len(my_dict))

		stmt = "REPLACE INTO `{table}` ({columns}) VALUES ({values});".format(
			table=table_name,
			columns=",".join(my_dict.keys()),
			values=placeholders
		)


		cursor.execute(stmt, list(my_dict.values()))

		connector.commit()
		cursor.close()
		connector.close()


	def select_all_from_table(self, table_name):
		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)

		stmt = "SELECT * FROM `"+table_name+"`;"

		#print(stmt)
		cursor.execute(stmt)
		data = cursor.fetchall()
		cursor.close()
		connector.close()

		return data


	def delete_assignment(self, id):
		#Inserts a dictionary into table table_name
		#print("delete assignment")
		id = str(id)

		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)

		stmt = ("DELETE FROM Assignments WHERE Assignments.id="+ id +" LIMIT 1")
		#print(stmt)

		cursor.execute(stmt)

		connector.commit()
		cursor.close()
		connector.close()


	def delete_user(self, email):
		#Inserts a dictionary into table table_name
		#print("delete user")
		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)

		stmt = ("DELETE FROM Users WHERE Users.email='"+email+"' LIMIT 1")
		#print("stmt:")
		#print(stmt)
		cursor.execute(stmt)
		connector.commit()
		cursor.close()
		connector.close()

	def check_password(self, email, password):
		#return true if successful
		#print("check_password")
		result = False

		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)

		query = ("SELECT * FROM Users WHERE Users.email='"+email+"' AND Users.password='******'")
		#print("query:")
		#print(query)

		cursor.execute(query)
		cursor.fetchall()

		if cursor.rowcount == 1:
			result = True

		cursor.close()
		connector.close()

		return result

	def get_user_info(self, message_data):
		#print ("get_user_data")
		email = message_data["email"]
		print("0")
		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)
		query = ("SELECT * FROM Users WHERE Users.email='"+email+"'")
		print("1")
		#print(query)

		cursor.execute(query)
		datas = cursor.fetchall()
		data = datas[0]

		cursor.close()
		connector.close()
		return data

	def get_all_users(self):
		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)
		query = ("SELECT * FROM Users")

		#print(query)

		cursor.execute(query)
		users_table = cursor.fetchall()
		cursor.close()
		connector.close()

		#sort it
		users = []
		for user_table in users_table:
			user = {}
			user["email"] = user_table["email"]
			user["name"] = user_table["name"]
			user["surname"] = user_table["surname"]
			users.append(user)

		return users

	def select_submissions_for_user(self, user_id):
		print("select_submissions_from_assignments")
		user_id = str(user_id)
		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)
		query = ("SELECT * FROM Submissions WHERE Submissions.user_id=" + user_id )

		#print(query)

		cursor.execute(query)
		data = cursor.fetchall()

		cursor.close()
		connector.close()
		return data


	def add_review(self, data):
		print("add_review")
		connector = self.cnxpool.get_connection()

		# first we need to get the submission
		cursor = connector.cursor(dictionary=True)
		submission_id = str(data["submission_id"])
		query = ("SELECT * FROM Submissions WHERE Submissions.id=" + submission_id)
		cursor.execute(query)
		submission = cursor.fetchall()[0]
		cursor.close()
		connector.close()


		feedbacks = json.loads(submission["feedbacks"])
		feedbacks.append(data)
		submission["feedbacks"] = json.dumps(feedbacks)

		self.replace_into_table("Submissions", submission)




	def update_review(self, data):
		print("update_review")
		connector = self.cnxpool.get_connection()

		# first we need to get the submission
		cursor = connector.cursor(dictionary=True)
		submission_id = str(data["submission_id"])
		query = ("SELECT * FROM Submissions WHERE Submissions.id=" + submission_id)
		cursor.execute(query)
		submission = cursor.fetchall()[0]
		cursor.close()
		connector.close()


		feedbacks = json.loads(submission["feedbacks"])

		for i in range(0,len(feedbacks)):
			if feedbacks[i]["reviewer_id"] == data["reviewer_id"]:
				if feedbacks[i]["iteration_submitted"] == data["iteration_submitted"]:
					#print("FEEDback old:", feedbacks[i])
					print("FEEDback new:", data["review"])
					feedbacks[i]["review"] = data["review"] #TEST THIS PLACE
					print("WHY", feedbacks[i]["review"])

		submission["feedbacks"] = json.dumps(feedbacks)
		print ("RESULT:", submission["feedbacks"])

		self.replace_into_table("Submissions", submission)
Ejemplo n.º 22
0
class ConnectionPool:
    CASSANDRA = 'Cassandra'
    REDIS = 'Redis'
    MONGO = 'MongoDB'
    MYSQL = 'MySQL'
    HOST = 'host'
    PORT = 'port'
    USER = '******'
    PASS = '******'
    DATABASE = 'database'
    KEYSPACE = 'keyspace'
    COMMA = ','
    configs = None
    redisPool = None
    mongoPool = None
    cassandraPool = None
    mysqlPool = None
    keyspace = None

    def __init__(self):
        global configs
        self.configs = get_configs()
        self.create_cassandra()
        # self.create_redis()
        self.create_mongo()
        # self.create_mysql()

    def get_params(self, config):
        global HOST, PORT, USER, PASS
        return config[self.HOST], config[self.PORT], config[self.USER], config[
            self.PASS]

    # perform exception handling with logging

    def create_cassandra(self):
        global CASSANDRA, COMMA, KEYSPACE, cassandraPool, keyspace
        config = self.configs[self.CASSANDRA]
        hosts, port, user, password = self.get_params(config)
        hosts = hosts.strip().split(self.COMMA)
        self.keyspace = config[self.KEYSPACE]
        self.cassandraPool = Cluster(hosts, port)

    def create_redis(self):
        global REDIS, redisPool
        config = self.configs[self.REDIS]
        hosts, port, user, password = self.get_params(config)
        self.redisPool = ConnectionPool(host=hosts, port=port)

    def create_mysql(self, database=None):
        global MYSQL, DATABASE, mysqlPool
        config = self.configs[self.MYSQL]
        hosts, port, user, password = self.get_params(config)
        if not database:
            database = config[self.DATABASE]
        dbconfig = {
            'database': database,
            'user': user,
            'password': password,
            'host': hosts,
            'port': port
        }
        self.mysqlPool = MySQLConnectionPool(pool_size=CNX_POOL_MAXSIZE,
                                             pool_name='POOL',
                                             **dbconfig)

    def create_mongo(self):
        global MONGO, mongoPool
        config = self.configs[self.MONGO]
        hosts, port, user, password = self.get_params(config)
        self.mongoPool = MongoClient(host=hosts, port=port, connect=False)

    def get_redis(self):
        return Redis(connection_pool=self.redisPool)

    def get_cassandra(self):
        return self.cassandraPool.connect(self.keyspace)

    def get_mysql(self):
        return self.mysqlPool.get_connection()

    def get_mongo(self):
        return self.mongoPool
Ejemplo n.º 23
0
class DbWrapperBase(ABC):
    def_spawn = 240

    def __init__(self, args):
        self.application_args = args
        self.host = args.dbip
        self.port = args.dbport
        self.user = args.dbusername
        self.password = args.dbpassword
        self.database = args.dbname
        self.pool = None
        self.pool_mutex = Lock()
        self.connection_semaphore = Semaphore(
            self.application_args.db_poolsize)
        self.dbconfig = {
            "database": self.database,
            "user": self.user,
            "host": self.host,
            "password": self.password,
            "port": self.port
        }
        self._init_pool()

    def _init_pool(self):
        logger.info("Connecting pool to DB")
        self.pool_mutex.acquire()
        self.pool = MySQLConnectionPool(
            pool_name="db_wrapper_pool",
            pool_size=self.application_args.db_poolsize,
            **self.dbconfig)
        self.pool_mutex.release()

    def _check_column_exists(self, table, column):
        query = ("SELECT count(*) "
                 "FROM information_schema.columns "
                 "WHERE table_name = %s "
                 "AND column_name = %s "
                 "AND table_schema = %s")
        vals = (
            table,
            column,
            self.database,
        )

        return int(self.execute(query, vals)[0][0])

    def _check_create_column(self, field):
        if self._check_column_exists(field["table"], field["column"]) == 1:
            return

        alter_query = ("ALTER TABLE {} "
                       "ADD COLUMN {} {}".format(field["table"],
                                                 field["column"],
                                                 field["ctype"]))

        self.execute(alter_query, commit=True)

        if self._check_column_exists(field["table"], field["column"]) == 1:
            logger.info("Successfully added '{}.{}' column", field["table"],
                        field["column"])
            return
        else:
            logger.error("Couldn't create required column {}.{}'",
                         field["table"], field["column"])
            sys.exit(1)

    def close(self, conn, cursor):
        """
        A method used to close connection of mysql.
        :param conn:
        :param cursor:
        :return:
        """
        cursor.close()
        conn.close()

    def execute(self, sql, args=None, commit=False):
        """
        Execute a sql, it could be with args and with out args. The usage is
        similar with execute() function in module pymysql.
        :param sql: sql clause
        :param args: args need by sql clause
        :param commit: whether to commit
        :return: if commit, return None, else, return result
        """
        self.connection_semaphore.acquire()
        conn = self.pool.get_connection()
        cursor = conn.cursor()

        # TODO: consider catching OperationalError
        # try:
        #     cursor = conn.cursor()
        # except OperationalError as e:
        #     logger.error("OperationalError trying to acquire a DB cursor: {}", str(e))
        #     conn.rollback()
        #     return None
        try:
            if args:
                cursor.execute(sql, args)
            else:
                cursor.execute(sql)
            if commit is True:
                affected_rows = cursor.rowcount
                conn.commit()
                return affected_rows
            else:
                res = cursor.fetchall()
                return res
        except mysql.connector.Error as err:
            logger.error("Failed executing query: {}", str(err))
            return None
        except Exception as e:
            logger.error("Unspecified exception in dbWrapper: {}", str(e))
            return None
        finally:
            self.close(conn, cursor)
            self.connection_semaphore.release()

    def executemany(self, sql, args, commit=False):
        """
        Execute with many args. Similar with executemany() function in pymysql.
        args should be a sequence.
        :param sql: sql clause
        :param args: args
        :param commit: commit or not.
        :return: if commit, return None, else, return result
        """
        # get connection form connection pool instead of create one.
        self.connection_semaphore.acquire()
        conn = self.pool.get_connection()
        cursor = conn.cursor()

        try:
            cursor.executemany(sql, args)

            if commit is True:
                conn.commit()
                return None
            else:
                res = cursor.fetchall()
                return res
        except mysql.connector.Error as err:
            logger.error("Failed executing query: {}", str(err))
            return None
        except Exception as e:
            logger.error("Unspecified exception in dbWrapper: {}", str(e))
            return None
        finally:
            self.close(conn, cursor)
            self.connection_semaphore.release()

    @abstractmethod
    def auto_hatch_eggs(self):
        """
        Check the entire DB for unhatched level 5 eggs and updates the mon ID if there is only one
        possible raidmon
        """
        pass

    @abstractmethod
    def db_timestring_to_unix_timestamp(self, timestring):
        """
        Converts a DB timestring to a unix timestamp (seconds since epoch)
        """
        pass

    @abstractmethod
    def get_next_raid_hatches(self, delay_after_hatch, geofence_helper=None):
        """
        In order to build a priority queue, we need to be able to check for the next hatches of raid eggs
        The result may not be sorted by priority, to be done at a higher level!
        :return: unsorted list of next hatches within delay_after_hatch
        """
        pass

    @abstractmethod
    def submit_raid(self,
                    gym,
                    pkm,
                    lvl,
                    start,
                    end,
                    type,
                    raid_no,
                    capture_time,
                    unique_hash="123",
                    MonWithNoEgg=False):
        """
        Insert or update raid in DB and send webhook
        :return: if raid has all the required values = True, else False
        """
        pass

    @abstractmethod
    def read_raid_endtime(self, gym, raid_no, unique_hash="123"):
        """
        Check if a raid already has an endtime and return True/False appropriately
        :return: if raid has endtime = True, else False
        """
        pass

    @abstractmethod
    def get_raid_endtime(self, gym, raid_no, unique_hash="123"):
        """
        Retrieves the time the requested raid ends - if present
        :return: returns (Boolean, Value) with Value being the time or None, Boolean being True/False appropriately
        """
        pass

    @abstractmethod
    def raid_exist(self, gym, type, raid_no, unique_hash="123", mon=0):
        """
        Checks if a raid is already present in the DB
        :return: returns True/False indicating if a raid is already present in the database
        """
        pass

    @abstractmethod
    def refresh_times(self, gym, raid_no, capture_time, unique_hash="123"):
        """
        Update last_modified/last_scanned/updated of a gym
        """
        pass

    @abstractmethod
    def get_near_gyms(self, lat, lng, hash, raid_no, dist, unique_hash="123"):
        """
        Retrieve gyms around a given lat, lng within the given dist
        :return: returns list of gyms within dist sorted by distance
        """
        pass

    @abstractmethod
    def set_scanned_location(self, lat, lng, capture_time):
        """
        Update scannedlocation (in RM) of a given lat/lng
        """
        pass

    @abstractmethod
    def check_stop_quest(self, lat, lng):
        """
        Update scannedlocation (in RM) of a given lat/lng
        """
        pass

    @abstractmethod
    def get_gym_infos(self, id=False):
        """
        Retrieve all the gyminfos from DB
        :return: returns dict containing all the gyminfos contained in the DB
        """
        pass

    @abstractmethod
    def gyms_from_db(self, geofence_helper):
        """
        Retrieve all the gyms valid within the area set by geofence_helper
        :return: numpy array with coords
        """
        pass

    @abstractmethod
    def update_encounters_from_db(self, geofence_helper, latest=0):
        """
        Retrieve all encountered ids inside the geofence.
        :return: the new value of latest and a dict like encounter_id: disappear_time
        """
        pass

    @abstractmethod
    def stops_from_db(self, geofence_helper):
        """
        Retrieve all the pokestops valid within the area set by geofence_helper
        :return: numpy array with coords
        """
        pass

    @abstractmethod
    def quests_from_db(self, GUID=None, timestamp=None):
        """
        Retrieve all the pokestops valid within the area set by geofence_helper
        :return: numpy array with coords
        """
        pass

    @abstractmethod
    def update_insert_weather(self,
                              cell_id,
                              gameplay_weather,
                              capture_time,
                              cloud_level=0,
                              rain_level=0,
                              wind_level=0,
                              snow_level=0,
                              fog_level=0,
                              wind_direction=0,
                              weather_daytime=0):
        """
        Updates the weather in a given cell_id
        """
        pass

    @abstractmethod
    def submit_mon_iv(self, origin, timestamp, encounter_proto):
        """
        Update/Insert a mon with IVs
        """
        pass

    @abstractmethod
    def submit_mons_map_proto(self, origin, map_proto, mon_ids_ivs):
        """
        Update/Insert mons from a map_proto dict
        """
        pass

    @abstractmethod
    def submit_pokestops_map_proto(self, origin, map_proto):
        """
        Update/Insert pokestops from a map_proto dict
        """
        pass

    @abstractmethod
    def submit_pokestops_details_map_proto(self, map_proto):
        """
        Update/Insert pokestop details from a GMO
        :param map_proto:
        :return:
        """
        pass

    @abstractmethod
    def submit_gyms_map_proto(self, origin, map_proto):
        """
        Update/Insert gyms from a map_proto dict
        """
        pass

    @abstractmethod
    def submit_raids_map_proto(self, origin, map_proto):
        """
        Update/Insert raids from a map_proto dict
        """
        pass

    @abstractmethod
    def get_pokemon_spawns(self, hours):
        """
        Get Pokemon Spawns for dynamic rarity
        """
        pass

    @abstractmethod
    def submit_weather_map_proto(self, origin, map_proto, received_timestamp):
        """
        Update/Insert weather from a map_proto dict
        """
        pass

    @abstractmethod
    def download_gym_images(self):
        pass

    @abstractmethod
    def get_to_be_encountered(self, geofence_helper, min_time_left_seconds,
                              eligible_mon_ids):
        pass

    @abstractmethod
    def stop_from_db_without_quests(self, geofence_helper):
        pass

    @abstractmethod
    def get_raids_changed_since(self, timestamp):
        pass

    @abstractmethod
    def get_mon_changed_since(self, timestamp):
        pass

    @abstractmethod
    def get_quests_changed_since(self, timestamp):
        pass

    @abstractmethod
    def get_gyms_changed_since(self, timestamp):
        pass

    @abstractmethod
    def get_weather_changed_since(self, timestamp):
        pass

    def statistics_get_pokemon_count(self, days):
        pass

    @abstractmethod
    def statistics_get_gym_count(self, days):
        pass

    @abstractmethod
    def statistics_get_stop_quest(self, days):
        pass

    def create_hash_database_if_not_exists(self):
        """
        In order to store 'hashes' of crops/images, we require a table to store those hashes
        """
        logger.debug(
            "DbWrapperBase::create_hash_database_if_not_exists called")
        logger.debug('Creating hash db in database')

        query = (' Create table if not exists trshash ( ' +
                 ' hashid MEDIUMINT NOT NULL AUTO_INCREMENT, ' +
                 ' hash VARCHAR(255) NOT NULL, ' +
                 ' type VARCHAR(10) NOT NULL, ' +
                 ' id VARCHAR(255) NOT NULL, ' +
                 ' count INT(10) NOT NULL DEFAULT 1, ' +
                 ' modify DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, ' +
                 ' PRIMARY KEY (hashid))')
        self.execute(query, commit=True)

        return True

    def create_quest_database_if_not_exists(self):
        """
        In order to store 'hashes' of crops/images, we require a table to store those hashes
        """
        logger.debug(
            "DbWrapperBase::create_quest_database_if_not_exists called")
        logger.debug('Creating hash db in database')

        query = (' Create table if not exists trs_quest ( ' +
                 ' GUID varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,' +
                 ' quest_type tinyint(3) NOT NULL, ' +
                 ' quest_timestamp int(11) NOT NULL,' +
                 ' quest_stardust smallint(4) NOT NULL,' +
                 ' quest_pokemon_id smallint(4) NOT NULL,' +
                 ' quest_reward_type smallint(3) NOT NULL,' +
                 ' quest_item_id smallint(3) NOT NULL,' +
                 ' quest_item_amount tinyint(2) NOT NULL,' +
                 ' quest_target tinyint(3) NOT NULL,' +
                 ' quest_condition varchar(500), ' + ' PRIMARY KEY (GUID), ' +
                 ' KEY quest_type (quest_type))')
        self.execute(query, commit=True)

        return True

    def check_for_hash(self,
                       imghash,
                       type,
                       raid_no,
                       distance,
                       unique_hash="123"):
        logger.debug("DbWrapperBase::check_for_hash called")
        logger.debug("[Crop: {} ({})] check_for_hash: Checking for hash in db",
                     str(raid_no), str(unique_hash))

        query = (
            "SELECT id, hash, "
            "BIT_COUNT( "
            "CONVERT((CONV(hash, 16, 10)), UNSIGNED) "
            "^ "
            "CONVERT((CONV(%s, 16, 10)), UNSIGNED)) as hamming_distance, type, count, modify "
            "FROM trshash "
            "HAVING hamming_distance < %s AND type = %s "
            "ORDER BY hamming_distance ASC")
        vals = (str(imghash), distance, str(type))

        res = self.execute(query, vals)
        number_of_rows = len(res)

        logger.debug(
            "[Crop: {} ({})] check_for_hash: Found hashes in database: {}",
            str(raid_no), str(unique_hash), str(number_of_rows))

        if number_of_rows > 0:
            logger.debug("[Crop: {} ({})] check_for_hash: returning found ID",
                         str(raid_no), str(unique_hash))
            for row in res:
                logger.debug("[Crop: {} ({})] check_for_hash: ID = {}",
                             str(raid_no), str(unique_hash), str(row[0]))
                logger.debug("DbWrapperBase::check_for_hash done")
                return True, row[0], row[1], row[4], row[5]
        else:
            logger.debug(
                "[Crop: {} ({})] check_for_hash: No matching hash found",
                str(raid_no), str(unique_hash))
            logger.debug("DbWrapperBase::check_for_hash done")
            return False, None, None, None, None

    def get_all_hash(self, type):
        logger.debug("DbWrapperBase::get_all_hash called")
        query = ("SELECT id, hash, type, count, modify "
                 "FROM trshash "
                 "HAVING type = %s")
        vals = (str(type), )
        logger.debug(query)

        res = self.execute(query, vals)

        return res

    def insert_hash(self, imghash, type, id, raid_no, unique_hash="123"):
        logger.debug("DbWrapperBase::insert_hash called")
        if type == 'raid':
            distance = 4
        else:
            distance = 4

        double_check = self.check_for_hash(imghash, type, raid_no, distance)

        if double_check[0]:
            logger.debug(
                "[Crop: {} ({})] insert_hash: Already in DB, updating counter",
                str(raid_no), str(unique_hash))

        # TODO: consider INSERT... ON DUPLICATE KEY UPDATE ??

        if not double_check[0]:
            query = ("INSERT INTO trshash (hash, type, id) "
                     "VALUES (%s, %s, %s)")
            vals = (str(imghash), str(type), id)
        else:
            query = ("UPDATE trshash "
                     "SET count=count+1, modify=NOW() "
                     "WHERE hash=%s")
            vals = (str(imghash), )

        self.execute(query, vals, commit=True)
        logger.debug("DbWrapperBase::insert_hash done")
        return True

    def delete_hash_table(self, ids, type, mode=' not in ', field=' id '):
        logger.debug("DbWrapperBase::delete_hash_table called")
        logger.debug('Deleting old Hashes of type {}', type)
        logger.debug('Valid ids: {}', ids)

        query = ("DELETE FROM trshash "
                 "WHERE " + field + " " + mode + " (%s) "
                 "AND type like %s")
        vals = (str(ids), str(type))
        logger.debug(query)

        self.execute(query, vals, commit=True)
        return True

    def clear_hash_gyms(self, mons):
        logger.debug("DbWrapperBase::clear_hash_gyms called")
        data = []
        query = ("SELECT hashid "
                 "FROM trshash "
                 "WHERE id LIKE '%\"mon\":\"%s\"%' AND type='raid'")

        mon_split = mons.split('|')
        for mon in mon_split:
            args = (int(mon), )
            res = self.execute(query, args)
            for dbid in res:
                data.append(int(dbid[0]))

        _mon_list = ','.join(map(str, data))
        logger.debug('clearHashGyms: Read Raid Hashes with known Mons')
        if len(data) > 0:
            query = ('DELETE FROM trshash ' + ' WHERE hashid NOT IN (' +
                     _mon_list + ')' + ' AND type=\'raid\'')
            self.execute(query, commit=True)
        logger.info('clearHashGyms: Deleted Raidhashes with unknown mons')

    def getspawndef(self, spawn_id):
        if not spawn_id:
            return False
        logger.debug("DbWrapperBase::getspawndef called")

        spawnids = ",".join(map(str, spawn_id))
        spawnret = {}

        query = ("SELECT spawnpoint, spawndef "
                 "FROM trs_spawn where spawnpoint in (%s)" % (spawnids))
        # vals = (spawn_id,)

        res = self.execute(query)
        for row in res:
            spawnret[row[0]] = row[1]
        return spawnret

    def submit_spawnpoints_map_proto(self, origin, map_proto):
        logger.debug(
            "DbWrapperBase::submit_spawnpoints_map_proto called with data received by {}",
            str(origin))
        cells = map_proto.get("cells", None)
        if cells is None:
            return False
        spawnpoint_args, spawnpoint_args_unseen = [], []
        spawnids = []

        query_spawnpoints = (
            "INSERT INTO trs_spawn (spawnpoint, latitude, longitude, earliest_unseen, "
            "last_scanned, spawndef, calc_endminsec) "
            "VALUES (%s, %s, %s, %s, %s, %s, %s) "
            "ON DUPLICATE KEY UPDATE last_scanned=VALUES(last_scanned), "
            "earliest_unseen=LEAST(earliest_unseen, VALUES(earliest_unseen)), "
            "spawndef=VALUES(spawndef), calc_endminsec=VALUES(calc_endminsec)"
            "")

        query_spawnpoints_unseen = (
            "INSERT INTO trs_spawn (spawnpoint, latitude, longitude, earliest_unseen, last_non_scanned, spawndef) "
            "VALUES (%s, %s, %s, %s, %s, %s) "
            "ON DUPLICATE KEY UPDATE spawndef=VALUES(spawndef), last_non_scanned=VALUES(last_non_scanned)"
            "")

        now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        dt = datetime.now()

        for cell in cells:
            for wild_mon in cell["wild_pokemon"]:
                spawnids.append(int(str(wild_mon['spawnpoint_id']), 16))

        spawndef = self.getspawndef(spawnids)

        for cell in cells:
            for wild_mon in cell["wild_pokemon"]:
                spawnid = int(str(wild_mon['spawnpoint_id']), 16)
                lat, lng, alt = S2Helper.get_position_from_cell(
                    int(str(wild_mon['spawnpoint_id']) + '00000', 16))
                despawntime = wild_mon['time_till_hidden']

                minpos = self._get_min_pos_in_array()
                # TODO: retrieve the spawndefs by a single executemany and pass that...

                spawndef_ = spawndef.get(spawnid, False)
                if spawndef_:
                    newspawndef = self._set_spawn_see_minutesgroup(
                        spawndef_, minpos)
                else:
                    newspawndef = self._set_spawn_see_minutesgroup(
                        DbWrapperBase.def_spawn, minpos)

                last_scanned = None
                last_non_scanned = None

                if 0 <= int(despawntime) <= 90000:
                    fulldate = dt + timedelta(milliseconds=despawntime)
                    earliest_unseen = int(despawntime)
                    last_scanned = now
                    calcendtime = fulldate.strftime("%M:%S")

                    spawnpoint_args.append(
                        (spawnid, lat, lng, earliest_unseen, last_scanned,
                         newspawndef, calcendtime))

                else:
                    earliest_unseen = 99999999
                    last_non_scanned = now
                    calcendtime = None

                    spawnpoint_args_unseen.append(
                        (spawnid, lat, lng, earliest_unseen, last_non_scanned,
                         newspawndef))

        self.executemany(query_spawnpoints, spawnpoint_args, commit=True)
        self.executemany(query_spawnpoints_unseen,
                         spawnpoint_args_unseen,
                         commit=True)

    def submitspsightings(self, spid, encid, secs):
        logger.debug("DbWrapperBase::submitspsightings called")
        if 0 <= int(secs) <= 90000:
            query = (
                "INSERT INTO trs_spawnsightings (encounter_id, spawnpoint_id, tth_secs) "
                "VALUES (%s, %s, %s)")
            vals = (encid, spid, secs)
        else:
            query = (
                "INSERT INTO trs_spawnsightings (encounter_id, spawnpoint_id) "
                "VALUES (%s, %s)")
            vals = (encid, spid)

        self.execute(query, vals, commit=True)

    def get_spawn_infos(self):
        logger.debug("DbWrapperBase::get_spawn_infos called")
        query = (
            "SELECT count(spawnpoint), "
            "ROUND ( "
            "(COUNT(calc_endminsec) + 1) / (COUNT(*) + 1) * 100, 2) AS percent "
            "FROM trs_spawn")

        found = self.execute(query)
        logger.info(
            "Spawnpoint statistics: {}, Spawnpoints with detected endtime: {}",
            str(found[0][0]), str(found[0][1]))

        return float(found[0][1])

    def get_detected_spawns(self, geofence_helper):
        logger.debug("DbWrapperBase::get_detected_spawns called")

        query = ("SELECT latitude, longitude " "FROM trs_spawn")
        list_of_coords = []
        logger.debug(
            "DbWrapperBase::get_detected_spawns executing select query")
        res = self.execute(query)
        logger.debug("DbWrapperBase::get_detected_spawns result of query: {}",
                     str(res))
        for (latitude, longitude) in res:
            list_of_coords.append([latitude, longitude])

        if geofence_helper is not None:
            logger.debug(
                "DbWrapperBase::get_detected_spawns applying geofence")
            geofenced_coords = geofence_helper.get_geofenced_coordinates(
                list_of_coords)
            logger.debug(geofenced_coords)
            return geofenced_coords
        else:
            logger.debug(
                "DbWrapperBase::get_detected_spawns converting to numpy")
            to_return = np.zeros(shape=(len(list_of_coords), 2))
            for i in range(len(to_return)):
                to_return[i][0] = list_of_coords[i][0]
                to_return[i][1] = list_of_coords[i][1]
            return to_return

    def get_undetected_spawns(self, geofence_helper):
        logger.debug("DbWrapperBase::get_undetected_spawns called")

        query = ("SELECT latitude, longitude "
                 "FROM trs_spawn "
                 "WHERE calc_endminsec is NULL")
        list_of_coords = []
        logger.debug(
            "DbWrapperBase::get_undetected_spawns executing select query")
        res = self.execute(query)
        logger.debug(
            "DbWrapperBase::get_undetected_spawns result of query: {}",
            str(res))
        for (latitude, longitude) in res:
            list_of_coords.append([latitude, longitude])

        if geofence_helper is not None:
            logger.debug(
                "DbWrapperBase::get_undetected_spawns applying geofence")
            geofenced_coords = geofence_helper.get_geofenced_coordinates(
                list_of_coords)
            logger.debug(geofenced_coords)
            return geofenced_coords
        else:
            logger.debug(
                "DbWrapperBase::get_undetected_spawns converting to numpy")
            to_return = np.zeros(shape=(len(list_of_coords), 2))
            for i in range(len(to_return)):
                to_return[i][0] = list_of_coords[i][0]
                to_return[i][1] = list_of_coords[i][1]
            return to_return

    def get_detected_endtime(self, spawn_id):
        logger.debug("DbWrapperBase::get_detected_endtime called")

        query = ("SELECT calc_endminsec "
                 "FROM trs_spawn "
                 "WHERE spawnpoint=%s")
        args = (spawn_id, )

        found = self.execute(query, args)

        if found and len(found) > 0 and found[0][0]:
            return str(found[0][0])
        else:
            return False

    def _gen_endtime(self, known_despawn):
        hrmi = known_despawn.split(':')
        known_despawn = datetime.now().replace(hour=0,
                                               minute=int(hrmi[0]),
                                               second=int(hrmi[1]),
                                               microsecond=0)
        now = datetime.now()
        if now.minute <= known_despawn.minute:
            despawn = now + timedelta(
                minutes=known_despawn.minute - now.minute,
                seconds=known_despawn.second - now.second)
        elif now.minute > known_despawn.minute:
            despawn = now + timedelta(hours=1) - timedelta(
                minutes=(now.minute - known_despawn.minute),
                seconds=now.second - known_despawn.second)
        else:
            return None
        return time.mktime(despawn.timetuple())

    def _get_min_pos_in_array(self):
        min = datetime.now().strftime("%M")

        if 0 <= int(min) < 15:
            pos = 4
        elif 15 <= int(min) < 30:
            pos = 5
        elif 30 <= int(min) < 45:
            pos = 6
        elif 45 <= int(min) < 60:
            pos = 7
        else:
            pos = None

        self.__globaldef = pos

        return pos

    def _set_spawn_see_minutesgroup(self, spawndef, pos):
        # b = BitArray([int(digit) for digit in bin(spawndef)[2:]])
        b = BitArray(uint=spawndef, length=8)
        if pos == 4:
            b[0] = 0
            b[4] = 1
        if pos == 5:
            b[1] = 0
            b[5] = 1
        if pos == 6:
            b[2] = 0
            b[6] = 1
        if pos == 7:
            b[3] = 0
            b[7] = 1
        return b.uint

    def check_and_create_spawn_tables(self):
        logger.debug("DbWrapperBase::check_and_create_spawn_tables called")

        query_trs_spawn = (
            'CREATE TABLE IF NOT EXISTS `trs_spawn` ('
            '`spawnpoint` varchar(16) COLLATE utf8mb4_unicode_ci NOT NULL, '
            '`latitude` double NOT NULL, '
            '`longitude` double NOT NULL, '
            '`spawndef` int(11) NOT NULL DEFAULT "240", '
            '`earliest_unseen` int(6) NOT NULL, '
            '`last_scanned` datetime DEFAULT NULL, '
            '`first_detection` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, '
            '`last_non_scanned` datetime DEFAULT NULL, '
            '`calc_endminsec` varchar(5) COLLATE utf8mb4_unicode_ci DEFAULT NULL, '
            'UNIQUE KEY `spawnpoint_2` (`spawnpoint`), '
            'KEY `spawnpoint` (`spawnpoint`) '
            ') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;'
        )

        query_trs_spawnsightings = (
            'CREATE TABLE IF NOT EXISTS `trs_spawnsightings` ('
            '`id` int(11) NOT NULL AUTO_INCREMENT, '
            '`encounter_id` bigint(20) UNSIGNED NOT NULL, '
            '`spawnpoint_id` bigint(20) UNSIGNED NOT NULL, '
            '`scan_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, '
            '`tth_secs` int(11) DEFAULT NULL, '
            'PRIMARY KEY (`id`), '
            'KEY `trs_spawnpointdd_spawnpoint_id` (`spawnpoint_id`) '
            ') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;'
        )

        self.execute(query_trs_spawn, commit=True)
        self.execute(query_trs_spawnsightings, commit=True)

    def download_spawns(self):
        logger.debug("dbWrapper::download_spawns")
        spawn = {}

        query = ("SELECT spawnpoint, latitude, longitude, calc_endminsec, "
                 "spawndef, last_scanned "
                 "FROM `trs_spawn`")

        res = self.execute(query)
        for (spawnid, lat, lon, endtime, spawndef, last_scanned) in res:
            spawn[spawnid] = {
                'lat': lat,
                'lon': lon,
                'endtime': endtime,
                'spawndef': spawndef,
                'lastscan': str(last_scanned)
            }

        return str(json.dumps(spawn, indent=4, sort_keys=True))

    def retrieve_next_spawns(self, geofence_helper):
        """
        Retrieve the spawnpoints with their respective unixtimestamp that are due in the next 300 seconds
        :return:
        """
        current_time_of_day = datetime.now().replace(microsecond=0)

        logger.debug("DbWrapperBase::retrieve_next_spawns called")
        query = (
            "SELECT latitude, longitude, spawndef, calc_endminsec FROM trs_spawn WHERE calc_endminsec IS NOT NULL and "
            "DATE_FORMAT(STR_TO_DATE(calc_endminsec,'%i:%s'),'%i:%s') between DATE_FORMAT(DATE_ADD(NOW(), "
            "INTERVAL if(spawndef=15,60,30) MINUTE),'%i:%s') and DATE_FORMAT(DATE_ADD(NOW(), "
            "INTERVAL if(spawndef=15,70,40) MINUTE),'%i:%s')")
        res = self.execute(query)
        next_up = []
        current_time = time.time()
        for (latitude, longitude, spawndef, calc_endminsec) in res:
            if geofence_helper and not geofence_helper.is_coord_inside_include_geofence(
                [latitude, longitude]):
                continue
            endminsec_split = calc_endminsec.split(":")
            minutes = int(endminsec_split[0])
            seconds = int(endminsec_split[1])
            temp_date = current_time_of_day.replace(minute=minutes,
                                                    second=seconds)
            if minutes < datetime.now().minute:
                temp_date = temp_date + timedelta(hours=1)

            if temp_date < current_time_of_day:
                # spawn has already happened, we should've added it in the past, let's move on
                # TODO: consider crosschecking against current mons...
                continue

            spawn_duration_minutes = 60 if spawndef == 15 else 30

            timestamp = time.mktime(temp_date.timetuple()) - \
                spawn_duration_minutes * 60
            # check if we calculated a time in the past, if so, add an hour to it...
            timestamp = timestamp + 60 * 60 if timestamp < current_time else timestamp
            # TODO: consider the following since I am not sure if the prio Q clustering handles stuff properly yet
            # if timestamp >= current_time + 600:
            #     # let's skip monspawns that are more than 10minutes in the future
            #     continue
            next_up.append((timestamp, Location(latitude, longitude)))
        return next_up

    def submit_quest_proto(self, map_proto):
        logger.debug("DbWrapperBase::submit_quest_proto called")
        fort_id = map_proto.get("fort_id", None)
        if fort_id is None:
            return False
        if 'challenge_quest' not in map_proto:
            return False
        quest_type = map_proto['challenge_quest']['quest'].get(
            "quest_type", None)
        quest_template = map_proto['challenge_quest']['quest'].get(
            "template_id", None)
        if map_proto['challenge_quest']['quest'].get("quest_rewards", None):
            rewardtype = map_proto['challenge_quest']['quest'][
                'quest_rewards'][0].get("type", None)
            reward = map_proto['challenge_quest']['quest'].get(
                "quest_rewards", None)
            item = map_proto['challenge_quest']['quest']['quest_rewards'][0][
                'item'].get("item", None)
            itemamount = map_proto['challenge_quest']['quest'][
                'quest_rewards'][0]['item'].get("amount", None)
            stardust = map_proto['challenge_quest']['quest']['quest_rewards'][
                0].get("stardust", None)
            pokemon_id = map_proto['challenge_quest']['quest'][
                'quest_rewards'][0]['pokemon_encounter'].get(
                    "pokemon_id", None)
            target = map_proto['challenge_quest']['quest']['goal'].get(
                "target", None)
            condition = map_proto['challenge_quest']['quest']['goal'].get(
                "condition", None)

            task = questtask(int(quest_type), str(condition), int(target))

            query_quests = (
                "INSERT INTO trs_quest (GUID, quest_type, quest_timestamp, quest_stardust, quest_pokemon_id, "
                "quest_reward_type, quest_item_id, quest_item_amount, quest_target, quest_condition, quest_reward, "
                "quest_task, quest_template) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
                "ON DUPLICATE KEY UPDATE quest_type=VALUES(quest_type), quest_timestamp=VALUES(quest_timestamp), "
                "quest_stardust=VALUES(quest_stardust), quest_pokemon_id=VALUES(quest_pokemon_id), "
                "quest_reward_type=VALUES(quest_reward_type), quest_item_id=VALUES(quest_item_id), "
                "quest_item_amount=VALUES(quest_item_amount), quest_target=VALUES(quest_target), "
                "quest_condition=VALUES(quest_condition), quest_reward=VALUES(quest_reward), "
                "quest_task=VALUES(quest_task), quest_template=VALUES(quest_template)"
            )
            vals = (fort_id, quest_type, time.time(), stardust, pokemon_id,
                    rewardtype, item, itemamount, target, str(condition),
                    str(reward), task, quest_template)
            logger.debug(
                "DbWrapperBase::submit_quest_proto submitted quest typ {} at stop {}",
                str(quest_type), str(fort_id))
            self.execute(query_quests, vals, commit=True)

        return True

    def create_status_database_if_not_exists(self):
        logger.debug(
            "DbWrapperBase::create_status_database_if_not_exists called")

        query = (' Create table if not exists trs_status (  '
                 'origin VARCHAR(50) NOT NULL , '
                 ' currentPos VARCHAR(50) NULL DEFAULT NULL, '
                 ' lastPos VARCHAR(50) NULL DEFAULT NULL, '
                 ' routePos INT(11) NULL DEFAULT NULL, '
                 ' routeMax INT(11) NULL DEFAULT NULL, '
                 ' routemanager VARCHAR(255) NULL DEFAULT NULL, '
                 ' rebootCounter INT(11) NULL DEFAULT NULL, '
                 ' lastProtoDateTime VARCHAR(50) NULL DEFAULT NULL, '
                 ' lastPogoRestart VARCHAR(50) NULL DEFAULT NULL, '
                 ' init TEXT NOT NULL, '
                 ' rebootingOption TEXT NOT NULL, '
                 ' restartCounter TEXT NOT NULL, '
                 ' PRIMARY KEY (origin))')

        self.execute(query, commit=True)

        return True

    def create_usage_database_if_not_exists(self):
        logger.debug(
            "DbWrapperBase::create_usage_database_if_not_exists called")

        query = ('CREATE TABLE if not exists trs_usage ( '
                 'usage_id INT(10) AUTO_INCREMENT , '
                 'instance varchar(100) NULL DEFAULT NULL, '
                 'cpu FLOAT NULL DEFAULT NULL , '
                 'memory FLOAT NULL DEFAULT NULL , '
                 'garbage INT(5) NULL DEFAULT NULL , '
                 'timestamp INT(11) NULL DEFAULT NULL, '
                 'PRIMARY KEY (usage_id))')

        self.execute(query, commit=True)

        return True

    def insert_usage(self, instance, cpu, mem, garbage, timestamp):
        logger.debug("dbWrapper::insert_usage")

        query = (
            "INSERT into trs_usage (instance, cpu, memory, garbage, timestamp) VALUES "
            "(%s, %s, %s, %s, %s)")
        vals = (instance, cpu, mem, garbage, timestamp)
        self.execute(query, vals, commit=True)

        return

    def save_status(self, data):
        logger.debug("dbWrapper::save_status")

        query = (
            "INSERT into trs_status (origin, currentPos, lastPos, routePos, routeMax, "
            "routemanager, rebootCounter, lastProtoDateTime, "
            "init, rebootingOption, restartCounter) values "
            "(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
            "ON DUPLICATE KEY UPDATE currentPos=VALUES(currentPos), "
            "lastPos=VALUES(lastPos), routePos=VALUES(routePos), "
            "routeMax=VALUES(routeMax), routemanager=VALUES(routemanager), "
            "rebootCounter=VALUES(rebootCounter), lastProtoDateTime=VALUES(lastProtoDateTime), "
            "init=VALUES(init), rebootingOption=VALUES(rebootingOption), restartCounter=VALUES(restartCounter)"
        )
        vals = (data["Origin"], str(data["CurrentPos"]), str(data["LastPos"]),
                data["RoutePos"], data["RouteMax"], data["Routemanager"],
                data["RebootCounter"], data["LastProtoDateTime"], data["Init"],
                data["RebootingOption"], data["RestartCounter"])
        self.execute(query, vals, commit=True)
        return

    def save_last_reboot(self, origin):
        logger.debug("dbWrapper::save_last_reboot")
        now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        query = (
            "insert into trs_status(origin, lastPogoReboot, globalrebootcount) "
            "values (%s, %s, %s) "
            "ON DUPLICATE KEY UPDATE lastPogoReboot=VALUES(lastPogoReboot), globalrebootcount=(globalrebootcount+1)"
        )

        vals = (origin, now, 1)

        self.execute(query, vals, commit=True)
        return

    def save_last_restart(self, origin):
        logger.debug("dbWrapper::save_last_restart")
        now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        query = (
            "insert into trs_status(origin, lastPogoRestart, globalrestartcount) "
            "values (%s, %s, %s) "
            "ON DUPLICATE KEY UPDATE lastPogoRestart=VALUES(lastPogoRestart), globalrestartcount=(globalrestartcount+1)"
        )

        vals = (origin, now, 1)

        self.execute(query, vals, commit=True)
        return

    def download_status(self):
        logger.debug("dbWrapper::download_status")
        workerstatus = []

        query = (
            "SELECT origin, currentPos, lastPos, routePos, routeMax, "
            "routemanager, rebootCounter, lastProtoDateTime, lastPogoRestart, "
            "init, rebootingOption, restartCounter, globalrebootcount, globalrestartcount, lastPogoReboot "
            "FROM trs_status")

        result = self.execute(query)
        for (origin, currentPos, lastPos, routePos, routeMax, routemanager,
             rebootCounter, lastProtoDateTime, lastPogoRestart, init,
             rebootingOption, restartCounter, globalrebootcount,
             globalrestartcount, lastPogoReboot) in result:
            status = {
                "origin": origin,
                "currentPos": currentPos,
                "lastPos": lastPos,
                "routePos": routePos,
                "routeMax": routeMax,
                "routemanager": routemanager,
                "rebootCounter": rebootCounter,
                "lastProtoDateTime": str(lastProtoDateTime),
                "lastPogoRestart": str(lastPogoRestart),
                "init": init,
                "rebootingOption": rebootingOption,
                "restartCounter": restartCounter,
                "lastPogoReboot": lastPogoReboot,
                "globalrebootcount": globalrebootcount,
                "globalrestartcount": globalrestartcount
            }

            workerstatus.append(status)

        return str(json.dumps(workerstatus, indent=4, sort_keys=True))

    def statistics_get_quests_count(self, days):
        logger.debug('Fetching quests count from db')
        query_where = ''
        query_date = "unix_timestamp(DATE_FORMAT(FROM_UNIXTIME(quest_timestamp), '%y-%m-%d %k:00:00')) * 1000 " \
            "as Timestamp"

        if days:
            days = datetime.utcnow() - timedelta(days=days)
            query_where = ' WHERE FROM_UNIXTIME(quest_timestamp) > \'%s\' ' % str(
                days)

        query = (
            "SELECT %s, count(GUID) as Count  FROM trs_quest %s "
            "group by day(FROM_UNIXTIME(quest_timestamp)), hour(FROM_UNIXTIME(quest_timestamp))"
            "order by quest_timestamp" % (str(query_date), str(query_where)))

        res = self.execute(query)

        return res

    def statistics_get_usage_count(self, minutes=120, instance=None):
        logger.debug('Fetching usage from db')
        query_where = ''

        if minutes:
            days = datetime.now() - timedelta(minutes=int(minutes))
            query_where = ' WHERE FROM_UNIXTIME(timestamp) > \'%s\' ' % str(
                days)

        if instance is not None:
            query_where = query_where + \
                ' and instance = \'%s\' ' % str(instance)

        query = (
            "SELECT cpu, memory, garbage, timestamp, instance FROM trs_usage %s "
            "order by timestamp" % (str(query_where)))

        res = self.execute(query)

        return res
Ejemplo n.º 24
0
class MySQLStorage(object):
    DoesNotExist = DoesNotExist

    def __init__(self, **kwargs):
        self.pool = MySQLConnectionPool(**kwargs)

    def get_list_by_path(self, path):
        con = None
        cur = None

        try:
            con = self.pool.get_connection()

            cur = con.cursor(cursor_class=MySQLCursorDict)
            cur.execute('SELECT * FROM rules WHERE path=%s', (path,))

            rows = cur.fetchall()
            if not rows:
                raise DoesNotExist(
                    'Rule with path={path} does not exist.'.format(path=path)
                )

            return [_rule_from_row(row) for row in rows]
        finally:
            if cur is not None:
                cur.close()
            if con is not None:
                con.close()

    def get_by_id(self, _id):
        con = None
        cur = None

        try:
            con = self.pool.get_connection()
            cur = con.cursor(cursor_class=MySQLCursorDict)
            cur.execute('SELECT * FROM rules WHERE id=%s', (_id,))

            row = cur.fetchone()
            if not row:
                raise DoesNotExist(
                    'Rule with id={id} does not exist.'.format(id=_id)
                )

            return _rule_from_row(row)
        finally:
            if cur is not None:
                cur.close()
            if con is not None:
                con.close()

    def get_list(self):
        con = None
        cur = None

        try:
            con = self.pool.get_connection()
            cur = con.cursor(cursor_class=MySQLCursorDict)
            cur.execute('SELECT * FROM `rules`')

            rows = cur.fetchall()

            return [_rule_from_row(row) for row in rows]
        finally:
            if cur is not None:
                cur.close()
            if con is not None:
                con.close()

    def create(self, rule):
        sql = 'INSERT INTO `rules` (`path`, `request`, `response`)' \
              'VALUES (%s, %s, %s)'

        con = None
        cur = None

        try:
            con = self.pool.get_connection()
            cur = con.cursor(cursor_class=MySQLCursorDict)

            cur.execute(sql, _row_from_rule(rule))
            rule['id'] = cur.lastrowid

            con.commit()

            return rule
        except:
            if con is not None:
                con.rollback()

            raise
        finally:
            if cur is not None:
                cur.close()
            if con is not None:
                con.close()

    def delete_by_id(self, _id):
        con = None
        cur = None

        try:
            con = self.pool.get_connection()
            cur = con.cursor()
            cur.execute('DELETE FROM rules WHERE id=%s', (_id,))
            con.commit()

            if cur.rowcount == 0:
                raise DoesNotExist(
                    'Rule with id={id} does not exist.'.format(id=_id)
                )
        except:
            if con is not None:
                con.rollback()

            raise
        finally:
            if cur is not None:
                cur.close()
            if con is not None:
                con.close()

    def clear(self):
        con = None
        cur = None

        try:
            con = self.pool.get_connection()
            cur = con.cursor()
            cur.execute('TRUNCATE TABLE `rules`')
            con.commit()
        except:
            if con is not None:
                con.rollback()

            raise
        finally:
            if cur is not None:
                cur.close()
            if con is not None:
                con.close()
Ejemplo n.º 25
0
class MysqlConnection:
    """
    Wrapper object for the MySQL connection.
    """
    def __init__(self, host, database, user, password):
        """
        Creates a new instance of a MySQL connection with its own connection pool.
        :param host: the mysql server
        :param database: the database on the mysql server
        :param user: the username for the mysql server
        :param password: the password for the mysql server
        """
        self.host = host
        self.database = database
        self.user = user
        self.password = password
        self.logger = Logger(self.__class__.__name__)
        self.connection_pool = MySQLConnectionPool(pool_name="pynative_pool",
                                                   pool_size=20,
                                                   pool_reset_session=True,
                                                   host=host,
                                                   database=database,
                                                   user=user,
                                                   password=password)

    def create_connection(self):
        """
        Opens a new connection to MySQL
        :return: the established connection
        """
        connection = connect(host=self.host,
                             database=self.database,
                             user=self.user,
                             passwd=self.password,
                             auth_plugin="mysql_native_password")
        return connection

    def get_connection(self):
        """
        Retrieves a connection from the connection pool or forces to create a new connection. These connections will be
        returned to the connection pool, if you close them.
        :return: a pooled mysql connection
        """
        try:
            con = self.connection_pool.get_connection()
            if not con.is_connected():
                # attempt to fetch a different connection
                return self.get_connection()
            return con
        except PoolError as pe:
            # no connection available
            self.logger.exception("No connection available: {}".format(pe))
            return self._get_new_connection(con)

    def _get_new_connection(self):
        """
        Creates a new pooled mysql connection and adds it to the existing connection pool.
        :return: a pooled mysql connection
        """
        connection = self.create_connection()
        pooled_connection = PooledMySQLConnection(self.connection_pool,
                                                  connection)
        return pooled_connection

    def cursor(self, prepared=False):
        """
        Proxy to create a new cursor.
        :param prepared: boolean flag whether this cursor is a prepared statement cursor
        :return: a tuple containing a cursor allowing for DB access and the connection
        """
        try:
            connection = self.connection_pool.get_connection()
            cursor = connection.cursor(prepared)
            return cursor, connection
        except PoolError as pe:
            # no connection in pool or pool full
            self.logger.exception(
                "No connection in pool or pool full: {}".format(pe))
        except OperationalError as oe:
            # reconnect in case of disconnect after a while
            self.logger.exception("Operational error: {}".format(oe))
Ejemplo n.º 26
0
# and for the geom_tables folder.

import mysql.connector
from mysql.connector.pooling import MySQLConnectionPool

dbconfig = {
    'host': "localhost",
    'port': "3307",
    'user': "******",
    'password': "******",
    'database': "SQE_DEV"
}

cnxpool = MySQLConnectionPool(pool_name="mypool", pool_size=30, **dbconfig)

db = cnxpool.get_connection()
cursor = db.cursor()
sql = 'SHOW TABLES'
cursor.execute(sql)
result_set = cursor.fetchall()
path = '/tmp/backup/'
owner_tables = set()
non_owner_tables = set()
exclude_tables = {
    'user', 'user_sessions', 'sqe_session', 'artefact_shape', 'scroll_version',
    'external_font_glyph', 'image_to_image_map', 'single_action',
    'main_action', 'roi_shape'
}
for result in result_set:
    if 'owner' in result[0]:
        owner_tables.add(result[0].replace("_owner", ""))
class DatabaseManager:

	def __init__(self):
		print("DatabaseManager: __init__")
		self.createConnectionPool()

	def createConnectionPool(self):
		dbconfig = {
		"user": "******",
		"password":"******",
		"host":'mysql', #set host to mysql using docker run link
		"database":'ProjectOrganiser',
		"port":'3306'
		}

		try:
			self.cnxpool = MySQLConnectionPool(
				pool_name = "mypool",
				pool_size = 32,
				**dbconfig)
		except:
			# sleep - hopefully will help - might be that the MySQL
			#container is not up and running yet
			print("Exception... sleeping for 5 seconds then retry")
			tb = traceback.format_exc()
			print("tb: " + tb)
			time.sleep(5)
			# try again
			return self.createConnectionPool()

	def insert_into_table(self, table_name, my_dict):
		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)

		columns = ', '.join(my_dict.keys())
		placeholders = ", ".join(["%s"] * len(my_dict))

		stmt = "INSERT INTO `{table}` ({columns}) VALUES ({values});".format(
			table=table_name,
			columns=",".join(my_dict.keys()),
			values=placeholders
		)

		cursor.execute(stmt, list(my_dict.values()))

		connector.commit()
		cursor.close()
		connector.close()


	def delete_user(self, email):
		#Inserts a dictionary into table table_name
		print("delete user")
		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)

		stmt = ("DELETE * FROM Users WHERE Users.email='"+email+"'")
		print("stmt:")
		print(stmt)

		cursor.execute(stmt)

		connector.commit()
		cursor.close()
		connector.close()

	def check_password(self, email, password):
		#return true if successful
		print("check_password")
		result = False

		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)

		query = ("SELECT * FROM Users WHERE Users.email='"+email+"' AND Users.password='******'")
		print("query:")
		print(query)

		cursor.execute(query)
		cursor.fetchall()

		if cursor.rowcount == 1:
			result = True

		cursor.close()
		connector.close()

		return result

	def get_user_info(self, message_data):
		print ("get_user_data")
		email = message_data["email"]

		connector = self.cnxpool.get_connection()
		cursor = connector.cursor(dictionary=True)
		query = ("SELECT * FROM Users WHERE Users.email='"+email+"'")

		print(query)

		cursor.execute(query)
		data = cursor.fetchall()

		print ("Cursor : ", data)

		message_data["role"] = data[0]["role"]
		message_data["name"] = data[0]["name"]
		message_data["surname"] = data[0]["surname"]

		cursor.close()
		connector.close()
Ejemplo n.º 28
0
def main(argv):
    inputfile = ''
    database = ''
    try:
        opts, args = getopt.getopt(argv, "hi:d:", ["ifile=", "db="])
    except getopt.GetoptError:
        print('test.py -i <inputfile> -d <database_name>')
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('test.py -i <inputfile> -d <database_name>')
            sys.exit()
        elif opt in ("-i", "--ifile"):
            inputfile = arg
        elif opt in ("-d", "--db"):
            database = arg
    print('Input file is', inputfile)
    print('Database is', database.rstrip(' '))

    dbconfig = {
        'host': "localhost",
        'port': "3307",
        'user': "******",
        'password': "******",
        'database': database
    }

    cnxpool = MySQLConnectionPool(pool_name="mypool", pool_size=10, **dbconfig)

    db = cnxpool.get_connection()
    cursor = db.cursor()
    unprocessed = []
    processed = []
    lines = [line.rstrip('\n') for line in open(inputfile)]
    for line in tqdm(lines):
        try:
            # req = requests.get('https://www.qumranica.org/image-proxy?address=http://192.114.7.208:8182/iiif/2/' + line + '/info.json')
            # resp = req.json()
            height = 7216  #resp["height"]
            width = 5412  #resp["width"]
            m = re.search(
                r'([X|\*]{0,1}\d{1,5}.*)(-Fg|Fg)(\d{1,5}).*-(R|V)-.*(LR445|LR924|ML445|ML924|_026|_028)',
                line)
            if m is not None and len(m.groups(0)) == 5:
                plate = str(
                    m.group(1).replace('Rec', '').replace('Vrs', '').replace(
                        '_', '/').replace('X', '*').replace('-',
                                                            '/').rstrip('/'))
                fragment = str(m.group(3)).lstrip('0')
                side = '0'
                if ('R' in str(m.group(4))):
                    side = '0'
                else:
                    side = '1'
                wvStart = '0'
                wvEnd = '0'
                imgType = '1'
                master = '0'
                if ('445' in str(m.group(5))):
                    wvStart = '445'
                    wvEnd = '704'
                    imgType = '0'
                    master = '1'
                elif ('26' in str(m.group(5))):
                    wvStart = '924'
                    wvEnd = '924'
                    imgType = '2'
                elif ('28' in str(m.group(5))):
                    wvStart = '924'
                    wvEnd = '924'
                    imgType = '3'
                elif ('924' in str(m.group(5))):
                    wvStart = '924'
                    wvEnd = '924'
                sql = """
                    SELECT image_catalog_id FROM image_catalog
                    WHERE institution = "IAA"
                    AND catalog_number_1 = %s
                    AND catalog_number_2 = %s
                    AND catalog_side = %s;
                    """
                cursor.execute(sql, (plate, fragment, side))
                result_set = cursor.fetchall()
                if (len(result_set) == 1):
                    imageCatalogId = str(result_set[0][0])
                    sql = """
                        INSERT INTO SQE_image
                        (image_urls_id, filename,
                            dpi, type, wavelength_start, wavelength_end, is_master,
                            image_catalog_id, native_width, native_height)
                        VALUES(2,%s,1215,%s,%s,%s,%s,%s,%s,%s)
                        ON DUPLICATE KEY UPDATE sqe_image_id=LAST_INSERT_ID(sqe_image_id);
                        """
                    cursor.execute(sql, (line, imgType, wvStart, wvEnd, master,
                                         imageCatalogId, width, height))
                    db.commit()
                    processed.append("%s %s" % (
                        line,
                        cursor.lastrowid,
                    ))
                else:
                    unprocessed.append(line + " " + plate + " " + fragment)
            else:
                institution = ""
                number_1 = ""
                number_2 = None
                if line[0] == "M" or line[0] == "m":
                    institution = "PAM"
                    pam = re.search(r'[M|m](\d{2})(\d{1,5})-', line)
                    if pam is not None and len(pam.groups()) == 2:
                        number_1 = str(pam.group(1))
                        number_2 = str(pam.group(2))
                elif line[0] == "I":
                    institution = "IDAM-IAA"
                    pam = re.search(r'I(\d{1,7})-', line)
                    if pam is not None and len(pam.groups()) == 1:
                        number_1 = str(pam.group(1))
                        number_2 = None
                elif line[0] == "P":
                    institution = "IAA"
                    pam = re.search(r'P(\d{1,7}.*?\d{1,2})(-F|_n|F)', line)
                    if pam is not None and len(pam.groups()) > 0:
                        number_1 = str(pam.group(1))
                        number_2 = None
                if institution is not "" and number_1 is not "":
                    sql = """
                        INSERT INTO image_catalog (institution, catalog_number_1, catalog_number_2, catalog_side)
                        VALUE (%s, %s, %s, 0)
                        ON DUPLICATE KEY UPDATE image_catalog_id = LAST_INSERT_ID(image_catalog_id)
                        """
                    cursor.execute(sql, (institution, number_1, number_2))
                    db.commit()
                    insert_id = cursor.lastrowid
                    sql = """
                        INSERT INTO SQE_image
                        (image_urls_id, filename,
                            dpi, type, wavelength_start, wavelength_end, is_master,
                            image_catalog_id, native_width, native_height)
                        VALUES(2,%s,800,0,0,0,0,%s,%s,%s)
                        ON DUPLICATE KEY UPDATE sqe_image_id=LAST_INSERT_ID(sqe_image_id);
                        """
                    cursor.execute(sql, (line, insert_id, width, height))
                    db.commit()
                    processed.append("%s %s" % (
                        line,
                        cursor.lastrowid,
                    ))
                else:
                    if number_2 is None:
                        number_2 = ""
                    unprocessed.append(line + " " + institution + " " +
                                       number_1 + " " + number_2)
        except:
            unprocessed.append(line)
    print("Collecting tiled images.")
    sql = """
        SELECT image_catalog.institution, image_catalog.catalog_number_1 AS plate, image_catalog.catalog_number_2 AS fragment,  IF(image_catalog.catalog_side = 0, 'R', 'V') AS side, filename
        FROM SQE_image
        JOIN image_catalog USING(image_catalog_id)
        WHERE image_catalog_id in (
                    SELECT image_catalog_id
                    FROM (
                        SELECT image_catalog_id
                        FROM SQE_image
                        GROUP BY image_catalog_id
                        HAVING COUNT(sqe_image_id) > 4
                        ) t
                )
        """
    cursor.execute(sql)
    results = cursor.fetchall()
    with open('tiled_images.csv', 'w') as f:
        f.write("%s\n" % 'institution,plate,fragment,side,filename')
        for entry in results:
            f.write("%s\n" % ','.join(map(str, entry)))

    db.commit()

    print("Deleting tiled images.")
    sql = """
        DELETE FROM SQE_image
        WHERE image_catalog_id in (
            SELECT image_catalog_id
            FROM (
                SELECT image_catalog_id
                FROM SQE_image
                GROUP BY image_catalog_id
                HAVING COUNT(sqe_image_id) > 4
            ) t
        )
        """
    cursor.execute(sql)
    db.commit()

    print("Writing the image_to_image_maps.")
    sql = """
        INSERT IGNORE INTO image_to_image_map (image1_id, image2_id, region_on_image1, region_on_image2)
        SELECT  sqe1.sqe_image_id,
                sqe2.sqe_image_id,
                PolygonFromText(CONCAT('POLYGON((0 0,', sqe1.native_width, ' 0,', sqe1.native_width, ' ', sqe1.native_height, ',0 ', sqe1.native_height, ',0 0))')),
                PolygonFromText(CONCAT('POLYGON((0 0,', sqe2.native_width, ' 0,', sqe2.native_width, ' ', sqe2.native_height, ',0 ', sqe2.native_height, ',0 0))'))
        FROM SQE_image sqe1
        JOIN SQE_image sqe2 ON sqe2.image_catalog_id = sqe1.image_catalog_id
            AND sqe2.sqe_image_id != sqe1.sqe_image_id
        WHERE sqe1.is_master = 1
        """
    cursor.execute(sql)
    db.commit()
    sql = """
        INSERT INTO image_to_image_map_author (image_to_image_map_id, user_id)
        SELECT image_to_image_map.image_to_image_map_id, (SELECT user_id FROM user WHERE user_name = "sqe_api")
        FROM image_to_image_map
        LEFT JOIN image_to_image_map_author ON image_to_image_map_author.image_to_image_map_id = image_to_image_map.image_to_image_map_id
            AND image_to_image_map_author.user_id = (SELECT user_id FROM user WHERE user_name = "sqe_api")
        WHERE image_to_image_map_author.image_to_image_map_id IS NULL
        """
    cursor.execute(sql)
    db.commit()
    cursor.close()
    db.close()
    with open('import_failed.txt', 'w') as f:
        for item in unprocessed:
            f.write("%s\n" % item)
    with open('import_succeeded.txt', 'w') as f:
        for item in processed:
            f.write("%s\n" % item)
    print("%s unsuccessful matches." % (len(unprocessed), ))
    print("%s successful matches." % (len(processed), ))
Ejemplo n.º 29
0
class DealerDatabase():
    '''
    Wrapper for the Dealer MySQL database.
    '''
    def __init__(self,
                 host: str,
                 port: int,
                 database: str,
                 user: str,
                 password=None,
                 pool_size=5):
        '''
        Create a new DB wrapper class an instantiate the MySQL connection pool.
        '''

        self.connection_pool = MySQLConnectionPool(pool_name='connection_pool',
                                                   pool_size=pool_size,
                                                   host=host,
                                                   port=port,
                                                   user=user,
                                                   password=password,
                                                   database=database)

        self._init_tables()

    def add_exchange_order(self, order_id: str, exchange: str, symbol: str,
                           side: str, qty: str, price: str, ts: str) -> None:
        '''
        Add an exchange order to the database.

        :param id: The exchange-specific order ID.
        :param exchange: The name of the exchange the order was posted to.
        :param symbol: The market symbol of the order.
        :param side: The side of the order (buy/sell).
        :param qty: The quantity of the base asset in the order (size).
        :param price: The price of the order in the quote asset.
        :param ts: The timestamp the order was posted at.
        '''

        qs = (
            "INSERT INTO `exchange_order_history` (`order_id`, `exchange`, `pair`, `side`, `size`, `price`, `time_placed`) "
            "VALUES (%s, %s, %s, %s, %s, %s, %s)")

        try:
            self._execute_single_query(
                qs, (order_id, exchange, symbol, side, qty, price, ts))
        except Exception as error:
            raise DealerDatabaseError('failed to add exchange order',
                                      error.args)

    def add_zero_ex_order(self, quote_id: str, side: str, pair: str, size: str,
                          price: str, expiration: str, fee: str, status: str,
                          tx_id: str) -> None:
        '''
        Add a filled (or failed) 0x order into the database.

        :param quote_id: The UUID sent with the quote.
        :param side: The side of the quote (bid or ask).
        :param pair: The market symbol the quote is for (BASE/QUOTE).
        :param size: The size of the quote in units of the base asset.
        :param price: The price of the quote in units of the quote asset.
        :param expiration: Quote expiration time.
        :param fee: The fee sent with the quote.
        :param status: Order status at time of insertion.
        :param tx_id: The order transaction hash.
        '''

        qs = (
            "INSERT INTO `zero_ex_order_history` ( `quote_id`, `side`, `pair`, `size`, `price`, `expiration`, `fee`, `status`, `transaction_id` ) "
            "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)")

        try:
            self._execute_single_query(qs, (quote_id, side, pair, size, price,
                                            expiration, fee, status, tx_id))
        except Exception as error:
            raise DealerDatabaseError('failed to add 0x order', error.args)

    def _init_tables(self) -> None:
        exchange_order_table_query = (
            "CREATE TABLE IF NOT EXISTS `dealer_db`.`exchange_order_history` "
            "(`id` INT AUTO_INCREMENT PRIMARY KEY, `order_id` VARCHAR(45) NOT NULL, `exchange` VARCHAR(45) NOT NULL,"
            "`pair` VARCHAR(45) NULL, `side` VARCHAR(45) NULL, `size` VARCHAR(45) NULL, `price` VARCHAR(45) NULL,"
            "`time_placed` VARCHAR(45) NULL, `filled_size` VARCHAR(45) NULL, `status` VARCHAR(45) NULL);"
        )

        zrx_order_table_query = (
            "CREATE TABLE IF NOT EXISTS `dealer_db`.`zero_ex_order_history` (`id` INT AUTO_INCREMENT PRIMARY KEY, "
            "`quote_id` VARCHAR(45) NOT NULL, `side` VARCHAR(45) NULL, `pair` VARCHAR(45) NULL, `size` VARCHAR(45) NULL, "
            "`price` VARCHAR(45) NULL, `expiration` VARCHAR(45) NULL, `fee` VARCHAR(45) NULL, `status` VARCHAR(45) NULL, "
            "`transaction_id` VARCHAR(66) NULL);")

        try:
            connection = self._get_connection()
            cursor = self._get_cursor(connection)
            self._execute_query(cursor, exchange_order_table_query)
            self._execute_query(cursor, zrx_order_table_query)
            self._commit(connection)
            self._close_cursor(cursor)
            self._give_connection(connection)
        except Exception as error:
            raise DealerDatabaseError('failed to create tables', error.args)

    def _execute_single_query(self, query: str, args=None) -> None:
        '''
        Execute a provided SQL query, but do not return any values (write only).

        :param query: The valid SQL query string.
        '''
        connection = self._get_connection()
        cursor = self._get_cursor(connection)
        self._execute_query(cursor, query, args)
        self._commit(connection)
        self._close_cursor(cursor)
        self._give_connection(connection)

    def _get_connection(self) -> PooledMySQLConnection:
        return self.connection_pool.get_connection()

    def _give_connection(self, connection: PooledMySQLConnection) -> None:
        connection.close()

    def _get_cursor(self, connetion: PooledMySQLConnection) -> MySQLCursor:
        return connetion.cursor()

    def _close_cursor(self, cursor: MySQLCursor) -> None:
        cursor.close()

    def _execute_query(self,
                       cursor: MySQLCursor,
                       query: str,
                       args=None) -> MySQLCursor:
        cursor.execute(query, args)
        return cursor

    def _commit(self,
                connection: PooledMySQLConnection) -> PooledMySQLConnection:
        connection.commit()
        return connection
Ejemplo n.º 30
0
class TheFlashServer:
    def __init__(self, debug=False):
        if debug:
            logging.info('Initializing The Flash Server as debug mode')
        else:
            logging.info('Initializing The Flash Server as production mode')

        self.debug = debug

        self.app = Flask(__name__)
        logging.info('Flask Initialized')

        dbconfig = { "user" : settings.db_user_name,
                     "password" : settings.db_password,
                     "database" : settings.db_name,
                     "host" : settings.db_host
                     }
        self.mysql_pool = MySQLConnectionPool(pool_name=None, pool_size=4, pool_reset_session=True, **dbconfig)
        logging.info('Database Initialized')

        self.redis_pool = redis.ConnectionPool(host=settings.redis_host, port=settings.redis_port, db=0)
        logging.info('Redis Initialized')

        self.application = tornado.web.Application([
            (r"/auth", RequestAuth, dict(mysql_pool=self.mysql_pool, redis_pool=self.redis_pool)),
            (r"/get_users", RequestGetUsers, dict(mysql_pool=self.mysql_pool, redis_pool=self.redis_pool)),
            (r"/get_beacons", RequestGetBeacons, dict(mysql_pool=self.mysql_pool, redis_pool=self.redis_pool)),
            (r"/update_user_pos", RequestUpdateUserPosition, dict(mysql_pool=self.mysql_pool, redis_pool=self.redis_pool)),
            (r"/register_beacon", RequestRegisterBeacon, dict(mysql_pool=self.mysql_pool, redis_pool=self.redis_pool)),
            (r"/unregister_beacon", RequestUnregisterBeacon, dict(mysql_pool=self.mysql_pool, redis_pool=self.redis_pool)),
            (r"/", RequestPage, dict(mysql_pool=self.mysql_pool, redis_pool=self.redis_pool, page_name="index.html")),
            (r"/manage", RequestPage, dict(mysql_pool=self.mysql_pool, redis_pool=self.redis_pool, page_name="manage.html")),
            (r"/about", RequestPage, dict(mysql_pool=self.mysql_pool, redis_pool=self.redis_pool, page_name="about.html")),
        ],
            debug=debug,
            autoreload=debug,
            static_path=settings.static_path
        )
        logging.info('Application Initialized')

        self.init_cache()

    def init_cache(self):
        conn = None
        cursor = None

        try:
            conn = self.mysql_pool.get_connection()
            cursor = conn.cursor()

            beacon_cache(conn, cursor, self.redis_pool)
            user_cache(conn, cursor, self.redis_pool)

        except Exception as e:
            logging.error(str(e))
            raise

        finally:
            if cursor is not None:
                cursor.close()
            if conn is not None:
                conn.close()

    def start(self):
        logging.info('The Flash Server Start')

        server = tornado.httpserver.HTTPServer(self.application)
        if self.debug:
            print '- Start as debug mode'
            server.listen(settings.service_port)
        else:
            server.bind(settings.service_port)
            server.start(0)

        IOLoop.current().start()
Ejemplo n.º 31
0
class Registry():
    def __init__(self, broker_cloud, mode, db_config, time_inactive_platform, time_update_conf, time_check_platform_active):
        self.time_update_conf = time_update_conf
        self.time_check_platform_active = time_check_platform_active
        self.time_inactive_platform = time_inactive_platform
        self.cnxpool = MySQLConnectionPool(pool_name="mypool", pool_size=32, **db_config)
        self.mode = mode

        self.producer_connection = Connection(broker_cloud)
        self.consumer_connection = Connection(broker_cloud)

        self.exchange = Exchange("IoT", type="direct")

    def update_config_changes_by_platform_id(self, platform_id):

        message = {
            'reply_to': 'driver.response.registry.api_check_configuration_changes',
            'platform_id': platform_id
        }

        # send request to Driver
        queue = Queue(name='driver.request.api_check_configuration_changes', exchange=self.exchange,
                      routing_key='driver.request.api_check_configuration_changes')
        routing_key = 'driver.request.api_check_configuration_changes'
        self.producer_connection.ensure_connection()
        with Producer(self.producer_connection) as producer:
            producer.publish(
                json.dumps(message),
                exchange=self.exchange.name,
                routing_key=routing_key,
                declare=[queue],
                retry=True
            )

    def check_platform_active(self):
        # print("Check active platform")
        list_platforms = self.get_list_platforms("active")
        for platform in list_platforms:
            if (time.time() - platform['last_response']) > self.time_inactive_platform:
                # print("Mark inactive platform: {}".format(platform['platform_id']))
                self.mark_inactive(str(platform['platform_id']))
                self.send_notification_to_collector()

        threading.Timer(self.time_check_platform_active, self.check_platform_active).start()

    def update_changes_to_db(self, new_info, platform_id):
        # print("Update change of {} to database".format(platform_id))
        now_info = self.get_things_by_platform_id(platform_id, "all", "all")
        inactive_things = now_info[:]
        new_things = new_info[:]
        cnx_1 = self.get_connection_to_db()
        cursor_1 = cnx_1.cursor()
        for now_thing in now_info:
            for new_thing in new_info:
                if now_thing["thing_global_id"] == new_thing["thing_global_id"]:

                    if (now_thing['thing_name'] != new_thing['thing_name'] \
                            or now_thing['thing_type'] != new_thing['thing_type'] \
                            or now_thing['location'] != new_thing['location']):
                        cursor_1.execute(
                            """UPDATE Thing SET thing_name=%s, thing_type=%s, location=%s, thing_status=%s  WHERE thing_global_id=%s""",
                            (new_thing["thing_name"], new_thing["thing_type"], new_thing["location"], 'active',
                             now_thing["thing_global_id"]))
                    if now_thing['thing_status'] == 'inactive':
                        cursor_1.execute("""UPDATE Thing SET thing_status=%s  WHERE thing_global_id=%s""",
                                         ('active', now_thing["thing_global_id"]))

                    inactive_items = now_thing["items"][:]
                    new_items = new_thing['items'][:]

                    for now_item in now_thing["items"]:
                        for new_item in new_thing["items"]:
                            if now_item["item_global_id"] == new_item["item_global_id"]:
                                if (now_item["item_name"] != new_item["item_name"] or
                                        now_item["item_type"] != new_item["item_type"] or
                                        now_item['can_set_state'] != new_item['can_set_state']):

                                    cursor_1.execute(
                                        """UPDATE Item SET item_name=%s, item_type=%s, can_set_state=%s  WHERE item_global_id=%s""",
                                        (new_item["item_name"], new_item["item_type"], new_item["can_set_state"],
                                         now_item['item_global_id']))
                                if now_item['item_status'] == 'inactive':
                                    cursor_1.execute("""UPDATE Item SET item_status=%s  WHERE item_global_id=%s""",
                                                     ('active', now_item['item_global_id']))

                                inactive_items.remove(now_item)
                                new_items.remove(new_item)
                                break

                    if len(inactive_items) != 0:
                        # Item inactive
                        # print("Item inactive")
                        for item_inactive in inactive_items:
                            cursor_1.execute("""UPDATE Item SET item_status=%s  WHERE item_global_id=%s""",
                                             ("inactive", item_inactive['item_global_id']))
                    if len(new_items) != 0:
                        # print("New Item ")
                        for item in new_items:
                            cursor_1.execute("""INSERT INTO Item VALUES (%s,%s,%s,%s,%s,%s,%s)""",
                                             (item['item_global_id'], new_thing['thing_global_id'], item['item_name'],
                                              item['item_type'], item['item_local_id'], item['can_set_state'],
                                              "active"))
                    inactive_things.remove(now_thing)
                    new_things.remove(new_thing)
                    break
        if len(inactive_things) != 0:
            # Thing inactive
            # print("Thing inactive")
            for thing_inactive in inactive_things:
                cursor_1.execute("""UPDATE Thing SET thing_status=%s  WHERE thing_global_id=%s""",
                                 ("inactive", thing_inactive['thing_global_id']))
                for item_inactive in thing_inactive['items']:
                    cursor_1.execute("""UPDATE Item SET item_status=%s  WHERE item_global_id=%s""",
                                     ("inactive", item_inactive['item_global_id']))

        if len(new_things) != 0:
            # New things

            # print("New Thing")
            for thing in new_things:
                cursor_1.execute("""INSERT INTO Thing VALUES (%s,%s,%s,%s,%s,%s,%s)""",
                                 (thing['thing_global_id'], platform_id, thing['thing_name'],
                                  thing['thing_type'], thing['thing_local_id'], thing['location'], "active"))
                # print('Updated Things')
                for item in thing['items']:
                    # print("{}".format(item['item_global_id']))
                    cursor_1.execute("""INSERT INTO Item VALUES (%s,%s,%s,%s,%s,%s,%s)""",
                                     (item['item_global_id'], thing['thing_global_id'], item['item_name'],
                                      item['item_type'], item['item_local_id'], item['can_set_state'], "active"))
                    # print('Updated Items')
        cnx_1.commit()
        cursor_1.close()
        cnx_1.close()

    def get_things_by_platform_id(self, platform_id, thing_status, item_status):
        # print("Get things in platform_id: {}".format(platform_id))
        things_in_system = self.get_things(thing_status, item_status)
        things_in_platform = []
        for thing in things_in_system:
            if thing['platform_id'] == platform_id:
                things_in_platform.append(thing)
        return things_in_platform

    def handle_configuration_changes(self, body, message):
        cnx_2 = self.get_connection_to_db()
        cursor_2 = cnx_2.cursor()
        body = json.loads(body)
        platform_id = body['platform_id']

        if body['have_change'] == False:
            # print('Platform have Id: {} no changes'.format(platform_id))
            cursor_2.execute("""SELECT platform_status FROM Platform WHERE platform_id=%s""",
                             (str(platform_id),))
            platform_status = cursor_2.fetchone()[0]
            if platform_status == 'active':
                pass
            else:
                new_info = body['new_info']
                self.update_changes_to_db(new_info, platform_id)
                self.send_notification_to_collector()

        else:
            print('Platform have Id: {} changed the configuration file'.format(platform_id))
            new_info = body['new_info']
            self.update_changes_to_db(new_info, platform_id)

        #Update last_response and status of platform
        cursor_2.execute("""UPDATE Platform SET last_response=%s, platform_status=%s WHERE platform_id=%s""",
                         (time.time(), 'active', platform_id))

        cnx_2.commit()
        cursor_2.close()
        cnx_2.close()

    def mark_inactive(self, platform_id):
        print('Mark Thing and Item inactive')
        cnx_1 = self.get_connection_to_db()
        cursor_1 = cnx_1.cursor()
        cursor_1.execute("""SELECT thing_global_id FROM Thing WHERE platform_id = %s""", (str(platform_id),))
        list_thing_global_id = cursor_1.fetchall()
        # print('List_thing {}'.format(list_thing_global_id))
        for thing_global_id in list_thing_global_id:
            # print(thing_global_id[0])
            # thing_global_id[0] để lấy ra kết quả. Còn thing_global_id vẫn là list.
            #  VD: ('d32d30b4-8917-4eb1-a273-17f7f440b240/sensor.humidity',)
            cursor_1.execute("""UPDATE Item SET item_status=%s  WHERE thing_global_id=%s""",
                             ("inactive", str(thing_global_id[0])))

        cnx_1.commit()
        cursor_1.execute("""UPDATE Thing SET thing_status=%s  WHERE platform_id=%s""", ("inactive", str(platform_id)))
        cursor_1.execute("""UPDATE Platform SET platform_status=%s  WHERE platform_id=%s""",
                         ("inactive", str(platform_id)))
        cnx_1.commit()
        cursor_1.close()
        cnx_1.close()

    def update_all_config_changes(self):
        # print('Run Update All Configuration Changes')
        list_platforms = self.get_list_platforms("active")

        for platform in list_platforms:
            self.update_config_changes_by_platform_id(platform['platform_id'])

        threading.Timer(self.time_update_conf, self.update_all_config_changes).start()

    def get_list_platforms(self, platform_status):
        # print('Get list platforms')
        list_platforms = []
        cnx_1 = self.get_connection_to_db()
        cursor_1 = cnx_1.cursor()

        if platform_status == "active":
            cursor_1.execute("""SELECT platform_id, platform_name, host, port, last_response, platform_status
                                FROM Platform WHERE platform_status='active'""")
        elif platform_status == "inactive":
            cursor_1.execute("""SELECT platform_id, platform_name, host, port, last_response, platform_status
                                FROM Platform WHERE platform_status='inactive'""")
        elif platform_status == "all":
            cursor_1.execute("""SELECT platform_id, platform_name, host, port, last_response, platform_status
                                FROM Platform""")
        else:
            return list_platforms

        rows = cursor_1.fetchall()
        for row in rows:
            list_platforms.append({
                "platform_id": row[0],
                "platform_name": row[1],
                "host": row[2],
                "port": row[3],
                "last_response": row[4],
                "platform_status": row[5]
            })
        # print(list_platforms)
        cursor_1.close()
        cnx_1.close()
        return list_platforms

    def api_get_list_platforms(self, body, message):
        print("API get list platform with platform_status")
        platform_status = json.loads(body)['platform_status']
        reply_to = json.loads(body)['reply_to']
        message_response = {
            "list_platforms": self.get_list_platforms(platform_status)
        }
        self.producer_connection.ensure_connection()
        with Producer(self.producer_connection) as producer:
            producer.publish(
                json.dumps(message_response),
                exchange=self.exchange.name,
                routing_key=reply_to,
                retry=True
            )

    def api_add_platform(self, body, message):
        body = json.loads(body)
        print ("api_add_platform")
        print (body)
        host = body['host']
        port = body['port']
        platform_name = body['platform_name']
        cnx_1 = self.get_connection_to_db()
        cursor_1 = cnx_1.cursor()

        if "platform_id" in body:
            platform_id = body['platform_id']
            print("Platform {} have id: {} come back to system".format(platform_name, platform_id))
            cursor_1.execute("""UPDATE Platform SET platform_status=%s, last_response=%s  WHERE platform_id=%s""",
                             ('active', time.time(), platform_id))
        else:
            platform_id = str(uuid.uuid4())
            print('Add {} have address {}:{} to system '.format(platform_name, host, port))
            print('Generate id for this platform : ', platform_id)
            cursor_1.execute("""INSERT INTO Platform VALUES (%s,%s,%s,%s,%s,%s)""",
                             (platform_id, platform_name, host, port, time.time(), "active"))

        message_response = {
            'platform_id': platform_id,
            'host': host,
            'port': port,
            'platform_name': platform_name
        }

        # check connection and publish message
        queue_response = Queue(name='registry.response.driver.api_add_platform', exchange=self.exchange,
                               routing_key='registry.response.driver.api_add_platform')
        routing_key = 'registry.response.driver.api_add_platform'
        self.producer_connection.ensure_connection()
        with Producer(self.producer_connection) as producer:
            producer.publish(
                json.dumps(message_response),
                exchange=self.exchange.name,
                routing_key=routing_key,
                declare=[queue_response],
                retry=True
            )

        cnx_1.commit()
        cursor_1.close()
        cnx_1.close()
        self.send_notification_to_collector()

    def get_things(self, thing_status, item_status):
        cnx_1 = self.get_connection_to_db()
        cursor_1 = cnx_1.cursor()
        query_thing = ""
        query_item = ""
        if thing_status == 'active':
            query_thing = """SELECT Thing.platform_id, Thing.thing_global_id, Thing.thing_name,
                                    Thing.thing_type, Thing.location, Thing.thing_local_id, Thing.thing_status
                              FROM  Thing
                              WHERE Thing.thing_status = 'active'; """
        elif thing_status == 'inactive':
            query_thing = """SELECT Thing.platform_id, Thing.thing_global_id, Thing.thing_name,
                                    Thing.thing_type, Thing.location, Thing.thing_local_id, Thing.thing_status
                              FROM  Thing
                              WHERE Thing.thing_status = 'inactive'; """
        elif thing_status == 'all':
            query_thing = """SELECT Thing.platform_id, Thing.thing_global_id, Thing.thing_name,
                                    Thing.thing_type, Thing.location, Thing.thing_local_id, Thing.thing_status
                              FROM  Thing;"""

        if item_status == 'active':
            query_item = """SELECT Item.thing_global_id, Item.item_global_id, Item.item_name,
                                   Item.item_type, Item.can_set_state, Item.item_local_id, Item.item_status
                              FROM Item 
                              WHERE Item.item_status='active';"""
        elif item_status == 'inactive':
            query_item = """SELECT Item.thing_global_id, Item.item_global_id, Item.item_name,
                                   Item.item_type, Item.can_set_state, Item.item_local_id, Item.item_status
                              FROM Item 
                              WHERE Item.item_status='inactive';"""
        elif item_status == 'all':
            query_item = """SELECT Item.thing_global_id, Item.item_global_id, Item.item_name,
                                   Item.item_type, Item.can_set_state, Item.item_local_id, Item.item_status
                              FROM Item;"""

        cursor_1.execute(query_thing)
        thing_rows = cursor_1.fetchall()

        cursor_1.execute(query_item)
        item_rows = cursor_1.fetchall()
        cursor_1.close()
        cnx_1.close()
        things = []
        for thing in thing_rows:
            temp_thing = {
                'platform_id': thing[0],
                'thing_global_id': thing[1],
                'thing_name': thing[2],
                'thing_type': thing[3],
                'location': thing[4],
                'thing_local_id': thing[5],
                'thing_status': thing[6],
                'items': []
            }

            for item in item_rows:
                if item[0] == thing[1]:
                    temp_item = {
                        'item_global_id': item[1],
                        'item_name': item[2],
                        'item_type': item[3],
                        'can_set_state': item[4],
                        'item_local_id': item[5],
                        'item_status': item[6]
                    }
                    temp_thing['items'].append(temp_item)
            things.append(temp_thing)

        return things

    def get_thing_by_global_id(self, thing_global_id):
        cnx_1 = self.get_connection_to_db()
        cursor_1 = cnx_1.cursor()

        cursor_1.execute("""SELECT Thing.platform_id, Thing.thing_global_id, Thing.thing_name,
                                Thing.thing_type, Thing.location, Thing.thing_local_id, Thing.thing_status
                          FROM  Thing
                          WHERE Thing.thing_global_id=%s; """, (thing_global_id,))
        thing_rows = cursor_1.fetchall()

        cursor_1.execute("""SELECT Item.thing_global_id, Item.item_global_id, Item.item_name,
                                   Item.item_type, Item.can_set_state, Item.item_local_id, Item.item_status
                              FROM Item 
                              WHERE Item.thing_global_id=%s;""", (thing_global_id,))

        item_rows = cursor_1.fetchall()
        cursor_1.close()
        cnx_1.close()
        things = []
        for thing in thing_rows:
            temp_thing = {
                'platform_id': thing[0],
                'thing_global_id': thing[1],
                'thing_name': thing[2],
                'thing_type': thing[3],
                'location': thing[4],
                'thing_local_id': thing[5],
                'thing_status': thing[6],
                'items': []
            }

            for item in item_rows:
                if item[0] == thing[1]:
                    temp_item = {
                        'item_global_id': item[1],
                        'item_name': item[2],
                        'item_type': item[3],
                        'can_set_state': item[4],
                        'item_local_id': item[5],
                        'item_status': item[6]
                    }
                    temp_thing['items'].append(temp_item)
            things.append(temp_thing)

        return things

    def api_get_things(self, body, message):
        print('API Get All Things')
        reply_to = json.loads(body)['reply_to']
        thing_status = json.loads(body)['thing_status']
        item_status = json.loads(body)['item_status']
        things = self.get_things(thing_status, item_status)
        message_response = {
            'things': things
        }
        self.producer_connection.ensure_connection()
        with Producer(self.producer_connection) as producer:
            producer.publish(
                json.dumps(message_response),
                exchange=self.exchange.name,
                routing_key=reply_to,
                retry=True
            )

    def api_get_thing_by_global_id(self, body, message):
        print('API Get Thing by thing_global_id')
        reply_to = json.loads(body)['reply_to']
        thing_global_id = json.loads(body)['thing_global_id']

        things = self.get_thing_by_global_id(thing_global_id)

        message_response = {
            'things': things
        }
        self.producer_connection.ensure_connection()
        with Producer(self.producer_connection) as producer:
            producer.publish(
                json.dumps(message_response),
                exchange=self.exchange.name,
                routing_key=reply_to,
                retry=True
            )

    def api_get_things_by_platform_id(self, body, message):
        print('Get Thing by platform_id')
        reply_to = json.loads(body)['reply_to']
        platform_id = json.loads(body)['platform_id']
        thing_status = json.loads(body)['thing_status']
        item_status = json.loads(body)['item_status']
        things = self.get_things_by_platform_id(platform_id, thing_status, item_status)

        message_response = {
            'things': things
        }
        self.producer_connection.ensure_connection()
        with Producer(self.producer_connection) as producer:
            producer.publish(
                json.dumps(message_response),
                exchange=self.exchange.name,
                routing_key=reply_to,
                retry=True
            )

    def get_connection_to_db(self):
        while True:
            try:
                # print("Get connection DB")
                connection = self.cnxpool.get_connection()
                return connection
            except:
                # print("Can't get connection DB")
                pass

    def send_notification_to_collector(self):
        print('Send notification to Collector')
        message = {
            'notification': 'Have Platform_id change'
        }

        queue = Queue(name='collector.request.notification', exchange=self.exchange,
                      routing_key='collector.request.notification')
        routing_key = 'collector.request.notification'
        self.producer_connection.ensure_connection()
        with Producer(self.producer_connection) as producer:
            producer.publish(
                json.dumps(message),
                exchange=self.exchange.name,
                routing_key=routing_key,
                declare=[queue],
                retry=True
            )

    def run(self):
        queue_get_things = Queue(name='registry.request.api_get_things', exchange=self.exchange,
                                 routing_key='registry.request.api_get_things', ttl=20)
        queue_get_list_platforms = Queue(name='registry.request.api_get_list_platforms', exchange=self.exchange,
                                         routing_key='registry.request.api_get_list_platforms')
        queue_add_platform = Queue(name='registry.request.api_add_platform', exchange=self.exchange,
                                   routing_key='registry.request.api_add_platform')
        queue_check_config = Queue(name='driver.response.registry.api_check_configuration_changes', exchange=self.exchange,
                                   routing_key='driver.response.registry.api_check_configuration_changes')
        queue_get_thing_by_global_id = Queue(name='registry.request.api_get_thing_by_global_id', exchange=self.exchange,
                                             routing_key='registry.request.api_get_thing_by_global_id')
        queue_get_things_by_platform_id = Queue(name='registry.request.api_get_things_by_platform_id',
                                                exchange=self.exchange,
                                                routing_key='registry.request.api_get_things_by_platform_id')

        if self.mode == 'PULL':
            self.update_all_config_changes()

        self.check_platform_active()

        while 1:
            try:
                self.consumer_connection.ensure_connection(max_retries=1)
                with nested(Consumer(self.consumer_connection, queues=queue_get_things_by_platform_id,
                                     callbacks=[self.api_get_things_by_platform_id], no_ack=True),
                            Consumer(self.consumer_connection, queues=queue_get_thing_by_global_id,
                                     callbacks=[self.api_get_thing_by_global_id], no_ack=True),
                            Consumer(self.consumer_connection, queues=queue_add_platform, callbacks=[self.api_add_platform],
                                     no_ack=True),
                            Consumer(self.consumer_connection, queues=queue_get_things, callbacks=[self.api_get_things],
                                     no_ack=True),
                            Consumer(self.consumer_connection, queues=queue_get_list_platforms,
                                     callbacks=[self.api_get_list_platforms], no_ack=True),
                            Consumer(self.consumer_connection, queues=queue_check_config,
                                     callbacks=[self.handle_configuration_changes], no_ack=True)):
                    while True:
                        self.consumer_connection.drain_events()
            except (ConnectionRefusedError, exceptions.OperationalError):
                print('Connection lost')
            except self.consumer_connection.connection_errors:
                print('Connection error')
Ejemplo n.º 32
0
class DatabaseModule(mp_module.MPModule):
    def __init__(self, mpstate):
        super(DatabaseModule, self).__init__(mpstate, "database", "database module", public=True)

        while True:
            try:
                self.pool = MySQLConnectionPool(pool_name="mavproxy_pool", pool_size=30, host=os.environ.get("DATABASE_HOST", "localhost"), user="******", passwd="aeolus", db="mavproxy", auth_plugin='mysql_native_password')
                break
            except mysql.connector.Error as e:
                logger.error("Cannot connect to MYSql database. Please start MYSql ('mysql.server start') or run setup_db.sh if MYSql has not been installed: " + str(e))
                logger.info("Retrying connection in 10 seconds...")
            
            time.sleep(10)

        self.autopilot_boot_time = None
        self.time_inconsistent = False

        if not mpstate.airapi:
            self.interop_instance = interop.get_instance()
            self.interop_instance.bind_to_new_obstacle(self.handle_interop_obstacles)

        self.Heartbeat = namedtuple('Heartbeat', ['time', 'plane_link'])
        self.heartbeat = self.Heartbeat(0, False)

        self.GPS_INT = namedtuple('GlobalPositionInt', ['time', 'rel_alt', 'asl_alt', 'lat', 'lon', 'heading', 'groundvx', 'groundvy', 'groundvz'])
        self.global_position_int = self.GPS_INT(0, 0, 0, 0, 0, 0, 0, 0, 0)

        self.GPS_STATUS = namedtuple('GPSStatus', ['time', 'satellite_number'])
        self.GPSStatus = self.GPS_STATUS(0, 0)

        self.VFR_HUD = namedtuple('VFRHUD', ['time', 'airvx', 'airvy', 'airvz', 'speed', 'climb', 'throttle'])
        self.vfr_hud = self.VFR_HUD(0, 0, 0, 0, 0, 0, 0)

        self.FLIGHT_TIME = namedtuple("FlightTime", ['time', 'time_start', 'is_flying'])
        self.flight_time = self.FLIGHT_TIME(0, None, False)

        self.WIND = namedtuple('Wind', ['time', 'windvx', 'windvy', 'windvz'])
        self.wind = self.WIND(0, 0, 0, 0)

        self.ATTITUDE = namedtuple("Attitude", ['time', 'roll', 'pitch', 'yaw', 'rollspeed', 'pitchspeed', 'yawspeed'])
        self.attitude = self.ATTITUDE(0, 0, 0, 0, 0, 0, 0)

        self.BATTERY = namedtuple("Battery", ['time', 'batterypct', 'batteryvoltage', 'batterycurrent'])
        self.battery = self.BATTERY(0, 0, 0, 0)

        self.MAV_INFO = namedtuple("MAVInfo", ['time', 'text', 'severity'])
        self.mav_info = self.MAV_INFO(0, "", 0)

        self.MAV_WARNING = namedtuple("MAVWarning", ['time', 'text', 'severity'])
        self.mav_warning = self.MAV_WARNING(0, "", 0)

        self.MAV_ERROR = namedtuple("MAVError", ['time', 'text', 'severity'])
        self.mav_error = self.MAV_ERROR(0, "", 0)

        self.CAMERA_FEEDBACK = namedtuple("CameraFeedback", ['ds_time', 'lat', 'lon', 'alt_rel', 'roll', 'pitch', 'yaw', 'mavproxy_time'])

        self.mode = "MAV"
        self.mode2num = {"UNKNOWN": -1, "MAV": -1, "MANUAL": 0, "CIRCLE": 1, "STABILIZE": 2, "TRAINING": 3, "ACRO": 4, "FBWA": 5, "FBWB": 6, "CRUISE": 7, "AUTOTUNE": 8, "AUTO": 10, "TL": 11, "LOITER": 12, "AVOID_ADSB": 14, "GUIDED": 15, "QSTABILIZE": 17, "QHOVER": 18, "QLOITER": 19, "QLAND": 20, "RTL" : 21, "INITIALISING" : 22}
        self.num2mode = {n: mode for mode, n in self.mode2num.items()}
        self.num2mode[-1] = "MAV"

        self.SIGNAL = namedtuple("Signal", ['time', 'signal_strength'])
        self.signal = self.SIGNAL(0, 0)

        self.STATIONARY_OBSTACLE = namedtuple("STATIONARY_OBSTACLES", ['time', 'lat', 'lon', 'cylinder_radius', 'cylinder_height'])
    
        self.WAYPOINT = namedtuple("WAYPOINT", ['command', 'current', 'param1', 'param2', 'param3', 'param4', 'lat', 'lon', 'alt', 'index'])
        self.wps = []  # these are mavwaypoints. use get_wps to get a list of WAYPOINT objects 

        self.current_waypoint_index = 0

    def _get_packet_time(self, msg):

        # # If the packet includes a time since boot, use autopilot boot time
        # # Better to ask forgiveness than permission
        # try:
        #     t = msg.time_boot_ms/1000.0 + self.autopilot_boot_time

        #     if not self.set_time and self.mpstate.airapi:
        #         self.set_time = True
        #         time_string = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(t))
        #         logger.info("Syncing system time to Autopilot time ({})".format(time_string))
        #         subprocess.call("(sudo /usr/local/bin/set-time.sh \"" + time_string + "\")", shell=True)

        #     if abs(t - time.time()) > TIME_INCONSISTENCY_MAX:
        #         if not self.time_inconsistent:
        #             self.time_inconsistent = True
        #             print("Autopilot time and computer system time are inconsistent")

        # except (AttributeError, TypeError):
        #     # Note that this also catches the case where we haven't set self.autopilot_boot_time yet
        #     # The most likely reason for this is that the GPS isn't working - we still want to store the packet though
        #     # So we resort to returning the current system time
        #    t = time.time()
        t = time.time()

        return datetime.fromtimestamp(t)

    def _db_connector(func):

        def wrapper(self, arg=None):
            try:
                conn = self.pool.get_connection()
                cursor = conn.cursor(buffered=True)  # set so we can fetchone when we don't want all the results
                r_value = None
                try:
                    r_value = func(self, cursor, arg)

                except mysql.connector.IntegrityError as e:
                    pass
                except mysql.connector.Error as e:
                    logger.error("MYSQl error in " + str(func) + ": " + str(e))
                conn.commit()
                conn.close()
                return r_value
            except Exception as e:
                logger.error(traceback.format_exc())

        return wrapper

    # @param data: [ [t1, t1_data1, t1_data2, t1_data3], [t2, t2_data1, t2_data2, t2_data3], [t3, t3_data1, t3_data2, t3_data3] ]
    # @param goal_time: time to interopolate to in seconds.
    # @returns [tgoal_data1, tgoal_data2, tgoal_data3]
    def interpolate(self, data, req_time):
        if len(data) < 1 or len(data[0]) < 1:
            logger.debug("Empty data, cannot interpolate")
            return data
        data = data[:10]  # don't use tons of old data
        interpolated = [req_time]
        times = [d[0].strftime("%s") for d in data]  # get the unix time version of each datetime
        for i in range(1, len(data[0])):
            values = [d[i] for d in data]
            interpolated.append(np.interp(req_time, times, values))
        return interpolated

    # Gets data close by a to a particular time with linear backoff for interpolation
    def get_nearby_data(self, cursor, req_time, query, fetch_one=False):
        modifier = 0
        closest = []
        while not closest and (TIME_VARIANCE+modifier) < TIME_VARIANCE_MAX:
            # print modifier
            time_start = req_time - TIME_VARIANCE - modifier
            time_end = req_time + TIME_VARIANCE + modifier
            cursor.execute(query, (time_start, time_end, req_time))
            if not fetch_one:
                closest = cursor.fetchall()
            else:
                closest = cursor.fetchone()
            modifier += 1
        return closest

    def make_interpolated_query(self, cursor, req_time, query):
        closest = self.get_nearby_data(cursor, req_time, query)
        return self.interpolate(closest, req_time)


    # returns a list of waypoints as a dict
    def mavwp_to_wps(self, wplist):
        def wp2dict(wp):
            return self.WAYPOINT(wp.command, wp.current, float(wp.param1), float(wp.param2), float(wp.param3), float(wp.param4), float(wp.x), float(wp.y), float(wp.z), wp.seq)._asdict()

        return map(wp2dict, wplist)

    def get_wps(self):
        return self.mavwp_to_wps(self.wps)

    # ###################
    #
    #
    #    HANDLES (POSTS)
    #
    #
    # ###################

    @_db_connector
    def handle_waypoints(self, cursor, arg):
        wploader = self.mpstate.public_modules['wp'].wploader

        # Update if our waypoints are out of date, and the loader is at the expected count.
        if wploader.wpoints != self.wps and len(wploader.wpoints) == wploader.expected_count:
            # If the waypoints change, then the minimum distance to each waypoint calculated before
            # is no longer valid, so reset it.
            

            self.wps = list(wploader.wpoints)

            data = (datetime.now(), json.dumps(self.mavwp_to_wps(self.wps)))
            query = """
                    INSERT INTO
                      waypoints (time, json)
                    VALUES
                      (%s, %s);
                    """

            if not self.mpstate.airapi:
                self.mpstate.public_modules['mission'].reset_min_dists()
            cursor.execute(query, data)

    @_db_connector
    def handle_mode(self, cursor, arg):
        if self.mode != self.master.flightmode:
            # mode has changed
            
            # These next two lines are pretty janky and someone should trace it 
            # back to see why the error occurs in the first place
            if self.master.flightmode.startswith("Mode"):
                self.master.flightmode = self.num2mode[int(self.master.flightmode[5:-1])]
            self.mode = self.master.flightmode
            data = (datetime.now(), self.mode2num[self.mode])
            query = """
                    INSERT INTO
                      mode (time, mode)
                    VALUES
                      (%s, %s);
                    """
            if not self.mpstate.airapi:
                cursor.execute(query, data)

    @_db_connector
    def handle_heartbeat(self, cursor, msg):
        # Note: We're only callling this function after we've received a heartbeat from the plane
        # so there will never be a linkerror when this function is called. The boolean stored is
        # never referenced but is left in to avoid needing to update the database schema
        self.heartbeat = self.Heartbeat(self._get_packet_time(msg),
                                        not self.master.linkerror)

        query = """
                INSERT INTO plane_link
                  (time, plane_link)
                VALUES
                  (%s, %s);
                """
        if not self.mpstate.airapi:
            cursor.execute(query, self.heartbeat)

    @_db_connector
    def handle_global_position_int(self, cursor, msg):
        self.global_position_int = self.GPS_INT(self._get_packet_time(msg),                  # seconds
                                                float(msg.relative_alt) / 1000,    # meters
                                                float(msg.alt) / 1000,             # meters
                                                float(msg.lat) / pow(10, 7),       # decimal
                                                float(msg.lon) / pow(10, 7),       # decimal
                                                msg.hdg / float(100),              # degrees
                                                msg.vx / float(100),               # m/s
                                                msg.vy / float(100),               # m/s
                                                msg.vz / float(100))               # m/s
        query = """
                INSERT INTO global_position_int
                  (time, rel_alt, asl_alt, lat, lon, heading, groundvx, groundvy, groundvz)
                VALUES
                  (%s,%s,%s,%s,%s,%s,%s,%s,%s);
                """
        if not self.mpstate.airapi:
            cursor.execute(query, self.global_position_int)

    @_db_connector
    def handle_vfr_hud(self, cursor, msg):
        airx, airy = heading_to_vector(msg.heading, msg.airspeed)
        airz = self.global_position_int.groundvz
        self.vfr_hud = self.VFR_HUD(self._get_packet_time(msg),        # seconds
                                    airx,                  # m/s
                                    airy,                  # m/s
                                    airz,                  # m/s
                                    msg.airspeed,            # m/s
                                    msg.climb,               # m/s
                                    float(msg.throttle))     # pct
        query = """
                INSERT INTO
                  vfr_hud (time, airspeedvx, airspeedvy, airspeedvz, speed, climb, throttle)
                VALUES
                  (%s,%s,%s,%s,%s,%s,%s);
                """
        if not self.mpstate.airapi:
            cursor.execute(query, self.vfr_hud)

    @_db_connector
    def handle_flight_time(self, cursor, msg):
        # if the throttle is on and it is climbing and this is the first time this has been seen then 
        # this is the start of the flight time
        if(msg.throttle > 2 and msg.climb > 2 and not self.flight_time.is_flying):
            self.flight_time = self.FLIGHT_TIME(self._get_packet_time(msg), self._get_packet_time(msg), True)
        # if the flight has started and you are still moving then the plane is still in flight
        elif(msg.airspeed > 2 and self.global_position_int.rel_alt > 2 and self.flight_time.is_flying):
            self.flight_time = self.FLIGHT_TIME(self._get_packet_time(msg), self.flight_time.time_start, True)
        else:
            self.flight_time = self.FLIGHT_TIME(self._get_packet_time(msg), self.flight_time.time_start, False)

        query = """
                INSERT INTO
                  flight_time (time, time_start, is_flying)
                VALUES
                  (%s, %s, %s);
                """
        if not self.mpstate.airapi:
            cursor.execute(query, self.flight_time)

    @_db_connector
    def handle_signal(self, cursor, msg):
        self.signal = self.SIGNAL(self._get_packet_time(msg), msg.rssi)
        query = """
                INSERT INTO signal_status
                  (time, signal_strength)
                VALUES
                  (%s, %s);
                """

        if not self.mpstate.airapi:
            cursor.execute(query, self.signal)

    @_db_connector
    def handle_attitude(self, cursor, msg):
        self.attitude = self.ATTITUDE(self._get_packet_time(msg),         # seconds
                                      msg.roll,                 # rad -pi...+pi
                                      msg.pitch,                # rad -pi...+pi
                                      msg.yaw,                  # rad -pi...+pi
                                      msg.rollspeed,            # rad/s
                                      msg.pitchspeed,           # rad/s
                                      msg.yawspeed)             # rad/s

        query = """
                INSERT INTO attitude
                  (time, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed)
                VALUES
                  (%s,%s,%s,%s,%s,%s,%s);
                """
        if not self.mpstate.airapi:
            cursor.execute(query, self.attitude)

    @_db_connector
    def handle_status_text(self, cursor, msg):
        if '#EP' in msg.text and len(self.wps) > 0:
            lat, lon = msg.text.split('#EP:')[1].split(',')
            self.wps[0].x = float(lat)
            self.wps[0].y = float(lon)

        data = (self._get_packet_time(msg),
                msg.text,
                msg.severity)            # 0: emergency, 1: alert, 2: critical, 3: error, 4: warning, 5: notice, 6: info

        if msg.severity == 6:
            self.mav_info = self.MAV_INFO(*data)
        elif msg.severity in (5, 4):
            self.mav_warning = self.MAV_WARNING(*data)
        elif msg.severity < 4:
            self.mav_error = self.MAV_ERROR(*data)

        query = """
                INSERT INTO mav_message
                  (time, text, severity)
                VALUES
                  (%s, %s, %s)
                """
        if not self.mpstate.airapi:
            cursor.execute(query, data)

    @_db_connector
    def handle_sys_status(self, cursor, msg):
        self.battery = self.BATTERY(self._get_packet_time(msg),                      # seconds
                                    float(msg.battery_remaining),          # percent
                                    float(msg.voltage_battery) / 1000.0, # Volts
                                    float(msg.current_battery) / 10.0)  # mAh

        query = """
                INSERT INTO battery
                  (time, batterypct, batteryvoltage, batterycurrent)
                VALUES
                  (%s, %s, %s, %s)
                """
        if not self.mpstate.airapi:
            cursor.execute(query, self.battery)

    @_db_connector
    def handle_wind(self, cursor, msg):
        windx, windy = heading_to_vector(msg.direction, msg.speed)
        self.wind = self.WIND(self._get_packet_time(msg),        # seconds
                              windx,                  # m/s
                              windy,                  # m/s
                              msg.speed_z)              # m/s

        query = """
                INSERT INTO wind
                  (time, windx, windy, windz)
                VALUES
                  (%s, %s, %s, %s);
                """

        if not self.mpstate.airapi:
            cursor.execute(query, self.wind)

    @_db_connector
    def handle_current_wp(self, cursor, msg):
        self.current_waypoint_index = msg.seq

        data = (self._get_packet_time(msg),  # datetime
                msg.seq)           # int

        query = """
                INSERT INTO current_wp
                  (time, current_wp)
                VALUES
                  (%s, %s);
                """

        if not self.mpstate.airapi:
            cursor.execute(query, data)

    @_db_connector
    def handle_gps_status(self, cursor, msg):
        self.GPSStatus = self.GPS_STATUS(self._get_packet_time(msg),
                                         msg.satellites_visible)
        query = """
                INSERT INTO gps_status
                  (time, satellite_number)
                VALUES
                  (%s, %s);
                """

        if not self.mpstate.airapi:
            cursor.execute(query, self.GPSStatus)


    def handle_ekf(self, msg):
        pass

    def handle_hw_status(self, msg):
        pass

    def handle_power_status(self, msg):
        pass

    def handle_radio(self, msg):
        pass

    def handle_nav_controller(self, msg):
        pass

    def handle_param(self, msg):
        pass

    # @param obstacles: {'stationary_obstaces': {"lat": lat, "lon": lon, "cylinder_radius": cylinder_radius, "cylinder_height": cylinder_height} }
    # @param t: option time in seconds
    @_db_connector
    def handle_interop_obstacles(self, cursor, obstacles):
        if 'stationary_obstacles' not in obstacles:
            logger.error("handle_interop_obstacles: incorrect obstacle format: {}".format(obstacles.keys()))
            return
        data = (datetime.now(),
                json.dumps(obstacles['stationary_obstacles']))

        query = """
                INSERT INTO obstacles
                  (time, st_obstacles)
                VALUES
                  (%s, %s);
                """

        if not self.mpstate.airapi:
            cursor.execute(query, data)

    @_db_connector
    def handle_camera_feedback(self, cursor, msg):
        data = (msg.time_usec, float(msg.lat) / pow(10, 7), 
                float(msg.lng) / pow(10, 7), msg.alt_rel,
                degrees_to_rads(msg.roll),     # Comes from roll_sensor in degrees 0-360, converting to -pi...+pi
                degrees_to_rads(msg.pitch),    # Comes from pitch_sensor in degrees 0-360, converting to -pi...+pi
                degrees_to_rads(msg.yaw),      # Comes from yaw_sensor in degrees 0-360, converting to -pi...+pi
                datetime.fromtimestamp(time.time()), msg.img_idx)
        query = """
                INSERT INTO camera_feedback
                  (ds_time, lat, lon, alt_rel,
                   roll, pitch, yaw,
                   mavproxy_time, image_index)
                VALUES
                  (%s, %s, %s, %s, %s, %s, %s, %s, %s)
                """
        cursor.execute(query, data)

    # #############
    #
    #
    #    GETS
    #
    #
    # #############
    @_db_connector
    def get_camera_feedback(self, cursor, req_time=None):
        if req_time is None:
            return None
        query = """
                SELECT
                  ds_time
                  , lat
                  , lon
                  , alt_rel
                  , roll
                  , pitch
                  , yaw
                  , mavproxy_time
                FROM 
                  camera_feedback
                WHERE
                  ds_time = %s
                """
        cursor.execute(query, (req_time,))
        data = cursor.fetchone()
        try:
            cam_feedback = self.CAMERA_FEEDBACK(*data)._asdict()
            cam_feedback['mavproxy_time'] = cam_feedback['mavproxy_time'].strftime('%s')
            return cam_feedback
        except TypeError as e:
            logger.error("get_camera_feedback:" + str(e) + str(data))
        return None       


    @_db_connector
    def get_attitude(self, cursor, req_time=None):
        if req_time is None:
            return self.attitude._asdict()
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                      time
                      , roll
                      , pitch
                      , yaw
                      , rollspeed
                      , pitchspeed
                      , yawspeed
                    FROM attitude
                    WHERE time > FROM_UNIXTIME(%s) AND time < FROM_UNIXTIME(%s)
                    ORDER BY ABS(DATEDIFF(time, FROM_UNIXTIME(%s)))
                    """
            data = self.make_interpolated_query(cursor, req_time, query)
            if len(data) == 0:
                logger.debug("get_attitude: no data at time {}".format(req_time))
                return None
            try:
                return self.ATTITUDE(*data)._asdict()
            except TypeError as e:
                logger.error("get_attitude:" + str(e) + str(data))
                return None
        else:
            raise ValueError("Database query not supported in AirAPI mode")

    @_db_connector
    def get_param(self, cursor, req_time=None):
        pass

    @_db_connector
    def get_heartbeat(self, cursor, req_time=None):
        if req_time is None:
            data = abs(self.heartbeat[0] - time.time()) < HEARTBEAT_TIMEOUT
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                      time
                    FROM plane_link
                    WHERE time > FROM_UNIXTIME(%s) and time < FROM_UNIXTIME(%s)
                    ORDER BY ABS(DATEDIFF(time, FROM_UNIXTIME(%s)))
                    """

            time_start = req_time - TIME_VARIANCE
            time_end = req_time
            cursor.execute(query, (time_start, time_end, req_time))

            closest = cursor.fetchall()
            if len(closest) == 0:
                data = False
            else:
                data = abs(time.mktime(closest[-1][0].timetuple()) - req_time) < HEARTBEAT_TIMEOUT
        else:
            raise ValueError("Database query not supported in AirAPI mode")

        return data

    @_db_connector
    def get_gps_status(self, cursor, req_time=None):
        if req_time is None:
            return self.GPSStatus._asdict()
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                        time
                      , satellite_number
                    FROM gps_status
                    WHERE time > FROM_UNIXTIME(%s) and time < FROM_UNIXTIME(%s)
                    ORDER BY ABS(DATEDIFF(time, FROM_UNIXTIME(%s)))
                    """
            time_start = req_time - TIME_VARIANCE
            time_end = req_time + TIME_VARIANCE
            cursor.execute(query, (time_start, time_end, req_time))

            closest = cursor.fetchone()
            if closest is None:
                logger.debug("get_gps_status: no data at time {}".format(req_time))
                return 0
            else:
                return self.GPS_STATUS(*closest)._asdict()
        else:
            raise ValueError("Database query not supported in AirAPI mode")

    @_db_connector
    def get_flight_time(self, cursor, req_time=None):
        if req_time is None:
            return self.flight_time._asdict()
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                        time
                      , time_start
                      , is_flying
                    FROM flight_time
                    WHERE time > FROM_UNIXTIME(%s) and time < FROM_UNIXTIME(%s)
                    ORDER BY ABS(DATEDIFF(time, FROM_UNIXTIME(%s)))
                    """

            cursor.execute(query, (req_time - TIME_VARIANCE, req_time + TIME_VARIANCE, req_time))

            closest = cursor.fetchone()
            if closest is None:
                logger.debug("get_flight_time: no data at time {}".format(req_time))
                return 0
            else:
                # as it turns out mysql does not store booleans as true or false it stores them as 0 and 1 
                # this was causing the closest tupple to have an int for the value that represented is_flying (a bool)
                # this changes the 0s and 1s back to booleans
                lst = list(closest)
                lst[2] = bool(lst[2])
                toReturn = tuple(lst)

                return self.FLIGHT_TIME(*toReturn)._asdict()
        else:
            raise ValueError("Database query not supported in AirAPI mode")

    @_db_connector
    def get_gps(self, cursor, req_time=None):
        if req_time is None:
            return self.global_position_int._asdict()
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                        time
                      , rel_alt
                      , asl_alt
                      , lat
                      , lon
                      , heading
                      , groundvx
                      , groundvy
                      , groundvz
                    FROM global_position_int
                    WHERE time > FROM_UNIXTIME(%s) AND time < FROM_UNIXTIME(%s)
                    ORDER BY ABS(DATEDIFF(time, FROM_UNIXTIME(%s)))
                    """
            data = self.make_interpolated_query(cursor, req_time, query)
            if len(data) == 0:
                logger.debug("get_gps: no data at time {}".format(req_time))
                return None
            try:
                return self.GPS_INT(*data)._asdict()
            except TypeError as e:
                logger.error("get_gps: " + str(e))
                return None
        else:
            raise ValueError("Database query not supported in AirAPI mode")

    # @param time: time in seconds
    # @returns dict of format VFR_HUD or None if unavailable
    @_db_connector
    def get_vfr_hud(self, cursor, req_time=None):
        if req_time is None:
            return self.vfr_hud._asdict()
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                        time
                      , airspeedvx
                      , airspeedvy
                      , airspeedvz
                      , speed
                      , climb
                      , throttle
                    FROM vfr_hud
                    WHERE time > FROM_UNIXTIME(%s) AND time < FROM_UNIXTIME(%s)
                    ORDER BY ABS(DATEDIFF(time, FROM_UNIXTIME(%s)))
                    """
            data = self.make_interpolated_query(cursor, req_time, query)

            if len(data) == 0:
                logger.debug("get_vfr_hud: no data at time {}".format(req_time))
                return None

            try:
                return self.VFR_HUD(*data)._asdict()
            except TypeError as e:
                logger.error("get_vfr_hud" + str(e))
                return None
        else:
            raise ValueError("Database query not supported in AirAPI mode")

    @_db_connector
    def get_signal(self, cursor, req_time=None):
        if req_time is None:
            return self.signal._asdict()
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                        time
                      , signal_strength
                    FROM signal_status
                    WHERE time > FROM_UNIXTIME(%s) and time < FROM_UNIXTIME(%s)
                    ORDER BY ABS(DATEDIFF(time, FROM_UNIXTIME(%s)))
                    """
            cursor.execute(query, (req_time - TIME_VARIANCE, req_time + TIME_VARIANCE, req_time))

            closest = cursor.fetchone()
            if closest is None:
                logger.debug("get_signal : no data at time {}".format(req_time))
                return None
            else:
                return self.SIGNAL(*closest)._asdict()
        else:
            raise ValueError("Database query not supported in AirAPI mode")

    @_db_connector
    def get_status_text(self, cursor, req_time=None):
        if req_time is None:
            return {'info_text': self.mav_info.text, 'warning_text': self.mav_warning.text, 'error_text': self.mav_error.text}
        elif not self.mpstate.airapi:
            # 0: emergency, 1: alert, 2: critical, 3: error, 4: warning, 5: notice, 6: info
            query = """
                    SELECT
                        COALESCE(CASE WHEN severity = 6 THEN text END) as info
                      , COALESCE(CASE WHEN severity >= 4 AND severity <= 5 THEN text END) as warning
                      , COALESCE(CASE WHEN severity < 4 THEN text END) as error
                    FROM mav_message
                    WHERE time < FROM_UNIXTIME(%s)
                    AND   time > FROM_UNIXTIME(%s)
                    ORDER BY time desc
                    """

            cursor.execute(query, (req_time, req_time - TIME_HISTORY_MAX))
            data = cursor.fetchone()
            if data is None or len(data) < 3:
                logger.debug("get_status_text: bad data {} is null.".format(data))
                return

            return {'info_text': data[0], 'warning_text': data[1], 'error_text': data[2]}
        else:
            raise ValueError("Database query not supported in AirAPI mode")

    @_db_connector
    def get_sys_status(self, cursor, req_time=None):
        if req_time is None:
            return self.battery._asdict()
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                        time
                      , batterypct      # percent
                      , batteryvoltage  # volts
                      , batterycurrent  # current
                    FROM battery
                    WHERE time > FROM_UNIXTIME(%s) AND time < FROM_UNIXTIME(%s)
                    ORDER BY ABS(DATEDIFF(time, FROM_UNIXTIME(%s)))
                    """
            data = self.make_interpolated_query(cursor, req_time, query)

            if len(data) == 0:
                logger.debug("get_sys_status: no data at time {}".format(req_time))
                return None

            try:
                return self.BATTERY(*data)._asdict()
            except TypeError as e:
                logger.error("get_sys_status" + str(e))
                return None
        else:
            raise ValueError("Database query not supported in AirAPI mode")

    @_db_connector
    def get_wind(self, cursor, req_time=None):
        if req_time is None:
            return self.wind._asdict()
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                        time
                      , windx
                      , windy
                      , windz
                    FROM wind
                    WHERE time > FROM_UNIXTIME(%s) AND time < FROM_UNIXTIME(%s)
                    ORDER BY ABS(DATEDIFF(time, FROM_UNIXTIME(%s)))
                    """
            data = self.make_interpolated_query(cursor, req_time, query)

            if len(data) == 0:
                logger.debug("get_wind: no data at time {}".format(req_time))
                return None
        else:
            raise ValueError("Database query not supported in AirAPI mode")

        try:
            return self.WIND(*data)._asdict()
        except TypeError as e:
            logger.error("get_wind error" + str(e))
            return None

    @_db_connector
    def get_waypoints(self, cursor, req_time=None):
        if req_time is not None and self.mpstate.airapi:
            raise ValueError("Database query not supported in AirAPI mode")
        try:
            if req_time is None:
                return self.mavwp_to_wps(self.wps)
            else:
                query = """
                        SELECT
                          json
                        FROM waypoints
                        WHERE time < FROM_UNIXTIME(%s)
                        AND   time > FROM_UNIXTIME(%s)
                        ORDER BY time desc;
                        """
                cursor.execute(query, (req_time, req_time - TIME_HISTORY_MAX))
                result = cursor.fetchone()
                if result is None:
                    logger.debug('get_waypoints: no data at time {}'.format(req_time))
                    return []
                return json.loads(result[0])
        except Exception as e:
            logger.error("get_waypoints error {}".format(str(e)))

    @_db_connector
    def get_mode(self, cursor, req_time=None):
        if req_time is None:
            return self.master.flightmode
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                      mode
                    FROM mode
                    WHERE time < FROM_UNIXTIME(%s)
                    AND   time > FROM_UNIXTIME(%s)
                    ORDER by time desc;
                    """
            cursor.execute(query, (req_time, req_time - TIME_HISTORY_MAX))
            result = cursor.fetchone()  # returns tuple of length one (0, )
            if result is None:
                return "MAV"
            return self.num2mode[result[0]]
        else:
            raise ValueError("Database query not supported in AirAPI mode")

    @_db_connector
    def get_interop_obstacles(self, cursor, req_time=None):
        if req_time is None:
            return self.interop_instance.obstacles
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                        time
                      , st_obstacles
                    FROM obstacles
                    WHERE time < FROM_UNIXTIME(%s)
                    AND   time > FROM_UNIXTIME(%s)
                    ORDER BY time desc;
                    """
            cursor.execute(query, (req_time, req_time - TIME_HISTORY_MAX))
            result = cursor.fetchone()

            if result is None:
                return []

            mv_obstacles = json.loads(result[1])
            st_obstacles = json.loads(result[2])
        else:
            raise ValueError("Database query not supported in AirAPI mode")
        return {'time': result[0], 'stationary_obstacles': st_obstacles}

    @_db_connector
    def get_ekf(self, cursor, req_time=None):
        raise NotImplementedError

    @_db_connector
    def get_hw_status(self, cursor, req_time=None):
        raise NotImplementedError

    @_db_connector
    def get_power_status(self, cursor, req_time=None):
        raise NotImplementedError

    @_db_connector
    def get_radio(self, cursor, req_time=None):
        raise NotImplementedError

    @_db_connector
    def get_nav_controller(self, cursor, req_time=None):
        raise NotImplementedError

    @_db_connector
    def get_current_wp(self, cursor, req_time=None):
        if req_time is None:
            data = self.current_waypoint_index
        elif not self.mpstate.airapi:
            query = """
                    SELECT
                      current_wp
                    FROM current_wp
                    WHERE time < FROM_UNIXTIME(%s)
                    AND   time > FROM_UNIXTIME(%s)
                    ORDER BY time desc
                    LIMIT 1;
                    """
            cursor.execute(query, (req_time, req_time - TIME_HISTORY_MAX))
            data_tuple = cursor.fetchone()
            
            if data_tuple is None:
                raise ValueError("No data for time: " + str(req_time))
            data = data_tuple[0]
        else:
            raise ValueError("Database query not supported in AirAPI mode")

        return data

    def mavlink_packet(self, msg):
        try:   # use giant try catch so mavproxy doesn't eat this errors
            msgt = msg.get_type()  # message type

            if msgt == 'SYSTEM_TIME' and msg.time_unix_usec != 0:
                if self.autopilot_boot_time is None:
                    self.autopilot_boot_time = (msg.time_unix_usec/1000.0 - msg.time_boot_ms)/1000.0
                    new_time = round(msg.time_unix_usec / 1000000)
                    logger.info("Recorded autopilot boot time: " + str(datetime.fromtimestamp(new_time)))
                    if self.mpstate.airapi:
                        time_string = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(new_time))
                        logger.info("Syncing system time to Autopilot time ({})".format(time_string))
                        subprocess.call("(sudo /usr/local/bin/set-time.sh \"" + time_string + "\")", shell=True)

            if msgt == 'PARAM_VALUE':
                self.handle_param(msg)
            elif msgt == 'HEARTBEAT':
                self.handle_heartbeat(msg)
            elif msgt == 'GLOBAL_POSITION_INT':
                self.handle_global_position_int(msg)
            elif msgt == 'ATTITUDE':
                self.handle_attitude(msg)
            elif msgt == 'VFR_HUD':
                self.handle_vfr_hud(msg)
                self.handle_flight_time(msg)
            elif msgt == 'WIND':
                self.handle_wind(msg)
            elif msgt == 'STATUSTEXT':
                self.handle_status_text(msg)
            elif msgt == 'SYS_STATUS':
                self.handle_sys_status(msg)
            elif msgt == 'EKF_STATUS_REPORT':
                self.handle_ekf(msg)
            elif msgt == 'HWSTATUS':
                self.handle_hw_status(msg)
            elif msgt == 'POWER_STATUS':
                self.handle_power_status(msg)
            elif msgt in['RADIO', 'RADIO_STATUS']:
                self.handle_radio(msg)
            elif msgt in['RC_CHANNELS', 'RC_CHANNELS_RAW']:
                self.handle_signal(msg)
            elif msgt == 'NAV_CONTROLLER_OUTPUT':
                self.handle_nav_controller(msg)
            elif msgt == 'GPS_RAW_INT':
                self.handle_gps_status(msg)
            elif msgt in ['MISSION_CURRENT', "WAYPOINT_CURRENT"]:
                self.handle_current_wp(msg)
            elif msgt == 'CAMERA_FEEDBACK':
                self.handle_camera_feedback(msg)
                
            self.handle_waypoints(None)
            self.handle_mode(None)

        except Exception as e:
            exc_type, _, exc_tb = sys.exc_info()
            fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
            logger.error("mavlink_packet: " + str(e) + str(exc_type) + str(fname) + str(exc_tb.tb_lineno))
Ejemplo n.º 33
0
from mysql.connector import connect
global pool
pool = MySQLConnectionPool(pool_name=None,
                           pool_size=50,
                           pool_reset_session=True,
                           user=db_user,
                           password=db_pass,
                           database=db_name,
                           host=db_host)


@contextmanager
def getconnection_mysql(buffered=True, raw=True):
    con = pool.get_connection(buffered=buffered, raw=raw)
    try:
        yield con
    finally:
        con.close()


@contextmanager
def getcursor_mysql(prepared=False, buffered=True, raw=True):
    con = pool.get_connection(buffered=buffered, raw=raw)
    try:
        yield con.cursor(prepared=prepared)
    finally:
        con.close()


con = pool.get_connection()