Example #1
0
def mysql_execute(command, params=None):
    """
    Function to execute a sql statement on the mysql database. This function is
    called by the database_execute function when the mysql backend is set in
    the configuration file.
    @param command the sql command to execute
    @param params a list of tuple of values to substitute in command
    @returns a list of dictionaries representing the sql result
    """
    parser = ConfigParser()
    parser.read(SYNCINI_PATH)
    try:
        host = parser.get('database', 'hostname')
        user = parser.get('database', 'username')
        pawd = parser.get('database', 'password')
        dbse = parser.get('database', 'database')
        port = parser.getint('database', 'port')
        connection = mysql_connect(host=host, port=port, user=user,
                                   passwd=pawd, db=dbse)
        cursor = connection.cursor()
        cursor.execute(command, params)
        connection.commit()
        return cursor.fetchall()
    except MySQLError as mysqlerror:
        string = "MySQL Error: %d: %s" % (mysqlerror.args[0],
                                          mysqlerror.args[1])
        getLogger(__name__).debug(string, extra=get_sql_log_dict())
    finally:
        try:
            if connection:
                connection.close()
        except UnboundLocalError as error:
            getLogger(__name__).exception(error)
Example #2
0
def mysql_execute(command, params=None):
    """
    Function to execute a sql statement on the mysql database. This function is
    called by the database_execute function when the mysql backend is set in
    the configuration file.
    @param command the sql command to execute
    @param params a list of tuple of values to substitute in command
    @returns a list of dictionaries representing the sql result
    """
    getLogger("database").debug("mysql_execute(" + command + ", " + str(params)
                                + ")", extra=get_sql_log_dict())
    parser = lox_config.ConfigSingleton(module.NAME)
    try:
        host = parser.get('database', 'hostname')
        user = parser.get('database', 'username')
        pawd = parser.get('database', 'password')
        dbse = parser.get('database', 'database')
        port = parser.getint('database', 'port')
        connection = mysql_connect(host=host, port=port, user=user,
                                   passwd=pawd, db=dbse)
        cursor = connection.cursor()
        cursor.execute(command, params)
        connection.commit()
        return cursor.fetchall()
    except MySQLError as mysqlerror:
        getLogger(__name__).info(
            "MySQL Error: %d: %s" %
            (mysqlerror.args[0], mysqlerror.args[1]))
    finally:
        try:
            if connection:
                connection.close()
        except UnboundLocalError:
            pass
Example #3
0
def connect(user, passwd, host, port):
    """connect is a convenient shortcut to mysql.connector.connect. Also,
    it makes it reasonable to import mysql.connector in this file, so
    to make it self-complete as a template.
    Args:
        user: Specifies the MySQL user name.
        passwd : Specify the MySQL password.
        host : The host name or IP address.
        port : Specifies the port number that attempts to connect
            to the MySQL server.

    """
    return mysql_connect(user=user, passwd=passwd, host=host, port=port)
Example #4
0
 def create_connection(self, db_name=None):
   if self.db_type == IMPALA:
     connection_class = ImpalaDbConnection
     connection = impala_connect(host=self.host_name, port=self.port or 21050)
   elif self.db_type == POSTGRESQL:
     connection_class = PostgresqlDbConnection
     connection_args = {'user': self.user_name or 'postgres'}
     if self.password:
       connection_args['password'] = self.password
     if db_name:
       connection_args['database'] = db_name
     if self.host_name:
       connection_args['host'] = self.host_name
     if self.port:
       connection_args['port'] = self.port
     global postgresql_connect
     if not postgresql_connect:
       try:
         from psycopg2 import connect as postgresql_connect
       except:
         print('Error importing psycopg2. Please make sure it is installed. '
             'See the README for details.')
         raise
     connection = postgresql_connect(**connection_args)
     connection.autocommit = True
   elif self.db_type == MYSQL:
     connection_class = MySQLDbConnection
     connection_args = {'user': self.user_name or 'root'}
     if self.password:
       connection_args['passwd'] = self.password
     if db_name:
       connection_args['db'] = db_name
     if self.host_name:
       connection_args['host'] = self.host_name
     if self.port:
       connection_args['port'] = self.port
     global mysql_connect
     if not mysql_connect:
       try:
         from MySQLdb import connect as mysql_connect
       except:
         print('Error importing MySQLdb. Please make sure it is installed. '
             'See the README for details.')
         raise
     connection = mysql_connect(**connection_args)
   else:
     raise Exception('Unexpected database type: %s' % self.db_type)
   return connection_class(self, connection, db_name=db_name)
Example #5
0
def mysql_execute(command, params=None):
    """
    Function to execute a sql statement on the mysql database. This function is
    called by the database_execute function when the mysql backend is set in
    the configuration file.

    :param command: the sql command to execute
    :param params: a list of tuple of values to substitute in command
    :returns: a list of dictionaries representing the sql result
    """
    getLogger("database").debug("mysql_execute(" + command + ", " +
                                str(params) + ")",
                                extra=get_sql_log_dict())
    try:
        host = config.get('database', 'hostname')
        user = config.get('database', 'username')
        pawd = config.get('database', 'password')
        dbse = config.get('database', 'database')
        port = config.getint('database', 'port')
        connection = mysql_connect(host=host,
                                   port=port,
                                   user=user,
                                   passwd=pawd,
                                   db=dbse)
        cursor = connection.cursor()
        cursor.execute(command, params)
        connection.commit()
        return cursor.fetchall()
    except MySQLError as mysqlerror:
        print("MySQL Error: %d: %s" % (mysqlerror.args[0], mysqlerror.args[1]))
    finally:
        try:
            if connection:
                connection.close()
        except UnboundLocalError:
            pass
Example #6
0
    def connect(self):
        self.close()

        if self.database_type == dba.DatabaseType["MySQL"]:
            import MySQLdb
            from MySQLdb import connect as mysql_connect
            from MySQLdb.cursors import Cursor as mysql_cursor, DictCursor as mysql_dict_cursor
            from warnings import filterwarnings
            filterwarnings("ignore", category = MySQLdb.Warning)

            self.db = mysql_connect(
                    host=self.host,
                    port=self.port,
                    user=self.username,
                    passwd=self.password,
                    db=self.database,
                    charset=self.charset,
                    use_unicode=self.use_unicode)

            if self.result_type == dba.ResultType["ListResult"]:
                self.cur = self.db.cursor(mysql_cursor)
            elif self.result_type == dba.ResultType["DictResult"]:
                self.cur = self.db.cursor(mysql_dict_cursor)
        elif self.database_type == dba.DatabaseType["PostgreSQL"]:
            from pgdb import connect as pgdb_connect

            self.db = pgdb_connect(
                    host="%s:%d" % (self.host, self.port),
                    user=self.username,
                    password=self.password,
                    database=self.database)

            if self.result_type == dba.ResultType["ListResult"]:
                self.cur = self.db.cursor()
            elif self.result_type == dba.ResultType["DictResult"]:
                self.cur = PGDB_DictCursor(self.db.cursor())
def main():
    """
    Connect to both databases and migrate data
    """

    parser = argparse.ArgumentParser()
    parser.add_argument('--user',
                        '-u',
                        dest='user',
                        action='store',
                        required=True,
                        help='MySQL/MariaDB username')
    parser.add_argument('--password',
                        "-p",
                        dest='password',
                        action='store',
                        help='MySQL/MariaDB password')
    parser.add_argument('--host',
                        '-s',
                        dest='host',
                        action='store',
                        required=True,
                        help='MySQL/MariaDB host')
    parser.add_argument('--port',
                        '-o',
                        dest='port',
                        action='store',
                        required=True,
                        help='MySQL/MariaDB host')
    parser.add_argument(
        '--database',
        '-d',
        dest='database',
        action='store',
        required=false,
        type=int,
        default=3306,
        help='MySQL/MariaDB port. MySQL 3306 (default), MariaDB 3307')
    parser.add_argument(
        '--count',
        '-c',
        dest='row_count',
        action='store',
        required=False,
        type=int,
        default=0,
        help=
        'If 0 (default), determine upper bound of number of rows by querying database, '
        'otherwise use this number (used for progress bar only)')

    args = parser.parse_args()

    # load InfluxDB configuration file (the one from Home Assistant) (without using !secrets)
    with open("influxdb.yaml") as config_file:
        influx_config = yaml.load(config_file, Loader=yaml.FullLoader)

    # validate and extend config
    schema = vol.Schema(INFLUX_SCHEMA, extra=vol.ALLOW_EXTRA)
    influx_config = schema(influx_config)

    # establish connection to InfluxDB
    influx = get_influx_connection(influx_config,
                                   test_write=True,
                                   test_read=True)
    converter = _generate_event_to_json(influx_config)

    # connect to MySQL/MariaDB database
    connection = mysql_connect(host=args.host,
                               port=args.port,
                               user=args.user,
                               password=args.password,
                               database=args.database,
                               cursorclass=cursors.SSCursor,
                               charset="utf8")
    cursor = connection.cursor()

    # untested: connect to SQLite file instead (you need to get rid of the first three `add_argument` calls above)
    #connection = sqlite3.connect('home_assistant_v2.db')

    if args.row_count == 0:
        # query number of rows in states table - this will be more than the number of rows we
        # are going to process, but at least it gives us some percentage and estimation
        cursor.execute("select COUNT(*) from states")
        total = cursor.fetchone()[0]
    else:
        total = args.row_count

    # select the values we are interested in
    cursor.execute(
        "select states.entity_id, states.state, states.attributes, events.event_type, events.time_fired from states, events where events.event_id = states.event_id"
    )

    # map to count names and number of measurements for each entity
    statistics = {}

    # convert each row, write to influxdb in batches
    batch_size_max = 512
    batch_size_cur = 0
    batch_json = []
    with tqdm(total=total, mininterval=1, unit=" rows",
              unit_scale=True) as progress_bar:
        for row in cursor:
            progress_bar.update(1)

            try:
                _entity_id = rename_entity_id(row[0])
                _state = row[1]
                _attributes_raw = row[2]
                _attributes = rename_friendly_name(json.loads(_attributes_raw))
                _event_type = row[3]
                _time_fired = row[4]
            except Exception as e:
                print("Failed extracting data from %s: %s.\nAttributes: %s" %
                      (row, e, _attributes_raw))
                continue

            try:
                # recreate state and event
                state = State(entity_id=_entity_id,
                              state=_state,
                              attributes=_attributes)
                event = Event(_event_type,
                              data={"new_state": state},
                              time_fired=_time_fired)
            except InvalidEntityFormatError:
                pass
            else:
                data = converter(event)
                if not data:
                    continue

                # collect statistics (remove this code block to speed up processing slightly)
                if "friendly_name" in _attributes:
                    friendly_name = _attributes["friendly_name"]

                    if _entity_id not in statistics:
                        statistics[_entity_id] = {friendly_name: 1}
                    elif friendly_name not in statistics[_entity_id]:
                        statistics[_entity_id][friendly_name] = 1
                        print(
                            "Found new name '%s' for entity '%s'. All names known so far: %s"
                            % (friendly_name, _entity_id,
                               statistics[_entity_id].keys()))
                        print(row)
                    else:
                        statistics[_entity_id][friendly_name] += 1

                batch_json.append(data)
                batch_size_cur += 1

                if batch_size_cur >= batch_size_max:
                    influx.write(batch_json)
                    batch_json = []
                    batch_size_cur = 0

    influx.write(batch_json)
    influx.close()

    # print statistics - ideally you have one friendly name per entity_id
    # you can use the output to see where the same sensor has had different
    # names, as well as which entities do not have lots of measurements and
    # thus could be ignored (add them to exclude/entities in the influxdb yaml)
    for entity in sorted(statistics.keys()):
        print(entity)
        for friendly_name in sorted(statistics[entity].keys()):
            count = statistics[entity][friendly_name]
            print("  - %s (%d)" % (friendly_name, count))
 def create_connection(self, db_name=None):
   if self.db_type == HIVE:
     connection_class = HiveDbConnection
     connection = impala_connect(
         host=self.host_name,
         port=self.port,
         user=self.user_name,
         password=self.password,
         timeout=maxint,
         auth_mechanism='PLAIN')
     return HiveDbConnection(self, connection, user_name=self.user_name,
         user_pass=self.password, db_name=db_name, hdfs_host=self.hdfs_host,
         hdfs_port=self.hdfs_port)
   elif self.db_type == IMPALA:
     connection_class = ImpalaDbConnection
     connection = impala_connect(
         host=self.host_name,
         port=self.port or 21050,
         timeout=maxint)
   elif self.db_type == ORACLE:
     connection_class = OracleDbConnection
     connection_str = '%(user)s/%(password)s@%(host)s:%(port)s/%(service)s'
     connection_args = {
       'user': self.user_name or 'system',
       'password': self.password or 'oracle',
       'host': self.host_name or 'localhost',
       'port': self.port or 1521,
       'service': self.service or 'XE'}
     try:
       from cx_Oracle import connect as oracle_connect
     except:
       print('Error importing cx_Oracle. Please make sure it is installed. '
           'See the README for details.')
       raise
     connection = oracle_connect(connection_str % connection_args)
     connection.outputtypehandler = OracleDbConnection.type_converter
     connection.autocommit = True
   elif self.db_type == POSTGRESQL:
     connection_class = PostgresqlDbConnection
     connection_args = {'user': self.user_name or 'postgres'}
     if self.password:
       connection_args['password'] = self.password
     if db_name:
       connection_args['database'] = db_name
     if self.host_name:
       connection_args['host'] = self.host_name
     if self.port:
       connection_args['port'] = self.port
     connection = postgresql_connect(**connection_args)
     connection.autocommit = True
   elif self.db_type == MYSQL:
     connection_class = MySQLDbConnection
     connection_args = {'user': self.user_name or 'root'}
     if self.password:
       connection_args['passwd'] = self.password
     if db_name:
       connection_args['db'] = db_name
     if self.host_name:
       connection_args['host'] = self.host_name
     if self.port:
       connection_args['port'] = self.port
     try:
       from MySQLdb import connect as mysql_connect
     except:
       print('Error importing MySQLdb. Please make sure it is installed. '
           'See the README for details.')
       raise
     connection = mysql_connect(**connection_args)
   else:
     raise Exception('Unexpected database type: %s' % self.db_type)
   return connection_class(self, connection, db_name=db_name)
Example #9
0
File: db.py Project: zs-2014/util
    def reconnect(self):
        if self.conn is not None:
            try:
                self.conn.ping(True)
                return True
            except OperationalError, e:
                if e.args[0] != 2006:
                    raise 
            try:
                self.conn.ping()
                return True
            except OperationalError, e:
                if e.args[0] != 2006:
                    raise
        self.conn = mysql_connect(host=self.config['host'], port=self.config['port'], user=self.config['user'], 
                                  passwd=self.config['passwd'], db=self.config['db'], charset=self.config.get('charset', 'utf8'), 
                                  connect_timeout=self.config.get('timeout'))
        self.conn.autocommit(self.auto_commit)
        return True

    connect = reconnect 
                        
    def is_connected(self):
        return self.conn is not None

    def close(self):
        self.conn.close()
        self.conn = None

    def begin_transaction(self):
        if not self.auto_commit: