Esempio n. 1
0
 def _cursor( self, connection, kwargs ):
     if not connection: 
         kwargsKeys = kwargs.keys()
         if ( kwargsKeys.__contains__( 'port' ) and 
             kwargsKeys.__contains__( 'host' ) ):
             kwargs['dsn'] = "DATABASE=%s;HOSTNAME=%s;PORT=%s;PROTOCOL=TCPIP;" % ( 
                      kwargs.get( 'database' ),
                      kwargs.get( 'host' ),
                      kwargs.get( 'port' )
             )
         else:
             kwargs['dsn'] = kwargs.get( 'database' )
         
         # Setting AUTO COMMIT off on connection.
         conn_options = {Database.SQL_ATTR_AUTOCOMMIT : Database.SQL_AUTOCOMMIT_OFF}
         kwargs['conn_options'] = conn_options
         if kwargsKeys.__contains__( 'options' ):
             kwargs.update( kwargs.get( 'options' ) )
             del kwargs['options']
         if kwargsKeys.__contains__( 'port' ):
             del kwargs['port']
         
         pconnect_flag = True
         if kwargsKeys.__contains__( 'PCONNECT' ):
             pconnect_flag = kwargs['PCONNECT']
             del kwargs['PCONNECT']
             
         if pconnect_flag:
             connection = Database.pconnect( **kwargs )
         else:
             connection = Database.connect( **kwargs )
         connection.autocommit = connection.set_autocommit
         return connection, DB2CursorWrapper( connection )
     else:
         return DB2CursorWrapper( connection )
Esempio n. 2
0
    def connect(self):
        self.initConnection()

        try:
            database = "DRIVER={IBM DB2 ODBC DRIVER};DATABASE=%s;HOSTNAME=%s;PORT=%s;PROTOCOL=TCPIP;" % (self.db, self.hostname, self.port)
            self.connector = ibm_db_dbi.connect(database, self.user, self.password)
        except ibm_db_dbi.OperationalError, msg:
            raise sqlmapConnectionException, msg
Esempio n. 3
0
    def connect(self):
        self.initConnection()

        try:
            database = "DATABASE=%s;HOSTNAME=%s;PORT=%s;PROTOCOL=TCPIP;" % (self.db, self.hostname, self.port)
            self.connector = ibm_db_dbi.connect(database, self.user, self.password)
        except ibm_db_dbi.OperationalError, msg:
            raise SqlmapConnectionException(getSafeExString(msg))
Esempio n. 4
0
def db2(user, password, database, host='localhost', **kwargs):
    """Return a connection to IBM DB2. Adapter ibm_db_dbi is required."""
    import ibm_db_dbi
    
    if not password:
        password = _passwordPrompt()
    dbc = ibm_db_dbi.connect(dsn=database, user=user, password=password, database=database, host=host, **kwargs)
    
    return Connection(dbc, ibm_db_dbi, database, **kwargs)
def query_ibm_db():
    statement = request.forms.get('sql')

    conn = dbi.connect()
    cur = conn.cursor()
    cur.execute(statement)
    
    headers = [descr[0] for descr in cur.description]

    return template('query', headers=headers, rows=cur)
def writeDataToExcel(args, workbook, sheetName):
    """ Write query output to Excel worksheet """
    try:
        conn = dbi.connect()
        cur = conn.cursor()
        cur.execute(args.c)
        headers = [descr[0] for descr in cur.description]
        format = workbook.add_format({'bold': args.b, 'italic': args.i})
        worksheet = workbook.add_worksheet(sheetName)
        worksheet.write_row('A1', headers, format)
        for i, row in enumerate(cur, start=1):
            worksheet.write_row(i, 0, row)
    except Exception as err:
        print('ERROR:  ' + str(err))
Esempio n. 7
0
    def connect():
        """ Connect to a DB2 database.

            All subclasses of DB2_Query will use this database for queries. 

        """
        
        if settings.DISABLE_REPORTING_DB:
            raise SIMSProblem("Reporting database access has been disabled in this deployment.")

        sims_user = settings.SIMS_USER
        sims_passwd = settings.SIMS_PASSWORD
        sims_db_name = settings.SIMS_DB_NAME
        sims_db_schema = settings.SIMS_DB_SCHEMA

        import ibm_db_dbi
        dbconn = ibm_db_dbi.connect(sims_db_name, sims_user, sims_passwd)
        cursor = dbconn.cursor()
        cursor.execute("SET SCHEMA "+sims_db_schema)
        DB2_Query.db = dbconn
Esempio n. 8
0
 def _get_conn(self):
     conn = None
     connection_string = 'DRIVER={IBM DB2 ODBC DRIVER};DATABASE={};HOSTNAME={};PORT={}; PROTOCOL=TCPIP;UID={};PWD={};'.format(
         self.database, self.host, self.port, self.user, self.password)
     try:
         conn = ibm_db_dbi.connect(connection_string)
         conn.set_autocommit(True)
         if self.schema:
             conn.set_current_schema(self.schema)
     except:
         LOGGER.critical('Cannot connect to database: %(connection_string)s', {'connection_string': connection_string}, exc_info=1)
         try:
             raise
         finally:
             if conn is not None:
                 try:
                     conn.close()
                 except Exception:
                     LOGGER.exception('Cannot close database connection')
     else:
         return conn
Esempio n. 9
0
 def _get_conn(self):
     conn = None
     try:
         connection_string = 'DRIVER={IBM DB2 ODBC DRIVER};DATABASE=%s;HOSTNAME=%s;PORT=%s; PROTOCOL=TCPIP;UID=%s;PWD=%s;' % (
             self.database, self.host, self.port, self.user, self.password)
         conn = ibm_db_dbi.connect(connection_string)
         if self.schema:
             with contextlib.closing(NamedParameterCursor(conn.cursor())) as c:
                 c.execute('SET SCHEMA {}'.format(self.schema))
     except:
         LOGGER.critical('Cannot connect to database: %(connection_string)s', {'connection_string': connection_string}, exc_info=1)
         try:
             raise
         finally:
             if conn is not None:
                 try:
                     conn.close()
                 except:
                     LOGGER.exception('Cannot close database connection')
     else:
         return conn
Esempio n. 10
0
    def get_siebel_curs():

        config = ConfigParser.RawConfigParser()
        config.read('/home/fnikitin/projects/RandomWalker/rw.cfg')

        if dbConnector.siebel_curs is None:

            server = config.get('SIEBEL_DB','Server')
            port   = config.get('SIEBEL_DB','Port')
            user   = config.get('SIEBEL_DB','User')
            pwd    = config.get('SIEBEL_DB','Pass')
            name   = config.get('SIEBEL_DB','Name')

            siebel_conn = ibm_db_dbi.connect("DATABASE=" + name + ";\
                                              HOSTNAME=" + server + ";\
                                              PORT=" + port + ";\
                                              UID=" + user + ";\
                                              PWD=" + pwd + ";","","")
            dbConnector.siebel_curs = siebel_conn.cursor()
            dbConnector.siebel_curs.execute("set schema='OLAPADM'")

        return dbConnector.siebel_curs
Esempio n. 11
0
    def get_ads_curs():

        config = ConfigParser.RawConfigParser()
        config.read('/home/fnikitin/projects/RandomWalker/rw.cfg')

        if dbConnector.ads_dw_curs is None:

            server = config.get('ADS_DW_DB','Server')
            port   = config.get('ADS_DW_DB','Port')
            user   = config.get('ADS_DW_DB','User')
            pwd    = config.get('ADS_DW_DB','Pass')
            name   = config.get('ADS_DW_DB','Name')

            dw_conn = ibm_db_dbi.connect("DATABASE=" + name + ";\
                                          HOSTNAME=" + server + ";\
                                          PORT=" + port + ";\
                                          PROTOCOL=TCPIP;\
                                          UID=" + user + ";\
                                          PWD=" + pwd + ";","","")
            dbConnector.ads_dw_curs = dw_conn.cursor()
            dbConnector.ads_dw_curs.execute("set schema='ADS'")

        return dbConnector.ads_dw_curs
Esempio n. 12
0
 def get_new_connection(self, kwargs):
     kwargsKeys = kwargs.keys()
     if ( kwargsKeys.__contains__( 'port' ) and 
         kwargsKeys.__contains__( 'host' ) ):
         kwargs['dsn'] = "DATABASE=%s;HOSTNAME=%s;PORT=%s;PROTOCOL=TCPIP;" % ( 
                  kwargs.get( 'database' ),
                  kwargs.get( 'host' ),
                  kwargs.get( 'port' )
         )
     else:
         kwargs['dsn'] = kwargs.get( 'database' )
     
     # Before Django 1.6, autocommit was turned OFF
     if ( djangoVersion[0:2] >= ( 1, 6 )):
         conn_options = {Database.SQL_ATTR_AUTOCOMMIT : Database.SQL_AUTOCOMMIT_ON}
     else:
         conn_options = {Database.SQL_ATTR_AUTOCOMMIT : Database.SQL_AUTOCOMMIT_OFF}
     kwargs['conn_options'] = conn_options
     if kwargsKeys.__contains__( 'options' ):
         kwargs.update( kwargs.get( 'options' ) )
         del kwargs['options']
     if kwargsKeys.__contains__( 'port' ):
         del kwargs['port']
     
     pconnect_flag = False
     if kwargsKeys.__contains__( 'PCONNECT' ):
         pconnect_flag = kwargs['PCONNECT']
         del kwargs['PCONNECT']
         
     if pconnect_flag:
         connection = Database.pconnect( **kwargs )
     else:
         connection = Database.connect( **kwargs )
     connection.autocommit = connection.set_autocommit
     
     return connection
Esempio n. 13
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            product_id=dict(type='str'),
            fix_list=dict(type='list', elements='str', default=['*ALL']),
            fix_omit_list=dict(type='list', elements='str'),
            save_file_object=dict(type='str'),
            save_file_lib=dict(type='str', default='QGPL'),
            delayed_option=dict(type='str',
                                default='*NO',
                                choices=['*YES', '*NO']),
            temp_or_perm=dict(type='str',
                              default='*TEMP',
                              choices=['*TEMP', '*PERM']),
            operation=dict(type='str',
                           default='load_and_apply',
                           choices=[
                               'load_and_apply', 'load_only', 'apply_only',
                               'remove', 'query'
                           ]),
        ),
        required_if=[["operation", "apply_only", ["product_id"]],
                     ["operation", "remove", ["product_id"]],
                     [
                         "operation", "load_and_apply",
                         ["product_id", "save_file_object"]
                     ],
                     [
                         "operation", "load_only",
                         ["product_id", "save_file_object"]
                     ]],
        supports_check_mode=True,
    )

    if HAS_ITOOLKIT is False:
        module.fail_json(msg="itoolkit package is required")

    if HAS_IBM_DB is False:
        module.fail_json(msg="ibm_db package is required")

    product_id = module.params['product_id']
    ptf_list_to_select = module.params['fix_list']
    ptf_list_to_omit = module.params['fix_omit_list']
    save_file_object = module.params['save_file_object']
    save_file_lib = module.params['save_file_lib']
    delayed_option = module.params['delayed_option']
    temp_or_perm = module.params['temp_or_perm']
    operation = module.params['operation']

    if operation in ['load_and_apply', 'load_only', 'remove']:
        if product_id == '*ALL':
            module.fail_json(
                msg=
                "product_id cannot be *ALL when operation is remove, load_and_apply and load_only."
            )

    startd = datetime.datetime.now()

    connection_id = None
    try:
        connection_id = dbi.connect()
    except Exception as e_db_connect:
        module.fail_json(msg="Exception when connecting to IBM i Db2. " +
                         str(e_db_connect))

    if operation in ['load_and_apply', 'load_only', 'apply_only']:
        operation_bool_map = {
            'load_and_apply': [False, False],
            'load_only': [True, False],
            'apply_only': [False, True]
        }
        # install single or a list of PTFs
        savf_obj = save_file_lib + "/" + save_file_object
        rc, out, err = install_ptf(connection_id, module, product_id,
                                   ptf_list_to_select, ptf_list_to_omit,
                                   "*SAVF", savf_obj, delayed_option,
                                   temp_or_perm,
                                   operation_bool_map[operation][0],
                                   operation_bool_map[operation][1])

        # Need to query the status of the PTF

    elif operation in ['remove']:
        rc, out, err = remove_ptf(connection_id,
                                  module,
                                  product_id,
                                  ptf_list_to_select,
                                  ptf_list_to_omit,
                                  temp_or_perm=temp_or_perm,
                                  delayed_option=delayed_option)
        # Need to query the status of the PTF

    # return the status of the ptf
    if ptf_list_to_select is not None:
        ptf_list, query_err = return_fix_information(connection_id, product_id,
                                                     ptf_list_to_select)
    else:
        module.fail_json(msg="PTF list contains no PTF.")

    if operation == "query":
        if query_err is not None:
            rc = IBMi_COMMAND_RC_ERROR
            err = query_err
        else:
            rc = IBMi_COMMAND_RC_SUCCESS

    # job_log, get_joblog_err = db2i_tools.get_job_log(connection_id, "*")

    if connection_id is not None:
        try:
            connection_id.close()
        except Exception as e_disconnect:
            module.log("ERROR: Unable to disconnect from the database. " +
                       str(e_disconnect))

    endd = datetime.datetime.now()
    delta = endd - startd

    if rc > 0:
        result_failed = dict(
            start=str(startd),
            end=str(endd),
            delta=str(delta),
            stdout=out,
            stderr=err,
            rc=rc,
            # changed=True,
        )
        module.fail_json(msg='non-zero return code', **result_failed)
    else:
        result_success = dict(
            start=str(startd),
            end=str(endd),
            delta=str(delta),
            ptf_list=ptf_list,
            rc=rc,
            # job_log=job_log,
            # changed=True,
        )
        module.exit_json(**result_success)
Esempio n. 14
0
import ibm_db_dbi
import pandas as pd

dir='/home/shared_folder/temp_bc/'
con = ibm_db_dbi.connect('DATABASE=wm;HOSTNAME=10.11.12.141;PORT=50000;PROTOCOL=TCPIP;UID=produser;PWD=d2a@ruStuC;','','')
con.set_autocommit(True)
cursor=con.cursor()
fd=open('{d}client.dat'.format(d=dir),'r')
client=fd.read().rstrip()
fd.close()
if client=='pxmca' or client=='hrpus' or client=='sofca':
    fd=open('{d}npe_final_framework.sql'.format(d=dir),'r')
elif client=='ncrca' or client=='accus' or client=='culci' or client=='ncrca_sm':
    fd=open('{d}npe_final_nwc.sql'.format(d=dir),'r')
else:
    fd=open('{d}npe_final.sql'.format(d=dir),'r')
sqlfile=fd.read()
fd.close()
sqlstring=sqlfile.split(';')
for sqlquery in sqlstring:
    query = ''.join(sqlquery).rstrip()
    if query:
#        print query
        cursor.execute(query.format(c=client))
con.close()
print "npe_final table has been updated"
Esempio n. 15
0
def query():
    cur = db2.connect().cursor()
    cur.execute(request.form.get('sql'))
    return render_template('query.html', cursor=cur)
Esempio n. 16
0
if __name__ == "__main__":

    parser = argparse.ArgumentParser(description='Display netstat information.')
    parser.add_argument('--limit', type=int,
        help='Only show X rows')
    parser.add_argument('--offset', type=int,
        help='Skip first X rows')
    parser.add_argument('--port', type=int,
        help='Look for only local port')
    args = parser.parse_args()

try:
    # The connect() with no parameters will connect to database *LOCAL. 
    # This flavor requires ibm_db 2.0.5.5, which you get via SI61963 and the 'pip3' command referenced earlier. 
    conn = dbi.connect()
    
    cur = conn.cursor()
    sql = '''
SELECT
        REMOTE_ADDRESS as RemoteAddr, REMOTE_PORT as RmtPort, REMOTE_PORT_NAME as RmtPortName,
        LOCAL_ADDRESS, LOCAL_PORT as Port, LOCAL_PORT_NAME as PortName,
        CONNECTION_TYPE as TYPE,
        TRIM(AUTHORIZATION_NAME) AS AUTH_NAME, JOB_NAME, SLIC_TASK_NAME
    FROM QSYS2.NETSTAT_JOB_INFO
    {0} -- WHERE CLAUSE
    ORDER BY LOCAL_PORT, LOCAL_ADDRESS, REMOTE_PORT,  REMOTE_ADDRESS
'''
    sql = sql.format("WHERE LOCAL_PORT = ?") if  args.port is not None else sql.format('')
    params = (args.port,) if args.port is not None else None
    if args.limit is not None:
import ibm_db_dbi
from datetime import datetime
from datetime import date

today = date.today()

# SQL statments to query the database
dir()
con = ibm_db_dbi.connect(dsn=None,
                         user='******',
                         password='******',
                         database='DEV',
                         conn_options=None)
curs = con.cursor()

curs.execute(
    "select *  from filelib.haptempsv2 where hc_site_name = 'Booneville'")
rows = curs.fetchall()

curs.execute(
    "select distinct hc_bin_code from filelib.haptempsv2 where hc_site_name = 'Booneville' order by hc_bin_code "
)
bins = curs.fetchall()

curs.execute(
    "select distinct hc_read_date from filelib.haptempsv2 where hc_site_name = 'Booneville'"
)
dates = curs.fetchall()

# curs.execute("select distinct hc_read_date from filelib.haptempsv2 where hc_site_name = 'Booneville' and hc_read_date = '{}'").formate(today)
# todayDate = curs.fetchall()
Esempio n. 18
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import ibm_db_dbi as db2
from csv import writer, QUOTE_NONNUMERIC

conn = db2.connect()
cur = conn.cursor()
cur.execute(
    "select cusnum, init, lstnam, cdtlmt from qiws.qcustcdt where cdtlmt > 100"
)


def trim_col(s):
    return s.rstrip() if hasattr(s, 'rstrip') else s


with open('qcustcdt.csv', 'w', newline='') as file:
    csvf = writer(file, quoting=QUOTE_NONNUMERIC)
    for row in cur:
        csvf.writerow([trim_col(col) for col in row])
Esempio n. 19
0
returnCode = False

# Display A Status Message Indicating An Attempt To Establish A Connection To A Db2 Database
# Is About To Be Made
print("\nConnecting to the \'" + dbName + "\' database ... ", end="")

# Construct The String That Will Be Used To Establish A Db2 Database Connection
connString = "ATTACH=FALSE"  # Attach To A Database; Not A Server
connString += ";DATABASE=" + dbName  # Required To Connect To A Database
connString += ";PROTOCOL=TCPIP"
connString += ";UID=" + userID
connString += ";PWD=" + passWord

# Attempt To Establish A Connection To The Database Specified
try:
    connectionID = ibm_db_dbi.connect(connString, '', '')
except Exception:
    pass

# If A Db2 Database Connection Could Not Be Established, Display An Error Message And Exit
if connectionID is None:
    print("\nERROR: Unable to connect to the \'" + dbName + "\' database.")
    print("Connection string used: " + connString + "\n")
    exit(-1)

# Otherwise, Complete The Status Message
else:
    print("Done!\n")

# Retrieve The Cursor Object That Was Created For The Connection Object
if not connectionID is None:
Esempio n. 20
0
import ibm_db_dbi as db
import sys

conn = db.connect(
    "DATABASE=tpcds;HOSTNAME=localhost;PORT=50001;UID=db2inst1;PWD=db2inst1-pwd;",
    "", "")

if len(sys.argv) < 2:
    print("Usage: query_db2.py sqlfile")
    sys.exit(-1)

query = open(sys.argv[1],
             'r').read().replace('\n', ' ').replace('SUBSTRING', 'SUBSTR')

cursor = conn.cursor()
cursor.execute(query)
for r in cursor.fetchall():
    print("\t".join([str(c).strip().replace('None', '') for c in r]))
Esempio n. 21
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            age=dict(default=None, type='str'),
            age_stamp=dict(default="ctime", choices=['ctime'], type='str'),
            object_type_list=dict(type='str', default='*ALL'),
            lib_name=dict(type='str', default='*ALLUSR'),
            object_name=dict(type='str', default='*ALL'),
            size=dict(default=None, type='str'),
            iasp_name=dict(type='str', default='*SYSBAS'),
            use_regex=dict(default=False, type='bool'),
        ),
        supports_check_mode=True,
    )

    if HAS_ITOOLKIT is False:
        module.fail_json(msg="itoolkit package is required")

    if HAS_IBM_DB is False:
        module.fail_json(msg="ibm_db package is required")

    input_age = module.params['age']
    input_age_stamp = module.params['age_stamp']
    input_object_type = module.params['object_type_list']
    input_iasp_name = module.params['iasp_name']
    input_size = module.params['size']
    input_lib = module.params['lib_name']
    input_obj_name = module.params['object_name']
    input_use_regex = module.params['use_regex']

    startd = datetime.datetime.now()

    connection_id = None

    if input_iasp_name.strip() != '*SYSBAS':
        try:
            connection_id = dbi.connect(database='{db_pattern}'.format(db_pattern=input_iasp_name))
        except Exception as e_db_connect:
            module.fail_json(msg="Exception when trying to use IASP. " + str(e_db_connect))
    else:
        try:
            connection_id = dbi.connect()
        except Exception as e_db_connect:
            module.fail_json(msg="Exception when connecting to IBM i Db2. " + str(e_db_connect))

    # generate age where stmt
    if input_age is None:
        # age = None
        sql_where_stmt_age = ''
    else:
        sql_where_stmt_age = age_where_stmt(input_age, input_age_stamp)
        if sql_where_stmt_age is None:
            module.fail_json(msg="failed to process age: " + input_age)

    # generate size where stmt
    if input_size is None:
        sql_where_stmt_size = ''
    else:
        sql_where_stmt_size = size_where_stmt(input_size)
        if sql_where_stmt_size is None:
            module.fail_json(msg="failed to process size: " + input_size)

    # get the version and release info
    release_info, err = db2i_tools.get_ibmi_release(connection_id)

    if release_info["version_release"] < 7.4:
        lib_name_label = "(SELECT SYSTEM_SCHEMA_NAME FROM QSYS2.SYSSCHEMAS WHERE SCHEMA_NAME = OBJLONGSCHEMA)"
    else:
        lib_name_label = "OBJLIB"

    if input_use_regex:
        obj_stats_expression = " SELECT OBJNAME, OBJTYPE, OBJOWNER, OBJDEFINER, OBJCREATED," \
                               " TEXT, " + lib_name_label + " AS OBJLIB, IASP_NUMBER, LAST_USED_TIMESTAMP, " \
                               " LAST_RESET_TIMESTAMP," \
                               " BIGINT(OBJSIZE) AS OBJSIZE, OBJATTRIBUTE, OBJLONGSCHEMA " \
                               " FROM TABLE (QSYS2.OBJECT_STATISTICS('" + input_lib + "','" + \
                               input_object_type + "','*ALL')) X "
        sql_where_stmt_regex = " AND REGEXP_LIKE(A.OBJNAME, '" + input_obj_name + "') "
    else:
        obj_stats_expression = " SELECT OBJNAME, OBJTYPE, OBJOWNER, OBJDEFINER, OBJCREATED," \
                               " TEXT, " + lib_name_label + " AS OBJLIB, IASP_NUMBER, LAST_USED_TIMESTAMP, " \
                               " LAST_RESET_TIMESTAMP," \
                               " BIGINT(OBJSIZE) AS OBJSIZE, OBJATTRIBUTE, OBJLONGSCHEMA " \
                               " FROM TABLE (QSYS2.OBJECT_STATISTICS('" + input_lib + "','" + \
                               input_object_type + "','" + input_obj_name + "')) X "
        sql_where_stmt_regex = ""

    sql = "select * from (" + obj_stats_expression + ") A WHERE 1 = 1 " + \
          sql_where_stmt_age + \
          sql_where_stmt_size + \
          sql_where_stmt_regex

    # rc, out, err = itoolkit_run_sql(sql, input_iasp_name)
    out_result_set, err = db2i_tools.ibm_dbi_sql_query(connection_id, sql)

    if err is not None:
        err = handle_db_exception(err)

    if connection_id is not None:
        try:
            connection_id.close()
        except Exception as e_disconnect:
            err = "ERROR: Unable to disconnect from the database. " + str(e_disconnect)

    endd = datetime.datetime.now()
    delta = endd - startd

    if err is not None:
        rc = IBMi_COMMAND_RC_ERROR
        rc_msg = interpret_return_code(rc)
        result_failed = dict(
            sql=sql,
            # size=input_size,
            # age=input_age,
            # age_stamp=input_age_stamp,
            stderr=err,
            rc=rc,
            start=str(startd),
            end=str(endd),
            delta=str(delta),
            # changed=True,
        )
        module.fail_json(msg='non-zero return code: ' + rc_msg, **result_failed)
    else:
        out = []
        for result in out_result_set:
            result_map = {"OBJNAME": result[0], "OBJTYPE": result[1],
                          "OBJOWNER": result[2], "OBJDEFINER": result[3],
                          "OBJCREATED": result[4], "TEXT": result[5],
                          "OBJLIB": result[6], "IASP_NUMBER": result[7],
                          "LAST_USED_TIMESTAMP": result[8], "LAST_RESET_TIMESTAMP": result[9],
                          "OBJSIZE": result[10], "OBJATTRIBUTE": result[11], "OBJLONGSCHEMA": result[12]
                          }
            out.append(result_map)

        rc = IBMi_COMMAND_RC_SUCCESS
        result_success = dict(
            sql=sql,
            object_list=out,
            rc=rc,
            start=str(startd),
            end=str(endd),
            delta=str(delta),
            # changed=True,
        )
        module.exit_json(**result_success)
Esempio n. 22
0
import time

from django import db
import ibm_db
import ibm_db_dbi
import ibm_db_django
import ibm_db_dlls
from pip._vendor.requests.packages.urllib3.connectionpool import xrange
import xlwt
from _csv import Error

#maintain/OCH2012ujm
#DB2INST1
conn = ibm_db_dbi.connect("PORT=50000;PROTOCOL=TCPIP;",
                          host="10.0.12.115",
                          database="TRENDYEC",
                          user="******",
                          password="******")
#conn = ibm_db_dbi.connect("DATABASE=DB2INST1;HOSTNAME=10.0.12.115;PORT=50000;PROTOCOL=TCPIP;UID=maintain;PWD=OCH2012ujm;", "", "")
#conn.set_autocommit(True)#璁剧疆鑷姩鎻愪氦

cursor = conn.cursor()
#sql = "select orders_id from DB2INST1.orders where orders_id='11566785'"
#result = cursor.execute(sql)


def select_table(sql):
    #sql_select='select * from test'
    result = []
    try:
        cursor = conn.cursor()
Esempio n. 23
0
    def get_new_connection(self, kwargs):
        SchemaFlag = False
        kwargsKeys = kwargs.keys()
        if (kwargsKeys.__contains__('port')
                and kwargsKeys.__contains__('host')):
            kwargs[
                'dsn'] = "DATABASE=%s;HOSTNAME=%s;PORT=%s;PROTOCOL=TCPIP;" % (
                    kwargs.get('database'), kwargs.get('host'),
                    kwargs.get('port'))
        else:
            kwargs['dsn'] = kwargs.get('database')

        if (kwargsKeys.__contains__('currentschema')):
            kwargs['dsn'] += "CurrentSchema=%s;" % (
                kwargs.get('currentschema'))
            currentschema = kwargs.get('currentschema')
            SchemaFlag = True
            del kwargs['currentschema']

        if (kwargsKeys.__contains__('security')):
            kwargs['dsn'] += "security=%s;" % (kwargs.get('security'))
            del kwargs['security']

        if (kwargsKeys.__contains__('sslclientkeystoredb')):
            kwargs['dsn'] += "SSLCLIENTKEYSTOREDB=%s;" % (
                kwargs.get('sslclientkeystoredb'))
            del kwargs['sslclientkeystoredb']

        if (kwargsKeys.__contains__('sslclientkeystoredbpassword')):
            kwargs['dsn'] += "SSLCLIENTKEYSTOREDBPASSWORD=%s;" % (
                kwargs.get('sslclientkeystoredbpassword'))
            del kwargs['sslclientkeystoredbpassword']

        if (kwargsKeys.__contains__('sslclientkeystash')):
            kwargs['dsn'] += "SSLCLIENTKEYSTASH=%s;" % (
                kwargs.get('sslclientkeystash'))
            del kwargs['sslclientkeystash']

        if (kwargsKeys.__contains__('sslservercertificate')):
            kwargs['dsn'] += "SSLSERVERCERTIFICATE=%s;" % (
                kwargs.get('sslservercertificate'))
            del kwargs['sslservercertificate']

        # Before Django 1.6, autocommit was turned OFF
        if (djangoVersion[0:2] >= (1, 6)):
            conn_options = {
                Database.SQL_ATTR_AUTOCOMMIT: Database.SQL_AUTOCOMMIT_ON
            }
        else:
            conn_options = {
                Database.SQL_ATTR_AUTOCOMMIT: Database.SQL_AUTOCOMMIT_OFF
            }
        kwargs['conn_options'] = conn_options
        if kwargsKeys.__contains__('options'):
            kwargs.update(kwargs.get('options'))
            del kwargs['options']
        if kwargsKeys.__contains__('port'):
            del kwargs['port']

        pconnect_flag = False
        if kwargsKeys.__contains__('PCONNECT'):
            pconnect_flag = kwargs['PCONNECT']
            del kwargs['PCONNECT']

        if pconnect_flag:
            connection = Database.pconnect(**kwargs)
        else:
            connection = Database.connect(**kwargs)
        connection.autocommit = connection.set_autocommit

        if SchemaFlag:
            schema = connection.set_current_schema(currentschema)

        return connection
Esempio n. 24
0
#https://github.com/altmanWang/IBM-DB2/blob/master/Insert.py

import csv
import io
from flask import Flask, render_template, request
import time

app = Flask(__name__)
import ibm_db_dbi

cnxn = ibm_db_dbi.connect("#####;", "", "")
if cnxn:
    print('database connected')


@app.route('/')
def index():
    return render_template('index.html')


@app.route('/upload', methods=['POST', 'GET'])
def insert_table():

    cursor = cnxn.cursor()

    start_time = time.time()
    cursor.execute(
        "CREATE TABLE equake1(time varchar(50), latitude float(20), longitude float(50), depth float(50), mag float(50), magType varchar(50), nst int, gap int, dmin float(50), rms float(50),net varchar (50), id varchar(50), updated varchar(50), place varchar(50),type varchar(50),horizontal float(50), depthError float(50), magError float(50), magNst int,status varchar (50), locationSource varchar(50), magSource varchar(50))"
    )
    cnxn.commit()
Esempio n. 25
0
from urllib import request
import json
import sys
import ibm_db_dbi as db2

conn = db2.connect(dsn=None, user='******', password='******', database='*LOCAL', conn_options=None)
csr = conn.cursor()

date = None

if len(sys.argv) > 1:
  date = sys.argv[1]
else:
  date = 'latest'

# expecting url : https://api.exchangeratesapi.io/2010-01-12?base=USD
url = f'https://api.exchangeratesapi.io/{date}?base=USD'
print('Fetching data for ', url)
response = request.urlopen(url)
jsondata = response.read()

data = json.loads(jsondata)

date = data['date']
for k,v in data['rates'].items():
  print(date, k, v)
  csr.execute("insert into user05001.exrates values (?,?,?)",(date, k, v))

conn.commit()
def query():
    cur = db2.connect().cursor()
    cur.execute(request.forms.get('sql'))
    return {'rows': cur}
Esempio n. 27
0
 def _connect(self, dsn, user, password):
   self.conn = ibm_db_dbi.connect(dsn, user, password)
Esempio n. 28
0
def history_data():
    graph1 = None
    try:
        str_conn = "DATABASE=BLUDB;HOSTNAME=dashdb-txn-sbox-yp-lon02-07.services.eu-gb.bluemix.net;PORT=50000;PROTOCOL=TCPIP;UID=bfr48077;PWD=3j4h-zwx77mzftr4"
        conn = ibm_db_dbi.connect(str_conn, " ", " ")
        df = pd.read_sql(sql='select *from dump_data',
                         con=conn,
                         parse_dates=True)
        df_ne = df.groupby(['REC']).mean()
        df_ne = df_ne.resample('H')
        df_ne = df_ne.interpolate(method='linear')
        df2_h = df_ne.resample('24H').sum()
        mig = make_subplots(rows=2,
                            cols=1,
                            specs=[[{
                                "type": "scatter",
                                "type": "scatter",
                                "type": "scatter"
                            }], [{
                                "type": "bar"
                            }]],
                            subplot_titles=('Power Prediction Plot',
                                            'Per Day Power Produced Plot'))
        mig.add_trace(
            go.Scatter(x=df_ne.index,
                       y=df_ne['Y_HAT'],
                       line=dict(color='red', width=4, dash='dot'),
                       name='Mean Predicted Power'), 1, 1)
        mig.add_trace(
            go.Scatter(x=df_ne.index,
                       y=df_ne['Y_LOWER'],
                       line=dict(color='royalblue', width=2, dash='dash'),
                       name='Lower Range Power'), 1, 1)
        mig.add_trace(
            go.Scatter(x=df_ne.index,
                       y=df_ne['Y_UPPER'],
                       fill='tonexty',
                       line=dict(color='royalblue', width=4, dash='dash'),
                       name='Upper Range Power'), 1, 1)
        mig.add_trace(go.Bar(x=df2_h.index,
                             y=df2_h['Y_HAT'],
                             text=df2_h['Y_HAT'],
                             name='Per Day Power Produced'),
                      row=2,
                      col=1)
        mig.update_yaxes(title_text="Power (KWh)", row=1, col=1)
        mig.update_yaxes(title_text="Power (KWh)", row=2, col=1)
        mig.update_xaxes(row=1, col=1, rangeslider_visible=True)
        mig.update_xaxes(row=2, col=1)
        mig.update_traces(texttemplate='%{text:.2s}',
                          textposition='outside',
                          row=2,
                          col=1)
        mig.update_layout(height=925)
        graph1 = mig.to_json()
    except Exception as ex:
        print(ex.args)

    finally:
        conn.close()
        return graph1
Esempio n. 29
0
 def connect(self):
     return ibm_db_dbi.connect("DSN=%s; HOSTNAME=%s; PORT=%s" %
                               (self.database, self.host, self.port),
                               user=self.username,
                               password=self.password)
Esempio n. 30
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from xlsxwriter import Workbook
import ibm_db_dbi as db2

cur = db2.connect().cursor()
cur.execute("select cusnum, lstnam, cdtlmt, baldue, cdtdue from qiws.qcustcdt")

headers = [desc[0] for desc in cur.description]

with Workbook('qcustcdt_format.xlsx') as workbook:
    fmt = workbook.add_format({'font_size': 20})

    hdr_fmt = workbook.add_format({'font_size': 20, 'align':'center', 'border':1})
    red_fmt = workbook.add_format({'font_size': 20, 'bg_color': '#FF0000'})

    ws = workbook.add_worksheet()

    ws.conditional_format("D2:D13", {'type': 'cell',
                                     'criteria': '>',
                                     'value': 'C2*0.5',
                                     'format': red_fmt})

    ws.set_column(0, len(headers)-1, 20)

    ws.write_row('A1', headers, hdr_fmt)
    ws.set_row(0, 22)

    for rownum, row in enumerate(cur, start=1):
        ws.write_row(rownum, 0, row)
        ws.set_row(rownum, 22, fmt)
Esempio n. 31
0
# coding=utf-8

import ibm_db_dbi

conn = ibm_db_dbi.connect(
    "DRIVER={IBM DB2 ODBC DRIVER};DATABASE=UACSDB0;HOSTNAME=10.25.101.8;PORT=50000;PROTOCOL=TCPIP;UID=UACSAPP;PWD=UACSAPP;",
    "", "")
if conn:
    conn.set_autocommit(True)
    print "connect db2 successed"


def count_stock_num(table_name, area_name):
    select_sql = "SELECT COUNT(*) FROM  %s  WHERE STOCK_NAME='%s'" % (
        table_name, area_name)
    # stmt = ibm_db_dbi.exec_immediate(conn, select_sql)
    # # row是字典形式
    # row = ibm_db_dbi.fetch_assoc(stmt)
    # 返回该区域的可放钢卷的位数
    c = conn.cursor()
    c.execute(select_sql)
    row = c.fetchone()
    return row[0]


# def read_stock_status(table_name,area_name):
#     stock_count = count_stock_num(table_name,area_name)
#     list = []
#     sql="SELECT * FROM %s " % (table_name)
#     stmt = ibm_db.exec_immediate(conn, sql)
#     r=ibm_db.fetch_both(stmt)
Esempio n. 32
0
 def _connect(self, dsn, user, password):
     self.conn = ibm_db_dbi.connect(dsn, user, password)
Esempio n. 33
0
import ibm_db_dbi
import pandas as pd
from docx import Document
from docx.shared import Mm

# Create Word Document Object
document = Document()
document.add_heading('Database Specification', 0)

# Set Printing Configurations For Pandas
pd.set_option('display.width', 1000)
pd.set_option('display.height', 1000)
pd.set_option('display.max_rows', 1000)
pd.set_option('display.max_columns', 1000)

conn = ibm_db_dbi.connect("DATABASE=;HOSTNAME=;PORT=;PROTOCOL=;UID=;PWD=;", "",
                          "")
cursor = conn.cursor()
cursor.execute('''
            SELECT
              T.TABNAME AS Table
            FROM
              SYSCAT.TABLES T
            WHERE T.TABSCHEMA = ''
              AND T.TYPE = 'T'
            ORDER BY T.TABNAME;''')

tableNames = cursor.fetchall()
for tableName in tableNames:
    print(tableName[0])
    df = pd.read_sql_query(
        '''
Esempio n. 34
0
import ibm_db_dbi

connection = ibm_db_dbi.connect('DATABASE=db_cra:INFORMIXSERVER=uccx1_uccx;HOSTNAME=198.18.133.19;PORT=1504;PROTOCOL=ONSOCTCP;UID=uccxhruser;PWD=C1sco12345;','','')

c = conn.cursor()
result = c.execute('SELECT * FROM systables')
print(result)
Esempio n. 35
0
#!/usr/bin/env python3 
from bottle import route, run, template, request
from string import capwords
import ibm_db_dbi as dbi

conn = dbi.connect(dsn=None, database='*LOCAL', \
                       user=None, password=None)
@route('/', method=('GET', 'POST'))
def root():
    reset = request.forms.get('reset') == 'true'
    reset_parm = 'YES' if reset else 'NO'
    sorting = request.forms.get('sorting') or '""'
    show_cols = (
        'JOB_NAME', 'AUTHORIZATION_NAME', 'JOB_TYPE', 'FUNCTION_TYPE',
        'FUNCTION', 'JOB_STATUS', 'ELAPSED_INTERACTION_COUNT',
        'ELAPSED_TOTAL_RESPONSE_TIME', 'ELAPSED_TOTAL_DISK_IO_COUNT',
        'ELAPSED_ASYNC_DISK_IO_COUNT', 'ELAPSED_SYNC_DISK_IO_COUNT',
        'ELAPSED_CPU_PERCENTAGE', 
        'ELAPSED_PAGE_FAULT_COUNT')
    hide_cols = ( 'ELAPSED_TIME', )
    all_cols = show_cols + hide_cols
   
    headers = [ titleize(col) for col in show_cols ]
    column_string = ', '.join(all_cols)
    
    query = "select %s from table(qsys2.active_job_info(RESET_STATISTICS => ?)) x" % column_string

    cur = conn.cursor()
    cur.execute(query, (reset_parm, ))
    
    elapsed_time = 0
Esempio n. 36
0
#!/usr/bin/env python3

import ibm_db_dbi as db2
from csv import writer, QUOTE_NONNUMERIC

query = "select cusnum, lstnam, init, cdtlmt from qiws.qcustcdt where cdtlmt > 100"


# Strip trailing spaces as applicable
def trim_col(s):
    return s.rstrip() if hasattr(s, 'rstrip') else s

conn = db2.connect()
cur = conn.cursor()
cur.execute(query)

with open('qcustcdt.csv', 'w', newline='') as file:
    csvf = writer(file, quoting=QUOTE_NONNUMERIC)
    for row in cur:
        csvf.writerow([trim_col(col) for col in row])
Esempio n. 37
0
# This application will not work on localhost
from itoolkit import *
from itoolkit.transport import DatabaseTransport
import ibm_db_dbi

itool = iToolKit()
conn = ibm_db_dbi.connect()
itransport = DatabaseTransport(conn)

itool.add(iCmd('addlible', 'ADDLIBLE PYSEIDEN'))

itool.add(
    iPgm('pypgm001c',
         'PYPGM001C').addParm(iData('RTNMSG', '50a', 'a')).addParm(
             iData('AMOUNT', '15p2', '33.33')))

itool.call(itransport)

pypgm001c = itool.dict_out('pypgm001c')

if 'success' in pypgm001c:
    print(pypgm001c['success'])
    print("Return parameter values:")
    print("RTNMSG: " + pypgm001c['RTNMSG'])
    print("AMOUNT: " + pypgm001c['AMOUNT'])
else:
    raise Exception("Program call error:" + pypgm001c['error'])
Esempio n. 38
0
    def _get_connection(self):
        self.connection_string = "DATABASE={};HOSTNAME={};PORT={};PROTOCOL=TCPIP;UID={};PWD={};".format(
            self.configuration["dbname"], self.configuration["host"], self.configuration["port"], self.configuration["user"], self.configuration["password"])
        connection = ibm_db_dbi.connect(self.connection_string, "", "")

        return connection
Esempio n. 39
0
import glob
import os

# Ensure this is aliased to a name not 'distros'
# since the module attribute 'distros'
# is a list of distros that are supported, not a sub-module
from cloudinit import distros as ds

from cloudinit import ssh_util
from cloudinit import util
from cloudinit import log as logging
from itoolkit import *
from itoolkit.transport import DatabaseTransport
import ibm_db_dbi as dbi

conn = dbi.connect()
itransport = DatabaseTransport(conn)
#frequency = PER_ALWAYS
LOG = logging.getLogger(__name__)

KEY_DIR_J = "/QOpenSys/QIBM/ProdData/SC1/OpenSSH/openssh-4.7p1/etc/"
KET_DIR_S = "/QOpenSys/QIBM/ProdData/SC1/OpenSSH/etc/"
V_V7R2M0 = "V7R2M0"

KEY_2_FILE = {
    "rsa_private": ("ssh_host_rsa_key", 0o600),
    "rsa_public": ("ssh_host_rsa_key.pub", 0o644),
    "dsa_private": ("ssh_host_dsa_key", 0o600),
    "dsa_public": ("ssh_host_dsa_key.pub", 0o644),
    "ecdsa_private": ("ssh_host_ecdsa_key", 0o600),
    "ecdsa_public": ("ssh_host_ecdsa_key.pub", 0o644),
Esempio n. 40
0
def set_polarity():
    conn = db.connect(DB_CONNECTION_STRING)
    cursor = conn.cursor()
    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'ak' '''
    cursor.execute(sql, )
    result1 = cursor.fetchall()
    print result1

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'at' '''
    cursor.execute(sql, )
    result3 = cursor.fetchall()
    print result3

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'ci' '''
    cursor.execute(sql, )
    result4 = cursor.fetchall()
    print result4

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'hv' '''
    cursor.execute(sql, )
    result5 = cursor.fetchall()
    print result5

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'id' '''
    cursor.execute(sql, )
    result6 = cursor.fetchall()
    print result6

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'mb' '''
    cursor.execute(sql, )
    result7 = cursor.fetchall()
    print result7

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'nc' '''
    cursor.execute(sql, )
    result8 = cursor.fetchall()
    print result8

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'nm' '''
    cursor.execute(sql, )
    result9 = cursor.fetchall()
    print result9

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'nn' '''
    cursor.execute(sql, )
    result9 = cursor.fetchall()
    print result9

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'pr' '''
    cursor.execute(sql, )
    result9 = cursor.fetchall()
    print result9

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'pt' '''
    cursor.execute(sql, )
    result9 = cursor.fetchall()
    print result9

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'se' '''
    cursor.execute(sql, )
    result9 = cursor.fetchall()
    print result9

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'us' '''
    cursor.execute(sql, )
    result9 = cursor.fetchall()
    print result9

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'uu' '''
    cursor.execute(sql, )
    result9 = cursor.fetchall()
    print result9

    sql = '''SELECT "polarity","locationSource" FROM QUAKES WHERE "locationSource" = 'uw' '''
    cursor.execute(sql, )
    result9 = cursor.fetchall()
    print result9
Esempio n. 41
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            internet_address=dict(type='str', required=False),
            line_description=dict(type='str', required=False),
            vlan_id=dict(type='str', required=False),
            subnet_mask=dict(type='str', required=False),
            alias_name=dict(type='str', required=False),
            associated_local_interface=dict(type='str', required=False),
            type_of_service=dict(type='str',
                                 required=False,
                                 choices=[
                                     "*NORMAL", "*MINDELAY", "*MAXTHRPUT",
                                     "*MAXRLB", "*MINCOST"
                                 ]),
            max_transmission_unit=dict(type='str', required=False),
            auto_start=dict(type='str',
                            required=False,
                            choices=["*YES", "*NO"]),
            preferred_interface=dict(type='list',
                                     elements='str',
                                     required=False),
            text_description=dict(type='str', required=False),
            sec_to_wait=dict(type='int', default=0),
            extra_params=dict(type='str', required=False),
            state=dict(type='str',
                       default='present',
                       choices=["present", "absent", "inactive", "active"])),
        required_one_of=[["internet_address", "alias_name"]],
        supports_check_mode=True,
    )

    if HAS_ITOOLKIT is False:
        module.fail_json(msg="itoolkit package is required")

    if HAS_IBM_DB is False:
        module.fail_json(msg="ibm_db package is required")

    internet_address = module.params['internet_address']
    line_description = module.params['line_description']
    vlan_id = module.params['vlan_id']
    subnet_mask = module.params['subnet_mask']
    alias_name = module.params['alias_name']
    associated_local_interface = module.params['associated_local_interface']
    type_of_service = module.params['type_of_service']
    max_transmission_unit = module.params['max_transmission_unit']
    auto_start = module.params['auto_start']
    preferred_interface = module.params['preferred_interface']
    text_description = module.params['text_description']
    state = module.params['state']
    extra_params = module.params['extra_params']
    sec_to_wait = module.params['sec_to_wait']

    startd = datetime.datetime.now()

    connection_id = None
    try:
        connection_id = dbi.connect()
    except Exception as e_db_connect:
        module.fail_json(msg="Exception when connecting to IBM i Db2. " +
                         str(e_db_connect))

    only_query = False
    cl_command = ""

    if state == "present":
        # check options
        opt_vlan_id = "" if vlan_id is None else vlan_id
        opt_line_description = "" if line_description is None else "LIND(" + line_description + " " + opt_vlan_id + ") "
        opt_subnet_mask = "" if subnet_mask is None else "SUBNETMASK('" + subnet_mask + "') "
        opt_alias_name = "" if alias_name is None else "ALIASNAME(" + alias_name + ") "
        opt_associate = "" if associated_local_interface is None else "LCLIFC(" + associated_local_interface + ") "
        opt_type_of_service = "" if type_of_service is None else "TOS(" + type_of_service + ") "
        opt_max_transmission_unit = "" if max_transmission_unit is None else "MTU(" + max_transmission_unit + ") "
        opt_auto_start = "" if auto_start is None else "AUTOSTART(" + auto_start + ") "
        opt_preferred_ifc = "" if preferred_interface is None else "PREFIFC('" + "' '".join(
            preferred_interface) + "') "
        opt_text_desc = "" if text_description is None else "TEXT('" + text_description + "') "

        # options_without_alias_name = opt_line_description + opt_subnet_mask + \
        #                              opt_associate + opt_type_of_service + \
        #                              opt_max_transmission_unit + opt_auto_start + opt_preferred_ifc + opt_text_desc
        options_without_alias_name = "{0}{1}{2}{3}{4}{5}{6}{7}".format(
            opt_line_description, opt_subnet_mask, opt_associate,
            opt_type_of_service, opt_max_transmission_unit, opt_auto_start,
            opt_preferred_ifc, opt_text_desc)

        options = options_without_alias_name + opt_alias_name

        if (internet_address is not None) and (options == ""):
            # nothing to add or change means to query
            only_query = True
        elif (opt_alias_name is not None) and (internet_address is None) and (
                options_without_alias_name == ""):
            only_query = True
        else:
            if internet_address is None:
                module.fail_json(
                    msg="Parameter internet_address is not specified.")

            # see if the ip address exists for present
            rs, query_err = return_interface_information(
                connection_id, internet_address, alias_name)
            present_operation = "QSYS/ADDTCPIFC" if len(
                rs) == 0 else "QSYS/CHGTCPIFC"

            cl_command = present_operation + " INTNETADR('" + internet_address + "') " + options

    elif state in ["absent", "active", "inactive"]:
        interface_action_map = {
            "absent": "QSYS/RMVTCPIFC",
            "active": "QSYS/STRTCPIFC",
            "inactive": "QSYS/ENDTCPIFC"
        }
        if internet_address is not None:
            cl_command = interface_action_map[
                state] + " INTNETADR('" + internet_address + "')"
        elif (alias_name is not None) and (alias_name != '*NONE'):
            cl_command = interface_action_map[
                state] + " ALIASNAME(" + alias_name + ")"
        else:
            module.fail_json(
                msg=
                "internet_address or alias_name must be specified when state is"
                " absent, active or inactive.")

        check_rs, query_err = return_interface_information(
            connection_id, internet_address, alias_name)
        if len(check_rs) == 0:
            if state == "absent":
                # we are trying to remove a non-existing interface
                only_query = True
        else:
            interface_status = check_rs[0]["INTERFACE_STATUS"]
            if interface_status == state.upper():
                # This means that the interface status is already what we want, skip the cl execution
                only_query = True
    else:
        module.fail_json(msg="Value for option state is not valid.")

    if extra_params is not None:
        cl_command = cl_command + extra_params

    is_changed = False
    if only_query:
        cl_command = ""
        out = None
        err = None
        rc = IBMi_COMMAND_RC_SUCCESS
    else:
        rc, out, err = itoolkit_run_command(connection_id, cl_command)
        if (rc == IBMi_COMMAND_RC_SUCCESS) and (state in ["present", "absent"
                                                          ]):
            is_changed = True

    if sec_to_wait > 0:
        time.sleep(sec_to_wait)

    rs, query_err = return_interface_information(connection_id,
                                                 internet_address, alias_name)

    if query_err is not None:
        rc = IBMi_COMMAND_RC_ERROR
        err = query_err

    if connection_id is not None:
        try:
            connection_id.close()
        except Exception as e_disconnect:
            module.log("ERROR: Unable to disconnect from the database. " +
                       str(e_disconnect))

    endd = datetime.datetime.now()
    delta = endd - startd

    rc_msg = interpret_return_code(rc)

    if rc != IBMi_COMMAND_RC_SUCCESS:
        result_failed = dict(
            # size=input_size,
            # age=input_age,
            # age_stamp=input_age_stamp,
            changed=is_changed,
            stderr=err,
            rc=rc,
            start=str(startd),
            end=str(endd),
            delta=str(delta),
            stdout=out,
            cl_command=cl_command,
        )
        module.fail_json(msg='non-zero return code: ' + rc_msg,
                         **result_failed)
    else:
        result_success = dict(
            changed=is_changed,
            stdout=out,
            rc=rc,
            start=str(startd),
            end=str(endd),
            delta=str(delta),
            interface_info=rs,
            cl_command=cl_command,
        )
        module.exit_json(**result_success)
Esempio n. 42
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            product_id=dict(type='list', elements='str', default=['*ALL']),
            virtual_image_name_list=dict(type='list', elements='str', default=['*ALL']),
            fix_omit_list=dict(type='list', elements='dict'),
            use_temp_path=dict(type='bool', default=True),
            src=dict(type='str', required=True),
            apply_type=dict(type='str', default='*DLYALL', choices=['*DLYALL', '*IMMDLY', '*IMMONLY']),
            hiper_only=dict(type='bool', default=False),
            rollback=dict(type='bool', default=True),
        ),
        supports_check_mode=True,
    )

    if HAS_ITOOLKIT is False:
        module.fail_json(msg="itoolkit package is required")

    if HAS_IBM_DB is False:
        module.fail_json(msg="ibm_db package is required")

    product_id = module.params['product_id']
    fix_file_name_list = module.params['virtual_image_name_list']
    fix_omit_list = module.params['fix_omit_list']
    delayed_option = module.params['apply_type']
    path = module.params['src']
    use_temp_path = module.params['use_temp_path']
    hiper_only = module.params['hiper_only']
    rollback = module.params['rollback']

    startd = datetime.datetime.now()

    connection_id = None
    try:
        connection_id = dbi.connect()
    except Exception as e_db_connect:
        module.fail_json(msg="Exception when connecting to IBM i Db2. " + str(e_db_connect))

    catalog_name = generate_object_name(connection_id, "QUSRSYS", "*IMGCLG", "ANSIBCLG")
    dev_name = generate_object_name(connection_id, "QSYS", "*DEVD", "ANSIBOPT")

    if use_temp_path:

        with TemporaryDirectory() as tmp_dir:
            module.log("Creating temp dir: " + tmp_dir)
            if os.path.isdir(tmp_dir):
                if (fix_file_name_list == ["*ALL"]) or (fix_file_name_list is None):
                    # move all the objects to the target folder
                    for f in os.listdir(path):
                        source_file = os.path.join(path, f)
                        target_file = os.path.join(tmp_dir, f)
                        if os.path.isfile(source_file):
                            shutil.copy(source_file, tmp_dir)
                else:
                    # move specific file to the target
                    for fix_file_name in fix_file_name_list:
                        if os.path.isfile(fix_file_name):
                            source_file = os.path.join(path, fix_file_name)
                            shutil.copy(source_file, tmp_dir)

                rc, out, err = install_by_image_catalog(module, product_id, None, tmp_dir,
                                                        str(dev_name), str(catalog_name),
                                                        fix_omit_list, is_rollback=rollback,
                                                        delayed_option=delayed_option,
                                                        hiper_only=hiper_only)
            else:
                module.fail_json(msg="Failed creating temp dir.")
    else:
        if os.path.exists(path) is False:
            module.fail_json(msg="The src directory does not exist.")
        if os.path.isdir(path) is False:
            module.fail_json(msg="The value specified in src is not a valid directory.")
        rc, out, err = install_by_image_catalog(module, product_id, fix_file_name_list, path,
                                                dev_name, catalog_name, fix_omit_list,
                                                is_rollback=rollback,
                                                delayed_option=delayed_option,
                                                hiper_only=hiper_only)

    endd = datetime.datetime.now()
    delta = endd - startd
    out_ptf_list, query_err = return_fix_information(connection_id, product_id, str(startd), str(endd))

    if connection_id is not None:
        try:
            connection_id.close()
        except Exception as e_disconnect:
            err = "ERROR: Unable to disconnect from the database. " + str(e_disconnect)

    if rc > 0:
        result_failed = dict(
            start=str(startd),
            end=str(endd),
            delta=str(delta),
            executed_commands=out,
            stderr=err,
            rc=rc,
            # changed=True,
        )
        module.fail_json(msg='Install from image catalog failed.', **result_failed)
    else:
        result_success = dict(
            start=str(startd),
            end=str(endd),
            delta=str(delta),
            rc=rc,
            # out=out,
            changed=True,
            need_action_ptf_list=out_ptf_list,
        )
        module.exit_json(**result_success)
Esempio n. 43
0
File: db2.py Progetto: zxct/redash
    def _get_connection(self):
        self.connection_string = "DATABASE={};HOSTNAME={};PORT={};PROTOCOL=TCPIP;UID={};PWD={};".format(
            self.configuration["dbname"], self.configuration["host"], self.configuration["port"], self.configuration["user"], self.configuration["password"])
        connection = ibm_db_dbi.connect(self.connection_string, "", "")

        return connection
Esempio n. 44
0
#!/usr/bin/env python3
from bottle import route, run, template, request
from string import capwords
import ibm_db_dbi as dbi

conn = dbi.connect(dsn=None, database='*LOCAL', \
                       user=None, password=None)


@route('/', method=('GET', 'POST'))
def root():
    reset = request.forms.get('reset') == 'true'
    reset_parm = 'YES' if reset else 'NO'
    sorting = request.forms.get('sorting') or '""'
    show_cols = ('JOB_NAME', 'AUTHORIZATION_NAME', 'JOB_TYPE', 'FUNCTION_TYPE',
                 'FUNCTION', 'JOB_STATUS', 'ELAPSED_INTERACTION_COUNT',
                 'ELAPSED_TOTAL_RESPONSE_TIME', 'ELAPSED_TOTAL_DISK_IO_COUNT',
                 'ELAPSED_ASYNC_DISK_IO_COUNT', 'ELAPSED_SYNC_DISK_IO_COUNT',
                 'ELAPSED_CPU_PERCENTAGE', 'ELAPSED_PAGE_FAULT_COUNT')
    hide_cols = ('ELAPSED_TIME', )
    all_cols = show_cols + hide_cols

    headers = [titleize(col) for col in show_cols]
    column_string = ', '.join(all_cols)

    query = "select %s from table(qsys2.active_job_info(RESET_STATISTICS => ?)) x" % column_string

    cur = conn.cursor()
    cur.execute(query, (reset_parm, ))

    elapsed_time = 0
Esempio n. 45
0
import ibm_db_dbi
conn = ibm_db_dbi.connect(
    "DATABASE=AEDB1;HOSTNAME=172.16.151.78;PORT=50000;PROTOCOL=TCPIP;UID=db2admin;PWD=Pass1234;",
    "", "")
if conn:
    conn.set_autocommit(True)
    cursor = conn.cursor()
    sql = "insert into AEDB1.TEST1(id, name, jobrole)  values(100,'ravi','admin')"
    result = cursor.execute(sql)
    print(result)
Esempio n. 46
0
#date_to_check = ""

if day == 'Thursday':
    date_to_check = datetime.today() - timedelta(days=3)

if day == "Monday":
    date_to_check = datetime.today() - timedelta(days=4)


#date_to_check = "2021-03-15"
date_to_check = date_to_check.strftime("%Y-%m-%d 00:00:00.0")

#date = time.strftime("%Y-%m-%d 00:00:00.0")

connection = ibm_db_dbi.connect()
sql = "sql query"

df = pd.read_sql(sql, connection)
df.to_excel('path'+timestr+'.xlsx')

filepath = 'path'+timestr+'.xlsx'

fromaddr = ""
toaddr = "" 

msg = MIMEMultipart()

cc = "x,y"
rcpt = cc.split(",")  + [toaddr]
Esempio n. 47
0
def connect(dsn, username=None, password=None):
    """Create a connection to the specified database.

    This utility method attempts to connect to the database named by dsn using
    the (optional) username and password provided. The method attempts to use a
    variety of connection frameworks (PyDB2, pyodbc, IBM's official DB2 driver,
    PythonWin's ODBC stuff and mxODBC) depending on the underlying platform.

    Note that the queries in the methods below are written to be agnostic to
    the quirks of the various connection frameworks (e.g. PythonWin's ODBC
    module doesn't correctly handle certain dates hence why all DATE and
    TIMESTAMP fields are CAST to CHAR in the queries below).
    """
    logging.info('Connecting to database "%s"' % dsn)
    # Try the "official" IBM DB2 Python driver
    try:
        import ibm_db
        import ibm_db_dbi
    except ImportError:
        pass
    else:
        logging.info('Using IBM DB2 Python driver')
        if username is not None:
            return ibm_db_dbi.connect(dsn, username, password)
        else:
            return ibm_db_dbi.connect(dsn)
    # Try the PyDB2 driver
    try:
        import DB2
    except ImportError:
        pass
    else:
        logging.info('Using PyDB2 driver')
        if username is not None:
            return DB2.connect(dsn, username, password)
        else:
            return DB2.connect(dsn)
    # Try the pyodbc driver
    try:
        import pyodbc
    except ImportError:
        pass
    else:
        logging.info('Using pyodbc driver')
        # XXX Check whether escaping/quoting is required
        # XXX Should there be a way to specify the driver name? Given that on
        # unixODBC the driver alias is specified in odbcinst.ini, and on
        # Windows with DB2 9+ one can have multiple DB2 ODBC drivers installed
        # with differentiating suffixes
        if username is not None:
            return pyodbc.connect(
                'driver=IBM DB2 ODBC DRIVER;dsn=%s;uid=%s;pwd=%s' %
                (dsn, username, password))
        else:
            return pyodbc.connect('driver=IBM DB2 ODBC DRIVER;dsn=%s' % dsn)
    # Try the PythonWin ODBC driver
    try:
        import dbi
        import odbc
    except ImportError:
        pass
    else:
        logging.info('Using PyWin32 odbc driver')
        if username is not None:
            # XXX Check whether escaping/quoting is required
            return odbc.odbc("%s/%s/%s" % (dsn, username, password))
        else:
            return odbc.odbc(dsn)
    raise ImportError(
        'Unable to find a suitable connection framework; please install PyDB2, pyodbc, PyWin32, or mxODBC'
    )
Esempio n. 48
0
def connect(dsn, username=None, password=None):
    """Create a connection to the specified database.

    This utility method attempts to connect to the database named by dsn using
    the (optional) username and password provided. The method attempts to use a
    variety of connection frameworks (PyDB2, pyodbc, IBM's official DB2 driver,
    PythonWin's ODBC stuff and mxODBC) depending on the underlying platform.

    Note that the queries in the methods below are written to be agnostic to
    the quirks of the various connection frameworks (e.g. PythonWin's ODBC
    module doesn't correctly handle certain dates hence why all DATE and
    TIMESTAMP fields are CAST to CHAR in the queries below).
    """
    logging.info('Connecting to database "%s"' % dsn)
    # Try the "official" IBM DB2 Python driver
    try:
        import ibm_db
        import ibm_db_dbi
    except ImportError:
        pass
    else:
        logging.info('Using IBM DB2 Python driver')
        if username is not None:
            return ibm_db_dbi.connect(dsn, username, password)
        else:
            return ibm_db_dbi.connect(dsn)
    # Try the PyDB2 driver
    try:
        import DB2
    except ImportError:
        pass
    else:
        logging.info('Using PyDB2 driver')
        if username is not None:
            return DB2.connect(dsn, username, password)
        else:
            return DB2.connect(dsn)
    # Try the pyodbc driver
    try:
        import pyodbc
    except ImportError:
        pass
    else:
        logging.info('Using pyodbc driver')
        # XXX Check whether escaping/quoting is required
        # XXX Should there be a way to specify the driver name? Given that on
        # unixODBC the driver alias is specified in odbcinst.ini, and on
        # Windows with DB2 9+ one can have multiple DB2 ODBC drivers installed
        # with differentiating suffixes
        if username is not None:
            return pyodbc.connect('driver=IBM DB2 ODBC DRIVER;dsn=%s;uid=%s;pwd=%s' % (dsn, username, password))
        else:
            return pyodbc.connect('driver=IBM DB2 ODBC DRIVER;dsn=%s' % dsn)
    # Try the PythonWin ODBC driver
    try:
        import dbi
        import odbc
    except ImportError:
        pass
    else:
        logging.info('Using PyWin32 odbc driver')
        if username is not None:
            # XXX Check whether escaping/quoting is required
            return odbc.odbc("%s/%s/%s" % (dsn, username, password))
        else:
            return odbc.odbc(dsn)
    raise ImportError('Unable to find a suitable connection framework; please install PyDB2, pyodbc, PyWin32, or mxODBC')