Ejemplo n.º 1
0
def extract_from_db(dbs, qcode):
	results = []

	for db_name in dbs:
		print("reading "+db_name)
		db=_mysql.connect(host="localhost",user="******",
						passwd="1q2w3e4r5t",db=db_name)
		

		db.query("""SELECT * FROM jobs WHERE ID > 100000 AND Deleted='n'""")
		r=db.store_result()
		a = r.fetch_row(maxrows=0, how=1)
		for row in a:
			jid = row.get("ID").decode("utf-8")
			results.append({"DB": db_name, "ID": jid})

		add_app_details(db, results)
		add_cm_details(db, results)
		add_vetting_results(db, results, qcode)

	# filter results with missing data
	results = [i for i in results if ('question_'+qcode+'_pass' in i and 'Concrete' in i)]

	keys = results[0].keys()
	with open('all_results.csv', 'w', newline='')  as output_file:
		dict_writer = csv.DictWriter(output_file, keys)
		dict_writer.writeheader()
		dict_writer.writerows(results)
Ejemplo n.º 2
0
def test_db_connection():
    db = _mysql.connect(host=HOST, user=USER, passwd=PASSWD, db=DB)
    db.query(
        "select * from INFORMATION_SCHEMA.COLUMNS where TABLE_NAME='botlog_dev';"
    )
    r = db.store_result()
    return r.fetch_row()
Ejemplo n.º 3
0
def MySqlAdd(data):
    global db
    x = "INSERT INTO `pv` (`measured`, `power`, `energyDay`, `energyTotal`) VALUES ('" + str(
        datetime.now()) + "', '" + data['currentPower'] + "', '" + data[
            'dailyEnergy'] + "', '" + data['totalEnergy'] + "');"
    success = False
    trial = 0

    while success == False and trial < cfg.mysqlRetryCount:
        trial += 1
        try:
            if (db):
                print("already connected")
                pass
            else:
                db = _mysql.connect(host=cfg.mysqlHost,
                                    user=cfg.mysqlUser,
                                    passwd=cfg.mysqlPass,
                                    db=cfg.mysqlDbName,
                                    port=cfg.mysqlPort,
                                    connect_timeout=cfg.mysqlTimeout)

            db.query(x)
            success = True
        except:
            time.sleep(cfg.mysqlRetryDelay)
            print("mysql fail")
    return success
Ejemplo n.º 4
0
def Main():
    path = pathlib.Path(pathflag.path())
    db = _mysql.connect(host=FLAGS.host,
                        user=FLAGS.user,
                        passwd=FLAGS.pwd,
                        db=FLAGS.db)

    # First create the output directories. Fail if they already exist.
    (path / "ir").mkdir(parents=True)
    (path / "graphs").mkdir()
    (path / "train").mkdir()
    (path / "val").mkdir()
    (path / "test").mkdir()

    # Export the legacy IR database.
    export = ImportIrDatabase(path, db)
    progress.Run(export)

    # Import the classifyapp dataset.
    ImportClassifyAppDataset(pathlib.Path(FLAGS.classifyapp), path)

    # Add inst2vec encoding features to graphs.
    app.Log(1, "Encoding graphs with inst2vec")
    progress.Run(Inst2vecEncodeGraphs(path))

    app.Log(1, "Creating vocabularies")
    subprocess.check_call([str(CREATE_VOCAB), "--path", str(path)])

    app.Log(1, "Creating data flow analysis labels")
    subprocess.check_call([str(CREATE_LABELS), str(path)])
Ejemplo n.º 5
0
def working(param):
    print(param)
    db=_mysql.connect(host="cap-au-sg-prd-05.securegateway.appdomain.cloud",port=15208,user="******",passwd="password123",db="testdev")
    db.query("""SELECT * FROM user""")
    r = db.store_result()
    user_result = r.fetch_row(maxrows=0,how=1)
    print("success load data from mysql")

    df = pd.DataFrame(user_result)
    print(df.head())

    pgserver = '49ec7436-5643-423b-b0e4-158df3ec8b98.bqfh4fpt0vhjh7rs4ot0.databases.appdomain.cloud'
    pguser = '******'
    pgpassword = param
    pgport = '31369'
    pgdb = 'ibmclouddb'

    pgconn = psycopg2.connect(user=pguser,password=pgpassword,host=pgserver,database=pgdb,port=pgport)
    pgcursor = pgconn.cursor()
    print(pgconn.get_dsn_parameters(),"\n")


    query_insert = """ INSERT INTO public.t_user(username,job,dob,country) values (%s,%s,%s,%s) """
    
    for index,row in df.iterrows():
        record = (row['username'].decode('utf-8'),row['occupation'].decode('utf-8'),row['dob'].decode('utf-8'),row['country'].decode('utf-8'))
        pgcursor.execute(query_insert,record)
        
    
    pgconn.commit()
    return "Hello mysql pandas pgsql full"
Ejemplo n.º 6
0
 def mysql_brute(host, user=None, pwd=None, port=3306, timeout=1):
     """
     :param host: param user:
     :param pwd: param port: (Default value = None)
     :param Timeout: Default value = 2)
     :param user: param port: (Default value = None)
     :param port: 
     """
     try:
         # TODO give port to brute
         MYSQL_CONN = _mysql.connect(host=host,
                                     port=port,
                                     user=user,
                                     passwd=pwd,
                                     connect_timeout=int(timeout))
     except _mysql.Error as e:
         logger.info(f"mysql {e.args}")
         return
     else:
         MYSQL_CONN.close()
         logger.success("mysql success: {}:{}@{}:{}".format(
             user, pwd, host, port))
         return {
             'ip': host,
             'user': user,
             'password': pwd,
             'port': port,
             'serviceName': 'mysql'
         }
Ejemplo n.º 7
0
 def connect(self):
     self.connection = _mysql.connect(port=self.port,
                                      host=self.host,
                                      db=self.database,
                                      user=self.user,
                                      passwd=self.password)
     return self.connection
Ejemplo n.º 8
0
def ucsc_connect(build):
    """
    Open connection to UCSC MySQL database for a specified reference
    """
    conv = {MySQLdb.FIELD_TYPE.LONG: int}
    db = _mysql.connect(host='genome-mysql.cse.ucsc.edu',
                        user='******',
                        passwd='',
                        db=build,
                        conv=conv)
    return db
Ejemplo n.º 9
0
def get_last_bot_feedback_frame():
    chat_id = request.args.get('chatid')
    query = 'SELECT message_id, value, timestamp FROM botlog_dev WHERE conv_id = ' + single_preprop(
        chat_id) + "AND user_type = 'bot' ORDER BY timestamp DESC LIMIT 1;"
    db = _mysql.connect(host=HOST, user=USER, passwd=PASSWD, db=DB)
    db.query(query)
    r = db.store_result()
    try:
        return jsonify(r.fetch_row(how=1)[0])
    except:
        return 'EMPTY SET'
Ejemplo n.º 10
0
def get_poll_results():
    chat_id = request.args.get('chatid', False)
    ts = request.args.get('ts', False)
    if not (chat_id or ts):
        return 'err'
    query = "SELECT message_id, value, timestamp FROM botlog_dev WHERE conv_id = " + single_preprop(
        chat_id) + " AND label = 'answer' AND timestamp >= " + single_preprop(
            ts) + ";"
    db = _mysql.connect(host=HOST, user=USER, passwd=PASSWD, db=DB)
    db.query(query)
    r = db.store_result()
    return jsonify(r.fetch_row(maxrows=0, how=1))
Ejemplo n.º 11
0
def execSql(request):
  try:
    body_unicode = request.body.decode('utf-8')
    body = json.loads(body_unicode)
    sql = body['sql']
    sourceId = body['source_id']
    source = SourceDataBase.objects.get(source_id=sourceId)
    source = serializers.serialize('json', [source])
    source = json.loads(source)[0]['fields']
    host = source['host']
    username = source['username']
    port = source['port']
    password = pc.decrypt(source['password'])
    database = source['database']

    db=_mysql.connect(
      host=host,
      port=int(port),
      user=username,
      passwd=password,
      db=database,
      charset='utf8'
    )
    db.query(sql)
    data = db.store_result().fetch_row(maxrows=0, how=2)
    db.close()
    json_data = []
    for index in range(len(data)):
      row = data[index]
      json_data.append({})
      for key in row:
        if(key.find('.')>0):
          column = (key.split('.'))[1]
        else:
          column = key
        if isinstance(row[key], bytes):
          json_data[index][column] = row[key].decode('UTF-8', 'ignore')
        else:
          json_data[index][column] = row[key]
    response = {
      'code': 20000,
      'message': 'success',
      'data': json_data
    }
  except Exception as e:
    print(e)
    response = {
      'code': 10000,
      'message': str(e),
      'data': {}
    }
  return JsonResponse(response)
Ejemplo n.º 12
0
    def __init__(self, host, user, passwd, db):
        self.mysql = MySQL_Connector.connect(host=host,
                                             user=user,
                                             passwd=passwd,
                                             db=db,
                                             charset="utf8")
        self.db = db

        self.engine = "InnoDB"
        self.charset = "utf8mb4"
        self.collate = "utf8mb4_unicode_ci"

        self._table = None
Ejemplo n.º 13
0
   def __init__(self, path: pathlib.Path, db):
       self.path = path
       db = _mysql.connect(host=FLAGS.host,
                           user=FLAGS.user,
                           passwd=FLAGS.pwd,
                           db=FLAGS.db)
       db.query("""
 SELECT COUNT(*) FROM intermediate_representation
 WHERE compilation_succeeded=1
 AND source NOT LIKE 'poj-104:%'
 """)
       n = int(db.store_result().fetch_row()[0][0].decode("utf-8"))
       super(ImportIrDatabase, self).__init__("ir db", i=0, n=n, unit="irs")
Ejemplo n.º 14
0
 def connect(self):
     """
     Connect to Oracle database using parameters from options.
     Save connection object into self.con.
     """
     logger.debug(
         f"Trying to connect: host={self.options['host']} port={self.options['port']}"
         f" dbname={self.options['dbname']}, user={self.options['user']} "
         f"password={self.options['password']}.")
     self.con = _mysql.connect(host=self.options['host'],
                               port=self.options['port'],
                               user=self.options['user'],
                               passwd=self.options['password'],
                               db=self.options['dbname'])
Ejemplo n.º 15
0
def main():
    os.system('mkdir ' + CONFIG_TEMP_DIR)
    filename_zip = CONFIG_TEMP_DIR + 'padron_reducido_ruc.zip'
    os.system('wget ' + CONFIG_PADRON_REDUCIDO_URL + ' -O ' + filename_zip)
    os.system('unzip ' + filename_zip + ' -d ' + CONFIG_TEMP_DIR)
    os.system('rm ' + filename_zip)
    filename = CONFIG_TEMP_DIR + 'padron_reducido_ruc.txt'

    num_records = sum(1 for line in open(filename, 'r', encoding='ISO-8859-1'))
    num_records -= 1
    db=_mysql.connect(
        host=CONFIG_MYSQL_HOST,
        user=CONFIG_MYSQL_USER,
        passwd=CONFIG_MYSQL_PASS,
        db=CONFIG_MYSQL_DB,
    )
 
    query_params = "('%s','%s','%s','%s','%s','%s', NOW(), NOW())"

    i = 0
    n = 0
    query_block = ''
    domicilio = ''
    domicilio_lst = []
    with open(filename, 'r', encoding='ISO-8859-1') as f:
        f.readline()
        db.query(CONFIG_MYSQL_DROP_TABLE)
        db.query(CONFIG_MYSQL_CREATE_TABLE)
        for line in tqdm(f, total=num_records):
            i += 1
            n += 1
            l = line.replace("\\", "")
            l = l.replace("'", "\\'")
            lst = l.split('|')
            lst.pop()

            domicilio_lst = [lst[7], lst[8], lst[5], lst[6], lst[13], lst[11], lst[9], lst[10], lst[12], lst[14]]
            domicilio_lst = [w.replace('-', '') for w in domicilio_lst]
            domicilio = ' '.join(domicilio_lst).strip()
            domicilio = ' '.join(domicilio.split())
            query_block += (query_params % (lst[0], lst[1], lst[2], lst[3], lst[4], domicilio))
            if i < CONFIG_MYSQL_INSERT_GROUP and n < num_records: query_block += ','
            if i == CONFIG_MYSQL_INSERT_GROUP or n == num_records:
                db.query(CONFIG_MYSQL_INSERT_QUERY % query_block)
                i = 0
                query_block = ''
    os.system('rm ' + filename)
Ejemplo n.º 16
0
def mysql_conn(retry):
    log = logging.getLogger(__name__)

    from time import sleep

    try:
        from MySQLdb import _mysql
    except ImportError as e:
        log.error(e)
        return False

    mainint = __salt__['pillar.get']('sensor:mainint',
                                     __salt__['pillar.get']('manager:mainint'))
    mainip = __salt__['grains.get']('ip_interfaces').get(mainint)[0]

    mysql_up = False
    for i in range(0, retry):
        log.debug(f'Connection attempt {i+1}')
        try:
            db = _mysql.connect(host=mainip,
                                user='******',
                                passwd=__salt__['pillar.get']('secrets:mysql'))
            log.debug(
                f'Connected to MySQL server on {mainip} after {i} attempts.')

            db.query("""SELECT 1;""")
            log.debug(
                f'Successfully completed query against MySQL server on {mainip}'
            )

            db.close()
            mysql_up = True
            break
        except _mysql.OperationalError as e:
            log.debug(e)
        except Exception as e:
            log.error('Unexpected error occured.')
            log.error(e)
            break
        sleep(1)

    if not mysql_up:
        log.error(
            f'Could not connect to MySQL server on {mainip} after {retry} attempts.'
        )

    return mysql_up
Ejemplo n.º 17
0
def get_users(request):
    dbinfo = DATABASES['default']
    db = _mysql.connect(host=dbinfo['HOST'],
                        user=dbinfo['USER'],
                        passwd=dbinfo['PASSWORD'],
                        db="dev_box")
    try:
        query = """SELECT first_name,last_name,email,last_login FROM auth_user ORDER BY last_login DESC"""
        db.query(query)
        qr = db.store_result()
        qr = qr.fetch_row(maxrows=0, how=1)
        db.close()
        return Response(data=qr, status=status.HTTP_200_OK)
    except Exception as e:
        # Some other error - most likely related to the DB connection/execution
        db.close()
        return Response(data=str(e),
                        status=status.HTTP_500_INTERNAL_SERVER_ERROR)
Ejemplo n.º 18
0
def sourceTables(request, sourceId):
    json_data = []
    try:
        tables = SourceDataTable.objects.filter(database=sourceId)

        tables = serializers.serialize('json', tables)
        tables = json.loads(tables)
        for table in tables:
            json_data.append(table['fields'])

    except Exception as e:
        print('no linked tables before', e)

    source = SourceDataBase.objects.get(source_id=sourceId)
    source = serializers.serialize('json', [source])
    source = json.loads(source)[0]['fields']
    password = source['password'].encode(('utf-8'))
    host = source['host']
    username = source['username']
    port = source['port']
    password = pc.decrypt(password)
    database = source['database']

    db = _mysql.connect(host=host,
                        port=int(port),
                        user=username,
                        passwd=password,
                        db=database)
    db.query('show tables;')
    tables = db.store_result().fetch_row(maxrows=0, how=2)
    db.close()
    tables = list(tables)
    for i, table in enumerate(tables):
        tableName = list(table.values())[0].decode('utf-8')
        if next((x for x in json_data if x['table'] == tableName), None):
            print(tableName + ' linked')
        else:
            json_data.append({'table': tableName, 'status': 0})

    return JsonResponse({
        'code': 20000,
        'message': 'success',
        'data': json_data
    })
Ejemplo n.º 19
0
def db_sample():
    con = _mysql.connect(user='******',
                         passwd='db password',
                         host='localhost',
                         db='sample',
                         charset='utf8')

    cur = con.cursor()

    sql = 'select id, body, post_code, created from posts'
    cur.execute(sql)

    rows = cur.fetchall()

    for row in rows:
        print(row)

    cur.close()
    con.close()
Ejemplo n.º 20
0
def get_poll_since_bot():
    chat_id = request.args.get('chatid', False)
    if not chat_id:
        return 'err'
    query_last_bot = 'SELECT message_id, value, timestamp FROM botlog_dev WHERE conv_id = ' + single_preprop(
        chat_id) + "AND user_type = 'bot' ORDER BY timestamp DESC LIMIT 1;"

    db = _mysql.connect(host=HOST, user=USER, passwd=PASSWD, db=DB)
    db.query(query_last_bot)
    r = db.store_result()
    ts = r.fetch_row(maxrows=0, how=1)[0]['timestamp'].decode()
    query = "SELECT message_id, value, timestamp FROM botlog_dev WHERE conv_id = " + single_preprop(
        chat_id) + " AND label = 'answer' AND timestamp >= '" + ts + "';"
    db.query(query)
    r = db.store_result()
    try:
        return jsonify(r.fetch_row(maxrows=0, how=1))
    except:
        return 'EMPTY SET'
  def get(self):
    self.response.headers['Content-Type'] = 'text/plain'

    if (os.getenv('SERVER_SOFTWARE') and
      os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')):
      db = _mysql.connect(
          unix_socket='/cloudsql/my_project:my_instance',
          user='******')
    else:
      db = MySQLdb.connect(host='localhost', user='******')

    db.query('SHOW VARIABLES')
    result = db.store_result()
    while True:
      row = result.fetch_row()
      if row:
        self.response.write('%s\n' % str(row[0]))
      else:
        break
Ejemplo n.º 22
0
  def Run(self):
    with multiprocessing.Pool() as pool:
      # A counter used to produce a unique ID number for each exported file.
      n = 0
      # Run many smaller queries rather than one big query since MySQL
      # connections will die if hanging around for too long.
      batch_size = 512
      job_size = 16
      for j in range(0, self.ctx.n, batch_size):
        db = _mysql.connect(
          host=FLAGS.host, user=FLAGS.user, passwd=FLAGS.pwd, db=FLAGS.db
        )
        db.query(
          f"""\
SELECT
  source,
  source_language,
  type,
  binary_ir
FROM intermediate_representation
WHERE compilation_succeeded=1
AND source NOT LIKE 'poj-104:%'
LIMIT {batch_size}
OFFSET {j}
"""
        )

        results = db.store_result()
        rows = [
          (item, i)
          for i, item in enumerate(results.fetch_row(maxrows=0), start=n)
        ]
        # Update the exported file counter.
        n += len(rows)
        jobs = [
          (self.path, chunk) for chunk in labtypes.Chunkify(rows, job_size)
        ]

        for exported_count in pool.imap_unordered(_ProcessRows, jobs):
          self.ctx.i += exported_count

    self.ctx.i = self.ctx.n
Ejemplo n.º 23
0
def working(param, passwd):
    print(param)
    db = _mysql.connect(host="cap-au-sg-prd-05.securegateway.appdomain.cloud",
                        port=15208,
                        user="******",
                        passwd="password123",
                        db="testdev")
    db.query("SELECT * FROM trx where trx_type = {0}".format(param))
    r = db.store_result()
    trx_result = r.fetch_row(maxrows=0, how=1)
    print("success load data from mysql")

    df = pd.DataFrame(trx_result)
    print(df.head())

    pgserver = '49ec7436-5643-423b-b0e4-158df3ec8b98.bqfh4fpt0vhjh7rs4ot0.databases.appdomain.cloud'
    pguser = '******'
    pgport = '31369'
    pgdb = 'ibmclouddb'
    pgpassword = passwd

    pgconn = psycopg2.connect(user=pguser,
                              password=pgpassword,
                              host=pgserver,
                              database=pgdb,
                              port=pgport)
    pgcursor = pgconn.cursor()
    print(pgconn.get_dsn_parameters(), "\n")

    query_insert = """ INSERT INTO public.t_trx(trx_id,username,item_id,qty,amount,trx_type) values (%s,%s,%s,%s,%s,%s) """

    for index, row in df.iterrows():
        record = (row['trx_id'].decode('utf-8'),
                  row['username'].decode('utf-8'),
                  row['item_id'].decode('utf-8'), row['qty'].decode('utf-8'),
                  row['amount'].decode('utf-8'),
                  row['trx_type'].decode('utf-8'))
        pgcursor.execute(query_insert, record)

    pgconn.commit()
    return "Success staging trx"
Ejemplo n.º 24
0
def authenticate_user(request):
    token = request.data
    dbinfo = DATABASES['default']
    db = _mysql.connect(host=dbinfo['HOST'],
                        user=dbinfo['USER'],
                        passwd=dbinfo['PASSWORD'],
                        db="dev_box")
    try:
        # idinfo contains all of the information from the user being authenticated
        idinfo = id_token.verify_oauth2_token(
            token, requests.Request(),
            "91335092244-a8nui54bma999p0f0f61uklj8095v6cl.apps.googleusercontent.com"
        )
        firstName = idinfo['given_name']
        lastName = idinfo['family_name']
        email = idinfo['email']
        check = """SELECT * FROM auth_user WHERE email = "{email}" """.format(
            email=email)
        db.query(check)
        qr = db.store_result()
        qr = qr.fetch_row(maxrows=0, how=1)
        if (len(qr) > 0):
            query = """UPDATE auth_user SET last_login = CURRENT_TIMESTAMP WHERE email = "{email}" """.format(
                email=email)
            db.query(query)
        else:
            query = """INSERT INTO auth_user (first_name, last_name, email, last_login) VALUES ("{fn}", "{ln}", "{email}", CURRENT_TIMESTAMP)""".format(
                fn=firstName, ln=lastName, email=email)
            db.query(query)

        db.close()
        return Response(data=qr, status=status.HTTP_200_OK)
    except ValueError as e:
        # Invalid token
        db.close()
        return Response(data=str(e), status=status.HTTP_401_UNAUTHORIZED)
    except Exception as e:
        # Some other error - most likely related to the DB connection/execution
        db.close()
        return Response(data=str(e),
                        status=status.HTTP_500_INTERNAL_SERVER_ERROR)
Ejemplo n.º 25
0
    def OnNovoFormulario(self):
        tex1 = '112223'  #self.cpfText.GetValue()
        tex2 = 'Rua Lobo'  #self.nomeText.GetValue()
        tex3 = '991032340'  #self.enderecoText.GetValue()
        """Data base connect"""
        db = _mysql.connect("127.0.0.1", "root", "", "origginal")
        print(db)
        """SQL INSERT INTO"""
        insert = (
            f"""INSERT INTO cliente (Nome, Endereco, Contato) VALUES ("{tex1}", "{tex2}", "{tex3}")"""
        )
        db.query(insert)
        """SQL DELETE"""
        # delete = (f"""delete from cliente where Endereco="{tex2}" """)
        """sql SELECT"""
        db.query("""SELECT * FROM cliente""")
        maxrows = 1
        r = db.store_result()
        print(r.fetch_row(maxrows))

        db.close()
    def start_logging_sql(self, host, port, db, tb, log_interval):
        db = _mysql.connect(host=host, port=port, db=db, table=tb, read_default_file=TempHumiditySensor.SQL_CREDS_PATH)

        print("Logging to SQL database started!")
        if self.mode != "periodic":
            self.set_mode("periodic", rate=1, acc="med")

        if TempHumiditySensor.IDX == 0:
            self.init_read()
            TempHumiditySensor.IDX += 1
        while True:
            sample = self.get_sample()
            temp = sample[0]
            rh = sample[1]
            dtime = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
            f = open(TempHumiditySensor.LOGGING_PATH, "w")
            f.write(dtime + ',' + str(temp) + ',' + str(rh) + '\n')
            f.close()
            query_str = "INSERT INTO " + tb + " (time, temp, rh) VALUES (" + "'" + dtime + "'" + "," + str(temp) + "," + str(rh) + ")"
            db.query(query_str)
            time.sleep(log_interval)
Ejemplo n.º 27
0
def sourceTables(request, sourceId):
    try:
        tables = SourceDataTable.objects.get(database=sourceId)
        tables = serializers.serialize('json', [tables])
        tables = json.loads(tables)
        json_data = []
        for table in tables:
            json_data.append(table['fields'])

    except:
        source = SourceDataBase.objects.get(source_id=sourceId)
        source = serializers.serialize('json', [source])
        source = json.loads(source)[0]['fields']
        password = source['password'].encode(('utf-8'))
        print(password)
        host = source['host']
        username = source['username']
        port = source['port']
        password = pc.decrypt(password)
        database = source['database']
        print(password)

        db = _mysql.connect(host=host,
                            port=int(port),
                            user=username,
                            passwd=password,
                            db=database,
                            charset='utf8')
        db.query('show tables;')
        tables = db.store_result().fetch_row(maxrows=0, how=2)
        db.close()
        json_data = list(tables[0].values())
        for i, table in enumerate(json_data):
            json_data[i] = {'table': table.decode('utf-8'), 'status': 0}

    return JsonResponse({
        'code': 20000,
        'message': 'success',
        'data': json_data
    })
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument("-f", "--file", help="Output filename")
    parser.add_argument("-t", "--table", help="Database table to select")
    parser.add_argument("--maxsamples",
                        help="Maximum number of samples to fetch",
                        type=int,
                        default=1000)

    args = parser.parse_args()

    fpath = args.file
    samples = args.maxsamples
    table = args.table

    if fpath is None or table is None:
        print("Must provide file path [-f] and data table [-t]")
        sys.exit()

    db = _mysql.connect(host=host,
                        port=3306,
                        user=user,
                        passwd=passwd,
                        db=dbase)
    query_str = "SELECT * FROM " + table + " ORDER BY time DESC LIMIT " + str(
        samples)
    db.query(query_str)
    res = db.store_result()
    data = res.fetch_row(maxrows=0)

    with open(fpath, 'w') as file:
        for _tuple in data:
            file.write(_tuple[0].decode('utf-8'))
            file.write(",")
            file.write(_tuple[1].decode('utf-8'))
            file.write(",")
            file.write(_tuple[2].decode('utf-8'))
            file.write('\n')

    print("Data export complete!")
Ejemplo n.º 29
0
def connect_to_mysql():
    """ Connect to the database """
    for var in MYSQL_ENV:
        if var not in os.environ or os.environ[var] == "":
            print(f"ERROR: Environment variable '{var}' is missing")
            return None

    my_conv = MySQLdb.converters.conversions.copy()
    my_conv[FIELD_TYPE.VARCHAR] = convert_string
    my_conv[FIELD_TYPE.CHAR] = convert_string
    my_conv[FIELD_TYPE.STRING] = convert_string
    my_conv[FIELD_TYPE.VAR_STRING] = convert_string
    sock = "/tmp/mysql.sock"
    host = None
    port = None
    if "MYSQL_CONNECT" in os.environ:
        conn = os.environ["MYSQL_CONNECT"]
        if conn[0] == "/":
            sock = conn
        else:
            host = conn
            port = 3306
            if conn.find(":") >= 0:
                svr = conn.split(":")
                host = svr[0]
                port = int(svr[1])

    return _mysql.connect(
        user=os.environ["MYSQL_USERNAME"],
        passwd=os.environ["MYSQL_PASSWORD"],
        unix_socket=sock,
        host=host,
        port=port,
        db=os.environ["MYSQL_DATABASE"],
        conv=my_conv,
        charset='utf8mb4',
        init_command='SET NAMES UTF8',
    )
Ejemplo n.º 30
0
def check(c):

    global defaults_file

    # mysql server endpoint and credentials
    defaults_file = c.conf.get('defaults_file', '/opt/cmt/mysql.cnf')

    query = c.conf.get('query', '')
    columns = c.conf.get('columns', {})
    maxlines = c.conf.get('maxlines', 200)

    # -------------------------------------

    try:
        #db=_mysql.connect(host=host,user=user,passwd=password)
        db = _mysql.connect(read_default_file=defaults_file)
    except Exception as e:
        c.severity = cmt.SEVERITY_CRITICAL
        c.add_message(
            "mysql - can't connect with conf {}".format(defaults_file))
        debug("Error {}".format(e))
        return c

    # -------------------------------------

    try:
        db.query(query)
    except Exception as e:
        c.severity = cmt.SEVERITY_CRITICAL
        c.add_message("mysql - couldn't execute query ({})".format(e))
        debug("Error {}".format(e))
        return c

    try:
        lines = db.store_result().fetch_row(maxrows=0, how=1)
    except Exception as e:
        c.severity = cmt.SEVERITY_CRITICAL
        c.add_message("mysql - couldn't fetch result ({})".format(e))
        debug("Error {}".format(e))
        return c

    #  ( {'id': b'1', 'user': b'joe',  'age': b'33'},
    #    {'id': b'2', 'user': b'igor', 'age': b'23'},
    #    {'id': b'3', 'user': b'phil', 'age': b'42'}   )

    # print(lines)
    count = 0
    for line in lines:
        vars = {}
        count = count + 1
        if count >= maxlines:
            break

        for k, v in line.items():
            try:
                v = v.decode()
            except:
                pass
            try:
                k2 = columns[k]
            except:
                k2 = k
            # print(k2,v)
            vars[k2] = v
        c.multievent.append(vars)

    # create global event
    c.add_item(checkitem.CheckItem('mysqldata_count', count))

    c.add_message("{} - {} lines collected".format(c.check, count))
    return c
Ejemplo n.º 31
0
#   - for each row in notification_settings:
#     - if member_id in oldsite_member:
#       - set preference for notification preference to false
# - orders -> 'oldsite_payment' (online orders), 'oldsite_payment_manual' (manual orders)
#   - ocvt_orders + order_items + order_items(ONLINE ORDERS)
#   - ocvt_manual_payments (MANUAL ORDERS)
#     - ASK DOUG
#     - Create webtools page to view all incomplete manual orders
#     - Email all incomplete people >= 2019
#     - Add note on myocvt page to email webmaster if any issues

import datetime
import sqlite3
from MySQLdb import _mysql

mdb = _mysql.connect('127.0.0.1', 'root', 'ocvt', 'ocvt')
scon = sqlite3.connect('dolabra-sqlite.sqlite3')
sc = scon.cursor()

## NEWS
mdb.query("SELECT * FROM ocvt_news")
rows = mdb.use_result()

while True:
    row = rows.fetch_row()
    if len(row) == 0:
        break

    # parse & decode
    create_datetime = row[0][1].decode('utf-8')
    title           = row[0][2].decode('utf-8')