def results(id): select_stmt = "SELECT q.name, d.name as department, q.description, q.query " \ "FROM query q JOIN departments d ON q.department_id = d.id " \ "WHERE q.id=%s;" with RealDictConnection(dsn=local) as conn: with conn.cursor() as cursor: cursor.execute(select_stmt, (str(id), )) res = cursor.fetchone() if res: with RealDictConnection(dsn=local) as conn: with conn.cursor() as cursor: cursor.execute(res['query']) result = cursor.fetchall() header = result[0].keys() if request.args.get('download', '').strip(): si = StringIO() f = csv.writer(si) f.writerow(header) f.writerows([row.values() for row in result]) output = make_response(si.getvalue()) output.headers["Content-Disposition"] = "attachment; filename=%s.csv" \ % str(res['name']) output.headers["Content-type"] = "text/csv" return output else: return render_template('results.html', details=res, rows=result[0:5], id=id, header=header) else: return 'Query with id %s does not exist!' % str(id)
def edit(id): select_stmt = "SELECT q.name, d.name as department, q.query, q.description " \ "FROM query q JOIN departments d ON q.department_id = d.id WHERE q.id=%s;" update_stmt = "UPDATE query SET description=%s, query=%s WHERE id=%s;" delete_stmt = "DELETE FROM query WHERE id=%s;" if request.args.get('save', '').strip(): description = request.args.get('description', '').strip() query = request.args.get('query', '').strip() with psycopg2.connect(dsn=local) as conn: with conn.cursor() as cursor: cursor.execute(update_stmt, (description, query, str(id))) conn.commit() return redirect(url_for('home')) elif request.args.get('delete', '').strip(): with psycopg2.connect(dsn=local) as conn: with conn.cursor() as cursor: cursor.execute(delete_stmt, (str(id), )) conn.commit() return redirect(url_for('home')) else: with RealDictConnection(dsn=local) as conn: with conn.cursor() as cursor: cursor.execute(select_stmt, (str(id), )) res = cursor.fetchone() return render_template('edit.html', details=res)
def setUp(self): log = logging.getLogger("setUp") log.debug("installing schema and test data") _install_schema_and_test_data() log.debug("creating database connection") self._connection = RealDictConnection( get_node_database_dsn(_node_name, _database_password, _database_host, _database_port)) log.debug("setup done")
def get_rs(queries): rs = dict() connection = RealDictConnection(config.conn_string) connection.set_client_encoding('latin1') cursor = connection.cursor() cursor.execute('begin transaction isolation level serializable;') for query_nome, query in queries.items(): cursor.execute(query['query'], query.get('args', {})) rs[query_nome] = cursor.fetchall() cursor.execute('commit;') cursor.close() connection.close() return rs
def get_node_databases(): """ return a dict of database connections keyed by node name """ psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY) node_databases = dict() node_database_list = zip(_node_names, _node_database_hosts, _node_database_ports, _node_database_passwords) for name, host, port, password in node_database_list: # XXX: temporary expedient until boostrap is fixed host = 'localhost' dsn = get_node_database_dsn(name, password, host, port) connection = RealDictConnection(dsn) node_databases[name] = connection return node_databases
def get_node_ids(node_name): psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY) connection = RealDictConnection(get_central_database_dsn()) cursor = connection.cursor() query = """select id, name from nimbusio_central.node where cluster_id = (select cluster_id from nimbusio_central.node where name = %s)""" cursor.execute(query, [ node_name, ]) # we assume node-name will never be the same as node-id node_dict = dict() for entry in cursor.fetchall(): node_dict[entry["id"]] = entry["name"] node_dict[entry["name"]] = entry["id"] cursor.close() connection.close() return node_dict
from datetime import datetime, timedelta import re from urllib.request import urlopen, Request from psycopg2.extras import RealDictConnection import os import time import xml.etree.ElementTree as ET from dateutil.parser import parse db = RealDictConnection(os.environ['DATABASE_URL']) if os.environ['FLASK_ENV'] == 'development': WAIT_MINUTES = int(os.environ.get('WAIT_MINUTES', 2)) WAIT_SECONDS = 6 else: WAIT_MINUTES = int(os.environ.get('WAIT_MINUTES', 15)) WAIT_SECONDS = 60 def download_feed(feed_url): try: req = Request(feed_url, headers={'User-Agent': 'Mozilla/5.0'}) f = urlopen(req) except: return None with f: if f.getcode() == 200 and 'xml' in f.getheader('Content-Type'): return ET.fromstring(f.read())
import psycopg2.extensions from psycopg2.extras import RealDictConnection psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) conn_string = "host=%s port=%s dbname=%s user=%s password=%s" % ( "localhost4", "5432", "fahstats", "kakaostats", "aroeira") connection = RealDictConnection(conn_string) connection.set_client_encoding('latin1') cursor = connection.cursor() query = """\ set session work_mem = 2097152 ; begin transaction isolation level serializable ; create table donor_work_old (like donors_old including constraints including defaults) ; alter table donors_old rename to donors_old_back ;""" cursor.execute(query) query = """\ select distinct data from donors_old_back ;""" cursor.execute(query) rs = cursor.fetchall()
import sys sys.path.append("/fahstats/scripts/python") from setup import connStr from psycopg2.extras import RealDictConnection def fetchsome(cursor, arraysize=50000): while True: results = cursor.fetchmany(arraysize) if not results: break for result in results: yield result db = RealDictConnection(connStr["backend"]) cursor = db.cursor() query = """ select d1_serial as d0, d0_serial as d1 from select_two_serial_dates_x_days_last_batch(50) as tsd inner join datas as d on d.data_serial = tsd.d1_serial where d.data >= ( select max(data) from datas where have_data) - '8 weeks'::interval ;""" cursor.execute(query) serial_dates = cursor.fetchall() query = """ select distinct serial_date from team_active_members_history