def insert_transaction(txn): header = txn["header"] sql_args = ( header["transaction_id"], header["create_ts"], header["transaction_ts"], header["business_unit"], header["family_of_business"], header["line_of_business"], Json(txn["payload"]), Json(txn["signature"]), header["owner"], header["transaction_type"], "new", header.get('actor', ''), header.get('entity', '') ) conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_INSERT, sql_args) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get_all_replication(block_id, phase, origin_id): """ queries for records matching given block_id, having phase less than given phase, and matching origin_id. This is used for retrieving verification records at lower phases that match the higher phase record in question. """ query = SQL_GET_ALL query += """ WHERE block_id = """ + str(block_id) query += """ AND phase < """ + str(phase) query += """ AND origin_id = '""" + str(origin_id) query += """' ORDER BY block_id DESC """ records = [] conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(query) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: records.append(format_block_verification(result)) cur.close() return records finally: get_connection_pool().putconn(conn)
def get_by_phase(phases, limit=None): """ query for all nodes that provide services for the given phases (bitwise and) e.g. 01001 (phase 1 and 4) will return all nodes that provide either phase 1, 4 or both """ query = SQL_GET_ALL if phases: query += """ WHERE """ query += """ phases & """ + str(phases) + "::bit(" "" + str( BIT_LENGTH) + """) != 0::bit(""" + str( BIT_LENGTH) + """) AND connection_attempts IS NULL""" query += """ ORDER BY priority_level ASC, latency DESC """ if not limit or limit > MAX_CONN_LIMIT: limit = MAX_CONN_LIMIT if limit: query += """ LIMIT """ + str(limit) conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(query) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_node(result) cur.close() finally: get_connection_pool().putconn(conn)
def get_by_phase(phases, limit=None): query = SQL_GET_ALL multi_param = False if phases: query += """ WHERE """ query += """ phases & """ + str(phases) + "::bit(" "" + str( BIT_LENGTH) + """) != 0::bit(""" + str( BIT_LENGTH) + """) AND connection_attempts IS NULL""" multi_param = True query += """ ORDER BY priority_level ASC, latency DESC """ if not limit or limit > MAX_CONN_LIMIT: limit = MAX_CONN_LIMIT if limit: query += """ LIMIT """ + str(limit) conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(query) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_node(result) cur.close() finally: get_connection_pool().putconn(conn)
def insert_sc(sc, sc_class, sc_key): """ insert given smart contract into database :param sc: smart contract payload :param sc_class: type of smart contract being run :param sc_key: dictionary key """ values = ( str(uuid.uuid4()), # uuid pk sc_class, sc['smart_contract'][sc_class], # code to be run sc_key, # dictionary key sc['criteria'], # criteria (i.e. transaction type) sc['test'], # unit test sc['requirements'], # required libraries sc['version'], # current version "approved" # sc status ) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get_all(limit=None, offset=None, **params): query = SQL_GET_ALL multi_param = False if params: query += """ WHERE """ if "block_id" in params: query += """ block_id = """ + str(params["block_id"]) multi_param = True if "transaction_type" in params: if multi_param: query += """ AND """ query += """ transaction_type = '""" + str(params["transaction_type"]) + """'""" multi_param = True if "owner" in params: if multi_param: query += """ AND """ query += """ owner = '""" + str(params["owner"]) + """'""" multi_param = True if "actor" in params: if multi_param: query += """ AND """ query += """ actor = '""" + str(params["actor"]) + """'""" multi_param = True if "entity" in params: if multi_param: query += """ AND """ query += """ entity = '""" + str(params["entity"]) + """'""" multi_param = True # not used but left in place to handle future params query += """ ORDER BY transaction_ts DESC """ if not limit: limit = 10 if limit: query += """ LIMIT """ + str(limit) if offset: query += """ OFFSET """ + str(offset) conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(query) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_transaction(result) cur.close() finally: get_connection_pool().putconn(conn)
def get_unregistered_nodes(limit=None): """ query for all nodes that are currently unconnected """ query = SQL_GET_ALL # should possibly be based upon total unregistered/non-connected nodes and how often this is executed # TODO: base time interval based on number of attempts (fibonacci series?) query += """ WHERE last_conn_attempt_ts IS NULL OR last_conn_attempt_ts < NOW() - INTERVAL '7 days' """ query += """ AND start_conn_ts IS NULL AND last_activity_ts < NOW() - INTERVAL '7 days' """ query += """ ORDER BY priority_level ASC, last_conn_attempt_ts ASC, connection_attempts ASC """ if not limit or limit > MAX_CONN_LIMIT: limit = MAX_CONN_LIMIT if limit: query += """ LIMIT """ + str(limit) conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(query) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_node(result) cur.close() finally: get_connection_pool().putconn(conn)
def get_records(block_id, origin_id, phase): """ return verification records with given criteria """ # TODO: getting strange results from query string "||" around origin_id. Not currently breaking anything query = SQL_GET_ALL query += """ WHERE block_id = """ + str(block_id) query += """ AND origin_id = '""" + str(origin_id) query += """' AND phase = """ + str(phase) records = [] conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(query) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: records.append(format_block_verification(result)) cur.close() return records finally: get_connection_pool().putconn(conn)
def insert_transfer(origin_id, transfer_to, verification_id): values = (origin_id, transfer_to, verification_id) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT_QUERY, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def execute_db_args(sql_args, query_type): """ establish database connection with given args """ conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(query_type, sql_args) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def fixate_block(start_ts_range, end_ts_range, block_id): # get all tx within the previous block conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_FIXATE_BLOCK, (block_id, start_ts_range, end_ts_range)) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def execute_db_args(sql_args, query_type): # establish database connection with given args conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(query_type, sql_args) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get_all(limit=None, offset=None, **params): """ return all verification records matching given parameters """ query = SQL_GET_ALL separator_needed = False if params: query += """ WHERE""" if "block_id" in params and params["block_id"]: query += """ block_id = %(block_id)s""" separator_needed = True if "phase" in params and params["phase"]: if separator_needed: query += """ AND """ query += """ phase = %(phase)s""" separator_needed = True if "origin_id" in params and params["origin_id"]: if separator_needed: query += """ AND """ query += """ origin_id = %(origin_id)s""" separator_needed = True if "min_block_id" in params and params["min_block_id"]: if separator_needed: query += """ AND """ query += """ block_id >= %(min_block_id)s""" separator_needed = True if not limit: limit = 10 if limit: params["limit"] = limit query += """ LIMIT %(limit)s""" if offset: params["offset"] = offset query += """ OFFSET $(offset)s""" conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(query, params) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_block_verification(result) cur.close() finally: get_connection_pool().putconn(conn)
def set_verification_sent(transfer_to, ver_id): """ set verifications sent field to true with matching given 'transfer_to' and 'verification_id' """ conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_MARK_RECORD, (transfer_to, ver_id)) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get_prior_block(origin_id, phase): conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_PRIOR_BLOCK, (origin_id, phase)) result = cur.fetchone() cur.close() if result: result = format_block_verification(result) return result finally: get_connection_pool().putconn(conn)
def get(verification_id): conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_BY_ID, (verification_id, )) result = cur.fetchone() cur.close() if result: result = format_block_verification(result) return result finally: get_connection_pool().putconn(conn)
def update_transaction(txn): header = txn["header"] sql_args = (header["status"], header["block_id"], str(header["transaction_id"])) conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_UPDATE, sql_args) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get(txid): conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_BY_ID, (txid,)) result = cur.fetchone() cur.close() if result: result = format_transaction(result) return result finally: get_connection_pool().putconn(conn)
def get(node_id): """ query for network node that matches given node id """ conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_BY_ID, (node_id, )) result = cur.fetchone() cur.close() if result: result = format_node(result) return result finally: get_connection_pool().putconn(conn)
def get(subscriber_id): """ query for subscription matching given subscriber_id """ conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_BY_ID, (subscriber_id, )) result = cur.fetchone() cur.close() if result: result = format_subscriber(result) return result finally: get_connection_pool().putconn(conn)
def insert_node(node): node_id = node.node_id sql_args = (node_id, int(time.time()), node.owner, node.host, node.port, node.phases, node.latency, node.pass_phrase) conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_INSERT, sql_args) conn.commit() cur.close() return node_id finally: get_connection_pool().putconn(conn)
def get(node_id): """ query for network node that matches given node id """ conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_BY_ID, (node_id,)) result = cur.fetchone() cur.close() if result: result = format_node(result) return result finally: get_connection_pool().putconn(conn)
def insert_verification(verification_record): values = (str(uuid.uuid4()), verification_record['verification_ts'], verification_record["block_id"], psycopg2.extras.Json(verification_record["signature"]), verification_record["origin_id"], verification_record["phase"], psycopg2.extras.Json(verification_record["verification_info"])) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT_QUERY, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def insert_transfer(origin_id, transfer_to, verification_id): values = ( origin_id, transfer_to, verification_id ) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT_QUERY, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get_subscription_txns(criteria, block_id=None): """ retrieve transactions that meet given criteria and have a block_id >= minimum_block_id """ query = SQL_GET_ALL query += """ WHERE """ separator_needed = False if "transaction_type" in criteria: query += """ transaction_type = %(transaction_type)s""" separator_needed = True if "actor" in criteria: if separator_needed: query += """ AND """ query += """ actor = %(actor)s""" separator_needed = True if "entity" in criteria: if separator_needed: query += """ AND """ query += """ entity = %(entity)s""" separator_needed = True if "owner" in criteria: if separator_needed: query += """ AND """ query += """ owner = %(owner)s""" separator_needed = True if block_id: if separator_needed: query += """ AND """ query += """ block_id = %(block_id)s""" criteria['block_id'] = block_id # adding for query execution vars conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(query, criteria) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_transaction(result) cur.close() finally: get_connection_pool().putconn(conn)
def update_transaction(txn): header = txn["header"] sql_args = ( header["status"], header["block_id"], str(header["transaction_id"]) ) conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_UPDATE, sql_args) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get_all(transfer_to): """ query for all transfer records with transfer_to matching given transfer_to id """ conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_ALL, (transfer_to, )) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_sub_response(result) cur.close() finally: get_connection_pool().putconn(conn)
def get_pending_timestamp(): timestamps = [] conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_PENDING_QUERY) while True: results = cur.fetchmany() if not results: break for result in results: timestamps.append(format_pending_transaction(result)) cur.close() return timestamps finally: get_connection_pool().putconn(conn)
def get_unsent_verification_records(node_transmit_id): """ retrieve validated records that have not already been sent back to node with node_transmit_id or verification_id """ conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(GET_UNSENT_VERIFIED_RECORDS, (node_transmit_id, )) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_verification_record(result) cur.close() finally: get_connection_pool().putconn(conn)
def insert_transfer(transfer_to, transactions, verifications): """ inserts new transfer record containing list of transactions and verifications param transfer_to: node_id to send transfer records to param transactions: list of json formatted transactions param verifications: list of json formatted verification records """ values = (transfer_to, map(psycopg2.extras.Json, transactions), map(psycopg2.extras.Json, verifications)) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT_QUERY, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get_all(): """ query for all approved smart contracts """ conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_APPROVED) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_sc(result) cur.close() finally: get_connection_pool().putconn(conn)
def get_all(limit=None): """ query for all subscriptions that have passed due synchronization periods """ conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_ALL, (limit, )) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_subscription(result) cur.close() finally: get_connection_pool().putconn(conn)
def insert_verification(verification_record): values = ( str(uuid.uuid4()), verification_record["block_id"], verification_record["origin_id"], verification_record['verification_ts'], psycopg2.extras.Json(verification_record["signature"]), psycopg2.extras.Json(verification_record["verification_info"]) ) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT_QUERY, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get_prior_block(origin_id, phase): query = SQL_GET_ALL query += """ WHERE origin_id = '""" + str(origin_id) query += """' AND phase = """ + str(phase) query += """ ORDER BY block_id DESC LIMIT 1 """ conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(query) result = cur.fetchone() cur.close() if result: result = format_block_verification(result) return result finally: get_connection_pool().putconn(conn)
def get_by_phase_criteria(phase): """ retrieve subscriptions with phase criteria that match given phase """ subscriptions = [] conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_ALL_BY_PHASE, (phase, )) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: subscriptions.append(format_subscriber(result)) cur.close() return subscriptions finally: get_connection_pool().putconn(conn)
def get_backlogs(block_id): """ check if backlog exists for given block id """ back_logs = [] conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_BY_ID, (block_id,)) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: back_logs.append(format_backlog(result)) cur.close() return back_logs finally: get_connection_pool().putconn(conn)
def insert_backlog(client_id, block_id): """ insert new backlog param client_id: id of subscribing node param block_id: block id of backlog """ values = ( str(uuid.uuid4()), # transfer_id PK client_id, block_id ) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT_QUERY, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def insert_subscription(subscriber_id, criteria, phase_criteria, subscriber_public_key, create_ts): """ insert given subscription into database param subscriber_id: id of subscribing node param criteria: json structured data of criteria to be met by the subscription node param phase_criteria: subscriber requests data up to this phase param create_ts: time subscription was created """ values = (subscriber_id, psycopg2.extras.Json(criteria), phase_criteria, subscriber_public_key, create_ts) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def insert_transfer(transfer_to, transactions, verifications): """ inserts new transfer record containing list of transactions and verifications param transfer_to: node_id to send transfer records to param transactions: list of json formatted transactions param verifications: list of json formatted verification records """ values = ( transfer_to, map(psycopg2.extras.Json, transactions), map(psycopg2.extras.Json, verifications) ) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT_QUERY, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def insert_subscription(subscription, subscription_id=None): """ insert given subscription into database """ if not subscription_id: subscription_id = str(uuid.uuid4()) values = ( subscription_id, # subscription uuid pk subscription['subscribed_node_id'], # id of subscription node subscription['node_owner'], # owner of subscription node subscription['host'], # subscription node's host subscription['port'], # subscription node's port psycopg2.extras.Json(subscription['criteria']), # criteria to be met by subscription subscription['create_ts'], # subscription creation time "pending" # subscription status ) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get_records(**params): """ return verification records with given criteria """ query = SQL_GET_ALL query += """ WHERE""" separator_needed = False if "block_id" in params: query += """ block_id = %(block_id)s""" separator_needed = True if "origin_id" in params: if separator_needed: query += """ AND """ query += """ origin_id = %(origin_id)s""" separator_needed = True if "phase" in params: if separator_needed: query += """ AND """ query += """ phase = %(phase)s""" records = [] conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(query, params) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: records.append(format_block_verification(result)) cur.close() return records finally: get_connection_pool().putconn(conn)
def insert_node(node): """ insert given network node into table.""" node_id = node.node_id sql_args = ( node_id, int(time.time()), # node creation time node.owner, node.host, node.port, node.phases, # phases provided in binary node.latency, node.pass_phrase # used for Thrift network auth ) conn = get_connection_pool().getconn() try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_INSERT, sql_args) conn.commit() cur.close() return node_id finally: get_connection_pool().putconn(conn)
def get_all_replication(block_id, phase, origin_id): """ queries for records matching given block_id, having phase less than given phase, and matching origin_id. This is used for retrieving verification records at lower phases that match the higher phase record in question. """ records = [] conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_ALL_REPLICATION, (block_id, phase, origin_id)) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: records.append(format_block_verification(result)) cur.close() return records finally: get_connection_pool().putconn(conn)
def get_by_phase(phases, limit=None): """ query for all nodes that provide services for the given phases (bitwise and) e.g. 01001 (phase 1 and 4) will return all nodes that provide either phase 1, 4 or both """ if not limit or limit > MAX_CONN_LIMIT: limit = MAX_CONN_LIMIT conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_BY_PHASE, (phases, BIT_LENGTH, BIT_LENGTH, limit)) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_node(result) cur.close() finally: get_connection_pool().putconn(conn)
def insert_subscription(subscriber_id, criteria, phase_criteria, subscriber_public_key, create_ts): """ insert given subscription into database param subscriber_id: id of subscribing node param criteria: json structured data of criteria to be met by the subscription node param phase_criteria: subscriber requests data up to this phase param create_ts: time subscription was created """ values = ( subscriber_id, psycopg2.extras.Json(criteria), phase_criteria, subscriber_public_key, create_ts ) conn = get_connection_pool().getconn() try: cur = conn.cursor() cur.execute(SQL_INSERT, values) conn.commit() cur.close() finally: get_connection_pool().putconn(conn)
def get_unregistered_nodes(limit=None): """ query for all nodes that are currently unconnected """ # should possibly be based upon total unregistered/non-connected nodes and how often this is executed # TODO: base time interval based on number of attempts (fibonacci series?) if not limit or limit > MAX_CONN_LIMIT: limit = MAX_CONN_LIMIT conn = get_connection_pool().getconn() try: cur = conn.cursor(get_cursor_name(), cursor_factory=psycopg2.extras.DictCursor) cur.execute(SQL_GET_UNREGISTERED_NODES, (limit,)) 'An iterator that uses fetchmany to keep memory usage down' while True: results = cur.fetchmany(DEFAULT_PAGE_SIZE) if not results: break for result in results: yield format_node(result) cur.close() finally: get_connection_pool().putconn(conn)