def handle_requests(self, authentication, proxy_requests, route_name, container_ids, container_interface): key = authentication.cookie_value with FileLock(self.proxy_session_map): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() try: # Create table c.execute('''CREATE TABLE gxproxy2 (key text PRIMARY KEY, host text, port integer, container_ids text, container_interface text)''') except Exception: pass delete = '''DELETE FROM gxproxy2 WHERE key=?''' c.execute(delete, (key,)) insert = '''INSERT INTO gxproxy2 (key, host, port, container_ids, container_interface) VALUES (?, ?, ?, ?, ?)''' c.execute(insert, (key, proxy_requests.host, proxy_requests.port, json.dumps(container_ids), container_interface)) conn.commit() finally: conn.close()
def set_meta( self, dataset, overwrite=True, **kwd ): try: tables = [] columns = dict() rowcounts = dict() conn = sqlite.connect(dataset.file_name) c = conn.cursor() tables_query = "SELECT name,sql FROM sqlite_master WHERE type='table' ORDER BY name" rslt = c.execute(tables_query).fetchall() for table, sql in rslt: tables.append(table) try: col_query = 'SELECT * FROM %s LIMIT 0' % table cur = conn.cursor().execute(col_query) cols = [col[0] for col in cur.description] columns[table] = cols except Exception as exc: log.warn( '%s, set_meta Exception: %s', self, exc ) for table in tables: try: row_query = "SELECT count(*) FROM %s" % table rowcounts[table] = c.execute(row_query).fetchone()[0] except Exception as exc: log.warn( '%s, set_meta Exception: %s', self, exc ) dataset.metadata.tables = tables dataset.metadata.table_columns = columns dataset.metadata.table_row_count = rowcounts except Exception as exc: log.warn( '%s, set_meta Exception: %s', self, exc )
def test_sqlite_exploits(): # This is not really testing any Galaxy code, just experimenting with ways # to attempt to exploit sqlite3 connections. # More info... # http://atta.cked.me/home/sqlite3injectioncheatsheet connection = sqlite.connect(":memory:") connection.execute("create TABLE FOO (foo1 text)") __assert_has_n_rows(connection, "select * from FOO", 0) __assert_query_errors(connection, "select * from FOOX") # Make sure sqlite query cannot execute multiple statements __assert_query_errors(connection, "select * from FOO; select * from FOO") # Make sure sqlite cannot select on PRAGMA results __assert_query_errors(connection, "select * from (PRAGMA database_list)") __assert_has_n_rows( connection, "select * from FOO where foo1 in (SELECT foo1 from FOO)", 0) # Ensure nested queries cannot modify database. __assert_query_errors( connection, "select * from FOO where foo1 in (INSERT INTO FOO VALUES ('bar')")
def handle_requests(self, authentication, proxy_requests, route_name, container_ids, container_interface): key = authentication.cookie_value with FileLock(self.proxy_session_map): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() try: # Create table c.execute('''CREATE TABLE gxproxy2 (key text PRIMARY KEY, host text, port integer, container_ids text, container_interface text)''') except Exception: pass delete = '''DELETE FROM gxproxy2 WHERE key=?''' c.execute(delete, (key, )) insert = '''INSERT INTO gxproxy2 (key, host, port, container_ids, container_interface) VALUES (?, ?, ?, ?, ?)''' c.execute(insert, (key, proxy_requests.host, proxy_requests.port, json.dumps(container_ids), container_interface)) conn.commit() finally: conn.close()
def set_meta(self, dataset, overwrite=True, **kwd): try: tables = [] columns = dict() rowcounts = dict() conn = sqlite.connect(dataset.file_name) c = conn.cursor() tables_query = "SELECT name,sql FROM sqlite_master WHERE type='table' ORDER BY name" rslt = c.execute(tables_query).fetchall() for table, sql in rslt: tables.append(table) try: col_query = 'SELECT * FROM %s LIMIT 0' % table cur = conn.cursor().execute(col_query) cols = [col[0] for col in cur.description] columns[table] = cols except Exception as exc: log.warn('%s, set_meta Exception: %s', self, exc) for table in tables: try: row_query = "SELECT count(*) FROM %s" % table rowcounts[table] = c.execute(row_query).fetchone()[0] except Exception as exc: log.warn('%s, set_meta Exception: %s', self, exc) dataset.metadata.tables = tables dataset.metadata.table_columns = columns dataset.metadata.table_row_count = rowcounts except Exception as exc: log.warn('%s, set_meta Exception: %s', self, exc)
def __init__(self, source, query=None, headers=False, limit=sys.maxsize, **kwargs): self.query = query self.headers = headers self.limit = limit self.connection = sqlite.connect(source.dataset.file_name) super(SQliteDataTableProvider, self).__init__(source, **kwargs)
def set_meta( self, dataset, overwrite=True, **kwd ): super( GeminiSQLite, self ).set_meta( dataset, overwrite=overwrite, **kwd ) try: conn = sqlite.connect( dataset.file_name ) c = conn.cursor() tables_query = "SELECT version FROM version" result = c.execute( tables_query ).fetchall() for version, in result: dataset.metadata.gemini_version = version # TODO: Can/should we detect even more attributes, such as use of PED file, what was input annotation type, etc. except Exception as e: log.warn( '%s, set_meta Exception: %s', self, e )
def set_meta(self, dataset, overwrite=True, **kwd): super(GeminiSQLite, self).set_meta(dataset, overwrite=overwrite, **kwd) try: conn = sqlite.connect(dataset.file_name) c = conn.cursor() tables_query = "SELECT version FROM version" result = c.execute(tables_query).fetchall() for version, in result: dataset.metadata.gemini_version = version # TODO: Can/should we detect even more attributes, such as use of PED file, what was input annotation type, etc. except Exception, e: log.warn('%s, set_meta Exception: %s', self, e)
def update_requests(self, authentication, host=None, port=None): key = authentication.cookie_value with FileLock(self.proxy_session_map): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() update = '''UPDATE gxproxy2 SET host = ?, port = ? WHERE key = ?''' c.execute(update, (host, port, key)) conn.commit() finally: conn.close()
def sniff(self, filename): if super(GAFASQLite, self).sniff(filename): gafa_table_names = frozenset([ 'gene', 'gene_family', 'gene_family_member', 'meta', 'transcript' ]) conn = sqlite.connect(filename) c = conn.cursor() tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" results = c.execute(tables_query).fetchall() found_table_names = frozenset(_[0] for _ in results) return gafa_table_names <= found_table_names return False
def set_meta(self, dataset, overwrite=True, **kwd): super(GAFASQLite, self).set_meta(dataset, overwrite=overwrite, **kwd) try: conn = sqlite.connect(dataset.file_name) c = conn.cursor() version_query = 'SELECT version FROM meta' results = c.execute(version_query).fetchall() if len(results) == 0: raise Exception('version not found in meta table') elif len(results) > 1: raise Exception('Multiple versions found in meta table') dataset.metadata.gafa_schema_version = results[0][0] except Exception as e: log.warn("%s, set_meta Exception: %s", self, e)
def sniff( self, filename ): if super( MzSQlite, self ).sniff( filename ): mz_table_names = ["DBSequence", "Modification", "Peaks", "Peptide", "PeptideEvidence", "Score", "SearchDatabase", "Source", "SpectraData", "Spectrum", "SpectrumIdentification"] try: conn = sqlite.connect( filename ) c = conn.cursor() tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" result = c.execute( tables_query ).fetchall() result = map( lambda x: x[0], result ) for table_name in mz_table_names: if table_name not in result: return False return True except Exception, e: log.warn( '%s, sniff Exception: %s', self, e )
def sniff( self, filename ): if super( GeminiSQLite, self ).sniff( filename ): gemini_table_names = [ "gene_detailed", "gene_summary", "resources", "sample_genotype_counts", "sample_genotypes", "samples", "variant_impacts", "variants", "version" ] try: conn = sqlite.connect( filename ) c = conn.cursor() tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" result = c.execute( tables_query ).fetchall() result = map( lambda x: x[0], result ) for table_name in gemini_table_names: if table_name not in result: return False return True except Exception, e: log.warn( '%s, sniff Exception: %s', self, e )
def handle_requests(self, authentication, proxy_requests, route_name, container_ids): key = "%s:%s" % ( proxy_requests.host, proxy_requests.port ) secure_id = authentication.cookie_value with FileLock( self.proxy_session_map ): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() try: # Create table c.execute('''CREATE TABLE gxproxy (key text PRIMARY_KEY, secret text)''') except Exception: pass insert_tmpl = '''INSERT INTO gxproxy (key, secret) VALUES ('%s', '%s');''' insert = insert_tmpl % (key, secure_id) c.execute(insert) conn.commit() finally: conn.close()
def handle_requests(self, authentication, proxy_requests): key = "%s:%s" % (proxy_requests.host, proxy_requests.port) secure_id = authentication.cookie_value with FileLock(self.proxy_session_map): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() try: # Create table c.execute('''CREATE TABLE gxproxy (key text PRIMARY_KEY, secret text)''') except Exception: pass insert_tmpl = '''INSERT INTO gxproxy (key, secret) VALUES ('%s', '%s');''' insert = insert_tmpl % (key, secure_id) c.execute(insert) conn.commit() finally: conn.close()
def sniff(self, filename): if super(MzSQlite, self).sniff(filename): mz_table_names = [ "DBSequence", "Modification", "Peaks", "Peptide", "PeptideEvidence", "Score", "SearchDatabase", "Source", "SpectraData", "Spectrum", "SpectrumIdentification" ] try: conn = sqlite.connect(filename) c = conn.cursor() tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" result = c.execute(tables_query).fetchall() result = map(lambda x: x[0], result) for table_name in mz_table_names: if table_name not in result: return False return True except Exception, e: log.warn('%s, sniff Exception: %s', self, e)
def set_meta( self, dataset, overwrite = True, **kwd ): try: tables = [] columns = dict() rowcounts = dict() conn = sqlite.connect(dataset.file_name) c = conn.cursor() tables_query = "SELECT name,sql FROM sqlite_master WHERE type='table' ORDER BY name" rslt = c.execute(tables_query).fetchall() for table,sql in rslt: tables.append(table) columns[table] = re.sub('^.*\((.*)\)$','\\1',sql).split(',') for table in tables: row_query = "SELECT count(*) FROM %s" % table rowcounts[table] = c.execute(row_query).fetchone()[0] dataset.metadata.tables = tables dataset.metadata.table_columns = columns dataset.metadata.table_row_count = rowcounts except Exception, exc: log.warn( '%s, set_meta Exception: %s', self, exc )
def test_sqlite_exploits(): # This is not really testing any Galaxy code, just experimenting with ways # to attempt to exploit sqlite3 connections. # More info... # http://atta.cked.me/home/sqlite3injectioncheatsheet connection = sqlite.connect(":memory:") connection.execute("create TABLE FOO (foo1 text)") __assert_has_n_rows(connection, "select * from FOO", 0) __assert_query_errors(connection, "select * from FOOX") # Make sure sqlite query cannot execute multiple statements __assert_query_errors(connection, "select * from FOO; select * from FOO") # Make sure sqlite cannot select on PRAGMA results __assert_query_errors(connection, "select * from (PRAGMA database_list)") __assert_has_n_rows(connection, "select * from FOO where foo1 in (SELECT foo1 from FOO)", 0) # Ensure nested queries cannot modify database. __assert_query_errors(connection, "select * from FOO where foo1 in (INSERT INTO FOO VALUES ('bar')")
def fetch_requests(self, authentication): key = authentication.cookie_value with FileLock(self.proxy_session_map): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() select = '''SELECT host, port, container_ids, container_interface FROM gxproxy2 WHERE key=?''' c.execute(select, (key,)) try: host, port, container_ids, container_interface = c.fetchone() except TypeError: log.warning('fetch_requests(): invalid key: %s', key) return None return ProxyMapping( host=host, port=port, container_ids=json.loads(container_ids), container_interface=container_interface) finally: conn.close()
def fetch_requests(self, authentication): key = authentication.cookie_value with FileLock(self.proxy_session_map): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() select = '''SELECT host, port, container_ids, container_interface FROM gxproxy2 WHERE key=?''' c.execute(select, (key, )) try: host, port, container_ids, container_interface = c.fetchone( ) except TypeError: log.warning('fetch_requests(): invalid key: %s', key) return None return ProxyMapping(host=host, port=port, container_ids=json.loads(container_ids), container_interface=container_interface) finally: conn.close()
def __init__( self, source, query=None, headers=False, limit=sys.maxint, **kwargs ): self.query = query self.headers = headers self.limit = limit self.connection = sqlite.connect(source.dataset.file_name) super( SQliteDataTableProvider, self ).__init__( source, **kwargs )
def __init__( self, source, query=None, **kwargs ): self.query = query self.connection = sqlite.connect(source.dataset.file_name) super( SQliteDataDictProvider, self ).__init__( source, **kwargs )
def __init__(self, source, query=None, **kwargs): self.query = query self.connection = sqlite.connect(source.dataset.file_name) super().__init__(source, **kwargs)
def __init__(self, source, query=None, **kwargs): self.query = query self.connection = sqlite.connect(source.dataset.file_name) super(SQliteDataDictProvider, self).__init__(source, **kwargs)