def test_interactive_new(self): runner = CliRunner() pg_i = query_runners.keys().index('pg') + 1 result = runner.invoke( new, input="test\n%s\n\n\nexample.com\n\ntestdb\n" % (pg_i,)) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) self.assertEqual(DataSource.select().count(), 1) ds = DataSource.select().first() self.assertEqual(ds.name, 'test') self.assertEqual(ds.type, 'pg') self.assertEqual(ds.options['dbname'], 'testdb')
def test_options_new(self): runner = CliRunner() result = runner.invoke( new, ['test', '--options', '{"host": "example.com", "dbname": "testdb"}', '--type', 'pg']) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) self.assertEqual(DataSource.select().count(), 1) ds = DataSource.select().first() self.assertEqual(ds.name, 'test') self.assertEqual(ds.type, 'pg') self.assertEqual(ds.options['host'], 'example.com') self.assertEqual(ds.options['dbname'], 'testdb')
def test_pause_sets_reason(self): admin = self.factory.create_admin() rv = self.make_request( "post", "/api/data_sources/{}/pause".format(self.factory.data_source.id), user=admin, data={"reason": "testing"}, ) self.assertEqual(rv.status_code, 200) self.assertEqual(DataSource.get_by_id(self.factory.data_source.id).paused, True) self.assertEqual(DataSource.get_by_id(self.factory.data_source.id).pause_reason, "testing") rv = self.make_request( "post", "/api/data_sources/{}/pause?reason=test".format(self.factory.data_source.id), user=admin ) self.assertEqual(DataSource.get_by_id(self.factory.data_source.id).pause_reason, "test")
def test_resumes_data_source(self): admin = self.factory.create_admin() self.factory.data_source.pause() self.factory.data_source.save() rv = self.make_request("delete", "/api/data_sources/{}/pause".format(self.factory.data_source.id), user=admin) self.assertEqual(rv.status_code, 200) self.assertEqual(DataSource.get_by_id(self.factory.data_source.id).paused, False)
def test_adds_data_source_to_default_group(self): data_source = DataSource.create_with_group( org=self.factory.org, name='test', options=ConfigurationContainer.from_json('{"dbname": "test"}'), type='pg') self.assertIn(self.factory.org.default_group.id, data_source.groups)
def test_bad_type_new(self): runner = CliRunner() result = runner.invoke( new, ['test', '--type', 'wrong']) self.assertTrue(result.exception) self.assertEqual(result.exit_code, 1) self.assertIn('not supported', result.output) self.assertEqual(DataSource.select().count(), 0)
def test_pauses_data_source(self): admin = self.factory.create_admin() rv = self.make_request('post', '/api/data_sources/{}/pause'.format( self.factory.data_source.id), user=admin) self.assertEqual(rv.status_code, 200) self.assertEqual( DataSource.get_by_id(self.factory.data_source.id).paused, True)
def test_options_edit(self): self.factory.create_data_source( name='test1', type='sqlite', options=ConfigurationContainer({"dbpath": "/tmp/test.db"})) runner = CliRunner() result = runner.invoke( edit, ['test1', '--options', '{"host": "example.com", "dbname": "testdb"}', '--name', 'test2', '--type', 'pg']) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) self.assertEqual(DataSource.select().count(), 1) ds = DataSource.select().first() self.assertEqual(ds.name, 'test2') self.assertEqual(ds.type, 'pg') self.assertEqual(ds.options['host'], 'example.com') self.assertEqual(ds.options['dbname'], 'testdb')
def test_can_delete_group_with_data_sources(self): group = self.factory.create_group() data_source = self.factory.create_data_source(group=group) response = self.make_request('delete', '/api/groups/{}'.format(group.id), user=self.factory.create_admin()) self.assertEqual(response.status_code, 200) self.assertEqual(data_source, DataSource.get_by_id(data_source.id))
def test_bad_options_new(self): runner = CliRunner() result = runner.invoke( new, ['test', '--options', '{"host": 12345, "dbname": "testdb"}', '--type', 'pg']) self.assertTrue(result.exception) self.assertEqual(result.exit_code, 1) self.assertIn('invalid configuration', result.output) self.assertEqual(DataSource.select().count(), 0)
def test_connection_bad_delete(self): self.factory.create_data_source( name='test1', type='sqlite', options=ConfigurationContainer({"dbpath": "/tmp/test.db"})) runner = CliRunner() result = runner.invoke(delete_ds, ['wrong']) self.assertTrue(result.exception) self.assertEqual(result.exit_code, 1) self.assertIn("Couldn't find", result.output) self.assertEqual(DataSource.select().count(), 1)
def test_resumes_data_source(self): admin = self.factory.create_admin() self.factory.data_source.pause() self.factory.data_source.save() rv = self.make_request('delete', '/api/data_sources/{}/pause'.format( self.factory.data_source.id), user=admin) self.assertEqual(rv.status_code, 200) self.assertEqual( DataSource.get_by_id(self.factory.data_source.id).paused, False)
def test_pause_sets_reason(self): admin = self.factory.create_admin() rv = self.make_request('post', '/api/data_sources/{}/pause'.format( self.factory.data_source.id), user=admin, data={'reason': 'testing'}) self.assertEqual(rv.status_code, 200) self.assertEqual( DataSource.get_by_id(self.factory.data_source.id).paused, True) self.assertEqual( DataSource.get_by_id(self.factory.data_source.id).pause_reason, 'testing') rv = self.make_request('post', '/api/data_sources/{}/pause?reason=test'.format( self.factory.data_source.id), user=admin) self.assertEqual( DataSource.get_by_id(self.factory.data_source.id).pause_reason, 'test')
def test_bad_type_edit(self): self.factory.create_data_source( name='test1', type='sqlite', options=ConfigurationContainer({"dbpath": "/tmp/test.db"})) runner = CliRunner() result = runner.invoke( edit, ['test', '--type', 'wrong']) self.assertTrue(result.exception) self.assertEqual(result.exit_code, 1) self.assertIn('not supported', result.output) ds = DataSource.select().first() self.assertEqual(ds.type, 'sqlite')
def test_updates_data_source(self): admin = self.factory.create_admin() new_name = "New Name" new_options = {"dbname": "newdb"} rv = self.make_request( "post", self.path, data={"name": new_name, "type": "pg", "options": new_options}, user=admin ) self.assertEqual(rv.status_code, 200) data_source = DataSource.get_by_id(self.factory.data_source.id) self.assertEqual(data_source.name, new_name) self.assertEqual(data_source.options.to_dict(), new_options)
def test_updates_data_source(self): admin = self.factory.create_admin() new_name = 'New Name' new_options = {"dbname": "newdb"} rv = self.make_request('post', self.path, data={'name': new_name, 'type': 'pg', 'options': new_options}, user=admin) self.assertEqual(rv.status_code, 200) data_source = DataSource.get_by_id(self.factory.data_source.id) self.assertEqual(data_source.name, new_name) self.assertEqual(data_source.options.to_dict(), new_options)
def test_bad_options_edit(self): ds = self.factory.create_data_source( name='test1', type='sqlite', options=ConfigurationContainer({"dbpath": "/tmp/test.db"})) runner = CliRunner() result = runner.invoke( new, ['test', '--options', '{"host": 12345, "dbname": "testdb"}', '--type', 'pg']) self.assertTrue(result.exception) self.assertEqual(result.exit_code, 1) self.assertIn('invalid configuration', result.output) ds = DataSource.select().first() self.assertEqual(ds.type, 'sqlite') self.assertEqual(ds.options._config, {"dbpath": "/tmp/test.db"})
from base64 import b64encode import json from redash.models import DataSource def convert_p12_to_pem(p12file): from OpenSSL import crypto with open(p12file, 'rb') as f: p12 = crypto.load_pkcs12(f.read(), "notasecret") return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey()) if __name__ == '__main__': for ds in DataSource.all(): if ds.type == 'bigquery': options = json.loads(ds.options) if 'jsonKeyFile' in options: continue new_options = { 'projectId': options['projectId'], 'jsonKeyFile': b64encode( json.dumps({ 'client_email': options['serviceAccount'],
from base64 import b64encode import json from redash.models import DataSource def convert_p12_to_pem(p12file): from OpenSSL import crypto with open(p12file, 'rb') as f: p12 = crypto.load_pkcs12(f.read(), "notasecret") return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey()) if __name__ == '__main__': for ds in DataSource.select(): if ds.type == 'bigquery': options = json.loads(ds.options) if 'jsonKeyFile' in options: continue new_options = { 'projectId': options['projectId'], 'jsonKeyFile': b64encode(json.dumps({ 'client_email': options['serviceAccount'], 'private_key': convert_p12_to_pem(options['privateKey']) })) } ds.options = json.dumps(new_options)
from base64 import b64encode import simplejson from redash.models import DataSource def convert_p12_to_pem(p12file): from OpenSSL import crypto with open(p12file, 'rb') as f: p12 = crypto.load_pkcs12(f.read(), "notasecret") return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey()) if __name__ == '__main__': for ds in DataSource.select(DataSource.id, DataSource.type, DataSource.options): if ds.type == 'bigquery': options = simplejson.loads(ds.options) if 'jsonKeyFile' in options: continue new_options = { 'projectId': options['projectId'], 'jsonKeyFile': b64encode( simplejson.dumps({ 'client_email': options['serviceAccount'],
configuration = { "url": old_config["url"] } if "verify" in old_config: configuration['verify'] = old_config['verify'] if "auth" in old_config: configuration['username'], configuration['password'] = old_config["auth"] data_source.options = json.dumps(configuration) elif data_source.type == 'url': data_source.options = json.dumps({"url": data_source.options}) elif data_source.type == 'script': data_source.options = json.dumps({"path": data_source.options}) elif data_source.type == 'mongo': data_source.type = 'mongodb' else: print "[%s] No need to convert type of: %s" % (data_source.name, data_source.type) print "[%s] New options: %s" % (data_source.name, data_source.options) data_source.save() if __name__ == '__main__': for data_source in DataSource.select(): update(data_source)
def test_pauses_data_source(self): admin = self.factory.create_admin() rv = self.make_request("post", "/api/data_sources/{}/pause".format(self.factory.data_source.id), user=admin) self.assertEqual(rv.status_code, 200) self.assertEqual(DataSource.get_by_id(self.factory.data_source.id).paused, True)
__author__ = "lior" from redash.models import DataSource if __name__ == "__main__": for ds in DataSource.select(DataSource.id, DataSource.type): if ds.type == "elasticsearch": ds.type = "kibana" ds.save(only=ds.dirty_fields)
configuration = { "url": old_config["url"] } if "verify" in old_config: configuration['verify'] = old_config['verify'] if "auth" in old_config: configuration['username'], configuration['password'] = old_config["auth"] data_source.options = json.dumps(configuration) elif data_source.type == 'url': data_source.options = json.dumps({"url": data_source.options}) elif data_source.type == 'script': data_source.options = json.dumps({"path": data_source.options}) elif data_source.type == 'mongo': data_source.type = 'mongodb' else: print "[%s] No need to convert type of: %s" % (data_source.name, data_source.type) print "[%s] New options: %s" % (data_source.name, data_source.options) data_source.save() if __name__ == '__main__': for data_source in DataSource.all(): update(data_source)
with db.database.transaction(): # Add type to groups migrate(migrator.add_column('groups', 'type', Group.type)) for name in ['default', 'admin']: group = Group.get(Group.name == name) group.type = Group.BUILTIN_GROUP group.save() # Create association table between data sources and groups DataSourceGroup.create_table() # add default to existing data source: default_org = Organization.get_by_id(1) default_group = Group.get(Group.name == "default") for ds in DataSource.all(default_org): DataSourceGroup.create(data_source=ds, group=default_group) # change the groups list on a user object to be an ids list migrate(migrator.rename_column('users', 'groups', 'old_groups'), ) migrate(migrator.add_column('users', 'groups', User.groups)) group_map = dict(map(lambda g: (g.name, g.id), Group.select())) user_map = defaultdict(list) for user in User.select(User, peewee.SQL('old_groups')): group_ids = [group_map[group] for group in user.old_groups] user.update_instance(groups=group_ids) migrate(migrator.drop_column('users', 'old_groups'))
def test_adds_data_source_to_default_group(self): data_source = DataSource.create_with_group(org=self.factory.org, name='test', options=ConfigurationContainer.from_json('{"dbname": "test"}'), type='pg') self.assertIn(self.factory.org.default_group.id, data_source.groups)
def test_adds_data_source_to_default_group(self): data_source = DataSource.create_with_group(org=self.factory.org, name='test', options='{}', type='pg') self.assertIn(self.factory.org.default_group.id, data_source.groups)
__author__ = 'lior' from redash.models import DataSource if __name__ == '__main__': for ds in DataSource.select(DataSource.id, DataSource.type): if ds.type == 'elasticsearch': ds.type = 'kibana' ds.save(only=ds.dirty_fields)
if "verify" in old_config: configuration['verify'] = old_config['verify'] if "auth" in old_config: configuration['username'], configuration['password'] = old_config[ "auth"] data_source.options = json.dumps(configuration) elif data_source.type == 'url': data_source.options = json.dumps({"url": data_source.options}) elif data_source.type == 'script': data_source.options = json.dumps({"path": data_source.options}) elif data_source.type == 'mongo': data_source.type = 'mongodb' else: print("[%s] No need to convert type of: %s" % (data_source.name, data_source.type)) print("[%s] New options: %s" % (data_source.name, data_source.options)) data_source.save(only=data_source.dirty_fields) if __name__ == '__main__': for data_source in DataSource.select(DataSource.id, DataSource.name, DataSource.type, DataSource.options): update(data_source)
__author__ = 'lior' from redash.models import DataSource if __name__ == '__main__': for ds in DataSource.select(): if ds.type == 'elasticsearch': ds.type = 'kibana' ds.save()
from base64 import b64encode import json from redash.models import DataSource def convert_p12_to_pem(p12file): from OpenSSL import crypto with open(p12file, 'rb') as f: p12 = crypto.load_pkcs12(f.read(), "notasecret") return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey()) if __name__ == '__main__': for ds in DataSource.all(): if ds.type == 'bigquery': options = json.loads(ds.options) if 'jsonKeyFile' in options: continue new_options = { 'projectId': options['projectId'], 'jsonKeyFile': b64encode(json.dumps({ 'client_email': options['serviceAccount'], 'private_key': convert_p12_to_pem(options['privateKey']) })) } ds.options = json.dumps(new_options)
configuration = {"url": old_config["url"]} if "verify" in old_config: configuration['verify'] = old_config['verify'] if "auth" in old_config: configuration['username'], configuration['password'] = old_config[ "auth"] data_source.options = json.dumps(configuration) elif data_source.type == 'url': data_source.options = json.dumps({"url": data_source.options}) elif data_source.type == 'script': data_source.options = json.dumps({"path": data_source.options}) elif data_source.type == 'mongo': data_source.type = 'mongodb' else: print "[%s] No need to convert type of: %s" % (data_source.name, data_source.type) print "[%s] New options: %s" % (data_source.name, data_source.options) data_source.save() if __name__ == '__main__': for data_source in DataSource.select(): update(data_source)
migrate( migrator.add_column('groups', 'type', Group.type) ) for name in ['default', 'admin']: group = Group.get(Group.name==name) group.type = Group.BUILTIN_GROUP group.save() # Create association table between data sources and groups DataSourceGroup.create_table() # add default to existing data source: default_org = Organization.get_by_id(1) default_group = Group.get(Group.name=="default") for ds in DataSource.all(default_org): DataSourceGroup.create(data_source=ds, group=default_group) # change the groups list on a user object to be an ids list migrate( migrator.rename_column('users', 'groups', 'old_groups'), ) migrate(migrator.add_column('users', 'groups', User.groups)) group_map = dict(map(lambda g: (g.name, g.id), Group.select())) user_map = defaultdict(list) for user in User.select(User, peewee.SQL('old_groups')): group_ids = [group_map[group] for group in user.old_groups] user.update_instance(groups=group_ids)
from base64 import b64encode import simplejson from redash.models import DataSource def convert_p12_to_pem(p12file): from OpenSSL import crypto with open(p12file, 'rb') as f: p12 = crypto.load_pkcs12(f.read(), "notasecret") return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey()) if __name__ == '__main__': for ds in DataSource.select(DataSource.id, DataSource.type, DataSource.options): if ds.type == 'bigquery': options = simplejson.loads(ds.options) if 'jsonKeyFile' in options: continue new_options = { 'projectId': options['projectId'], 'jsonKeyFile': b64encode(simplejson.dumps({ 'client_email': options['serviceAccount'], 'private_key': convert_p12_to_pem(options['privateKey']) })) } ds.options = simplejson.dumps(new_options)
"url": old_config["url"] } if "verify" in old_config: configuration['verify'] = old_config['verify'] if "auth" in old_config: configuration['username'], configuration['password'] = old_config["auth"] data_source.options = json.dumps(configuration) elif data_source.type == 'url': data_source.options = json.dumps({"url": data_source.options}) elif data_source.type == 'script': data_source.options = json.dumps({"path": data_source.options}) elif data_source.type == 'mongo': data_source.type = 'mongodb' else: print("[%s] No need to convert type of: %s" % (data_source.name, data_source.type)) print("[%s] New options: %s" % (data_source.name, data_source.options)) data_source.save(only=data_source.dirty_fields) if __name__ == '__main__': for data_source in DataSource.select(DataSource.id, DataSource.name, DataSource.type, DataSource.options): update(data_source)