def setUp(self): super(CannulaTestCase, self).setUp() self.base_dir = conf.CANNULA_BASE if not os.path.isdir(self.base_dir): os.makedirs(self.base_dir) logging.info("Running Setup") self.dummy_project = os.path.join(self.base_dir, 'dummy') # TODO: Override any thing else? try: from cannula.api import api self.api = api # create an admin user self.api.users.create('abby', password="******", email='*****@*****.**', first_name="Abby", last_name="Admin", is_admin=True) self.api.users.create('jim', password="******", email='*****@*****.**', first_name="Jim", last_name="User", is_admin=False) # Copy the test git project to base_dir shutil.copytree(os.path.join(DATA_DIR, 'dummy'), self.dummy_project) from cannula.git import Git self.dummy = Git(self.dummy_project) self.dummy.init() self.dummy.add_all() self.dummy.commit('initial commit') # Write out base supervisor and proxy configs self.api.proc.write_main_conf(commit=True) self.api.proxy.write_main_conf(commit=True) self.api.proc.startup() except: logging.exception('Setup Failed') shutil.rmtree(self.base_dir) self.fail("Problem setting up testcase")
def git_repo(self): return Git(self.conf_base)
class CannulaTestCase(TransactionTestCase): def setUp(self): super(CannulaTestCase, self).setUp() self.base_dir = conf.CANNULA_BASE if not os.path.isdir(self.base_dir): os.makedirs(self.base_dir) logging.info("Running Setup") self.dummy_project = os.path.join(self.base_dir, 'dummy') # TODO: Override any thing else? try: from cannula.api import api self.api = api # create an admin user self.api.users.create('abby', password="******", email='*****@*****.**', first_name="Abby", last_name="Admin", is_admin=True) self.api.users.create('jim', password="******", email='*****@*****.**', first_name="Jim", last_name="User", is_admin=False) # Copy the test git project to base_dir shutil.copytree(os.path.join(DATA_DIR, 'dummy'), self.dummy_project) from cannula.git import Git self.dummy = Git(self.dummy_project) self.dummy.init() self.dummy.add_all() self.dummy.commit('initial commit') # Write out base supervisor and proxy configs self.api.proc.write_main_conf(commit=True) self.api.proxy.write_main_conf(commit=True) self.api.proc.startup() except: logging.exception('Setup Failed') shutil.rmtree(self.base_dir) self.fail("Problem setting up testcase") def test_projects(self): from cannula.apis import PermissionError g1 = self.api.groups.create('testy', 'abby') p1 = self.api.projects.create(name='test', user='******', group=g1) self.assertRaises(PermissionError, self.api.projects.create, name='test2', user='******', group='testy') self.assertRaises(ValidationError, self.api.projects.create, name='bad name', user='******', group=g1) self.assertRaises(ValidationError, self.api.projects.create, name='*bad name', user='******', group=g1) self.assertEqual(p1.get_absolute_url(), '/testy/test/') self.api.projects.initialize('test', user='******') self.assertTrue(os.path.isdir(p1.repo_dir)) self.assertTrue(os.path.isdir(p1.project_dir)) # Delete self.assertRaises(PermissionError, self.api.projects.delete, p1, 'jim') self.api.projects.delete(p1, 'abby') def test_groups(self): from cannula.apis import PermissionError, DuplicateObject g1 = self.api.groups.create('test', 'abby') self.assertRaises(PermissionError, self.api.groups.create, 'test', 'jim') self.assertEqual(g1.get_absolute_url(), '/test/') self.assertRaises(DuplicateObject, self.api.groups.create, 'test', 'abby') self.assertRaises(ValidationError, self.api.groups.create, 'bad name', 'abby') def test_users(self): from cannula.apis import DuplicateObject self.assertRaises(DuplicateObject, self.api.users.create, 'jim') ted = self.api.users.create('ted', password='******') self.assertTrue(ted.check_password('lkjh')) self.assertFalse(ted.check_password('lskjlskj')) self.assertEqual(unicode(ted), 'ted') jim = self.api.users.get('jim') self.assertEqual(unicode(jim), 'jim') self.assertEqual(jim.get_full_name(), 'Jim User') def test_permissions(self): from cannula.apis import PermissionError g1 = self.api.groups.create('test', 'abby') self.assertRaises(PermissionError, self.api.groups.delete, g1, 'jim') def test_keys(self): self.assertRaises(ValidationError, self.api.keys.create, 'jim', 'beans', 'bad_key') self.assertRaises(ValidationError, self.api.keys.create, 'jim', 'beans', 'ssh-rsa') ssh_key = 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD7bwOVC/d8JI4VS4OD/eJbKIUMJSKamCTz0jSe1dV4FHBioT7+r8HyZwgSKO/iCsc9jnjD5dlMAVhMqWxPeuxgfpd6IT7b8x9XMLKhV8/RORrRsqaT7yVCDgL8tfU0rpNmVUrGJVvRJtqijCNVMmGsVwekZBby3qBP+JIlbCBR2OgJRxhAfySuBqeaZpH2Aefzo9YoXW86LV07LRX0qkf0kCpOc+IR/7hYvgVeNBBKJghc/1B5RPRhts26mKesiPm1l+iwbpoqLV5QMGWadM6Iee4jgYgRA7nOPKHqRbwRrvaEJM/Wh2O9oLflCNa0j9n7D/YtehUYJZ3RjL7lBXcb jim@localhost' key = self.api.keys.create('jim', 'beans', ssh_key) self.assertEqual(key.ssh_key, ssh_key) def test_deploy(self): from cannula.utils import shell, call_subprocess # Fake a remote push g1 = self.api.groups.create('testy', 'abby') p1 = self.api.projects.create(name='test', user='******', group=g1) self.api.projects.initialize(p1, user='******') self.assertTrue(os.path.isfile(p1.post_receive)) # Persist the data in the test db so that external commands (git) can # see the data as well. transaction.commit() cmd = "%s push %s master" % (conf.CANNULA_GIT_CMD, p1.repo_dir) _, cannula_cmd = shell('which cannulactl') self.assertTrue(os.path.isfile('/tmp/cannula_test.db')) env = { 'C_USER': '******', 'DJANGO_SETTINGS_MODULE': 'cannula.test_settings', 'CANNULA_BASE': self.base_dir, 'CANNULA_CMD': cannula_cmd.strip(), 'REPO': 'testy/test.git', } # call_subprocess(cmd, cwd=self.dummy_project, env=env) yaml_file = os.path.join(p1.project_dir, 'app.yaml') self.assertTrue(os.path.isfile(yaml_file)) #self.api.deploy.deploy(p1, 'abby', 'initial commit', 'blah') def tearDown(self): super(CannulaTestCase, self).tearDown() self.api.proc.shutdown() shutil.rmtree(self.base_dir)
def deploy(self, project, user, oldrev='old', newrev='new'): user = api.users.get(user) project = api.projects.get(project) if not os.path.isfile(project.appconfig): raise ApiError("Project missing app.yaml file!") # Attempt to get an exclusive lock on the project # file. A way to ensure only one process can deploy # at any single time. This is hard because deployment is # triggered by a git push (which is just an ssh connection) with DeployLock(project, user): with open(project.appconfig) as f: # Store the configuration for this project in git repo # so that we can roll back to previous states conf_dir = Git(project.conf_dir) if not os.path.isdir(project.conf_dir): os.makedirs(project.conf_dir) conf_dir.init() # Add an initial commit, just to make a rollback point. open(project.deployconfig, 'a') conf_dir.add_all() conf_dir.commit("Initial Commit") # Copy the project app.yaml to the conf_dir shutil.copy(project.appconfig, project.deployconfig) # read in the application configuration app = yaml.load(f.read()) # setup any runtime specific things here try: runtime = import_object(app.get('runtime')) except ImportError: raise ApiError("Unsupported runtime!") # runtime bootstrap, setup project environment here runtime.bootstrap(project, app) # Simple counter to make unique names for each handler # and keep them in order handler_position = 0 sections = [] for handler in app.get('handlers', []): if handler.get('worker'): # Setup worker name = '%s_%d' % (project.name, handler_position) # defaults are special, they reference another # section in the app.yaml defaults = handler.pop('defaults', None) if defaults: handler_defaults = app.get(defaults, {}) handler.update(handler_defaults) handle = Handler(name, project, **handler) # write out bash start up scripts handle.write_startup_script() # add handler to vhost_sections sections.append(handle) handler_position += 1 else: # Just pass the dictionary to the proxy vhosts sections.append(handler) # Write out the proxy file to serve this app ctx = { 'sections': sections, 'domain': app.get('domain', 'localhost'), 'runtime': app.get('runtime', 'python'), 'port': app.get('port', 80), 'project_conf_dir': project.conf_dir, 'conf_dir': os.path.join(conf.CANNULA_BASE, 'config'), 'project': project, } api.proxy.write_vhost_conf(project, ctx) api.proc.write_project_conf(project, ctx) # Check if any files changed and check if still valid conf_dir.add_all() _, changed = conf_dir.status() logging.debug(changed) if re.search('vhost.conf', changed): # Vhost file is either new or changed which will require # our proxy server to reload its configuration files. try: api.proxy.restart() except: logging.exception("Error restarting proxy") conf_dir.reset() raise ApiError("Deployment failed") if re.search('supervisor.conf', changed): try: api.proc.reread() except: logging.exception("Error reading supervisor configs") conf_dir.reset() raise ApiError("Deployment failed") # Add the project api.proc.reread(stderr=True) api.proc.add_project(project.name) # Restart the project try: api.proc.restart(project.name, stderr=True) except: logging.exception("Error restarting project") conf_dir.reset() raise ApiError("Deployment failed") # Current revision of conf directory conf_oldrev = conf_dir.head() if changed: # Commit config changes conf_dir.commit("Configuration: %s" % datetime.datetime.now().ctime()) # new revision of conf directory conf_newrev = conf_dir.head() if oldrev is None: oldrev = "Initial Commit" self._create(project, user, oldrev, newrev, conf_oldrev, conf_newrev)