def _run_web_container(self, port, command, address='127.0.0.1', log_syslog=False, datapusher=True): """ Start web container on port with command """ if is_boot2docker(): ro = {} volumes_from = self._get_container_name('venv') else: ro = {self.datadir + '/venv': '/usr/lib/ckan'} volumes_from = None links = { self._get_container_name('solr'): 'solr', self._get_container_name('postgres'): 'db' } links.update({self._get_container_name(container): container for container in self.extra_containers}) if datapusher: if 'datapusher' not in self.containers_running(): raise DatacatsError(container_logs(self._get_container_name('datapusher'), "all", False, False)) links[self._get_container_name('datapusher')] = 'datapusher' try: run_container( name=self._get_container_name('web'), image='datacats/web', rw={self.sitedir + '/files': '/var/www/storage', self.sitedir + '/run/development.ini': '/project/development.ini'}, ro=dict({ self.target: '/project/', scripts.get_script_path('web.sh'): '/scripts/web.sh', scripts.get_script_path('adjust_devini.py'): '/scripts/adjust_devini.py'}, **ro), links=links, volumes_from=volumes_from, command=command, port_bindings={ 5000: port if is_boot2docker() else (address, port)}, log_syslog=log_syslog ) except APIError as e: if '409' in str(e): raise DatacatsError('Web container already running. ' 'Please stop_web before running.') else: raise
def start_ckan(self, production=False, log_syslog=False, paster_reload=True, interactive=False): """ Start the apache server or paster serve :param log_syslog: A flag to redirect all container logs to host's syslog :param production: True for apache, False for paster serve + debug on :param paster_reload: Instruct paster to watch for file changes """ self.stop_ckan() address = self.address or '127.0.0.1' port = self.port # in prod we always use log_syslog driver log_syslog = True if self.always_prod else log_syslog production = production or self.always_prod # We only override the site URL with the docker URL on three conditions override_site_url = (self.address is None and not is_boot2docker() and not self.site_url) command = ['/scripts/web.sh', str(production), str(override_site_url), str(paster_reload)] # XXX nasty hack, remove this once we have a lessc command # for users (not just for building our preload image) if not production: css = self.target + '/ckan/ckan/public/base/css' if not exists(css + '/main.debug.css'): from shutil import copyfile copyfile(css + '/main.css', css + '/main.debug.css') ro = { self.target: '/project', scripts.get_script_path('datapusher.sh'): '/scripts/datapusher.sh' } if not is_boot2docker(): ro[self.datadir + '/venv'] = '/usr/lib/ckan' datapusher = self.needs_datapusher() if datapusher: run_container( self._get_container_name('datapusher'), 'datacats/web', '/scripts/datapusher.sh', ro=ro, volumes_from=(self._get_container_name('venv') if is_boot2docker() else None), log_syslog=log_syslog) while True: self._create_run_ini(port, production) try: self._run_web_container(port, command, address, log_syslog=log_syslog, datapusher=datapusher, interactive=interactive) if not is_boot2docker(): self.address = address except PortAllocatedError: port = self._next_port(port) continue break
def install_postgis_sql(self): web_command( '/scripts/install_postgis.sh', image='datacats/postgres', ro={scripts.get_script_path('install_postgis.sh'): '/scripts/install_postgis.sh'}, links={self._get_container_name('postgres'): 'db'}, )
def compile_less(self): c = run_container( name=self._get_container_name('lessc'), image='datacats/lessc', rw={self.target: '/project/target'}, ro={scripts.get_script_path('compile_less.sh'): '/project/compile_less.sh'}) for log in container_logs(c['Id'], "all", True, False): yield log remove_container(c)
def _two_to_one(datadir): """After this command, your environment will be converted to format version {} and will not work with Datacats versions beyond and including 1.0.0. This format version doesn't support multiple sites, and after this only your "primary" site will be usable, though other sites will be maintained if you wish to do a migration back to a version which supports multisite. Would you like to continue the migration? (y/n) [n]:""" _, env_name = _split_path(datadir) print 'Making sure that containers are stopped...' # New-style names remove_container('datacats_web_{}_primary'.format(env_name)) remove_container('datacats_postgres_{}_primary'.format(env_name)) remove_container('datacats_solr_{}_primary'.format(env_name)) print 'Doing conversion...' if exists(path_join(datadir, '.version')): os.remove(path_join(datadir, '.version')) to_move = (['files', 'passwords.ini', 'run', 'solr'] + (['postgres'] if not is_boot2docker() else [])) web_command( command=['/scripts/migrate.sh', '/project/data/sites/primary', '/project/data'] + to_move, ro={scripts.get_script_path('migrate.sh'): '/scripts/migrate.sh'}, rw={datadir: '/project/data'} ) pgdata_name = 'datacats_pgdata_{}_primary'.format(env_name) if is_boot2docker() and inspect_container(pgdata_name): rename_container(pgdata_name, 'datacats_pgdata_{}'.format(env_name)) print 'Doing cleanup...' with open(path_join(datadir, 'project-dir')) as pd: datacats_env_location = path_join(pd.read(), '.datacats-environment') cp = SafeConfigParser() cp.read(datacats_env_location) # We need to move the port OUT of site_primary section and INTO datacats cp.set('datacats', 'port', cp.get('site_primary', 'port')) cp.remove_section('site_primary') with open(datacats_env_location, 'w') as config: cp.write(config) cp = SafeConfigParser() cp.read(path_join(datadir, 'passwords.ini')) # This isn't needed in this version cp.remove_option('passwords', 'beaker_session_secret') with open(path_join(datadir, 'passwords.ini'), 'w') as config: cp.write(config)
def create_ckan_ini(self): """ Use make-config to generate an initial development.ini file """ self.run_command( command='/scripts/run_as_user.sh /usr/lib/ckan/bin/paster make-config' ' ckan /project/development.ini', rw_project=True, ro={scripts.get_script_path('run_as_user.sh'): '/scripts/run_as_user.sh'}, )
def check_connectivity(): c = run_container(None, 'datacats/web', '/project/check_connectivity.sh', ro={ get_script_path('check_connectivity.sh'): '/project/check_connectivity.sh' }, detach=False) return collect_logs(c['Id'])
def clean_virtualenv(self): """ Empty our virtualenv so that new (or older) dependencies may be installed """ self.user_run_script( script=scripts.get_script_path('clean_virtualenv.sh'), args=[], rw_venv=True, )
def remote_server_command(command, environment, user_profile, **kwargs): """ Wraps web_command function with docker bindings needed to connect to a remote server (such as datacats.com) and run commands there (for example, when you want to copy your catalog to that server). The files binded to the docker image include the user's ssh credentials: ssh_config file, rsa and rsa.pub user keys known_hosts whith public keys of the remote server (if known) The **kwargs (keyword arguments) are passed on to the web_command call intact, see the web_command's doc string for details """ if environment.remote_server_key: temp = tempfile.NamedTemporaryFile(mode="wb") temp.write(environment.remote_server_key) temp.seek(0) known_hosts = temp.name else: known_hosts = get_script_path('known_hosts') binds = { user_profile.profiledir + '/id_rsa': '/root/.ssh/id_rsa', known_hosts: '/root/.ssh/known_hosts', get_script_path('ssh_config'): '/etc/ssh/ssh_config' } if kwargs.get("include_project_dir", None): binds[environment.target] = '/project' del kwargs["include_project_dir"] kwargs["ro"] = binds try: web_command(command, **kwargs) except WebCommandError as e: e.user_description = 'Sending a command to remote server failed' raise e
def user_run_script(self, script, args, db_links=False, rw_venv=False, rw_project=False, rw=None, ro=None, stream_output=None): return self.run_command( command=['/scripts/run_as_user.sh', '/scripts/run.sh'] + args, db_links=db_links, rw_venv=rw_venv, rw_project=rw_project, rw=rw, ro=dict(ro or {}, **{ scripts.get_script_path('run_as_user.sh'): '/scripts/run_as_user.sh', script: '/scripts/run.sh', }), stream_output=stream_output )
def install_package_develop(self, psrc, stream_output=None): """ Install a src package in place (setup.py develop) :param psrc: name of directory under project directory """ package = self.target + '/' + psrc assert isdir(package), package if not exists(package + '/setup.py'): return return self.user_run_script( script=scripts.get_script_path('install_package.sh'), args=['/project/' + psrc], rw_venv=True, rw_project=True, stream_output=stream_output)
def install_package_develop(self, psrc, stream_output=None): """ Install a src package in place (setup.py develop) :param psrc: name of directory under project directory """ package = self.target + '/' + psrc assert isdir(package), package if not exists(package + '/setup.py'): return return self.user_run_script( script=scripts.get_script_path('install_package.sh'), args=['/project/' + psrc], rw_venv=True, rw_project=True, stream_output=stream_output )
def purge_data(self, which_sites=None, never_delete=False): """ Remove uploaded files, postgres db, solr index, venv """ # Default to the set of all sites if not which_sites: which_sites = self.sites datadirs = [] boot2docker = is_boot2docker() if which_sites: if self.target: cp = SafeConfigParser() cp.read([self.target + '/.datacats-environment']) for site in which_sites: if boot2docker: remove_container(self._get_container_name('pgdata')) else: datadirs += [site + '/postgres'] # Always rm the site dir & solr & files datadirs += [site, site + '/files', site + '/solr'] if self.target: cp.remove_section('site_' + site) self.sites.remove(site) if self.target: with open(self.target + '/.datacats-environment', 'w') as conf: cp.write(conf) datadirs = ['sites/' + datadir for datadir in datadirs] if not self.sites and not never_delete: datadirs.append('venv') web_command( command=['/scripts/purge.sh'] + ['/project/data/' + d for d in datadirs], ro={scripts.get_script_path('purge.sh'): '/scripts/purge.sh'}, rw={self.datadir: '/project/data'}, ) if not self.sites and not never_delete: shutil.rmtree(self.datadir)
def install_package_requirements(self, psrc, stream_output=None): """ Install from requirements.txt file found in psrc :param psrc: name of directory in environment directory """ package = self.target + '/' + psrc assert isdir(package), package reqname = '/requirements.txt' if not exists(package + reqname): reqname = '/pip-requirements.txt' if not exists(package + reqname): return return self.user_run_script( script=scripts.get_script_path('install_reqs.sh'), args=['/project/' + psrc + reqname], rw_venv=True, rw_project=True, stream_output=stream_output)
def create_admin_set_password(self, password): """ create 'admin' account with given password """ with open(self.sitedir + '/run/admin.json', 'w') as out: json.dump( { 'name': 'admin', 'email': 'none', 'password': password, 'sysadmin': True }, out) self.user_run_script( script=scripts.get_script_path('update_add_admin.sh'), args=[], db_links=True, ro={self.sitedir + '/run/admin.json': '/input/admin.json'}, ) remove(self.sitedir + '/run/admin.json')
def install_package_requirements(self, psrc, stream_output=None): """ Install from requirements.txt file found in psrc :param psrc: name of directory in environment directory """ package = self.target + '/' + psrc assert isdir(package), package reqname = '/requirements.txt' if not exists(package + reqname): reqname = '/pip-requirements.txt' if not exists(package + reqname): return return self.user_run_script( script=scripts.get_script_path('install_reqs.sh'), args=['/project/' + psrc + reqname], rw_venv=True, rw_project=True, stream_output=stream_output )
def create_admin_set_password(self, password): """ create 'admin' account with given password """ with open(self.sitedir + '/run/admin.json', 'w') as out: json.dump({ 'name': 'admin', 'email': 'none', 'password': password, 'sysadmin': True}, out) self.user_run_script( script=scripts.get_script_path('update_add_admin.sh'), args=[], db_links=True, ro={ self.sitedir + '/run/admin.json': '/input/admin.json' }, ) remove(self.sitedir + '/run/admin.json')
def purge_data(self, which_sites=None, never_delete=False): """ Remove uploaded files, postgres db, solr index, venv """ # Default to the set of all sites if not exists(self.datadir + '/.version'): format_version = 1 else: with open(self.datadir + '/.version') as f: format_version = int(f.read().strip()) if format_version == 1: print 'WARNING: Defaulting to old purge for version 1.' datadirs = ['files', 'solr'] if is_boot2docker(): remove_container('datacats_pgdata_{}'.format(self.name)) remove_container('datacats_venv_{}'.format(self.name)) else: datadirs += ['postgres', 'venv'] web_command( command=['/scripts/purge.sh'] + ['/project/data/' + d for d in datadirs], ro={scripts.get_script_path('purge.sh'): '/scripts/purge.sh'}, rw={self.datadir: '/project/data'}, ) shutil.rmtree(self.datadir) elif format_version == 2: if not which_sites: which_sites = self.sites datadirs = [] boot2docker = is_boot2docker() if which_sites: if self.target: cp = SafeConfigParser() cp.read([self.target + '/.datacats-environment']) for site in which_sites: if boot2docker: remove_container(self._get_container_name('pgdata')) else: datadirs += [site + '/postgres'] # Always rm the site dir & solr & files datadirs += [site, site + '/files', site + '/solr'] if self.target: cp.remove_section('site_' + site) self.sites.remove(site) if self.target: with open(self.target + '/.datacats-environment', 'w') as conf: cp.write(conf) datadirs = ['sites/' + datadir for datadir in datadirs] if not self.sites and not never_delete: datadirs.append('venv') web_command( command=['/scripts/purge.sh'] + ['/project/data/' + d for d in datadirs], ro={scripts.get_script_path('purge.sh'): '/scripts/purge.sh'}, rw={self.datadir: '/project/data'}, ) if not self.sites and not never_delete: shutil.rmtree(self.datadir) else: raise DatacatsError( 'Unknown format version {}'.format(format_version))
def interactive_shell(self, command=None, paster=False, detach=False): """ launch interactive shell session with all writable volumes :param: list of strings to execute instead of bash """ if not exists(self.target + '/.bash_profile'): # this file is required for activating the virtualenv self.create_bash_profile() if not command: command = [] use_tty = sys.stdin.isatty() and sys.stdout.isatty() background = environ.get('CIRCLECI', False) or detach if is_boot2docker(): venv_volumes = ['--volumes-from', self._get_container_name('venv')] else: venv_volumes = ['-v', self.datadir + '/venv:/usr/lib/ckan:rw'] self._create_run_ini(self.port, production=False, output='run.ini') self._create_run_ini(self.port, production=True, output='test.ini', source='ckan/test-core.ini', override_site_url=False) script = scripts.get_script_path('shell.sh') if paster: script = scripts.get_script_path('paster.sh') if command and command != ['help'] and command != ['--help']: command += ['--config=/project/development.ini'] command = [self.extension_dir] + command proxy_settings = self._proxy_settings() if proxy_settings: venv_volumes += [ '-v', self.sitedir + '/run/proxy-environment:/etc/environment:ro' ] links = { self._get_container_name('solr'): 'solr', self._get_container_name('postgres'): 'db' } links.update({ self._get_container_name(container): container for container in self.extra_containers }) link_params = [] for link in links: link_params.append('--link') link_params.append(link + ':' + links[link]) if 'datapusher' in self.containers_running(): link_params.append('--link') link_params.append( self._get_container_name('datapusher') + ':datapusher') # FIXME: consider switching this to dockerpty # using subprocess for docker client's interactive session return subprocess.call( [ DOCKER_EXE, 'run', ] + (['--rm'] if not background else []) + [ '-t' if use_tty else '', '-d' if detach else '-i', ] + venv_volumes + [ '-v', self.target + ':/project:rw', '-v', self.sitedir + '/files:/var/www/storage:rw', '-v', script + ':/scripts/shell.sh:ro', '-v', scripts.get_script_path('paster_cd.sh') + ':/scripts/paster_cd.sh:ro', '-v', self.sitedir + '/run/run.ini:/project/development.ini:ro', '-v', self.sitedir + '/run/test.ini:/project/ckan/test-core.ini:ro' ] + link_params + ['--hostname', self.name, 'datacats/web', '/scripts/shell.sh'] + command)
def _run_web_container(self, port, command, address, log_syslog=False, datapusher=True, interactive=False): """ Start web container on port with command """ if is_boot2docker(): ro = {} volumes_from = self._get_container_name('venv') else: ro = {self.datadir + '/venv': '/usr/lib/ckan'} volumes_from = None links = { self._get_container_name('solr'): 'solr', self._get_container_name('postgres'): 'db' } links.update({ self._get_container_name(container): container for container in self.extra_containers }) if datapusher: if 'datapusher' not in self.containers_running(): raise DatacatsError( container_logs(self._get_container_name('datapusher'), "all", False, False)) links[self._get_container_name('datapusher')] = 'datapusher' ro = dict( { self.target: '/project/', scripts.get_script_path('web.sh'): '/scripts/web.sh', scripts.get_script_path('adjust_devini.py'): '/scripts/adjust_devini.py' }, **ro) rw = { self.sitedir + '/files': '/var/www/storage', self.sitedir + '/run/development.ini': '/project/development.ini' } try: if not interactive: run_container(name=self._get_container_name('web'), image='datacats/web', rw=rw, ro=ro, links=links, volumes_from=volumes_from, command=command, port_bindings={ 5000: port if is_boot2docker() else (address, port) }, log_syslog=log_syslog) else: # FIXME: share more code with interactive_shell if is_boot2docker(): switches = [ '--volumes-from', self._get_container_name('pgdata'), '--volumes-from', self._get_container_name('venv') ] else: switches = [] switches += [ '--volume={}:{}:ro'.format(vol, ro[vol]) for vol in ro ] switches += [ '--volume={}:{}'.format(vol, rw[vol]) for vol in rw ] links = [ '--link={}:{}'.format(link, links[link]) for link in links ] args = ['docker', 'run', '-it', '--name', self._get_container_name('web'), '-p', '{}:5000'.format(port) if is_boot2docker() else '{}:{}:5000'.format(address, port)] + \ switches + links + ['datacats/web', ] + command subprocess.call(args) except APIError as e: if '409' in str(e): raise DatacatsError('Web container already running. ' 'Please stop_web before running.') else: raise
def _run_web_container(self, port, command, address, log_syslog=False, datapusher=True, interactive=False): """ Start web container on port with command """ if is_boot2docker(): ro = {} volumes_from = self._get_container_name('venv') else: ro = {self.datadir + '/venv': '/usr/lib/ckan'} volumes_from = None links = { self._get_container_name('solr'): 'solr', self._get_container_name('postgres'): 'db' } links.update({self._get_container_name(container): container for container in self.extra_containers}) if datapusher: if 'datapusher' not in self.containers_running(): raise DatacatsError(container_logs(self._get_container_name('datapusher'), "all", False, False)) links[self._get_container_name('datapusher')] = 'datapusher' ro = dict({ self.target: '/project/', scripts.get_script_path('web.sh'): '/scripts/web.sh', scripts.get_script_path('adjust_devini.py'): '/scripts/adjust_devini.py'}, **ro) rw = { self.sitedir + '/files': '/var/www/storage', self.sitedir + '/run/development.ini': '/project/development.ini' } try: if not interactive: run_container( name=self._get_container_name('web'), image='datacats/web', rw=rw, ro=ro, links=links, volumes_from=volumes_from, command=command, port_bindings={ 5000: port if is_boot2docker() else (address, port)}, log_syslog=log_syslog ) else: # FIXME: share more code with interactive_shell if is_boot2docker(): switches = ['--volumes-from', self._get_container_name('pgdata'), '--volumes-from', self._get_container_name('venv')] else: switches = [] switches += ['--volume={}:{}:ro'.format(vol, ro[vol]) for vol in ro] switches += ['--volume={}:{}'.format(vol, rw[vol]) for vol in rw] links = ['--link={}:{}'.format(link, links[link]) for link in links] args = ['docker', 'run', '-it', '--name', self._get_container_name('web'), '-p', '{}:5000'.format(port) if is_boot2docker() else '{}:{}:5000'.format(address, port)] + \ switches + links + ['datacats/web', ] + command subprocess.call(args) except APIError as e: if '409' in str(e): raise DatacatsError('Web container already running. ' 'Please stop_web before running.') else: raise
def purge_data(self, which_sites=None, never_delete=False): """ Remove uploaded files, postgres db, solr index, venv """ # Default to the set of all sites if not exists(self.datadir + '/.version'): format_version = 1 else: with open(self.datadir + '/.version') as f: format_version = int(f.read().strip()) if format_version == 1: print 'WARNING: Defaulting to old purge for version 1.' datadirs = ['files', 'solr'] if is_boot2docker(): remove_container('datacats_pgdata_{}'.format(self.name)) remove_container('datacats_venv_{}'.format(self.name)) else: datadirs += ['postgres', 'venv'] web_command( command=['/scripts/purge.sh'] + ['/project/data/' + d for d in datadirs], ro={scripts.get_script_path('purge.sh'): '/scripts/purge.sh'}, rw={self.datadir: '/project/data'}, ) shutil.rmtree(self.datadir) elif format_version == 2: if not which_sites: which_sites = self.sites datadirs = [] boot2docker = is_boot2docker() if which_sites: if self.target: cp = SafeConfigParser() cp.read([self.target + '/.datacats-environment']) for site in which_sites: if boot2docker: remove_container(self._get_container_name('pgdata')) else: datadirs += [site + '/postgres'] # Always rm the site dir & solr & files datadirs += [site, site + '/files', site + '/solr'] if self.target: cp.remove_section('site_' + site) self.sites.remove(site) if self.target: with open(self.target + '/.datacats-environment', 'w') as conf: cp.write(conf) datadirs = ['sites/' + datadir for datadir in datadirs] if not self.sites and not never_delete: datadirs.append('venv') web_command( command=['/scripts/purge.sh'] + ['/project/data/' + d for d in datadirs], ro={scripts.get_script_path('purge.sh'): '/scripts/purge.sh'}, rw={self.datadir: '/project/data'}, ) if not self.sites and not never_delete: shutil.rmtree(self.datadir) else: raise DatacatsError('Unknown format version {}'.format(format_version))
def _one_to_two(datadir): """After this command, your environment will be converted to format version {}. and will only work with datacats version exceeding and including 1.0.0. This migration is necessary to support multiple sites within the same environment. Your current site will be kept and will be named "primary". Would you like to continue the migration? (y/n) [n]:""" new_site_name = 'primary' split = _split_path(datadir) print 'Making sure that containers are stopped...' env_name = split[1] # Old-style names on purpose! We need to stop old containers! remove_container('datacats_web_' + env_name) remove_container('datacats_solr_' + env_name) remove_container('datacats_postgres_' + env_name) print 'Doing conversion...' # Begin the actual conversion to_move = (['files', 'passwords.ini', 'run', 'solr'] + (['postgres'] if not is_boot2docker() else [])) # Make a primary site site_path = path_join(datadir, 'sites', new_site_name) if not exists(site_path): makedirs(site_path) web_command( command=['/scripts/migrate.sh', '/project/data', '/project/data/sites/' + new_site_name] + to_move, ro={scripts.get_script_path('migrate.sh'): '/scripts/migrate.sh'}, rw={datadir: '/project/data'}, clean_up=True ) if is_boot2docker(): rename_container('datacats_pgdata_' + env_name, 'datacats_pgdata_' + env_name + '_' + new_site_name) # Lastly, grab the project directory and update the ini file with open(path_join(datadir, 'project-dir')) as pd: project = pd.read() cp = SafeConfigParser() config_loc = path_join(project, '.datacats-environment') cp.read([config_loc]) new_section = 'site_' + new_site_name cp.add_section(new_section) # Ports need to be moved into the new section port = cp.get('datacats', 'port') cp.remove_option('datacats', 'port') cp.set(new_section, 'port', port) with open(config_loc, 'w') as config: cp.write(config) # Make a session secret for it (make it per-site) cp = SafeConfigParser() config_loc = path_join(site_path, 'passwords.ini') cp.read([config_loc]) # Generate a new secret cp.set('passwords', 'beaker_session_secret', generate_password()) with open(config_loc, 'w') as config: cp.write(config) with open(path_join(datadir, '.version'), 'w') as f: f.write('2')
def install_extra(self): self.user_run_script( script=scripts.get_script_path('install_extra_packages.sh'), args=[], rw_venv=True )
def check_connectivity(): c = run_container(None, 'datacats/web', '/project/check_connectivity.sh', ro={get_script_path('check_connectivity.sh'): '/project/check_connectivity.sh'}, detach=False) return collect_logs(c['Id'])
def install_extra(self): self.user_run_script( script=scripts.get_script_path('install_extra_packages.sh'), args=[], rw_venv=True)
def interactive_shell(self, command=None, paster=False, detach=False): """ launch interactive shell session with all writable volumes :param: list of strings to execute instead of bash """ if not exists(self.target + '/.bash_profile'): # this file is required for activating the virtualenv self.create_bash_profile() if not command: command = [] use_tty = sys.stdin.isatty() and sys.stdout.isatty() background = environ.get('CIRCLECI', False) or detach if is_boot2docker(): venv_volumes = ['--volumes-from', self._get_container_name('venv')] else: venv_volumes = ['-v', self.datadir + '/venv:/usr/lib/ckan:rw'] self._create_run_ini(self.port, production=False, output='run.ini') self._create_run_ini(self.port, production=True, output='test.ini', source='ckan/test-core.ini', override_site_url=False) script = scripts.get_script_path('shell.sh') if paster: script = scripts.get_script_path('paster.sh') if command and command != ['help'] and command != ['--help']: command += ['--config=/project/development.ini'] command = [self.extension_dir] + command proxy_settings = self._proxy_settings() if proxy_settings: venv_volumes += ['-v', self.sitedir + '/run/proxy-environment:/etc/environment:ro'] links = {self._get_container_name('solr'): 'solr', self._get_container_name('postgres'): 'db'} links.update({self._get_container_name(container): container for container in self.extra_containers}) link_params = [] for link in links: link_params.append('--link') link_params.append(link + ':' + links[link]) # FIXME: consider switching this to dockerpty # using subprocess for docker client's interactive session return subprocess.call([ DOCKER_EXE, 'run', ] + (['--rm'] if not background else []) + [ '-t' if use_tty else '', '-d' if detach else '-i', ] + venv_volumes + [ '-v', self.target + ':/project:rw', '-v', self.sitedir + '/files:/var/www/storage:rw', '-v', script + ':/scripts/shell.sh:ro', '-v', scripts.get_script_path('paster_cd.sh') + ':/scripts/paster_cd.sh:ro', '-v', self.sitedir + '/run/run.ini:/project/development.ini:ro', '-v', self.sitedir + '/run/test.ini:/project/ckan/test-core.ini:ro'] + link_params + (['--link', self._get_container_name('datapusher') + ':datapusher'] if self.needs_datapusher() else []) + ['--hostname', self.name, 'datacats/web', '/scripts/shell.sh'] + command)
def _run_web_container(self, port, command, address='127.0.0.1', log_syslog=False, datapusher=True): """ Start web container on port with command """ if is_boot2docker(): ro = {} volumes_from = self._get_container_name('venv') else: ro = {self.datadir + '/venv': '/usr/lib/ckan'} volumes_from = None links = { self._get_container_name('solr'): 'solr', self._get_container_name('postgres'): 'db' } links.update({ self._get_container_name(container): container for container in self.extra_containers }) if datapusher: if 'datapusher' not in self.containers_running(): raise DatacatsError( container_logs(self._get_container_name('datapusher'), "all", False, False)) links[self._get_container_name('datapusher')] = 'datapusher' try: run_container(name=self._get_container_name('web'), image='datacats/web', rw={ self.sitedir + '/files': '/var/www/storage', self.sitedir + '/run/development.ini': '/project/development.ini' }, ro=dict( { self.target: '/project/', scripts.get_script_path('web.sh'): '/scripts/web.sh', scripts.get_script_path('adjust_devini.py'): '/scripts/adjust_devini.py' }, **ro), links=links, volumes_from=volumes_from, command=command, port_bindings={ 5000: port if is_boot2docker() else (address, port) }, log_syslog=log_syslog) except APIError as e: if '409' in str(e): raise DatacatsError('Web container already running. ' 'Please stop_web before running.') else: raise