def _remote_dump(host, appid, output, sudo=False): # XXX generate unique/portable file name from datetime import date filename = '%s-%s.tgz' % (appid, date.today().strftime('%Y-%m-%d')) dmpcmd = 'cubicweb-ctl db-dump -o /tmp/%s %s' % (filename, appid) if sudo: dmpcmd = 'sudo %s' % (dmpcmd) dmpcmd = 'ssh -t %s "%s"' % (host, dmpcmd) print(dmpcmd) if os.system(dmpcmd): raise ExecutionError('Error while dumping the database') if output is None: output = filename cmd = 'scp %s:/tmp/%s %s' % (host, filename, output) print(cmd) if os.system(cmd): raise ExecutionError('Error while retrieving the dump at /tmp/%s' % filename) rmcmd = 'ssh -t %s "rm -f /tmp/%s"' % (host, filename) print(rmcmd) if os.system(rmcmd) and not ASK.confirm( 'An error occurred while deleting remote dump at /tmp/%s. ' 'Continue anyway?' % filename): raise ExecutionError('Error while deleting remote dump at /tmp/%s' % filename)
def run(self, args): """run the command with its specific arguments""" appid = args[0] configs = [ cwcfg.config_for(appid, configname) for configname in cwcfg.possible_configurations(appid) ] if not configs: raise ExecutionError('unable to guess configuration for %s' % appid) for config in configs: helper = self.config_helper(config, required=False) if helper: helper.cleanup() # remove home rm(config.apphome) # remove instance data directory try: rm(config.appdatahome) except OSError as ex: import errno if ex.errno != errno.ENOENT: raise confignames = ', '.join([config.name for config in configs]) print('-> instance %s (%s) deleted.' % (appid, confignames))
def read_config(config_file, raise_if_unreadable=False): """read some simple configuration from `config_file` and return it as a dictionary. If `raise_if_unreadable` is false (the default), an empty dictionary will be returned if the file is inexistant or unreadable, else :exc:`ExecutionError` will be raised. """ from logilab.common.fileutils import lines config = current = {} try: for line in lines(config_file, comments='#'): try: option, value = line.split('=', 1) except ValueError: option = line.strip().lower() if option[0] == '[': # start a section section = option[1:-1] assert section not in config, \ 'Section %s is defined more than once' % section config[section] = current = {} continue sys.stderr.write('ignoring malformed line\n%r\n' % line) continue option = option.strip().replace(' ', '_') value = option_value_from_env(option, value.strip()) current[option] = value or None except IOError as ex: if raise_if_unreadable: raise ExecutionError('%s. Are you logged with the correct user ' 'to use this instance?' % ex) else: warning('missing or non readable configuration file %s (%s)', config_file, ex) return config
def get_versions(self, checkversions=False): """Return the a dictionary containing cubes used by this instance as key with their version as value, including cubicweb version. This is a public method, not requiring a session id. """ from logilab.common.changelog import Version vcconf = {} with self.internal_cnx() as cnx: for pk, version in cnx.execute( 'Any K,V WHERE P is CWProperty, P value V, P pkey K, ' 'P pkey ~="system.version.%"', build_descr=False): cube = pk.split('.')[-1] # XXX cubicweb migration if cube in CW_MIGRATION_MAP: cube = CW_MIGRATION_MAP[cube] version = Version(version) vcconf[cube] = version if checkversions: if cube != 'cubicweb': fsversion = self.config.cube_version(cube) else: fsversion = self.config.cubicweb_version() if version < fsversion: msg = ('instance has %s version %s but %s ' 'is installed. Run "cubicweb-ctl upgrade %s".') raise ExecutionError( msg % (cube, version, fsversion, self.config.appid)) return vcconf
def cleanup(self): """remove instance's configuration and database""" source = self.config.system_source_config for msg, step, default in self._cleanup_steps(source): if ASK.confirm(msg, default_is_yes=default): try: step(source) except Exception as exc: print('ERROR', exc) if ASK.confirm('An error occurred. Continue anyway?', default_is_yes=False): continue raise ExecutionError(str(exc))
def wait_process_end(pid, maxtry=10, waittime=1): """wait for a process to actually die""" import signal from time import sleep nbtry = 0 while nbtry < maxtry: try: kill(pid, signal.SIGUSR1) except (OSError, AttributeError): # XXX win32 break nbtry += 1 sleep(waittime) else: raise ExecutionError('can\'t kill process %s' % pid)
def run(self, args): """run the command with its specific arguments""" if args: cubes = [DevConfiguration.cube_dir(cube) for cube in args] else: cubes = [ DevConfiguration.cube_dir(cube) for cube in DevConfiguration.available_cubes() ] cubes = [ cubepath for cubepath in cubes if osp.exists(osp.join(cubepath, 'i18n')) ] if not update_cubes_catalogs(cubes): raise ExecutionError("update cubes i18n catalog failed")
def run(self, args): from cubicweb import repoapi from cubicweb.cwctl import init_cmdline_log_threshold config = ServerConfiguration.config_for(args[0]) config.global_set_option('log-file', None) config.log_format = '%(levelname)s %(name)s: %(message)s' init_cmdline_log_threshold(config, self['loglevel']) repo = repoapi.get_repository(config=config) repo.hm.call_hooks('server_maintenance', repo=repo) errors = False with repo.internal_cnx() as cnx: sources = [] if len(args) >= 2: for name in args[1:]: try: source = repo.source_by_uri(name) except ValueError: cnx.error('no source named %r' % name) errors = True else: sources.append(source) else: for uri, source in repo.sources_by_uri.items(): if (uri != 'system' and repo.config.source_enabled(source) and source.config['synchronize']): sources.append(source) for source in sources: try: stats = source.pull_data(cnx, force=self['force'], raise_on_error=True) except Exception: cnx.exception('while trying to update source %s', source) errors = True else: for key, val in stats.items(): if val: print(key, ':', val) if errors: raise ExecutionError('All sources where not synced')
def daemonize(self, pid_file): pid = live_pidfile(pid_file) if pid: raise ExecutionError( "Daemon is already running (PID: %s from PID file %s)" % (pid, pid_file)) self.debug('Entering daemon mode') pid = os.fork() if pid: # The forked process also has a handle on resources, so we # *don't* want proper termination of the process, we just # want to exit quick (which os._exit() does) os._exit(0) # Make this the session leader os.setsid() # Fork again for good measure! pid = os.fork() if pid: os._exit(0) # @@: Should we set the umask and cwd now? import resource # Resource usage information. maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1] if (maxfd == resource.RLIM_INFINITY): maxfd = MAXFD # Iterate through and close all file descriptors. for fd in range(0, maxfd): try: os.close(fd) except OSError: # ERROR, fd wasn't open to begin with (ignored) pass if (hasattr(os, "devnull")): REDIRECT_TO = os.devnull else: REDIRECT_TO = "/dev/null" os.open(REDIRECT_TO, os.O_RDWR) # standard input (0) # Duplicate standard input to standard output and standard error. os.dup2(0, 1) # standard output (1) os.dup2(0, 2) # standard error (2)
def generate_static_dir(self, config, dest=None, ask_clean=False, repo=None): if not dest: dest = config['staticdir-path'] if not dest: dest = osp.join(config.appdatahome, 'data') if osp.exists(dest): if (config.verbosity and (not ask_clean or not (config.verbosity and ASK.confirm( 'Remove existing data directory %s?' % dest)))): raise ExecutionError('Directory %s already exists. ' 'Remove it first.' % dest) rmtreecontent(dest) config.quick_start = True # notify this is not a regular start # list all resources (no matter their order) resources = set() for datadir in self._datadirs(config, repo=repo): for dirpath, dirnames, filenames in os.walk(datadir): rel_dirpath = dirpath[len(datadir) + 1:] resources.update(osp.join(rel_dirpath, f) for f in filenames) # locate resources and copy them to destination for resource in resources: dest_resource = osp.join(dest, resource) dirname = osp.dirname(dest_resource) if not osp.isdir(dirname): os.makedirs(dirname) resource_dir, resource_path = config.locate_resource(resource) copy(osp.join(resource_dir, resource_path), dest_resource) # handle md5 version subdirectory linkdir(dest, osp.join(dest, config.instance_md5_version())) # ensure generated files are owned by configured uid config.ensure_uid_directory(dest) print('You can use apache rewrite rule below :\n' 'RewriteRule ^/data/(.*) %s/$1 [L]' % dest)
add_cube('geocoding', update_database=False) except ConfigurationError: if not confirm( 'In cubicweb 3.17 geocoding views have been moved to the geocoding ' 'cube, which is not installed. Continue anyway?'): raise if applcubicwebversion <= (3, 14, 0) and cubicwebversion >= (3, 14, 0): if 'require_permission' in schema and not 'localperms' in repo.config.cubes( ): from cubicweb import ExecutionError try: add_cube('localperms', update_database=False) except ConfigurationError: raise ExecutionError( 'In cubicweb 3.14, CWPermission and related stuff ' 'has been moved to cube localperms. Install it first.') if applcubicwebversion == (3, 6, 0) and cubicwebversion >= (3, 6, 0): CSTRMAP = dict( rql('Any T, X WHERE X is CWConstraintType, X name T', ask_confirm=False)) _add_relation_definition_no_perms('CWAttribute', 'update_permission', 'CWGroup') _add_relation_definition_no_perms('CWAttribute', 'update_permission', 'RQLExpression') rql('SET X update_permission Y WHERE X is CWAttribute, X add_permission Y') drop_relation_definition('CWAttribute', 'delete_permission', 'CWGroup') drop_relation_definition('CWAttribute', 'delete_permission', 'RQLExpression')
def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): """execute a migration script in interactive mode Display the migration script path, ask for confirmation and execute it if confirmed Allowed input file formats for migration scripts: - `python` (.py) - `sql` (.sql) - `doctest` (.txt or .rst) .. warning:: sql migration scripts are not available in web-only instance You can pass script parameters with using double dash (--) in the command line Context environment can have these variables defined: - __name__ : will be determine by funcname parameter - __file__ : is the name of the script if it exists - __args__ : script arguments coming from command-line :param migrscript: name of the script :param funcname: defines __name__ inside the shell (or use __main__) :params args: optional arguments for funcname :keyword scriptargs: optional arguments of the script """ ftypes = { 'python': ('.py', ), 'doctest': ('.txt', '.rst'), 'sql': ('.sql', ) } # sql migration scripts are not available in web-only instance if not hasattr(self, "session"): ftypes.pop('sql') migrscript = os.path.normpath(migrscript) for (script_mode, ftype) in ftypes.items(): if migrscript.endswith(ftype): break else: ftypes = ', '.join(chain(*ftypes.values())) msg = 'ignoring %s, not a valid script extension (%s)' raise ExecutionError(msg % (migrscript, ftypes)) if not self.execscript_confirm(migrscript): return scriptlocals = self._create_context().copy() scriptlocals.update({ '__file__': migrscript, '__args__': kwargs.pop("scriptargs", []) }) self._context_stack.append(scriptlocals) if script_mode == 'python': if funcname is None: pyname = '__main__' else: pyname = splitext(basename(migrscript))[0] scriptlocals['__name__'] = pyname with open(migrscript, 'rb') as fobj: fcontent = fobj.read() code = compile(fcontent, migrscript, 'exec') exec(code, scriptlocals) if funcname is not None: try: func = scriptlocals[funcname] self.info('found %s in locals', funcname) assert callable( func), '%s (%s) is not callable' % (func, funcname) except KeyError: self.critical('no %s in script %s', funcname, migrscript) return None return func(*args, **kwargs) elif script_mode == 'sql': from cubicweb.server.sqlutils import sqlexec sqlexec(open(migrscript).read(), self.session.system_sql) self.commit() else: # script_mode == 'doctest' import doctest return doctest.testfile( migrscript, module_relative=False, optionflags=doctest.ELLIPSIS, # verbose mode when user input is expected verbose=self.verbosity == 2, report=True, encoding='utf-8', globs=scriptlocals) self._context_stack.pop()