def main(key=None, keep_first=None, keep_last=None): if keep_first is None and keep_last is None: raise argh.CommandError( "Must specify either --keep-first or --keep-last.") elif keep_first is not None and keep_last is not None: raise argh.CommandError( "Must specify either --keep-first or --keep-last " + "but not both.") reader = csv.reader(sys.stdin) columns = next(reader) primary_key = make_key(key, columns) if keep_first is not None: secondary_key = make_key(keep_first, columns) else: secondary_key = make_key(keep_last, columns) writer = csv.writer(sys.stdout) writer.writerow(columns) for item_key, item_iter in it.groupby(sorted(reader, key=primary_key), primary_key): items = sorted(item_iter, key=secondary_key) if keep_first is not None: writer.writerow(items[0]) else: writer.writerow(items[-1])
class GpkgReader(GeoJsonReader): fiona_driver = 'GPKG' def __str__(self): return f'GpkgReader of \'{self.source}\' ({self.geometry_filter}, {self.chunk_size})' def __repr__(self): return f'GpkgReader of \'{self.source}\' ({self.geometry_filter}, {self.chunk_size})' def __init__(self, source, geometry_filter=None, chunk_size=10_000, skip=0, **kwargs): super().__init__(source, geometry_filter, chunk_size, skip, **kwargs) import fiona layername = None if '.gpkg:' in self.source: try: self.source, layername = self.source.split(':') except ValueError as e: raise argh.CommandError('File name should be name.gpkg or name.gpkg:layer_name. Got "%s" instead.' % self.source) else: try: layers = fiona.listlayers(self.source) except ValueError as e: raise argh.CommandError('Fiona driver can\'t read layers from file %s' % self.source) if len(layers) == 1: layername = layers[0] else: layername = os.path.splitext(os.path.basename(self.source))[0] if layername not in layers: raise argh.CommandError('Can\'t detect default layer in %s. Layers available are: %s' % (self.source, ', '.join(layers))) self.layername = layername
def script(configuration, script_path, script_args): """Run a script managed by claw with the provided configuration as context.""" conf = Configuration(configuration) if not conf.exists(): raise NO_INIT if not os.path.isfile(script_path): for scripts_dir in settings.scripts: possible_script_path = scripts_dir / script_path possible_script_path2 = scripts_dir / '{0}.py'.format(script_path) if possible_script_path.isfile(): script_path = possible_script_path break if possible_script_path2.isfile(): script_path = possible_script_path2 break else: raise argh.CommandError('Could not locate {0}'.format(script_path)) exec_globs = exec_env.exec_globals(script_path) execfile(script_path, exec_globs) if script_args and script_args[0] in exec_globs: func = script_args[0] script_args = script_args[1:] else: func = 'script' script_func = exec_globs.get(func) if not script_func: raise argh.CommandError('Cannot find a function to execute. Did you ' 'add a default "script" function?') try: current_configuration.set(conf) argh.dispatch_command(script_func, argv=script_args) finally: current_configuration.clear()
def sync_repos(feature_name): feature_name = feature_name or features.active_feature if not feature_name: raise argh.CommandError('No feature is currently active.') if not features.exists(feature_name): raise argh.CommandError('No such feature: {}'.format(feature_name)) branch = features.load()[feature_name]['branch'] git_branch_exists = _command(workflow='check_branch_exists', parameters={'branch': branch}) repos = sorted(_call(git_branch_exists)) with features.update_feature(feature_name) as feature: feature.update({ 'repos': repos, })
def finish(feature_name): feature_name = feature_name or features.active_feature if not feature_name: raise argh.CommandError('No feature is currently active.') if not features.exists(feature_name): raise argh.CommandError('No such feature: {}'.format(feature_name)) repos = features.load()[feature_name].get('repos', []) for repo in repos: try: remove_repo(repo, feature_name, force=False) except RuntimeError as e: print str(e), 'Skipping.' with features.update() as _features: _features.pop(feature_name, None) features.active_feature = None
def save(self, docker_host, ssh_key_path, clean_image_docker_tag, manager_image_docker_tag, source_root, workdir, reset, debug_ip, manager_image_url, manager_image_commit_sha_url): if not self.conf_dir.exists(): self.conf_dir.mkdir() conf = self.conf_dir / 'config.yaml' if conf.exists() and not reset: raise argh.CommandError('Already initialized. ' 'Run "docl init --reset"') workdir = workdir or self.conf_dir / 'work' workdir = path(workdir).expanduser().abspath() conf.write_text( yaml.safe_dump( { 'ssh_key_path': str(ssh_key_path), 'docker_host': docker_host, 'clean_image_docker_tag': clean_image_docker_tag, 'manager_image_docker_tag': manager_image_docker_tag, 'source_root': source_root, 'workdir': str(workdir), 'services': constants.SERVICES, 'expose': constants.EXPOSE, 'publish': constants.PUBLISH, 'container_hostname': constants.HOSTNAME, 'package_dir': constants.PACKAGE_DIR, 'package_services': constants.PACKAGE_SERVICES, 'env_packages': constants.ENV_PACKAGES, 'resources': constants.RESOURCES, 'agent_package_path': constants.AGENT_PACKAGE_PATH, 'manager_image_url': manager_image_url, 'manager_image_commit_sha_url': manager_image_commit_sha_url, 'debug_ip': debug_ip }, default_flow_style=False))
def init(manager_image_url=constants.MANAGER_IMAGE_URL, manager_image_docker_tag=constants.MANAGER_IMAGE_DOCKER_TAG, manager_image_commit_sha_url=constants.MANAGER_IMAGE_COMMIT_SHA_URL, docker_host=constants.DOCKER_HOST, ssh_key_path=constants.SSH_KEY, clean_image_docker_tag=constants.CLEAN_IMAGE_DOCKER_TAG, source_root=constants.SOURCE_ROOT, workdir=None, reset=False, debug_ip=None): ssh_key_path = path(ssh_key_path).expanduser() if not ssh_key_path.isfile(): raise argh.CommandError( 'You need to create a key (see man ssh-keygen) first') configuration.save( docker_host=docker_host, ssh_key_path=ssh_key_path.abspath(), clean_image_docker_tag=clean_image_docker_tag, manager_image_docker_tag=manager_image_docker_tag, source_root=source_root, workdir=workdir, reset=reset, debug_ip=debug_ip, manager_image_url=manager_image_url, manager_image_commit_sha_url=manager_image_commit_sha_url) logger.info('Configuration is saved to {}. Feel free to change it to your ' 'liking.'.format(configuration.conf_path)) work.init()
def generate_script(script_path, reset=False): """Generate a scaffold script.""" if os.path.exists(script_path) and not reset: raise argh.CommandError('{0} already exists'.format(script_path)) with open(script_path, 'w') as f: f.write(resources.get('templates/script.template.py')) os.chmod(script_path, os.stat(script_path).st_mode | 0o111)
def checkout(name): feature = features.load().get(name) if feature: features.active_feature = name else: raise argh.CommandError('No such feature: {}'.format(name)) _call(ctx.user_commands['git.checkout'], branch=name)
def init(simple_manager_blueprint_path=None, docker_host=constants.DOCKER_HOST, ssh_key_path=constants.SSH_KEY, clean_image_docker_tag=constants.CLEAN_IMAGE_DOCKER_TAG, manager_image_docker_tag=constants.MANAGER_IMAGE_DOCKER_TAG, source_root=constants.SOURCE_ROOT, workdir=None, reset=False): ssh_key_path = path(ssh_key_path).expanduser() simple_manager_blueprint_path = path( simple_manager_blueprint_path).expanduser() required_files = { simple_manager_blueprint_path: 'You must specify a path ' 'to a simple manager blueprint', ssh_key_path: 'You need to create a key (see man ssh-keygen) first', } for required_file, message in required_files.items(): if not required_file.isfile(): raise argh.CommandError(message) configuration.save( docker_host=docker_host, simple_manager_blueprint_path=simple_manager_blueprint_path.abspath(), ssh_key_path=ssh_key_path.abspath(), clean_image_docker_tag=clean_image_docker_tag, manager_image_docker_tag=manager_image_docker_tag, source_root=source_root, workdir=workdir, reset=reset) logger.info('Configuration is saved to {}. Feel free to change it to your ' 'liking.'.format(configuration.conf_path)) work.init()
def init(suites_yaml=None, claw_home=None, reset=False): """Initialize a claw environment.""" if settings.settings_path.exists() and not reset: raise INIT_EXISTS if not suites_yaml: system_tests_dir = os.path.dirname( os.path.dirname(cosmo_tester.__file__)) suites_yaml = os.path.join(system_tests_dir, 'suites', 'suites', 'suites.yaml') suites_yaml = os.path.expanduser(suites_yaml) if not os.path.exists(suites_yaml): raise argh.CommandError( 'suites.yaml not found at {0}'.format(suites_yaml)) if not claw_home: claw_home = os.getcwd() settings.write_settings(claw_home, suites_yaml) settings.user_suites_yaml.write_text( resources.get('templates/suites.template.yaml')) settings.blueprints_yaml.write_text( resources.get('templates/blueprints.template.yaml')) (settings.claw_home / '.gitignore').write_text( resources.get('templates/gitignore.template')) settings.configurations.mkdir_p() settings.default_scripts_dir.mkdir_p() generate_script(settings.default_scripts_dir / 'example-script.py', reset=True)
def remove_repo(repo, feature_name, force): feature_name = feature_name or features.active_feature if not feature_name: raise argh.CommandError('No feature is currently active.') if not features.exists(feature_name): raise argh.CommandError('No such feature: {}'.format(feature_name)) branch = features.load()[feature_name]['branch'] _call( _git_command(operation='git.delete_branch', repo=repo, branch=branch, force=force)) with features.update_feature(feature_name) as feature: repos = feature.get('repos', []) if repo in repos: repos.remove(repo) feature['repos'] = repos
def clear(force=False): if not force: raise argh.CommandError('clear will remove the cache directory and ' 'clean the work diretory. pass --force if ' 'this is indeed what you intend to do') else: cache.clear() work.clear()
def build_job(self, job, parameters=None): resource = 'job/{}/buildWithParameters'.format(job) response = self._raw_query(resource=resource, method='POST', data=parameters) if response.status_code != 201: raise argh.CommandError( 'Failed building job: {} [status={}, parameters={}]'.format( job, response.status_code, parameters))
def _retry(func, *args, **kwargs): for _ in range(300): try: func(*args, **kwargs) break except sh.ErrorReturnCode: sleep(0.1) else: raise argh.CommandError()
def _connect_postgres(path_string): from sqlalchemy import create_engine if '#' not in path_string: raise argh.CommandError('Use this format to read from sql: postgresql://[user[:password]@]hostname[:port]/<db_name>#<table_name or query>.') sharp_idx = path_string.index('#') engine = create_engine(path_string[:sharp_idx]) return engine, path_string[sharp_idx+1:]
def cp(source, target, container_id=None): container_id = container_id or work.last_container_id if source.startswith(':'): source = '{}{}'.format(container_id, source) elif target.startswith(':'): target = '{}{}'.format(container_id, target) else: raise argh.CommandError('Either source or target should be prefixed ' 'with : to denote the container.') quiet_docker.cp(source, target)
def command_error(fmt, *args, **kwargs): """Raise CommandError with msg Args: fmt (str): how to represent arguments Raises: CommandError: always """ raise argh.CommandError(fmt.format(*args, **kwargs))
def add_repo(repo, feature_name): feature_name = feature_name or features.active_feature if not feature_name: raise argh.CommandError('No feature is currently active.') if not features.exists(feature_name): raise argh.CommandError('No such feature: {}'.format(feature_name)) with features.update_feature(feature_name) as feature: branch = feature['branch'] base = feature.get('base', 'master') repos = feature.get('repos', []) if repo not in repos: repos.append(repo) feature['repos'] = repos _call( _git_command(operation='git.create_branch', repo=repo, branch=branch, base=base)) _call( _git_command(operation='git.checkout', repo=repo, branch=feature_name))
def _extract_build_parameters(build): actions = build['actions'] for action in actions: action_parameters = action.get('parameters') if not action_parameters: continue if not any([parameter.get('name') == 'system_tests_branch' for parameter in action_parameters]): continue return {p['name']: p['value'] for p in action_parameters} else: raise argh.CommandError('Invalid build {}'.format(build['build']))
def _raw_query(resource, method='GET', data=None): url = '{}/{}/{}'.format(configuration.jenkins_base_url, configuration.jenkins_system_tests_base, resource) response = requests.request(method, url, auth=(configuration.jenkins_username, configuration.jenkins_password), data=data) if response.status_code == 404: raise argh.CommandError( 'Resource not found. (404)'.format(resource)) return response
def _fetch_builds(job, build_numbers): numbers = set() for build in build_numbers: split = build.split('-') if len(split) > 2: raise argh.CommandError('Illegal build range: {}'.format(build)) elif len(split) == 1: numbers.add(build) else: start, stop = int(split[0]), int(split[1]) numbers |= set(i for i in range(start, stop+1)) numbers = [str(s) for s in sorted([int(n) for n in numbers])] return [(b, jenkins.fetch_build(job, b)) for b in numbers]
def status(configuration): """See the status of an environment specified by a configuration.""" conf = Configuration(configuration) if not conf.exists(): raise NO_INIT manager_ip = conf.handler_configuration.get('manager_ip') if not manager_ip: raise NO_BOOTSTRAP try: version = conf.client.manager.get_version()['version'] conf.logger.info('[{0}] Running ({1})'.format(manager_ip, version)) except requests.exceptions.ConnectionError: raise argh.CommandError('[{0}] Not reachable'.format(manager_ip))
def save(self, repo_root_dir, reset=False): if not self.conf_dir.exists(): self.conf_dir.mkdir() conf = self.conf_dir / 'config.yaml' if conf.exists() and not reset: raise argh.CommandError('Already initialized. ' 'Run "jit init --reset"') conf.write_text( yaml.safe_dump({ 'repo_root_dir': repo_root_dir, }, default_flow_style=False))
def _gen_func(script_path): name = script_path.basename()[:-len('.py')].replace('_', '-') if name in names: raise argh.CommandError('Name conflict: Found two commands named ' '"{0}".'.format(name)) @command @arg('configuration', completer=completion.existing_configurations) @arg('script_args', nargs='...') @argh.named(name) def func(configuration, script_args): """Script based command.""" return script(configuration, script_path, script_args) return func
def func(args): if (self.user_config.current == args.name and self.user_config.storage_dir and not args.reset): raise argh.CommandError('storage dir already configured. pass ' '--reset to override.') storage_dir = args.storage_dir or os.getcwd() self.user_config.current = args.name self.user_config.editable = args.editable self.user_config.storage_dir = storage_dir self.user_config.storage_dir.mkdir_p() self._create_inputs(args, env_create.get('inputs', {})) self.user_config.macros_path.touch() after_env_create_func = self.config.hooks.after_env_create if after_env_create_func: after_env_create = module.load_attribute(after_env_create_func) after_env_create(self, **vars(args))
def init_handler(self, df=None): import fiona schema = self._get_schema(df) layername = None if '.gpkg:' in self.target: try: self.filename, layername = self.target.split(':') except ValueError as e: raise argh.CommandError('File name should be name.gpkg or name.gpkg:layer_name. Got "%s" instead.' % self.target) else: self.filename = self.target layername = os.path.splitext(os.path.basename(self.target))[0] # instead of self._cleanup_target(), delete fiona layer if os.path.exists(self.filename) and layername in fiona.listlayers(self.filename): fiona.remove(self.filename, self.fiona_driver, layername) crs = df.crs if df is not None else None self._handler = fiona.open(self.filename, 'w', crs=crs, driver=self.fiona_driver, schema=schema, layer=layername)
def build(job, branch=None, descriptor=None, source=None): parameters = {} if source: source_path = path(source).expanduser() if source_path.exists(): parameters = yaml.safe_load(source_path.text()) else: try: source = int(source) except ValueError: raise argh.CommandError('Invalid source: {}'.format(source)) fetched_build = jenkins.fetch_build(job, source) parameters = _extract_build_parameters(fetched_build['build']) if branch: parameters['system_tests_branch'] = branch if descriptor: parameters['system_tests_descriptor'] = descriptor jenkins.build_job(job, parameters=parameters) print 'Build successfully queued [job={}, parameters={}]'.format( job, parameters)
def rectangify(INPUT, convert=COLS_MODE, out=None, marker='---', items=None): """create CSV tables from a list of columns/rows""" with smart_open(INPUT, 'r') as inpfile: lines = inpfile.read().splitlines() if items: item_num = int(items) marker = make_magic_marker() insert_markers(lines, item_num, marker) if lines[0] != marker: lines.insert(0, marker) if convert==COLS_MODE: from_colslist(lines, out, marker) elif convert==ROWS_MODE: from_rowslist(lines, out, marker) else: raise argh.CommandError( "Convert mode '{0}' not available".format(convert))
def _init_command(self, reset=False): local_dir = self.user_config.storage_dir / self._name if local_dir.exists(): if reset: shutil.rmtree(local_dir) else: raise argh.CommandError('Already initialized, pass --reset ' 'to re-initialize.') inputs = self.user_config.inputs temp_dir = path(tempfile.mkdtemp( prefix='{}-blueprint-dir-'.format(self.config.name))) blueprint_dir = temp_dir / 'blueprint' try: shutil.copytree(self.config.blueprint_dir, blueprint_dir) sys.path.append(blueprint_dir) blueprint_path = (blueprint_dir / self.config.blueprint_path.basename()) before_init_func = self.config.hooks.before_init if before_init_func: blueprint = yaml.safe_load(blueprint_path.text()) before_init = module.load_attribute(before_init_func) before_init(blueprint=blueprint, inputs=inputs, loader=self) blueprint_path.write_text(yaml.safe_dump(blueprint)) local.init_env(blueprint_path=blueprint_path, inputs=inputs, name=self._name, storage=self._storage(), ignored_modules=self.config.ignored_modules) finally: shutil.rmtree(temp_dir, ignore_errors=True) if self.user_config.editable: resources_path = (self.user_config.storage_dir / self._name / 'resources') shutil.rmtree(resources_path, ignore_errors=True) os.symlink(self.config.blueprint_dir, resources_path)