def get_fixuid_services(self, include_missing_fixuid=False ) -> Iterable[BuildServiceDef]: """ Services where fixuid.tar.gz is available in build context. """ if "services" not in self.docker_compose_config: return for _, service in self.docker_compose_config.get("services").items(): if "build" not in service.keys(): continue if isinstance(service["build"], dict): build_context = Dotty(service).get("build.context") elif isinstance(service["build"], str): build_context = service["build"] else: continue if not include_missing_fixuid and not os.path.exists( os.path.join(build_context, "fixuid.yml")): continue dockerfile = Dotty(service).get("build.dockerfile", "Dockerfile") yield BuildServiceDef(build_context, dockerfile)
def _configure_defaults(self, feature_config: Dotty): suffixes = feature_config.get('suffixes') if not suffixes: try: env_available = config.data["core.env.available"] except KeyError as err: raise FeatureConfigurationError(self, "core.env.available or symlinks.suffixes should be defined.") \ from err available_suffixes = ["." + env for env in env_available] env_current = config.data.get("core.env.current") if not env_current: current_suffix = available_suffixes[-1] else: current_suffix = "." + env_current suffixes = available_suffixes[available_suffixes.index(current_suffix):] feature_config["suffixes"] = suffixes includes = feature_config.get('includes') if includes is None: includes = FileWalker.build_default_includes_from_suffixes(suffixes) feature_config["includes"] = includes
def get_old_value(self, config: Dotty): if self.new_config_key in config: return self.rollback_transformer(config.get(self.new_config_key), config) if self.old_config_key in config: return config.get(self.old_config_key) raise KeyError
def _handle_property(configuration, configuration_files): configuration, configuration_files = ConfigAction._get_configurations_for_prop( config.args.property, configuration, configuration_files ) if configuration is None and not config.args.full: configuration, configuration_files = ConfigAction._get_configuration_files( True, config.args.files ) configuration, configuration_files = ConfigAction._get_configurations_for_prop( config.args.property, configuration, configuration_files ) if configuration is not None: root_config = Dotty({}) root_config[config.args.property] = configuration configuration = dict(root_config) if configuration_files: root_configuration_files = {} for k, configuration_file in configuration_files.items(): root_configuration_file = Dotty({}) root_configuration_file[config.args.property] = configuration_file root_configuration_files[k] = dict(root_configuration_file) configuration_files = root_configuration_files return configuration, configuration_files
def _configure_defaults(self, feature_config: Dotty): if not is_git_repository(): return branch = feature_config.get("branch") if branch is None: branch = get_branch_from_vcs() feature_config["branch"] = branch version = feature_config.get("version") if version is None: version = get_version_from_vcs() feature_config["version"] = version tag = feature_config.get("tag") if tag is None: tag = get_tag_from_vcs() feature_config["tag"] = tag hash_value = feature_config.get("hash") if hash_value is None: hash_value = get_hash_from_vcs() feature_config["hash"] = hash_value short_hash = feature_config.get("short_hash") if short_hash is None: short_hash = get_short_hash_from_vcs() feature_config["short_hash"] = short_hash
def test_retrieve_binaries_data(self): features.register(DockerFeature()) load_registered_features() action = actions.get( 'docker:display-info') # type:DockerDisplayInfoAction assert [] == action._retrieve_binaries_data(Dotty({})) assert [] == action._retrieve_binaries_data(Dotty({'toto': 'toto'})) assert [] == action._retrieve_binaries_data( Dotty({'labels': { 'toto': '123' }})) assert ['npm-simple'] == action._retrieve_binaries_data( Dotty({ 'labels': { 'ddb.emit.docker:binary[npm-simple](name)': 'npm-simple', 'ddb.emit.docker:binary[npm-simple](workdir)': '/app' } })) assert ['npm', 'npm-simple'] == sorted( action._retrieve_binaries_data( Dotty({ 'labels': { 'ddb.emit.docker:binary[npm](name)': 'npm', 'ddb.emit.docker:binary[npm-simple](name)': 'npm-simple', 'ddb.emit.docker:binary[npm-simple](workdir)': '/app' } })))
def __init__(self, header_dicom: dict, acquisition): """ Args: header_dicom (dict): This is just the dicom header info similar to file.info['header']['dicom']. acquisition (flywheel.Acquisition): A flywheel acquisition container object """ self.header_dicom = Dotty(header_dicom) self.acquisition = acquisition self.label = acquisition.label
def _print_config_variables(configuration, configuration_files): if config.args.files and configuration_files: for index, (file, configuration_file) in enumerate(configuration_files.items()): if index > 0: print() print(f"# {file}") flat = flatten(Dotty(configuration_file), keep_primitive_list=True) for key in sorted(flat.keys()): print(f"{key}: {flat[key]}") else: flat = flatten(Dotty(configuration), keep_primitive_list=True) for key in sorted(flat.keys()): print(f"{key}: {flat[key]}")
def _get_configurations_for_prop(prop, configuration, configuration_files): prop_configuration = Dotty(configuration).get(prop) if configuration_files: prop_configuration_files = {} for file, configuration_file in configuration_files.items(): prop_configuration_file = Dotty(configuration_file).get(prop) if prop_configuration_file: prop_configuration_files[file] = prop_configuration_file else: prop_configuration_files = configuration_files return prop_configuration, prop_configuration_files
def execute(self): """ Execute action """ if not os.path.exists("docker-compose.yml"): return yaml_output = run("docker-compose", "config") parsed_config = yaml.load(yaml_output, yaml.SafeLoader) docker_compose_config = Dotty(parsed_config) if self.current_yaml_output == yaml_output: return self.current_yaml_output = yaml_output events.docker.docker_compose_config( docker_compose_config=docker_compose_config) services = docker_compose_config.get('services') if not services: return events.docker.docker_compose_before_events( docker_compose_config=docker_compose_config) cert_domains = [] def on_available(domain: str, wildcard: bool, private_key: Union[bytes, str], certificate: Union[bytes, str]): """ When a certificate is available. :param domain: :param wildcard: :param private_key: :param certificate: :return: """ cert_domains.append(domain) off = bus.on("certs:available", on_available) try: self._parse_docker_compose(docker_compose_config, services) finally: off() self._update_cache_and_emit_certs_remove(cert_domains) events.docker.docker_compose_after_events( docker_compose_config=docker_compose_config)
def test_retrieve_environment_data(self): features.register(DockerFeature()) load_registered_features() action = actions.get( 'docker:display-info') # type:DockerDisplayInfoAction assert {} == action._retrieve_environment_data(Dotty({})) assert {} == action._retrieve_environment_data(Dotty({'toto': 'toto'})) assert { 'AZERTY': '123' } == action._retrieve_environment_data( Dotty({'environment': { 'AZERTY': '123' }}))
def _retrieve_vhosts_data(service_config: Dotty) -> List[str]: # pylint: disable=no-self-use """ Retrieve vhosts data :param service_config: the service configuration :return: a list containing vhosts data """ labels = service_config.get('labels') if not labels: return [] vhosts_regex_re = re.compile(r"^Host\(`(.+?)`\)$") vhosts_labels = [] for key in labels.keys(): value = labels.get(key) match = vhosts_regex_re.match(value) if not match: continue http_url = 'http://{}/'.format(match.group(1)) https_url = 'https://{}/'.format(match.group(1)) if '-tls.' in key: try: vhosts_labels.remove(http_url) except ValueError: pass vhosts_labels.append(https_url) continue if https_url not in vhosts_labels: vhosts_labels.append(http_url) return vhosts_labels
def _retrieve_service_ports(service_config: Dotty) -> List[ServicePort]: # pylint: disable=no-self-use """ Retrieve services ports data :param service_config: the service configuration :return: a list of service port """ ports = service_config.get('ports') if not ports: return [] def _to_service_ports(port): if isinstance(port, str): return ServicePort.parse(port) parameters = { 'target': None, 'published': None, 'protocol': None, 'mode': None, 'external_ip': None } parameters.update(port) return [ServicePort(**parameters)] service_ports = [] for port in ports: for service_port in _to_service_ports(port): service_ports.append(service_port) return service_ports
def test_config_output_extra_filenames_some_files_option( self, project_loader, capsys: CaptureFixture): project_loader("extra-filenames") main(["config", "some", "--files"]) reset() output = capsys.readouterr().out parts = [part.lstrip() for part in output.split('---') if part.strip()] assert len(parts) == 1 configurations = {} for part in parts: filename, config = part.split('\n', 1) assert filename.startswith('# ') filename = filename[2:] filename = os.path.relpath(filename, os.getcwd()) configurations[filename] = Dotty(yaml.safe_load(config)) assert ('some.custom.yml',) == \ tuple(configurations.keys()) assert configurations['some.custom.yml']['some'] is True assert 'app.value' not in configurations['some.custom.yml']
def _print_config_value(configuration, prop): if configuration is None: raise ValueError(f"{prop} not found in configuration.") dotty_configuration = Dotty(configuration) if prop and prop not in dotty_configuration: raise ValueError(f"{prop} not found in configuration.") value = dotty_configuration[prop] if prop and prop in dotty_configuration else configuration print(value)
def inc(self, keys, skip_missing=True): output = Dotty({}, self.separator) for k in keys: value = self.item.get(k, default='_missing') if skip_missing and value == '_missing': continue output[k] = value return dict(output)
def __init__(self): self.phase = None # type: Optional['Phase'] self.command = None # type: Optional['Command'] self.stack = [] # type: List[ContextStackItem] self.watching = False # type: bool self.exceptions = [] # type: List[Exception] self.processed_sources = dict() # type: Dict[str, str] self.processed_targets = dict() # type: Dict[str, str] self.data = Dotty(dict())
def custom_escape_char(): from dotty_dict import Dotty dot = Dotty({'deep.deeper': { 'harder': 'faster' }}, separator='.', esc_char='#') assert dot['deep#.deeper.harder'] == 'faster'
def test_retrieve_ports_data(self): features.register(DockerFeature()) load_registered_features() action = actions.get( 'docker:display-info') # type:DockerDisplayInfoAction assert [] == action._retrieve_service_ports(Dotty({})) assert [] == action._retrieve_service_ports(Dotty({'toto': 'toto'})) assert [ServicePort(45, 123, None, None, None) ] == action._retrieve_service_ports( Dotty({'ports': [{ 'published': '123', 'target': '45' }]})) assert [ServicePort(45, 123, None, None, None)] == action._retrieve_service_ports( Dotty({'ports': ['123:45']})) assert [ServicePort(45, 123, 'tcp', None, None)] == action._retrieve_service_ports( Dotty({'ports': ['123:45/tcp']}))
def _retrieve_environment_data(service_config: Dotty) -> Dict[str, str]: """ Retrieve environment data :param service_config: the service configuration :return: a dict containing environment variables """ environments = service_config.get('environment') if not environments: return {} return environments
def _build_deprecation_dict(self, item): migrations = get_migrations_from_old_config_key_startswith(item + ".") deprecation_dict = Dotty({}) for migration in migrations: try: deprecation_dict[migration.old_config_key[ len(item + "."):]] = migration.get_new_value(self) except KeyError: pass return dict(deprecation_dict)
def custom_separator(): from dotty_dict import Dotty dot = Dotty({'deep': { 'deeper': { 'harder': 'faster' } }}, separator='$', esc_char='\\') assert dot['deep$deeper$harder'] == 'faster'
def execute(self): """ Execute action """ if not os.path.exists("docker-compose.yml"): return yaml_output = run("docker-compose", "config") parsed_config = yaml.load(yaml_output, yaml.SafeLoader) docker_compose_config = Dotty(parsed_config) if self.current_yaml_output == yaml_output: return self.current_yaml_output = yaml_output services = docker_compose_config.get('services') if not services: return for service_name in sorted(services.keys()): service_config = services.get(service_name) environments = self._retrieve_environment_data(service_config) ports = self._retrieve_service_ports(service_config) docker_binaries = self._retrieve_binaries_data(service_config) vhosts = self._retrieve_vhosts_data(service_config) output = self._output_data(service_name, environments, ports, docker_binaries, vhosts) if output: print(output) print() if features.has('traefik'): for id_, extra_service_data, _ in TraefikExtraServicesAction.get_extra_services( ): output = self._output_traefik_data(id_, extra_service_data) print(output) print()
class PTSubClassifier(abc.ABC): """ An abstract base class that's the sub-component in the composite design pattern. Currently, this sub-component is used to define only leaves. The composite of its leaves is defined as a concrete implementation of the parent (abstract) component. All leaves will define the method 'classify', which returns classifications and info_object parameters. """ def __init__(self, header_dicom: dict, acquisition): """ Args: header_dicom (dict): This is just the dicom header info similar to file.info['header']['dicom']. acquisition (flywheel.Acquisition): A flywheel acquisition container object """ self.header_dicom = Dotty(header_dicom) self.acquisition = acquisition self.label = acquisition.label @abc.abstractmethod def classify(self, classifications, info_object): """Returns updated classifications and info_object Args: classifications (dict): A dictionary matching flywheel modality specific classification. Note the classification for a modality can be fetched with `fw.get_modality('PT')['classification']` for a PT modality for instance. info_object (dict): Info dictionary attribute of a file object. """ raise NotImplemented def get_dicom_tag(self, dotty_key: str): """Returns the value of single_header_object at dotty_key location. Args: dotty_key (str): A string to reference the location of the targeted value (e.g. 'RadiopharmaceuticalInformationSequence.0.RadionuclideCodeSequence.0.CodeValue') """ return self.header_dicom.get(dotty_key) @staticmethod def warn_if_isotope_different_from_previously_found( isotope, classification): if classification['Isotope']: if isotope not in classification['Isotope'] and (isotope is not None): log.warning( f'Isotope from CodeMeaning ({isotope}) is different from the one previously found ' f'({classification["Isotope"]})')
def _configure_defaults(self, feature_config: Dotty): if not feature_config.get('shell'): comspec = os.environ.get('COMSPEC') shell = os.environ.get('SHELL') if comspec and comspec.endswith('cmd.exe'): feature_config['shell'] = 'cmd' elif shell and shell.endswith('bash'): feature_config['shell'] = 'bash' elif shell and shell.endswith('zsh'): feature_config['shell'] = 'zsh' else: raise FeatureConfigurationAutoConfigureError(self, 'shell') directories = feature_config.get('path.directories') absolute_directories = [] for directory in directories: if os.path.isabs(directory): absolute_directories.append(directory) else: absolute_directory = os.path.join(config.paths.project_home, directory) absolute_directories.append(absolute_directory) feature_config['path.directories'] = absolute_directories
def _configure_defaults(self, feature_config: Dotty): certs_directory = feature_config.get('certs_directory') if not certs_directory and config.paths.home: certs_directory = os.path.join(config.paths.home, 'certs') if os.path.exists(certs_directory): feature_config['certs_directory'] = certs_directory config_directory = feature_config.get('config_directory') if not config_directory and config.paths.home: config_directory = os.path.join(config.paths.home, 'traefik', 'config') if os.path.exists(config_directory): feature_config['config_directory'] = config_directory config_directory = os.path.join(config.paths.home, 'docker-toolbox', '.docker', 'traefik', 'hosts') if os.path.exists(config_directory): feature_config['config_directory'] = config_directory extra_services = feature_config.get('extra_services') if extra_services: for extra_service in extra_services.values(): domain = extra_service.get('domain') if not extra_service.get('rule'): if not domain: raise FeatureConfigurationAutoConfigureError( self, 'extra_services', "domain must be defined when rule is not defined.") extra_service['rule'] = "Host(`%s`)" % domain if extra_service.get('https') is not False and not domain: raise FeatureConfigurationAutoConfigureError( self, 'extra_services', "domain must be defined when https is not False.") if not feature_config.get('config_directory'): feature_config['disabled'] = True
def test_config_output_extra_filenames(self, project_loader, capsys: CaptureFixture): project_loader("extra-filenames") main(["config"]) configuration = Dotty(yaml.safe_load(capsys.readouterr().out)) assert configuration['app.value'] == 'local' assert configuration['some'] is True assert configuration['another'] is True assert configuration['core.configuration.extra'] == [ 'some.custom.yml', 'another.config.file' ] reset()
def test_config_more_properties_jsonnet_docker_compose( self, project_loader, capsys: CaptureFixture): project_loader("more-properties") main(["config", "jsonnet.docker.compose"]) configuration = Dotty(yaml.safe_load(capsys.readouterr().out)) assert configuration[ 'jsonnet.docker.compose.project_name'] == 'yo-custom' assert 'jsonnet.docker.registry.name' not in configuration assert 'jsonnet.docker.registry.repository' not in configuration assert 'jsonnet.docker.virtualhost.redirect_to_https' not in configuration assert 'docker' not in configuration assert 'core' not in configuration reset()
def test_named_user_group(self, project_loader): project_loader("named-user-group") main(["configure"]) assert os.path.exists('docker-compose.yml') with open('docker-compose.yml', 'r') as f: docker_compose = yaml.load(f, yaml.SafeLoader) with open('docker-compose.expected.yml', 'r') as f: docker_compose_expected = yaml.load(f, yaml.SafeLoader) uid, _ = get_user_uid_gid('root') gid = get_group_gid('nobody') Dotty(docker_compose_expected)['services.maven.user'] = f"{uid}:{gid}" assert docker_compose == docker_compose_expected
def test_use_custom_separator_and_custom_escape_char(self): sep = ',' esc = '$' dot = Dotty({}, separator=sep, esc_char=esc) dot['abcd,efg,hij'] = 'test' dot['abcd,efg$,hij'] = 'test2' dot[r'abcd,efg\$,hij'] = 'test3' self.assertDictEqual( dot._data, { 'abcd': { 'efg': { 'hij': 'test', }, 'efg,hij': 'test2', 'efg$': { 'hij': 'test3', }, }, })