def _configure_defaults(self, feature_config: Dotty): suffixes = feature_config.get('suffixes') if not suffixes: try: env_available = config.data["core.env.available"] except KeyError as err: raise FeatureConfigurationError(self, "core.env.available or symlinks.suffixes should be defined.") \ from err available_suffixes = ["." + env for env in env_available] env_current = config.data.get("core.env.current") if not env_current: current_suffix = available_suffixes[-1] else: current_suffix = "." + env_current suffixes = available_suffixes[available_suffixes.index(current_suffix):] feature_config["suffixes"] = suffixes includes = feature_config.get('includes') if includes is None: includes = FileWalker.build_default_includes_from_suffixes(suffixes) feature_config["includes"] = includes
def get_old_value(self, config: Dotty): if self.new_config_key in config: return self.rollback_transformer(config.get(self.new_config_key), config) if self.old_config_key in config: return config.get(self.old_config_key) raise KeyError
def _configure_defaults(self, feature_config: Dotty): if not is_git_repository(): return branch = feature_config.get("branch") if branch is None: branch = get_branch_from_vcs() feature_config["branch"] = branch version = feature_config.get("version") if version is None: version = get_version_from_vcs() feature_config["version"] = version tag = feature_config.get("tag") if tag is None: tag = get_tag_from_vcs() feature_config["tag"] = tag hash_value = feature_config.get("hash") if hash_value is None: hash_value = get_hash_from_vcs() feature_config["hash"] = hash_value short_hash = feature_config.get("short_hash") if short_hash is None: short_hash = get_short_hash_from_vcs() feature_config["short_hash"] = short_hash
def _retrieve_service_ports(service_config: Dotty) -> List[ServicePort]: # pylint: disable=no-self-use """ Retrieve services ports data :param service_config: the service configuration :return: a list of service port """ ports = service_config.get('ports') if not ports: return [] def _to_service_ports(port): if isinstance(port, str): return ServicePort.parse(port) parameters = { 'target': None, 'published': None, 'protocol': None, 'mode': None, 'external_ip': None } parameters.update(port) return [ServicePort(**parameters)] service_ports = [] for port in ports: for service_port in _to_service_ports(port): service_ports.append(service_port) return service_ports
def _retrieve_vhosts_data(service_config: Dotty) -> List[str]: # pylint: disable=no-self-use """ Retrieve vhosts data :param service_config: the service configuration :return: a list containing vhosts data """ labels = service_config.get('labels') if not labels: return [] vhosts_regex_re = re.compile(r"^Host\(`(.+?)`\)$") vhosts_labels = [] for key in labels.keys(): value = labels.get(key) match = vhosts_regex_re.match(value) if not match: continue http_url = 'http://{}/'.format(match.group(1)) https_url = 'https://{}/'.format(match.group(1)) if '-tls.' in key: try: vhosts_labels.remove(http_url) except ValueError: pass vhosts_labels.append(https_url) continue if https_url not in vhosts_labels: vhosts_labels.append(http_url) return vhosts_labels
def _retrieve_environment_data(service_config: Dotty) -> Dict[str, str]: """ Retrieve environment data :param service_config: the service configuration :return: a dict containing environment variables """ environments = service_config.get('environment') if not environments: return {} return environments
def execute(self): """ Execute action """ if not os.path.exists("docker-compose.yml"): return yaml_output = run("docker-compose", "config") parsed_config = yaml.load(yaml_output, yaml.SafeLoader) docker_compose_config = Dotty(parsed_config) if self.current_yaml_output == yaml_output: return self.current_yaml_output = yaml_output events.docker.docker_compose_config( docker_compose_config=docker_compose_config) services = docker_compose_config.get('services') if not services: return events.docker.docker_compose_before_events( docker_compose_config=docker_compose_config) cert_domains = [] def on_available(domain: str, wildcard: bool, private_key: Union[bytes, str], certificate: Union[bytes, str]): """ When a certificate is available. :param domain: :param wildcard: :param private_key: :param certificate: :return: """ cert_domains.append(domain) off = bus.on("certs:available", on_available) try: self._parse_docker_compose(docker_compose_config, services) finally: off() self._update_cache_and_emit_certs_remove(cert_domains) events.docker.docker_compose_after_events( docker_compose_config=docker_compose_config)
def _configure_defaults(self, feature_config: Dotty): if not feature_config.get('shell'): comspec = os.environ.get('COMSPEC') shell = os.environ.get('SHELL') if comspec and comspec.endswith('cmd.exe'): feature_config['shell'] = 'cmd' elif shell and shell.endswith('bash'): feature_config['shell'] = 'bash' elif shell and shell.endswith('zsh'): feature_config['shell'] = 'zsh' else: raise FeatureConfigurationAutoConfigureError(self, 'shell') directories = feature_config.get('path.directories') absolute_directories = [] for directory in directories: if os.path.isabs(directory): absolute_directories.append(directory) else: absolute_directory = os.path.join(config.paths.project_home, directory) absolute_directories.append(absolute_directory) feature_config['path.directories'] = absolute_directories
class PTSubClassifier(abc.ABC): """ An abstract base class that's the sub-component in the composite design pattern. Currently, this sub-component is used to define only leaves. The composite of its leaves is defined as a concrete implementation of the parent (abstract) component. All leaves will define the method 'classify', which returns classifications and info_object parameters. """ def __init__(self, header_dicom: dict, acquisition): """ Args: header_dicom (dict): This is just the dicom header info similar to file.info['header']['dicom']. acquisition (flywheel.Acquisition): A flywheel acquisition container object """ self.header_dicom = Dotty(header_dicom) self.acquisition = acquisition self.label = acquisition.label @abc.abstractmethod def classify(self, classifications, info_object): """Returns updated classifications and info_object Args: classifications (dict): A dictionary matching flywheel modality specific classification. Note the classification for a modality can be fetched with `fw.get_modality('PT')['classification']` for a PT modality for instance. info_object (dict): Info dictionary attribute of a file object. """ raise NotImplemented def get_dicom_tag(self, dotty_key: str): """Returns the value of single_header_object at dotty_key location. Args: dotty_key (str): A string to reference the location of the targeted value (e.g. 'RadiopharmaceuticalInformationSequence.0.RadionuclideCodeSequence.0.CodeValue') """ return self.header_dicom.get(dotty_key) @staticmethod def warn_if_isotope_different_from_previously_found( isotope, classification): if classification['Isotope']: if isotope not in classification['Isotope'] and (isotope is not None): log.warning( f'Isotope from CodeMeaning ({isotope}) is different from the one previously found ' f'({classification["Isotope"]})')
def _configure_defaults(self, feature_config: Dotty): certs_directory = feature_config.get('certs_directory') if not certs_directory and config.paths.home: certs_directory = os.path.join(config.paths.home, 'certs') if os.path.exists(certs_directory): feature_config['certs_directory'] = certs_directory config_directory = feature_config.get('config_directory') if not config_directory and config.paths.home: config_directory = os.path.join(config.paths.home, 'traefik', 'config') if os.path.exists(config_directory): feature_config['config_directory'] = config_directory config_directory = os.path.join(config.paths.home, 'docker-toolbox', '.docker', 'traefik', 'hosts') if os.path.exists(config_directory): feature_config['config_directory'] = config_directory extra_services = feature_config.get('extra_services') if extra_services: for extra_service in extra_services.values(): domain = extra_service.get('domain') if not extra_service.get('rule'): if not domain: raise FeatureConfigurationAutoConfigureError( self, 'extra_services', "domain must be defined when rule is not defined.") extra_service['rule'] = "Host(`%s`)" % domain if extra_service.get('https') is not False and not domain: raise FeatureConfigurationAutoConfigureError( self, 'extra_services', "domain must be defined when https is not False.") if not feature_config.get('config_directory'): feature_config['disabled'] = True
class FormatData: def __init__(self, item, separator='.'): self.output = {} self.separator = separator self.item = Dotty(item, self.separator) def inc(self, keys, skip_missing=True): output = Dotty({}, self.separator) for k in keys: value = self.item.get(k, default='_missing') if skip_missing and value == '_missing': continue output[k] = value return dict(output) def exc(self, keys): for k in keys: try: del self.item[k] except KeyError: continue return dict(self.item)
def execute(self): """ Execute action """ if not os.path.exists("docker-compose.yml"): return yaml_output = run("docker-compose", "config") parsed_config = yaml.load(yaml_output, yaml.SafeLoader) docker_compose_config = Dotty(parsed_config) if self.current_yaml_output == yaml_output: return self.current_yaml_output = yaml_output services = docker_compose_config.get('services') if not services: return for service_name in sorted(services.keys()): service_config = services.get(service_name) environments = self._retrieve_environment_data(service_config) ports = self._retrieve_service_ports(service_config) docker_binaries = self._retrieve_binaries_data(service_config) vhosts = self._retrieve_vhosts_data(service_config) output = self._output_data(service_name, environments, ports, docker_binaries, vhosts) if output: print(output) print() if features.has('traefik'): for id_, extra_service_data, _ in TraefikExtraServicesAction.get_extra_services( ): output = self._output_traefik_data(id_, extra_service_data) print(output) print()
class PTSubClassifier(abc.ABC): """ An abstract base class that's the component in the composite design pattern. All children will define the method 'classify', which returns classifications and info_object parameters. """ def __init__(self, header_dicom: dict, acquisition): """ Args: header_dicom (dict): This is just the dicom header info similar to file.info['header']['dicom']. acquisition (flywheel.Acquisition): A flywheel acquisition container object """ self.header_dicom = Dotty(header_dicom) self.acquisition = acquisition self.label = acquisition.label @abc.abstractmethod def classify(self, classifications, info_object): """Returns updated classifications and info_object Args: classifications (dict): A dictionary matching flywheel modality specific classification. Note the classification for a modality can be fetched with `fw.get_modality('PT')['classification']` for a PT modality for instance. info_object (dict): Info dictionary attribute of a file object. """ raise NotImplemented def get_dicom_tag(self, dotty_key: str): """Returns the value of single_header_object at dotty_key location. Args: dotty_key (str): A string to reference the location of the targeted value (e.g. 'RadiopharmaceuticalInformationSequence.0.RadionuclideCodeSequence.0.CodeValue') """ return self.header_dicom.get(dotty_key)
def _retrieve_binaries_data(service_config: Dotty) -> List[str]: # pylint: disable=no-self-use """ Retrieve binaries data :param service_config: the service configuration :return: a list containing binaries """ labels = service_config.get('labels') if not labels: return [] binary_regex_re = re.compile( r"^\s*ddb\.emit\.(.+?)(?:\[(.+?)\])?(?:\((.+?)\))?\s*$") binaries_labels = [] for key in labels.keys(): match = binary_regex_re.match(key) if not match: continue event_name = match.group(1) binary_name = match.group(2) if event_name == 'docker:binary' and binary_name not in binaries_labels: binaries_labels.append(match.group(2)) return binaries_labels
def value_for_field(self, obj, field): # add columns key to response if requested if field == "columns": objects = list( Contributions.objects.aggregate(*[ { "$match": { "project": obj.id } }, # NOTE contributors need to make sure that all columns are # included in first 20 contributions { "$limit": 20 }, { "$project": { "_id": 0, "akv": { "$objectToArray": "$data" } } }, { "$unwind": "$akv" }, { "$project": { "root": "$akv.k", "level2": { "$switch": { "branches": [{ "case": { "$eq": [ { "$type": "$akv.v" }, "object", ] }, "then": { "$objectToArray": "$akv.v" }, }], "default": [{}], } }, } }, { "$unwind": "$level2" }, { "$project": { "column": { "$switch": { "branches": [ { "case": { "$eq": ["$level2", {}] }, "then": "$root", }, { "case": { "$eq": ["$level2.k", "display"] }, "then": "$root", }, { "case": { "$eq": ["$level2.k", "value"] }, "then": "$root", }, { "case": { "$eq": ["$level2.k", "unit"] }, "then": "$root", }, ], "default": { "$concat": ["$root", ".", "$level2.k"] }, } } } }, ])) # neither $group nor set maintain order! Dicts are ordered in python 3.7+ columns = {} for col in list(dict.fromkeys(obj["column"] for obj in objects)): value_field, unit_field = f"data.{col}.value", f"data.{col}.unit" unit_query = { "project": obj.id, f'data__{col.replace(".", "__")}__exists': True, } unit_contribs = Contributions.objects.only(unit_field).filter( **unit_query) unit_sample = Dotty(unit_contribs.limit(-1).first().to_mongo()) min_max = list( Contributions.objects.aggregate(*[ { "$match": { "project": obj.id, value_field: { "$exists": True }, } }, { "$group": { "_id": None, "max": { "$max": f"${value_field}" }, "min": { "$min": f"${value_field}" }, } }, ])) rng = [min_max[0]["min"], min_max[0]["max"] ] if min_max else None unit = unit_sample.get(unit_field) if min_max and unit is None: unit = "" # catch missing unit field in data key = f"data.{col} [{unit}]" if min_max else f"data.{col}" columns[key] = rng contributions = Contributions.objects.only("pk").filter( project=obj.id) agg = list( Structures.objects.aggregate(*[ { "$match": { "contribution": { "$in": [c.pk for c in contributions] } } }, { "$group": { "_id": "$contribution", "count": { "$sum": 1 }, "labels": { "$addToSet": "$label" }, } }, { "$sort": { "count": -1 } }, { "$limit": 1 }, ])) if agg: for label in agg[0]["labels"]: columns[f"structures.{label}"] = None return columns else: raise UnknownFieldError
def _configure_defaults(self, feature_config: Dotty): if not feature_config.get('project.name'): project_name = os.path.basename(config.paths.project_home) feature_config['project.name'] = project_name if not feature_config.get('domain.sub'): feature_config['domain.sub'] = feature_config[ 'project.name'].replace("_", "-").replace(" ", "-") if feature_config.get('domain.value'): raise FeatureConfigurationReadOnlyError(self, 'domain.value') feature_config['domain.value'] = '.'.join( (feature_config['domain.sub'], feature_config['domain.ext'])) if not feature_config.get('env.current') and feature_config.get( 'env.available'): feature_config['env.current'] = feature_config['env.available'][-1] if not feature_config.get('env.current') or \ feature_config.get('env.current') not in feature_config['env.available']: raise FeatureConfigurationAutoConfigureError(self, 'env.current') if not feature_config.get( 'path.project_home') and config.paths.project_home: feature_config['path.project_home'] = config.paths.project_home if not feature_config.get('path.home') and config.paths.home: feature_config['path.home'] = config.paths.home if not feature_config.get('path.ddb_home') and config.paths.ddb_home: feature_config['path.ddb_home'] = config.paths.ddb_home self._configure_release_asset_name_defaults(feature_config) config.path = ConfigPaths( ddb_home=feature_config.get('path.ddb_home'), home=feature_config.get('path.home'), project_home=feature_config.get('path.project_home'))
def _configure_defaults(self, feature_config: Dotty): includes = feature_config.get("includes") if includes is None: includes = TemplateFinder.build_default_includes_from_suffixes( feature_config["suffixes"], feature_config["extensions"]) feature_config["includes"] = includes
def _configure_release_asset_name_defaults(self, feature_config: Dotty): if not feature_config.get('release_asset_name'): feature_config[ 'release_asset_name'] = self._get_default_binary_remote_name()