Ejemplo n.º 1
0
 def main(self, domain: cli.Set('habits',
                                'dailies',
                                'todos',
                                case_sensitive=False),
          op: cli.Set('up', 'down',
                      case_sensitive=False), *task_ids: TaskId):
     task_ids_list = []
     for tasks in task_ids:
         task_ids_list.extend(tasks)
     try:
         with open(STALE_FILE) as stale_file:
             stale = json.load(stale_file)
     except (OSError, json.JSONDecodeError):
         stale = {}
     if domain not in stale:
         stale[domain] = {}
     if op not in stale[domain]:
         stale[domain][op] = {}
     for task_id in task_ids_list:
         if task_id not in stale[domain][op]:
             stale[domain][op][task_id] = 0
         stale[domain][op][task_id] += 1
         print(
             _("Added '{task_id}' to {domain} with {op}").
             format(  # NOQA: Q000
                 task_id=task_id, domain=domain, op=op))
     with open(STALE_FILE, 'w') as stale_file:
         json.dump(stale, stale_file)
Ejemplo n.º 2
0
class SimpleApp(cli.Application):
    @cli.switch(["a"])
    def spam(self):
        print("!!a")

    @cli.switch(["b", "bacon"],
                argtype=int,
                mandatory=True,
                envname="PLUMBUM_TEST_BACON")
    def bacon(self, param):
        """give me some bacon"""
        print("!!b", param)

    eggs = cli.SwitchAttr(["e"],
                          str,
                          help="sets the eggs attribute",
                          envname="PLUMBUM_TEST_EGGS")
    cheese = cli.Flag(["--cheese"], help="cheese, please")
    chives = cli.Flag(["--chives"], help="chives, instead")
    verbose = cli.CountOf(["v"], help="increases the verbosity level")
    benedict = cli.CountOf(["--benedict"],
                           help="""a very long help message with lots of
        useless information that nobody would ever want to read, but heck, we need to test
        text wrapping in help messages as well""")

    csv = cli.SwitchAttr(["--csv"], cli.Set("MIN", "MAX", int, csv=True))
    num = cli.SwitchAttr(["--num"], cli.Set("MIN", "MAX", int))

    def main(self, *args):
        old = self.eggs
        self.eggs = "lalala"
        self.eggs = old
        self.tailargs = args
Ejemplo n.º 3
0
class ServidorTest(cli.Application):
    verbose = cli.Flag("-v", help="Arrancar el servidor en Verbose mode")
    port = 8080  # Puerto por defecto
    mode = "TCP"  #Protocolo de red por defecto
    type = "HTTP"

    @cli.switch(["-p", "--port"], cli.Range(1024,
                                            65535))  #Puerto entre 1024 y 65535
    def server_port(self, port):
        self.port = port

    @cli.switch(["-m", "--mode"],
                cli.Set("TCP", "UDP",
                        case_sensitive=False))  #Protocolo de red (TCP o UDP)
    def server_mode(self, mode):
        self.mode = mode

    @cli.switch(["-t", "--type"], cli.Set("HTTP",
                                          "HTTPS",
                                          case_sensitive=False))
    def server_type(self, type):
        self.type = type

    def main(self):
        print self.port, self.mode, self.type, self.verbose
Ejemplo n.º 4
0
class HabitsAdd(ApplicationWithApi):
    DESCRIPTION = _("Add a habit <habit>")  # noqa: Q000
    priority = cli.SwitchAttr(
        ['-p', '--priority'],
        cli.Set('0.1', '1', '1.5', '2'),
        default='1',
        help=_("Priority (complexity) of a habit"))  # noqa: Q000
    direction = cli.SwitchAttr(['-d', '--direction'],
                               cli.Set('positive', 'negative', 'both'),
                               default='both',
                               help=_("positive/negative/both"))  # noqa: Q000

    def main(self, *habit: str):
        habit_str = ' '.join(habit)
        if not habit_str:
            self.log.error(_("Empty habit text!"))  # noqa: Q000
            return 1
        super().main()
        self.api.tasks.user.post(type='habit',
                                 text=habit_str,
                                 priority=self.priority,
                                 up=(self.direction != 'negative'),
                                 down=(self.direction != 'positive'))
        res = _("Added habit '{}' with priority {} and direction {}").format(
            habit_str, self.priority, self.direction)  # noqa: Q000
        print(prettify(res))
        Habits.invoke(config_filename=self.config_filename)
Ejemplo n.º 5
0
class ServidorTest(cli.Application):
    verbose = cli.Flag("-v", help="Arrancar el servidor en Verbose mode")
    port = 8080  # Default port
    mode = "TCP"  #Default network protocol
    type = "HTTP"

    @cli.switch(["-p", "--port"],
                cli.Range(1024, 65535))  #Port between 1024 and 65535
    def server_port(self, port):
        self.port = port

    @cli.switch(["-m", "--mode"],
                cli.Set("TCP", "UDP",
                        case_sensitive=False))  #Network protocol (TCP or UDP)
    def server_mode(self, mode):
        self.mode = mode

    @cli.switch(["-t", "--type"], cli.Set("HTTP",
                                          "HTTPS",
                                          case_sensitive=False))
    def server_type(self, type):
        self.type = type

    def main(self):
        print self.port, self.mode, self.type, self.verbose
Ejemplo n.º 6
0
class Geet(cli.Application):
    SUBCOMMAND_HELPMSG = False
    DESCRIPTION = colors.yellow | """The l33t version control"""
    PROGNAME = colors.green
    VERSION = colors.blue | "1.7.2"
    COLOR_USAGE = colors.magenta
    COLOR_GROUPS = {
        "Meta-switches": colors.bold,
        "Switches": colors.skyblue1,
        "Subcommands": colors.yellow,
    }

    verbosity = cli.SwitchAttr(
        "--verbosity",
        cli.Set("low", "high", "some-very-long-name", "to-test-wrap-around"),
        help=colors.cyan
        |
        "sets the verbosity level of the geet tool. doesn't really do anything except for testing line-wrapping "
        "in help " * 3,
    )
    verbositie = cli.SwitchAttr(
        "--verbositie",
        cli.Set("low", "high", "some-very-long-name", "to-test-wrap-around"),
        help=colors.hotpink
        |
        "sets the verbosity level of the geet tool. doesn't really do anything except for testing line-wrapping "
        "in help " * 3,
    )
Ejemplo n.º 7
0
class AtlasCsv(cli.Application):
    """Specify training images and labelmaps via a csv file.
    Put the images with any header in the first column, 
    and labelmaps with proper headers in the consecutive columns. 
    The headers in the labelmap columns will be used to name the generated atlas labelmaps.
    """

    target = cli.SwitchAttr(
        ['-t', '--target'],
        cli.ExistingFile,
        help='target image',
        mandatory=True)
    fusions = cli.SwitchAttr(
        ['--fusion'],
        cli.Set("avg", "wavg", "antsJointFusion", case_sensitive=False),
        help='Also create predicted labelmap(s) by combining the atlas labelmaps: '
             'avg is naive mathematical average, wavg is weighted average where weights are computed from MI '
             'between the warped atlases and target image, antsJointFusion is local weighted averaging', default='wavg')
    out = cli.SwitchAttr(
        ['-o', '--outPrefix'], help='output prefix, output labelmaps are saved as outPrefix-mask.nrrd, outPrefix-cingr.nrrd, ...',
        mandatory=True)
    threads= cli.SwitchAttr(['-n', '--nproc'],
        help='number of processes/threads to use (-1 for all available)',
        default= 8)
    debug = cli.Flag('-d', help='Debug mode, saves intermediate labelmaps to atlas-debug-<pid> in output directory')

    @cli.positional(cli.ExistingFile)
    def main(self, csvFile):
        trainingTable = pd.read_csv(csvFile._path)
        makeAtlases(self.target, trainingTable, self.out, self.fusions, int(self.threads), self.debug)
        logging.info('Made ' + self.out + '-*.nrrd')
Ejemplo n.º 8
0
class RegistryServer(cli.Application):
    mode = cli.SwitchAttr(["-m", "--mode"], cli.Set("UDP", "TCP"), default="UDP",
                          help="Serving mode")

    ipv6 = cli.Flag(["-6", "--ipv6"], help="use ipv6 instead of ipv4")

    port = cli.SwitchAttr(["-p", "--port"], cli.Range(0, 65535), default=REGISTRY_PORT,
                          help="The UDP/TCP listener port")

    logfile = cli.SwitchAttr(["--logfile"], str, default=None,
                             help="The log file to use; the default is stderr")

    quiet = cli.SwitchAttr(["-q", "--quiet"], help="Quiet mode (only errors are logged)")

    pruning_timeout = cli.SwitchAttr(["-t", "--timeout"], int,
                                     default=DEFAULT_PRUNING_TIMEOUT, help="Set a custom pruning timeout (in seconds)")

    def main(self):
        if self.mode == "UDP":
            server = UDPRegistryServer(host='::' if self.ipv6 else '0.0.0.0', port=self.port,
                                       pruning_timeout=self.pruning_timeout)
        elif self.mode == "TCP":
            server = TCPRegistryServer(port=self.port, pruning_timeout=self.pruning_timeout)
        setup_logger(self.quiet, self.logfile)
        server.start()
Ejemplo n.º 9
0
class AtlasCsv(cli.Application):
    """Specify training images and labelmaps via a csv file.
    Put the images with any header in the first column, 
    and labelmaps with proper headers in the consecutive columns. 
    The headers in the labelmap columns will be used to name the generated atlas labelmaps.
    """

    target = cli.SwitchAttr(['-t', '--target'],
                            cli.ExistingFile,
                            help='target image',
                            mandatory=True)
    fusions = cli.SwitchAttr(
        ['--fusion'],
        cli.Set("avg", "antsJointFusion", case_sensitive=False),
        help=
        'Also create predicted labelmap(s) by combining the atlas labelmaps')
    out = cli.SwitchAttr(['-o', '--out'],
                         help='output directory',
                         mandatory=True)

    @cli.positional(cli.ExistingFile)
    def main(self, csvFile):
        trainingTable = pd.read_csv(csvFile)
        makeAtlases(self.target, trainingTable, self.out, self.fusions)
        logging.info('Made ' + self.out)
Ejemplo n.º 10
0
class UpdateSystem(cli.Application):
    PROGNAME = "update-system"
    VERSION = "0.1"
    DESCRIPTION = "Update linux system"

    _distro = False

    @cli.switch(["-d", "--distro"],
                cli.Set("arch", "debian", case_sensitive=True),
                mandatory=True)
    def distro(self, distro):
        """Distribution to update"""
        self._distro = distro

    def update_arch(self):
        print("Update arch system")
        pacman = local['pacman']
        pacman_update_cmd = pacman['-Syu']
        sudo[pacman_update_cmd] & FG

    def update_debian(self):
        print("Update debian system")
        apt_cmd = local['apt']
        apt_full_upgrade_cmd = apt_cmd['full-upgrade']
        sudo[apt_full_upgrade_cmd] & FG

    def main(self):
        method_name = 'update_' + self._distro
        method = getattr(self, method_name, lambda: "Invalid distro")
        method()
Ejemplo n.º 11
0
class Geet(cli.Application):
    """The l33t version control"""
    PROGNAME = "geet"
    VERSION = "1.7.2"
    
    verbosity = cli.SwitchAttr("--verbosity", cli.Set("low", "high", "some-very-long-name", "to-test-wrap-around"),
        help = "sets the verbosity level of the geet tool. doesn't really do anything except for testing line-wrapping "
        "in help " * 3)
Ejemplo n.º 12
0
class DecayLanguageDecay(cli.Application):
    generator = cli.SwitchAttr(['-G', '--generator'],
                               cli.Set('goofit'),
                               mandatory=True)

    def main(self, filename):
        if self.generator == 'goofit':
            ampgen2goofit(filename)
Ejemplo n.º 13
0
class CLI(cli.Application):
	kernel_ver = cli.SwitchAttr(["-v", "--kernel-version"], mandatory=True, help="set kernel version to 'version'")
	kernel_image = cli.SwitchAttr(["-k", "--kernel"], cli.ExistingFile, mandatory=True, help="include kernel image 'kernel'")
	arch = cli.SwitchAttr(["-A", "--architecture"], cli.Set(*SUPPORTED_ARCHITECTURES), mandatory=True, help="set architecture to 'arch'")
	config = cli.SwitchAttr(["-c", "--config"], mandatory=True, help="set config name 'config'")
	load_addr = cli.SwitchAttr(["-a", "--base"], str, mandatory=True, help="set load address to 'addr' (hex)")
	entry_addr = cli.SwitchAttr(["-e", "--entry-point"], str, mandatory=True, help="set entry point to 'entry' (hex)")

	fdtnum = cli.SwitchAttr(["-n", "--unit-address"], int, help="fdt unit-address 'address'", default=1)

	component = cli.SwitchAttr(["--component"], cli.Set(*SUPPORTED_COMPONENTS), default="kernel")
	compression = cli.SwitchAttr(["-C", "--compression"], cli.Set(*SUPPORTED_COMPRESSIONS), default="lzma", help="set compression type 'comp'")
	checksumAlgos = cli.SwitchAttr(["--checksum-algos"], cli.Set(*SUPPORTED_CHECKSUMS, csv=True), help="Algorithms to use in checksum", default=DEFAULT_CHECKSUM_ALGOS)
	os_name = cli.SwitchAttr(["-O", "--osname"], cli.Set(*SUPPORTED_OSES), help="set operating system to 'os'", default="linux")

	its_file = cli.SwitchAttr(["-o", "--output"], help="create output file 'its_file'", default="-")

	device_tree_blob = cli.SwitchAttr(["-d", "--dtb"], cli.ExistingFile, help="include Device Tree Blob 'dtb'", default=None)
	human_name = cli.SwitchAttr(["-D", "--human-name"], help="human friendly Device Tree Blob 'name'")

	def main(self):
		self.kernel_image = Path(self.kernel_image).absolute()
		self.config = Path(self.config).absolute()

		if self.device_tree_blob:
			self.device_tree_blob = Path(self.device_tree_blob).absolute()

		self.load_addr = literal_eval(self.load_addr)
		self.entry_addr = literal_eval(self.entry_addr)

		resNode = genITS(self.arch, self.os_name, self.kernel_image, self.kernel_ver, self.load_addr, self.entry_addr, self.config, self.fdtnum, self.compression, self.checksumAlgos, human_name=self.human_name, device_tree_blob=self.device_tree_blob, component=self.component)

		res = FDT()
		res.root = resNode
		res = res.to_dts()

		if self.its_file == "-":
			print(res)
		else:
			Path(self.its_file).write_text(res)
Ejemplo n.º 14
0
class AtlasArgs(cli.Application):
    """Specify training images and labelmaps via commandline arguments."""

    target = cli.SwitchAttr(['-t', '--target'],
                            cli.ExistingFile,
                            help='target image',
                            mandatory=True)
    fusion = cli.SwitchAttr(
        ['--fusion'],
        cli.Set("avg", "antsJointFusion", case_sensitive=False),
        list=True,
        help='Also create predicted labelmap(s) by fusing the atlas labelmaps')
    out = cli.SwitchAttr(['-o', '--out'],
                         help='output directory',
                         mandatory=True)

    images = cli.SwitchAttr(
        ['-i', '--images'],
        help='list of images in quotations, e.g. "img1.nrrd img2.nrrd"',
        mandatory=True)
    labels = cli.SwitchAttr(
        ['-l', '--labels'],
        help=
        'list of labelmap images in quotations, e.g. "mask1.nrrd mask2.nrrd cingr1.nrrd cingr2.nrrd"',
        mandatory=True)
    names = cli.SwitchAttr(
        ['-n', '--names'],
        help=
        'list of names for generated labelmaps, e.g. "atlasmask atlascingr"',
        mandatory=True)

    def main(self):
        images = self.images.split()
        labels = self.labels.split()
        labelnames = self.names.split()
        quotient, remainder = divmod(len(labels), len(images))
        if remainder != 0:
            logging.error(
                'Wrong number of labelmaps, must be a multiple of number of images ('
                + str(len(images)) + '). Instead there is a remainder of ' +
                str(remainder))
            sys.exit(1)
        if quotient != len(labelnames):
            logging.error(
                'Wrong number of names, must match number of labelmap training sets: '
                + str(quotient))
            sys.exit(1)
        labelcols = grouper(labels, quotient)
        trainingTable = pd.DataFrame(
            dict(zip(labelnames, labelcols) + [('image', images)]))
        makeAtlases(self.target, trainingTable, self.out, self.fusion)
        logging.info('Made ' + self.out)
Ejemplo n.º 15
0
class ModernizeGooFit(cli.Application):
    set_version = cli.SwitchAttr(['-v','--version'], cli.Set(*conversion), default='2.0', help='The version to convert')
    source = cli.Flag(['--source'], help="Run on the GooFit Source")

    @cli.positional(cli.ExistingFile)
    def main(self, *src):
        if not src:
            assert self.source, "You must use the --source flag to run over GooFit's source"
            git = local['git']
            with local.cwd(DIR / '../'):
                src = [local.path(n) for n in
                        git('ls-files', '--', '*.cpp', '*.h', '*.cu').splitlines()]
        fix_files(src, self.set_version)
Ejemplo n.º 16
0
class ContainerCreate(cli.Application):
    """
    Create a new container with a predefined strategy.

    We offer a variety of creation policies for a new container. By default a
    basic 'spawn a bash' policy is used. This just leaves you inside a bash
    that is started in the extracted container. After customization you can
    exit the bash and pack up the result.
    """

    _strategy = BashStrategy()

    @cli.switch(["-S", "--strategy"],
                cli.Set("bash", "polyjit", case_sensitive=False),
                help="Defines the strategy used to create a new container.",
                mandatory=False)
    def strategy(self, strategy):
        """Select strategy based on key.

        Args:
            strategy (str): The strategy to select.

        Returns:
            A strategy object.
        """
        self._strategy = {
            "bash": BashStrategy(),
            "polyjit": SetupPolyJITGentooStrategy()
        }[strategy]

    def main(self, *args):
        builddir = str(CFG["build_dir"])
        in_container = str(CFG["container"]["input"])
        out_container = str(CFG["container"]["output"])
        mounts = CFG["container"]["mounts"].value
        shell = str(CFG["container"]["shell"])

        if (in_container is None) or not os.path.exists(in_container):
            in_container = container.Gentoo().local

        in_is_file = os.path.isfile(in_container)
        if in_is_file:
            in_container = setup_container(builddir, in_container)

        self._strategy.run(
            MockObj(builddir=builddir,
                    in_container=in_container,
                    out_container=out_container,
                    mounts=mounts,
                    shell=shell))
        clean_directories(builddir, in_is_file, True)
Ejemplo n.º 17
0
class WalTVirtualSetup(cli.Application):
    _type = None
    _init_system = None
    _start = False

    def main(self):
        """install walt-virtual software"""
        setup(self)

    @cli.switch("--type", cli.Set('SERVER', 'VPN_CLIENT', case_sensitive = False), mandatory = True)
    def set_type(self, install_type):
        """indicate which kind of setup is requested on this device"""
        self._type = install_type.upper()

    @cli.switch("--init-system", cli.Set('SYSTEMD', 'BUSYBOX', case_sensitive = False), mandatory = True)
    def set_init_system(self, init_system):
        """indicate the init system available on this device"""
        self._init_system = init_system.upper()

    @cli.switch("--start")
    def set_start(self):
        """start services once installed"""
        self._start = True
Ejemplo n.º 18
0
class BackupApp(cli.Application):
    VERSION = colors.blue | "1.0.0"

    def init_backup_vscode(self):
        if not os.path.exists(vs_code_backup_path):
            os.mkdir(vs_code_backup_path)
            print(colors.green | ("created %s" % vs_code_backup_path))

    def backup_vscode(self):
        for file_name in vs_code_files:
            target_path = os.path.join(vs_code_config_path, file_name)
            if os.path.exists(target_path):
                cp("-r", target_path, vs_code_backup_path)
                print(colors.green | ('cp %s' % target_path))
            else:
                print(colors.red | ('file not found: %s' % target_path))
        print("vscode config backup completed")

    def recover_vscode(self):
        for file_name in vs_code_files:
            target_path = os.path.join(vs_code_config_path, file_name)
            backup_file_path = os.path.join(vs_code_backup_path, file_name)
            if os.path.exists(backup_file_path):
                if os.path.isdir(target_path):
                    cp("-rf", backup_file_path, vs_code_config_path)
                else:
                    cp("-f", backup_file_path, target_path)
                print(colors.green | ('cp %s covers %s' %
                                      (backup_file_path, target_path)))
            else:
                print(colors.red | ('file not found: %s' % backup_file_path))
        print("vscode config recover completed")

    @cli.switch(["-b", "--backup"], help="backup config")
    def backup(self):
        print("start backup")
        # backup vscode config
        self.init_backup_vscode()
        self.backup_vscode()
        print("backup completed")

    @cli.switch(["-r", "--recover"],
                cli.Set("all", "vscode"),
                help="recover config")
    def recover(self, mode):
        if mode == 'vscode':
            self.recover_vscode()

    def main(self):
        pass
Ejemplo n.º 19
0
class App(cli.Application):
    """Align a given labelmap (usually a mask) to make another labelmap"""

    infile = cli.SwitchAttr(['-i', '--input'],
                            cli.ExistingFile,
                            help='structural (nrrd/nii)',
                            mandatory=True)

    labelmap = cli.SwitchAttr(
        ['-l', '--labelmap'],
        cli.ExistingFile,
        help='structural labelmap, usually a mask (nrrd/nii)',
        mandatory=True)

    target = cli.SwitchAttr(['-t', '--target'],
                            cli.ExistingFile,
                            help='target image (nrrd/nii)',
                            mandatory=True)

    out = cli.SwitchAttr(['-o', '--output'],
                         help='output labelmap (nrrd/nii)',
                         mandatory=True)

    reg_method = cli.SwitchAttr(['--reg'],
                                cli.Set('rigid', 'SyN', case_sensitive=False),
                                help='ANTs registration method: rigid or SyN',
                                default='rigid')

    def main(self):
        with TemporaryDirectory() as tmpdir:
            tmpdir = local.path(tmpdir)
            pre = tmpdir / 'ants'

            warp = pre + '1Warp.nii.gz'
            affine = pre + '0GenericAffine.mat'

            check_call((' ').join([
                pjoin(FILEDIR,
                      'antsRegistrationSyNMI.sh'), '-f', self.target, '-m',
                self.infile, '-t r' if self.reg_method == 'rigid' else '',
                '-o', pre, '-n', ANTSREG_THREADS
            ]),
                       shell=True)

            xfrms = f'-t {warp} -t {affine}' if self.reg_method == 'SyN' else f'-t {affine}'

            antsApplyTransforms['-d', '3', '-i', self.labelmap,
                                xfrms.split(), '-r', self.target, '-o',
                                self.out, '--interpolation',
                                'NearestNeighbor'] & FG
Ejemplo n.º 20
0
class Assignments(cli.Application):
    DESCRIPTION = '''Makes assignments for pandoc. Needs pandoc in the path. \
Processes all *.{0} if no file given.'''.format(INPUTEXT)
    answer = cli.Flag(['-a', '--answer'], help="Produce an answer version")

    _prepend = None

    @cli.switch(['-p', '--prepend'], cli.ExistingFile)
    def prepend(self, filename):
        'File to prepend to each assignment'
        self._prepend = filename

    output = cli.SwitchAttr(['-o', '--output'],
                            cli.Set('pdf', 'docx', 'html', 'odt'),
                            list=True,
                            help='Sets the output format...')

    def main(self, *files):
        if not files:
            print("Searching", local.cwd, "for files.")
        if self._prepend is None:
            try:
                self._prepend = cli.ExistingFile('prepend.rst')
            except ValueError:
                pass

        items = list(map(cli.ExistingFile,
                         files)) if files else local.cwd // ('*.' + INPUTEXT)
        try:
            items.remove(self._prepend)
        except ValueError:
            pass

        for item in items:
            if self.output:
                for output in self.output:
                    print(item.basename + '...', end=' ')
                    process(item, output, self.answer, self._prepend)
                    print('\b\b\b\b -> {1}{0} done.'.format(
                        output, 'answers ' if self.answer else ''))

            else:
                for output in IMG_FORMATS:
                    print(item.basename + '...', end=' ')
                    process(item, output, False, self._prepend)
                    print('\b\b\b\b -> {0} done.'.format(output))
                print(item.basename + '...', end=' ')
                process(item, 'pdf', True, self._prepend)
                print('\b\b\b\b -> answers pdf done.')
Ejemplo n.º 21
0
 def main(self, cmd=cli.Set("info", "trim")):
     (_code, _stdout, _stderr) = self.info()
     silences = []
     for l in _stderr.split('\n'):
         if 'silence_end' not in l:
             continue
         # sample line: [silencedetect @ 0x7fffe351b460] silence_end: 51.2464 | silence_duration: 2.06966
         l2 = l.strip().split('silence_end: ')[1]
         silence_end, rem = l2.split(' ', maxsplit=1)
         silence_duration = rem.split('silence_duration: ')[1]
         end = float(silence_end)
         duration = float(silence_duration)
         start = end - duration
         silences.append((start, duration, end))
     self.trim_silences(silences)
Ejemplo n.º 22
0
class AtlasCsv(cli.Application):
    """Makes atlas image/labelmap pairs for a target image.
    Option to merge labelmaps via averaging or AntsJointFusion.
    Specify training images and labelmaps via a csv file.
    Put the images with any header in the first column, 
    and labelmaps with proper headers in the consecutive columns. 
    The headers in the labelmap columns will be used to name the generated atlas labelmaps.
    """

    target = cli.SwitchAttr(
        ['-t', '--target'],
        cli.ExistingFile,
        help='target image',
        mandatory=True)
    fusions = cli.SwitchAttr(
        ['--fusion'],
        cli.Set("avg", "wavg", "antsJointFusion", case_sensitive=False),
        help='Also create predicted labelmap(s) by combining the atlas labelmaps: '
             'avg is naive mathematical average, wavg is weighted average where weights are computed from MI '
             'between the warped atlases and target image, antsJointFusion is local weighted averaging', default='wavg')
    out = cli.SwitchAttr(
        ['-o', '--outPrefix'],
        help='output prefix, output labelmaps are saved as outPrefix_mask.nii.gz, outPrefix_cingr.nii.gz, ...',
        mandatory=True)
    threads= cli.SwitchAttr(['-n', '--nproc'],
        help='number of processes/threads to use (-1 for all available)',
        default= N_PROC)
    debug = cli.Flag('-d', help='Debug mode, saves intermediate labelmaps to atlas-debug-<pid> in output directory')
    csvFile = cli.SwitchAttr(['--train'],
        help='--train t1; --train t2; --train trainingImages.csv; '
        'see pnlNipype/docs/TUTORIAL.md to know what each value means')

    # @cli.positional(cli.ExistingFile)
    def main(self):
        
        if self.csvFile=='t1' or self.csvFile=='t2':
            PNLPIPE_SOFT = os.getenv('PNLPIPE_SOFT')
            if not PNLPIPE_SOFT:
                raise EnvironmentError('Define the environment variable PNLPIPE_SOFT from where training data could be obtained')

        if self.csvFile=='t1':
            self.csvFile=glob(PNLPIPE_SOFT+'/trainingDataT1AHCC-*/trainingDataT1Masks-hdr.csv')[0]
        elif self.csvFile=='t2':
            self.csvFile=glob(PNLPIPE_SOFT+'/trainingDataT2Masks-*/trainingDataT2Masks-hdr.csv')[0]
        
        trainingTable = pd.read_csv(self.csvFile)
        makeAtlases(self.target, trainingTable, self.out, self.fusions, int(self.threads), self.debug)
        logging.info('Made ' + self.out + '_*.nii.gz')
Ejemplo n.º 23
0
class WalTLogWait(WalTLogShowOrWait):
    """Wait for a given log line"""
    mode = cli.SwitchAttr(
        "--mode",
        cli.Set("ALL", "ANY", case_sensitive=False),
        argname='MODE',
        default='ANY',
        help="""specify mode (see walt help show log-wait)""")
    time_margin = cli.SwitchAttr(
        "--time-margin",
        int,
        argname='SECONDS',
        default=0,
        help="""also look in recent past logs if they matched""")

    def main(self, logline_regexp):
        if not WalTLogShowOrWait.verify_regexps(self.streams, logline_regexp):
            return
        with ClientToServerLink() as server:
            senders = server.parse_set_of_nodes(self.set_of_nodes)
            if senders == None:
                return
            if self.time_margin != 0:
                history_range = '-%ds:' % self.time_margin
                range_analysis = WalTLogShowOrWait.analyse_history_range(
                    server, history_range)
                history_range = range_analysis[1]
            else:
                history_range = None
        if self.mode == 'ANY':
            # as soon as a logline matches, we stop
            def stop_test(**record):
                return True
        else:
            # we stop when all nodes have emitted a matching logline
            missing_senders = set(senders)

            def stop_test(**record):
                missing_senders.discard(record['sender'])
                if len(missing_senders) == 0:
                    return True  # yes, we should stop
                else:
                    return False  # no, we are not done yet

        WalTLogShowOrWait.start_streaming(self.format_string, history_range,
                                          True, senders, self.streams,
                                          logline_regexp, stop_test)
Ejemplo n.º 24
0
class TodosAdd(ApplicationWithApi):
    DESCRIPTION = _("Add a todo <todo>")  # noqa: Q000
    priority = cli.SwitchAttr(
        ['-p', '--priority'],
        cli.Set('0.1', '1', '1.5', '2'), default='1',
        help=_("Priority (complexity) of a todo"))  # noqa: Q000

    def main(self, *todo: str):
        todo_str = ' '.join(todo)
        if not todo_str:
            self.log.error(_("Empty todo text!"))  # noqa: Q000
            return 1
        super().main()
        self.api.tasks.user.post(type='todo', text=todo_str, priority=self.priority)
        res = _("Added todo '{}' with priority {}").format(todo_str, self.priority)  # noqa: Q000
        print(prettify(res))
        ToDos.invoke(config_filename=self.config_filename)
        return 0
Ejemplo n.º 25
0
class AllocationCLI(cli.Application):
    #method = cli.SwitchAttr(["-m", "--method"], argtype=cli.Set(*methods), argname="method", help="Method of solution.", default="sort")
    method = cli.SwitchAttr(["-m", "--method"],
                            argtype=cli.Set(*methods),
                            argname="method",
                            help="Method of solution.",
                            default="z3")

    #def main(self, input=testsDir / "47,31->51,22,5.tsv"):
    #def main(self, input=testsDir / "wood1.tsv"):
    #def main(self, input=testsDir / "wood.tsv"):
    def main(self, input=testsDir / "toy.tsv"):
        task, parts = readFromTSV(Path(input))
        partsColors = generateColors(parts)

        for solution in methods[self.method](task, parts):
            print("\n")
            visualizeAll(solution, parts, partsColors)
Ejemplo n.º 26
0
class GetNewsCLI(p_cli.ProsperApplication):
    PROGNAME = PROGNAME
    VERSION = _version.__version__

    config_path = path.join(HERE, 'app.cfg')

    ticker_list = cli.SwitchAttr(
        ['--tickers'],
        str,
        help='Stock ticker list to fetch news on',
    )

    @cli.switch(
        ['--ticker-csv'],
        cli.ExistingFile,
        help='Stock ticker list from .csv file',
    )
    def ticker_csv(self, ticker_csv_path):
        """load and parse csv file into self.ticker_list"""
        csv_df = pd.read_csv(ticker_csv_path)
        self.ticker_list = ','.join(csv_df['ticker'].tolist())

    source = cli.SwitchAttr(
        ['s', '--source'],
        cli.Set('intrinio', 'robinhood', 'yahoo'),
        help='Where to fetch news from',
    )

    def main(self):
        """launcher logic"""
        self.logger.info('hello world')
        intrinio_auth = IntrinioAuth(
            self.config.get_option(self.PROGNAME, 'intrinio_username'),
            self.config.get_option(self.PROGNAME, 'intrinio_password'),
        )

        news_df = fetch_news(
            self.ticker_list.split(','),
            self.source,
            intrinio_auth,
            logger=self.logger,
        )
Ejemplo n.º 27
0
class AtlasCsv(cli.Application):
    """Specify training images and labelmaps via a csv file.  The names in the header row will be used to name the generated atlas labelmaps."""

    target = cli.SwitchAttr(
        ['-t', '--target'],
        cli.ExistingFile,
        help='target image',
        mandatory=True)
    fusions = cli.SwitchAttr(
        '--fusion',
        cli.Set("avg", "antsJointFusion", case_sensitive=False),
        list=True,
        help='Also create predicted labelmap(s) by averaging the atlas labelmaps')
    out = cli.SwitchAttr(
        ['-o', '--out'], help='output directory', mandatory=True)

    @cli.positional(cli.ExistingFile)
    def main(self, csv):
        trainingTable = pd.read_csv(csv)
        makeAtlases(self.target, trainingTable, self.out, self.fusions)
        logging.info('Made ' + self.out)
Ejemplo n.º 28
0
class ModernizeGooFit(cli.Application):
    set_version = cli.SwitchAttr(
        ["-v", "--version"],
        cli.Set(*conversion),
        default="2.0",
        help="The version to convert",
    )
    source = cli.Flag(["--source"], help="Run on the GooFit Source")

    @cli.positional(cli.ExistingFile)
    def main(self, *src):
        if not src:
            assert (
                self.source
            ), "You must use the --source flag to run over GooFit's source"
            git = local["git"]
            with local.cwd(DIR / "../"):
                src = [
                    local.path(n)
                    for n in git("ls-files", "--", "*.cpp", "*.h", "*.cu").splitlines()
                ]
        fix_files(src, self.set_version)
Ejemplo n.º 29
0
class ClassicServer(cli.Application):
    mode = cli.SwitchAttr(["-m", "--mode"], cli.Set("threaded", "forking", "stdio", "oneshot"),
                          default="threaded", help="The serving mode (threaded, forking, or 'stdio' for "
                          "inetd, etc.)")

    port = cli.SwitchAttr(["-p", "--port"], cli.Range(0, 65535), default=None,
                          help="The TCP listener port ("
                               "default = {DEFAULT_SERVER_PORT!r}, "
                               "default for SSL = {DEFAULT_SERVER_SSL_PORT!r})",
                          group="Socket Options")
    host = cli.SwitchAttr(["--host"], str, default="", help="The host to bind to. "
                          "The default is localhost", group="Socket Options")
    ipv6 = cli.Flag(["--ipv6"], help="Enable IPv6", group="Socket Options")

    logfile = cli.SwitchAttr("--logfile", str, default=None, help="Specify the log file to use; "
                             "the default is stderr", group="Logging")
    quiet = cli.Flag(["-q", "--quiet"], help="Quiet mode (only errors will be logged)",
                     group="Logging")

    ssl_keyfile = cli.SwitchAttr("--ssl-keyfile", cli.ExistingFile,
                                 help="The keyfile to use for SSL. Required for SSL", group="SSL",
                                 requires=["--ssl-certfile"])
    ssl_certfile = cli.SwitchAttr("--ssl-certfile", cli.ExistingFile,
                                  help="The certificate file to use for SSL. Required for SSL", group="SSL",
                                  requires=["--ssl-keyfile"])
    ssl_cafile = cli.SwitchAttr("--ssl-cafile", cli.ExistingFile,
                                help="The certificate authority chain file to use for SSL. "
                                "Optional; enables client-side authentication",
                                group="SSL", requires=["--ssl-keyfile"])

    auto_register = cli.Flag("--register", help="Asks the server to attempt registering with "
                             "a registry server. By default, the server will not attempt to register",
                             group="Registry")
    registry_type = cli.SwitchAttr("--registry-type", cli.Set("UDP", "TCP"),
                                   default="UDP", help="Specify a UDP or TCP registry", group="Registry")
    registry_port = cli.SwitchAttr("--registry-port", cli.Range(0, 65535), default=REGISTRY_PORT,
                                   help="The registry's UDP/TCP port", group="Registry")
    registry_host = cli.SwitchAttr("--registry-host", str, default=None,
                                   help="The registry host machine. For UDP, the default is 255.255.255.255; "
                                   "for TCP, a value is required", group="Registry")

    def main(self):
        if not self.host:
            self.host = "::1" if self.ipv6 else "127.0.0.1"

        if self.registry_type == "UDP":
            if self.registry_host is None:
                self.registry_host = "255.255.255.255"
            self.registrar = UDPRegistryClient(ip=self.registry_host, port=self.registry_port)
        else:
            if self.registry_host is None:
                raise ValueError("With TCP registry, you must specify --registry-host")
            self.registrar = TCPRegistryClient(ip=self.registry_host, port=self.registry_port)

        if self.ssl_keyfile:
            self.authenticator = SSLAuthenticator(self.ssl_keyfile, self.ssl_certfile,
                                                  self.ssl_cafile)
            default_port = DEFAULT_SERVER_SSL_PORT
        else:
            self.authenticator = None
            default_port = DEFAULT_SERVER_PORT
        if self.port is None:
            self.port = default_port

        setup_logger(self.quiet, self.logfile)

        if self.mode == "threaded":
            self._serve_mode(ThreadedServer)
        elif self.mode == "forking":
            self._serve_mode(ForkingServer)
        elif self.mode == "oneshot":
            self._serve_oneshot()
        elif self.mode == "stdio":
            self._serve_stdio()

    def _serve_mode(self, factory):
        t = factory(SlaveService, hostname=self.host, port=self.port,
                    reuse_addr=True, ipv6=self.ipv6, authenticator=self.authenticator,
                    registrar=self.registrar, auto_register=self.auto_register)
        t.start()

    def _serve_oneshot(self):
        t = OneShotServer(SlaveService, hostname=self.host, port=self.port,
                          reuse_addr=True, ipv6=self.ipv6, authenticator=self.authenticator,
                          registrar=self.registrar, auto_register=self.auto_register)
        t._listen()
        sys.stdout.write("rpyc-oneshot\n")
        sys.stdout.write(f"{t.host}\t{t.port}\n")
        sys.stdout.flush()
        t.start()

    def _serve_stdio(self):
        origstdin = sys.stdin
        origstdout = sys.stdout
        sys.stdin = open(os.devnull, "r")
        sys.stdout = open(os.devnull, "w")
        sys.stderr = open(os.devnull, "w")
        conn = rpyc.classic.connect_pipes(origstdin, origstdout)
        try:
            try:
                conn.serve_all()
            except KeyboardInterrupt:
                print("User interrupt!")
        finally:
            conn.close()
Ejemplo n.º 30
0
class ManagerGenerate(Manager):
    DESCRIPTION = "Generate Dockerfiles from templates."

    cuda = {}
    output_path = {}
    dist_base_path = ""
    key = ""

    template_env = Environment(extensions=["jinja2.ext.do"],
                               trim_blocks=True,
                               lstrip_blocks=True)

    generate_all = cli.Flag(
        ["--all"],
        excludes=["--os", "--os-version", "--cuda-version"],
        help="Generate all of the templates.",
    )

    generate_ci = cli.Flag(
        ["--ci"],
        help="Generate the gitlab pipelines only.",
    )

    distro = cli.SwitchAttr(
        "--os",
        str,
        group="Targeted",
        requires=["--os-version", "--cuda-version"],
        excludes=["--all"],
        help="The distro to use.",
        default=None,
    )

    distro_version = cli.SwitchAttr(
        "--os-version",
        str,
        group="Targeted",
        requires=["--os", "--cuda-version"],
        excludes=["--all"],
        help="The distro version",
        default=None,
    )

    cuda_version = cli.SwitchAttr(
        "--cuda-version",
        str,
        excludes=["--all"],
        group="Targeted",
        requires=["--os", "--os-version"],
        help="The cuda version to use. Example: '10.1'",
        default=None,
    )

    arch = cli.SwitchAttr(
        "--arch",
        cli.Set("x86_64", "ppc64le", "arm64", case_sensitive=False),
        excludes=["--all"],
        group="Targeted",
        requires=["--os", "--os-version", "--cuda-version"],
        help="Generate container scripts for a particular architecture.",
    )

    pipeline_name = cli.SwitchAttr(
        "--pipeline-name",
        str,
        excludes=["--all"],
        group="Targeted",
        requires=["--os", "--os-version", "--cuda-version"],
        help="Use a pipeline name for manifest matching.",
        default="default",
    )

    def supported_distro_list_by_cuda_version(self, version):
        if not version:
            return
        distros = ["ubuntu", "ubi", "centos"]
        keys = self.parent.manifest[self.key].keys()

        # There are other keys in the cuda field other than distros, we need to strip those out
        def get_distro_name(name):
            r = re.compile("[a-zA-Z]+")
            return r.findall(name)[0]

        return [f for f in keys if get_distro_name(f) in distros]

    def supported_arch_list(self):
        ls = []
        for k in glom.glom(
                self.parent.manifest,
                glom.Path(self.key, f"{self.distro}{self.distro_version}"),
        ):
            if k in ["x86_64", "ppc64le", "arm64"]:
                ls.append(k)
        return ls

    def cudnn_versions(self):
        obj = []
        for k, v in self.cuda["components"].items():
            if k.startswith("cudnn") and v:
                obj.append(k)
        return obj

    # extracts arbitrary keys and inserts them into the templating context
    def extract_keys(self, val):
        rgx = re.compile(r"^v\d+\.\d")
        for k, v in val.items():
            if rgx.match(k):
                # Do not copy cuda version keys
                continue
            # These top level keys should be ignored since they are processed elsewhere
            if k in [
                    "exclude_repos",
                    "build_version",
                    "components",
                    *self.supported_arch_list(),
                    *self.supported_distro_list_by_cuda_version(
                        self.cuda_version),
            ]:
                continue
            self.cuda[k] = v

    # For cudnn templates, we need a custom template context
    def output_cudnn_template(self, cudnn_version_name, template_path,
                              output_path):
        cudnn_manifest = self.cuda["components"][cudnn_version_name]
        if "source" in cudnn_manifest:
            cudnn_manifest["basename"] = os.path.basename(
                cudnn_manifest["source"])
            cudnn_manifest["dev"]["basename"] = os.path.basename(
                cudnn_manifest["dev"]["source"])

        new_ctx = {
            "cudnn": self.cuda["components"][cudnn_version_name],
            "arch": self.arch,
            "version": self.cuda["version"],
            "image_tag_suffix": self.cuda["image_tag_suffix"],
            "os": self.cuda["os"],
        }
        log.debug("cudnn template context: %s", new_ctx)
        self.output_template(template_path=template_path,
                             output_path=output_path,
                             ctx=new_ctx)

    def output_template(self, template_path, output_path, ctx=None):
        ctx = ctx if ctx is not None else self.cuda
        with open(template_path) as f:
            log.debug("Processing template %s", template_path)
            new_output_path = pathlib.Path(output_path)
            new_filename = template_path.name[:-6]
            template = self.template_env.from_string(f.read())
            if not new_output_path.exists():
                log.debug(f"Creating {new_output_path}")
                new_output_path.mkdir(parents=True)
            log.info(f"Writing {new_output_path}/{new_filename}")
            with open(f"{new_output_path}/{new_filename}", "w") as f2:
                f2.write(template.render(cuda=ctx))

    def prepare_context(self):
        # checks the cudnn components and ensures at least one is installed from the public "machine-learning" repo
        def use_ml_repo():
            use_ml_repo = False
            # First check the manifest to see if a ml repo url is specified
            if not self.get_data(
                    self.parent.manifest,
                    self.key,
                    f"{self.distro}{self.distro_version}",
                    self.arch,
                    "ml_repo_url",
                    can_skip=True,
            ):
                return use_ml_repo
            # if a cudnn component contains "source", then it is installed from a different source than the public machine
            # learning repo
            # If any of the cudnn components lack the source key, then the ML repo should be used
            for comp, val in self.cuda["components"].items():
                if next(
                    (True for mlcomp in ["cudnn", "nccl"] if mlcomp in comp),
                        False):
                    if val and "source" not in val:
                        use_ml_repo = True
            return use_ml_repo

        conf = self.parent.manifest
        major = self.cuda_version.split(".")[0]
        minor = self.cuda_version.split(".")[1]

        build_version = self.get_data(
            conf,
            self.key,
            f"{self.distro}{self.distro_version}",
            self.arch,
            "components",
            "build_version",
        )

        self.image_tag_suffix = self.get_data(
            conf,
            self.key,
            f"{self.distro}{self.distro_version}",
            "image_tag_suffix",
            can_skip=True,
        )
        if not self.image_tag_suffix:
            self.image_tag_suffix = ""

        # The templating context. This data structure is used to fill the templates.
        self.cuda = {
            "use_ml_repo":
            False,
            "version": {
                "full": f"{self.cuda_version}.{build_version}",
                "major": major,
                "minor": minor,
                "major_minor": self.cuda_version,
                "build": build_version,
            },
            "os": {
                "distro": self.distro,
                "version": self.distro_version
            },
            "arch":
            self.arch,
            "image_tag_suffix":
            self.image_tag_suffix,
            "components":
            self.get_data(
                conf,
                self.key,
                f"{self.distro}{self.distro_version}",
                self.arch,
                "components",
            ),
        }
        self.cuda["use_ml_repo"] = use_ml_repo()

        # Users of manifest.yaml are allowed to set arbitrary keys for inclusion in the templates
        # and the discovered keys are injected into the template context.
        # We only checks at three levels in the manifest
        self.extract_keys(
            self.get_data(
                conf,
                self.key,
                f"{self.distro}{self.distro_version}",
            ))
        self.extract_keys(
            self.get_data(
                conf,
                self.key,
                f"{self.distro}{self.distro_version}",
                self.arch,
            ))
        log.info("cuda version %s" % (self.cuda_version))
        log.debug("template context %s" % (self.cuda))
        #  sys.exit(1)

    def generate_cudnn_scripts(self, base_image, template_path):
        for pkg in self.cudnn_versions():
            self.cuda["components"][pkg]["target"] = base_image
            self.output_cudnn_template(
                cudnn_version_name=pkg,
                template_path=pathlib.Path(
                    f"{template_path}/cudnn/Dockerfile.jinja"),
                output_path=pathlib.Path(
                    f"{self.output_path}/{base_image}/{pkg}"),
            )

    # CUDA 8 uses a deprecated image layout
    def generate_containerscripts_cuda_8(self):
        for img in ["devel", "runtime"]:
            base = img
            if img == "runtime":
                # for CUDA 8, runtime == base
                base = "base"
            # cuda image
            temp_path = self.cuda["template_path"]
            log.debug("temp_path: %s, output_path: %s", temp_path,
                      self.output_path)
            self.output_template(
                template_path=pathlib.Path(
                    f"{temp_path}/{base}/Dockerfile.jinja"),
                output_path=pathlib.Path(f"{self.output_path}/{img}"),
            )
            # We need files in the base directory
            for filename in pathlib.Path(f"{temp_path}/{base}").glob("*"):
                if "Dockerfile" in filename.name:
                    continue
                log.debug("Checking %s", filename)
                if ".jinja" in filename.name:
                    self.output_template(filename, f"{self.output_path}/{img}")
                else:
                    log.info(f"Copying {filename} to {self.output_path}/{img}")
                    shutil.copy(filename, f"{self.output_path}/{img}")
            # cudnn image
            self.generate_cudnn_scripts(img, temp_path)

    def generate_containerscripts(self):
        for img in ["base", "devel", "runtime"]:
            # cuda image
            temp_path = self.cuda["template_path"]
            self.cuda["target"] = img
            log.debug("temp_path: %s, output_path: %s", temp_path,
                      self.output_path)
            self.output_template(
                template_path=pathlib.Path(
                    f"{temp_path}/{img}/Dockerfile.jinja"),
                output_path=pathlib.Path(f"{self.output_path}/{img}"),
            )
            # copy files
            for filename in pathlib.Path(f"{temp_path}/{img}").glob("*"):
                if "Dockerfile" in filename.name:
                    continue
                log.debug("Checking %s", filename)
                if not self.cuda["use_ml_repo"] and "nvidia-ml" in str(
                        filename):
                    continue
                if ".jinja" in filename.name:
                    self.output_template(filename, f"{self.output_path}/{img}")
                else:
                    log.info(f"Copying {filename} to {self.output_path}/{img}")
                    shutil.copy(filename, f"{self.output_path}/{img}")
            # cudnn image
            if "base" not in img:
                self.generate_cudnn_scripts(img, temp_path)

    # FIXME: Probably a much nicer way to do this with GLOM...
    # FIXME: Turn off black auto format for this function...
    # fmt: off
    def generate_gitlab_pipelines(self):

        manifest = self.parent.manifest
        ctx = {"manifest_path": self.parent.manifest_path}

        def get_cudnn_components(key, distro, arch):
            comps = {}
            for comp, val in manifest[key][distro][arch]["components"].items():
                if "cudnn" in comp and val:
                    #  print(comp, val)
                    comps[comp] = {}
                    comps[comp]["version"] = val["version"]
            return comps

        def matched(key):
            match = rgx.match(k)
            if match:
                return match

        for k, _ in manifest.items():
            rgx = re.compile(r"cuda_v([\d\.]+)(?:_(\w+))?$")
            if (match := matched(k)) is None:
                log.debug("No match for %s" % k)
                continue

            log.info("Adding pipeline '%s'" % k)
            cuda_version = match.group(1)
            if (pipeline_name := match.group(2)) is None:
                pipeline_name = "default"
            log.debug("matched cuda_version: %s" % cuda_version)
            log.debug("matched pipeline_name: %s" % pipeline_name)

            if cuda_version not in ctx:
                ctx[cuda_version] = {}
            ctx[cuda_version][pipeline_name] = {}
            ctx[cuda_version][pipeline_name][
                "yaml_safe"] = cuda_version.replace(".", "_")

            key = f"cuda_v{cuda_version}"
            if pipeline_name and pipeline_name != "default":
                key = f"cuda_v{cuda_version}_{pipeline_name}"

            #  log.debug("key: '%s'" % key)
            #  log.debug("cuda_version: '%s'" % cuda_version)
            ctx[cuda_version][pipeline_name]["dist_base_path"] = self.get_data(
                manifest, key, "dist_base_path")
            ctx[cuda_version][pipeline_name][
                "pipeline_name"] = self.pipeline_name

            for distro, _ in manifest[key].items():
                dmrgx = re.compile(r"(?P<name>[a-zA-Z]+)(?P<version>[\d\.]+)$")
                if (dm := dmrgx.match(distro)) is None:
                    continue

                #  log.debug("distro: '%s'" % distro)
                #  log.debug("pipeline_name: '%s'" % pipeline_name)
                ctx[cuda_version][pipeline_name][distro] = {}
                ctx[cuda_version][pipeline_name][distro]["name"] = dm.group(
                    'name')
                ctx[cuda_version][pipeline_name][distro]["version"] = dm.group(
                    'version')
                ctx[cuda_version][pipeline_name][distro][
                    "yaml_safe"] = distro.replace(".", "_")
                image_tag_suffix = self.get_data(manifest,
                                                 key,
                                                 distro,
                                                 "image_tag_suffix",
                                                 can_skip=True)
                ctx[cuda_version][pipeline_name][distro][
                    "image_tag_suffix"] = ""
                ctx[cuda_version][pipeline_name][distro]["image_name"] = {}

                if image_tag_suffix:
                    ctx[cuda_version][pipeline_name][distro][
                        "image_tag_suffix"] = image_tag_suffix

                ctx[cuda_version][pipeline_name][distro]["arches"] = []

                for arch, _ in manifest[key][distro].items():
                    if arch not in ["arm64", "ppc64le", "x86_64"]:
                        continue

                    #  log.debug("arch: '%s'" % arch)
                    no_os_suffix = self.get_data(manifest,
                                                 key,
                                                 distro,
                                                 arch,
                                                 "no_os_suffix",
                                                 can_skip=True)
                    latest = self.get_data(manifest,
                                           key,
                                           distro,
                                           arch,
                                           "latest",
                                           can_skip=True)
                    ctx[cuda_version][pipeline_name][distro]["image_name"][
                        arch] = self.get_data(manifest, key, distro, arch,
                                              "image_name")

                    if "latest" not in ctx[cuda_version][pipeline_name][
                            distro]:
                        ctx[cuda_version][pipeline_name][distro]["latest"] = {}

                    ctx[cuda_version][pipeline_name][distro]["latest"][
                        arch] = (True if latest else False)

                    if "no_os_suffix" not in ctx[cuda_version][pipeline_name][
                            distro]:
                        ctx[cuda_version][pipeline_name][distro][
                            "no_os_suffix"] = {}

                    ctx[cuda_version][pipeline_name][distro]["no_os_suffix"][
                        arch] = (True if no_os_suffix else False)
                    ctx[cuda_version][pipeline_name][distro]["arches"].append(
                        arch)

                    if "cudnn" not in ctx[cuda_version][pipeline_name][distro]:
                        ctx[cuda_version][pipeline_name][distro]["cudnn"] = {}

                    ctx[cuda_version][pipeline_name][distro]["cudnn"][
                        arch] = get_cudnn_components(key, distro, arch)