Exemplo n.º 1
0
def fixture_provide_flow(tmp_path_factory):
    """Provide a cylc workflow based on the contents of a folder which can
    be either validated or installed.
    """
    src_flow_name = '11_reinstall_clean'
    workflow_src = Path(__file__).parent / src_flow_name
    test_flow_name = f'cylc-rose-test-{str(uuid4())[:8]}'
    srcpath = (tmp_path_factory.getbasetemp() / test_flow_name)
    flowpath = Path(get_workflow_run_dir(test_flow_name))
    shutil.copytree(workflow_src, srcpath)
    (srcpath / 'opt').mkdir(exist_ok=True)
    for opt in ['foo', 'bar', 'baz']:
        (srcpath / f'opt/rose-suite-{opt}.conf').touch()
    yield {
        'test_flow_name': test_flow_name,
        'flowpath': flowpath,
        'srcpath': srcpath
    }
    shutil.rmtree(srcpath)
    shutil.rmtree(flowpath)
Exemplo n.º 2
0
    def file_install(self, platform):
        """Install required files on the remote install target.

        Included by default in the file installation:
            Files:
                .service/server.key  (required for ZMQ authentication)
            Directories:
                app/
                bin/
                etc/
                lib/
        """
        install_target = platform['install target']
        self.remote_init_map[install_target] = REMOTE_FILE_INSTALL_IN_PROGRESS
        src_path = get_workflow_run_dir(self.workflow)
        dst_path = get_remote_workflow_run_dir(self.workflow)
        install_target = platform['install target']
        try:
            cmd, host = construct_rsync_over_ssh_cmd(src_path,
                                                     dst_path,
                                                     platform,
                                                     self.rsync_includes,
                                                     bad_hosts=self.bad_hosts)
            ctx = SubProcContext('file-install', cmd, host)
        except NoHostsError:
            LOG.error(
                TaskRemoteMgmtError(TaskRemoteMgmtError.MSG_INIT,
                                    install_target, '', '', '', ''))
            self.remote_init_map[
                platform['install target']] = REMOTE_FILE_INSTALL_FAILED
            self.bad_hosts -= set(platform['hosts'])
            self.ready = True
        else:
            LOG.debug(f"Begin file installation on {install_target}")
            self.proc_pool.put_command(
                ctx,
                bad_hosts=self.bad_hosts,
                callback=self._file_install_callback,
                callback_args=[install_target, platform],
                callback_255=self._file_install_callback_255,
            )
Exemplo n.º 3
0
def test_cylc_reinstall_fail_on_clashing_template_vars(tmp_path):
    """If you re-install with a different templating engine in suite.rc
    reinstall should fail.
    """
    (tmp_path / 'rose-suite.conf').write_text(
        '[jinja2:suite.rc]\n'
        'Primrose=\'Primula Vulgaris\'\n')
    (tmp_path / 'flow.cylc').touch()
    test_flow_name = f'cylc-rose-test-{str(uuid4())[:8]}'
    install = subprocess.run([
        'cylc', 'install', '-C',
        str(tmp_path), '--flow-name', test_flow_name, '--no-run-name'
    ])
    assert install.returncode == 0
    (tmp_path / 'rose-suite.conf').write_text(
        '[empy:suite.rc]\n'
        'Primrose=\'Primula Vulgaris\'\n')
    reinstall = subprocess.run(['cylc', 'reinstall', test_flow_name],
                               capture_output=True)
    assert reinstall.returncode != 0
    assert ('You should not define more than one templating section'
            in reinstall.stderr.decode())
    # Clean up run dir:
    shutil.rmtree(get_workflow_run_dir(test_flow_name))
Exemplo n.º 4
0
    def file_install(self, platform):
        """Install required files on the remote install target.

        Included by default in the file installation:
            Files:
                .service/server.key  (required for ZMQ authentication)
            Directories:
                app/
                bin/
                etc/
                lib/
        """
        install_target = platform['install target']
        self.remote_init_map[install_target] = REMOTE_FILE_INSTALL_IN_PROGRESS
        src_path = get_workflow_run_dir(self.workflow)
        dst_path = get_remote_workflow_run_dir(self.workflow)
        install_target = platform['install target']
        ctx = SubProcContext(
            'file-install',
            construct_rsync_over_ssh_cmd(src_path, dst_path, platform,
                                         self.rsync_includes))
        LOG.debug(f"Begin file installation on {install_target}")
        self.proc_pool.put_command(ctx, self._file_install_callback,
                                   [install_target])
Exemplo n.º 5
0
def validate_source_dir(source, flow_name):
    """Ensure the source directory is valid.

    Args:
        source (path): Path to source directory
    Raises:
        WorkflowFilesError:
            If log, share, work or _cylc-install directories exist in the
            source directory.
            Cylc installing from within the cylc-run dir
    """
    # Ensure source dir does not contain log, share, work, _cylc-install
    for dir_ in SuiteFiles.RESERVED_DIRNAMES:
        if Path(source, dir_).exists():
            raise WorkflowFilesError(
                f'{flow_name} installation failed. - {dir_} exists in source '
                'directory.')
    cylc_run_dir = Path(get_workflow_run_dir(''))
    if (os.path.abspath(os.path.realpath(cylc_run_dir))
            in os.path.abspath(os.path.realpath(source))):
        raise WorkflowFilesError(
            f'{flow_name} installation failed. Source directory should not be '
            f'in {cylc_run_dir}')
    check_flow_file(source)
Exemplo n.º 6
0
def workflow_state(workflow,
                   task,
                   point,
                   offset=None,
                   status='succeeded',
                   message=None,
                   cylc_run_dir=None):
    """Connect to a workflow DB and query the requested task state.

    * Reports satisfied only if the remote workflow state has been achieved.
    * Returns all workflow state args to pass on to triggering tasks.

    Arguments:
        workflow (str):
            The workflow to interrogate.
        task (str):
            The name of the task to query.
        point (str):
            The cycle point.
        offset (str):
            The offset between the cycle this xtrigger is used in and the one
            it is querying for as an ISO8601 time duration.
            e.g. PT1H (one hour).
        status (str):
            The task status required for this xtrigger to be satisfied.
        message (str):
            The custom task output required for this xtrigger to be satisfied.
            .. note::

               This cannot be specified in conjunction with ``status``.

        cylc_run_dir (str):
            The directory in which the workflow to interrogate.

            .. note::

               This only needs to be supplied if the workflow is running in a
               different location to what is specified in the global
               configuration (usually ``~/cylc-run``).

    Returns:
        tuple: (satisfied, results)

        satisfied (bool):
            True if ``satisfied`` else ``False``.
        results (dict):
            Dictionary containing the args / kwargs which were provided
            to this xtrigger.

    """
    if cylc_run_dir:
        cylc_run_dir = expand_path(cylc_run_dir)
    else:
        cylc_run_dir = get_workflow_run_dir('')
    if offset is not None:
        point = str(add_offset(point, offset))
    try:
        checker = CylcWorkflowDBChecker(cylc_run_dir, workflow)
    except (OSError, sqlite3.Error):
        # Failed to connect to DB; target workflow may not be started.
        return (False, None)
    fmt = checker.get_remote_point_format()
    if fmt:
        my_parser = TimePointParser()
        point = str(my_parser.parse(point, dump_format=fmt))
    if message is not None:
        satisfied = checker.task_state_met(task, point, message=message)
    else:
        satisfied = checker.task_state_met(task, point, status=status)
    results = {
        'workflow': workflow,
        'task': task,
        'point': point,
        'offset': offset,
        'status': status,
        'message': message,
        'cylc_run_dir': cylc_run_dir
    }
    return satisfied, results
Exemplo n.º 7
0
def setup_stem_repo(tmp_path_factory, monkeymodule):
    """Setup a Rose Stem Repository for the tests.

    creates the following repo structure:

    .. code::

       |-- baseinstall
       |   `-- trunk
       |       `-- rose-stem
       |-- conf
       |   `-- keyword.cfg
       |-- cylc-rose-stem-test-1df3e028
       |   `-- rose-stem
       |       |-- flow.cylc
       |       `-- rose-suite.conf
       `-- rose-test-battery-stemtest-repo
           `-- foo
               <truncated>

    Yields:
        dictionary:
            basetemp:
                The location of the base temporary file, which allows tests
                to modify any part of the rose-stem suite.
            workingcopy:
                Path to the location of the working copy.
            suitename:
                The name of the suite, which will be name of the suite/workflow
                installed in ``~/cylc-run``.
            suite_install_directory:
                The path to the installed suite/workflow. Handy for cleaning
                up after tests.

    """
    # Set up required folders:
    basetemp = tmp_path_factory.getbasetemp()
    baseinstall = basetemp / 'baseinstall'
    rose_stem_dir = baseinstall / 'trunk/rose-stem'
    repo = basetemp / 'rose-test-battery-stemtest-repo'
    confdir = basetemp / 'conf'
    workingcopy = basetemp / f'cylc-rose-stem-test-{str(uuid4())[:8]}'
    for dir_ in [baseinstall, repo, rose_stem_dir, confdir, workingcopy]:
        dir_.mkdir(parents=True)

    # Turn repo into an svn repo:
    subprocess.run(['svnadmin', 'create', f'{repo}/foo'])
    url = f'file://{repo}/foo'

    old = Path().cwd()
    os.chdir(baseinstall)
    subprocess.run(['svn', 'import', '-q', '-m', '""', f'{url}'])
    os.chdir(old)

    # Set Keywords for repository.
    (basetemp /
     'conf/keyword.cfg').write_text(f"location{{primary}}[foo.x]={url}")
    monkeymodule.setenv('FCM_CONF_PATH', str(confdir))
    # Check out a working copy of the repo:
    suitename = workingcopy.parts[-1]
    subprocess.run(split(f'fcm checkout -q fcm:foo.x_tr {workingcopy}'))
    # Copy suite into working copy.
    test_src_dir = Path(__file__).parent / '12_rose_stem'
    for file in ['rose-suite.conf', 'flow.cylc']:
        src = str(test_src_dir / file)
        dest = str(workingcopy / 'rose-stem')
        shutil.copy2(src, dest)
    suite_install_dir = get_workflow_run_dir(suitename)
    yield {
        'basetemp': basetemp,
        'workingcopy': workingcopy,
        'suitename': suitename,
        'suite_install_dir': suite_install_dir
    }
Exemplo n.º 8
0
async def scan(run_dir=None, scan_dir=None, max_depth=MAX_SCAN_DEPTH):
    """List flows installed on the filesystem.

    Args:
        run_dir (pathlib.Path):
            The run dir to look for workflows in, defaults to ~/cylc-run.

            All workflow registrations will be given relative to this path.
        scan_dir(pathlib.Path):
            The directory to scan for workflows in.

            Use in combination with run_dir if you want to scan a subdir
            within the run_dir.
        max_depth (int):
            The maximum number of levels to descend before bailing.

            * ``max_depth=1`` will pick up top-level suites (e.g. ``foo``).
            * ``max_depth=2`` will pick up nested suites (e.g. ``foo/bar``).

    Yields:
        dict - Dictionary containing information about the flow.

    """
    if not run_dir:
        run_dir = Path(get_workflow_run_dir(''))
    if not scan_dir:
        scan_dir = run_dir

    running = []

    # wrapper for scandir to preserve context
    async def _scandir(path, depth):
        contents = await scandir(path)
        return path, depth, contents

    # perform the first directory listing
    for subdir in await scandir(scan_dir):
        if subdir.is_dir():
            running.append(
                asyncio.create_task(
                    _scandir(subdir, 1)
                )
            )

    # perform all further directory listings
    while running:
        # wait here until there's something to do
        done, _ = await asyncio.wait(
            running,
            return_when=asyncio.FIRST_COMPLETED
        )
        for task in done:
            path, depth, contents = task.result()
            running.remove(task)
            if dir_is_flow(contents):
                # this is a flow directory
                yield {
                    'name': str(path.relative_to(run_dir)),
                    'path': path,
                }
            elif depth < max_depth:
                # we may have a nested flow, lets see...
                for subdir in contents:
                    if (subdir.is_dir()
                            and subdir.stem not in EXCLUDE_FILES):
                        running.append(
                            asyncio.create_task(
                                _scandir(subdir, depth + 1)
                            )
                        )
        # don't allow this to become blocking
        await asyncio.sleep(0)
Exemplo n.º 9
0
def install_workflow(
    flow_name: Optional[str] = None,
    source: Optional[Union[Path, str]] = None,
    run_name: Optional[str] = None,
    no_run_name: bool = False,
    no_symlinks: bool = False
) -> Tuple[Path, Path, str]:
    """Install a workflow, or renew its installation.

    Install workflow into new run directory.
    Create symlink to suite source location, creating any symlinks for run,
    work, log, share, share/cycle directories.

    Args:
        flow_name: workflow name, default basename($PWD).
        source: directory location of flow.cylc file, default $PWD.
        run_name: name of the run, overrides run1, run2, run 3 etc...
            If specified, cylc install will not create runN symlink.
        rundir: for overriding the default cylc-run directory.
        no_run_name: Flag as True to install workflow into
            ~/cylc-run/<flow_name>
        no_symlinks: Flag as True to skip making localhost symlink dirs

    Return:
        source: The source directory.
        rundir: The directory the workflow has been installed into.
        flow_name: The installed suite name (which may be computed here).

    Raise:
        WorkflowFilesError:
            No flow.cylc file found in source location.
            Illegal name (can look like a relative path, but not absolute).
            Another suite already has this name (unless --redirect).
            Trying to install a workflow that is nested inside of another.
    """

    if not source:
        source = Path.cwd()
    elif Path(source).name == SuiteFiles.FLOW_FILE:
        source = Path(source).parent
    source = Path(expand_path(source))
    if not flow_name:
        flow_name = Path.cwd().stem
    validate_flow_name(flow_name)
    if run_name in SuiteFiles.RESERVED_NAMES:
        raise WorkflowFilesError(
            f'Run name cannot be "{run_name}".')
    validate_source_dir(source, flow_name)
    run_path_base = Path(get_workflow_run_dir(flow_name))
    relink, run_num, rundir = get_run_dir(run_path_base, run_name, no_run_name)
    if Path(rundir).exists():
        raise WorkflowFilesError(
            f"\"{rundir}\" exists."
            " Try using cylc reinstall. Alternatively, install with another"
            " name, using the --run-name option.")
    check_nested_run_dirs(rundir, flow_name)
    symlinks_created = {}
    if not no_symlinks:
        sub_dir = flow_name
        if run_num:
            sub_dir += '/' + f'run{run_num}'
        symlinks_created = make_localhost_symlinks(rundir, sub_dir)
    INSTALL_LOG = _get_logger(rundir, 'cylc-install')
    if not no_symlinks and bool(symlinks_created) is True:
        for src, dst in symlinks_created.items():
            INSTALL_LOG.info(f"Symlink created from {src} to {dst}")
    try:
        rundir.mkdir(exist_ok=True)
    except OSError as e:
        if e.strerror == "File exists":
            raise WorkflowFilesError(f"Run directory already exists : {e}")
    if relink:
        link_runN(rundir)
    create_workflow_srv_dir(rundir)
    rsync_cmd = get_rsync_rund_cmd(source, rundir)
    proc = Popen(rsync_cmd, stdout=PIPE, stderr=PIPE, text=True)
    stdout, stderr = proc.communicate()
    INSTALL_LOG.info(f"Copying files from {source} to {rundir}")
    INSTALL_LOG.info(f"{stdout}")
    if not proc.returncode == 0:
        INSTALL_LOG.warning(
            f"An error occurred when copying files from {source} to {rundir}")
        INSTALL_LOG.warning(f" Error: {stderr}")
    cylc_install = Path(rundir.parent, SuiteFiles.Install.DIRNAME)
    check_flow_file(rundir, symlink_suiterc=True, logger=INSTALL_LOG)
    if no_run_name:
        cylc_install = Path(rundir, SuiteFiles.Install.DIRNAME)
    source_link = cylc_install.joinpath(SuiteFiles.Install.SOURCE)
    cylc_install.mkdir(parents=True, exist_ok=True)
    if not source_link.exists():
        INSTALL_LOG.info(f"Creating symlink from {source_link}")
        source_link.symlink_to(source)
    elif source_link.exists() and (os.readlink(source_link) == str(source)):
        INSTALL_LOG.info(
            f"Symlink from \"{source_link}\" to \"{source}\" in place.")
    else:
        raise WorkflowFilesError(
            "Source directory between runs are not consistent.")
    # check source link matches the source symlink from workflow dir.
    INSTALL_LOG.info(f'INSTALLED {flow_name} from {source} -> {rundir}')
    print(f'INSTALLED {flow_name} from {source} -> {rundir}')
    _close_install_log(INSTALL_LOG)
    return source, rundir, flow_name
Exemplo n.º 10
0
def main(parser: COP, options: 'Values', workflow: str) -> None:
    if options.use_task_point and options.cycle:
        raise UserInputError(
            "cannot specify a cycle point and use environment variable")

    if options.use_task_point:
        if "CYLC_TASK_CYCLE_POINT" in os.environ:
            options.cycle = os.environ["CYLC_TASK_CYCLE_POINT"]
        else:
            raise UserInputError("CYLC_TASK_CYCLE_POINT is not defined")

    if options.offset and not options.cycle:
        raise UserInputError("You must target a cycle point to use an offset")

    # Attempt to apply specified offset to the targeted cycle
    if options.offset:
        options.cycle = str(add_offset(options.cycle, options.offset))

    # Exit if both task state and message are to being polled
    if options.status and options.msg:
        raise UserInputError("cannot poll both status and custom output")

    if options.msg and not options.task and not options.cycle:
        raise UserInputError("need a taskname and cyclepoint")

    # Exit if an invalid status is requested
    if (options.status and options.status not in TASK_STATUSES_ORDERED
            and options.status not in CylcWorkflowDBChecker.STATE_ALIASES):
        raise UserInputError(f"invalid status '{options.status}'")

    # this only runs locally
    if options.run_dir:
        run_dir = expand_path(options.run_dir)
    else:
        run_dir = get_workflow_run_dir('')

    pollargs = {
        'workflow': workflow,
        'run_dir': run_dir,
        'task': options.task,
        'cycle': options.cycle,
        'status': options.status,
        'message': options.msg,
    }

    spoller = WorkflowPoller("requested state",
                             options.interval,
                             options.max_polls,
                             args=pollargs)

    connected, formatted_pt = spoller.connect()

    if not connected:
        raise CylcError("cannot connect to the workflow DB")

    if options.status and options.task and options.cycle:
        # check a task status
        spoller.condition = options.status
        if not spoller.poll():
            sys.exit(1)
    elif options.msg:
        # Check for a custom task output
        spoller.condition = "output: %s" % options.msg
        if not spoller.poll():
            sys.exit(1)
    else:
        # just display query results
        spoller.checker.display_maps(
            spoller.checker.workflow_state_query(task=options.task,
                                                 cycle=formatted_pt,
                                                 status=options.status))
Exemplo n.º 11
0
def run_dir():
    """The cylc run directory for this host."""
    path = Path(get_workflow_run_dir(''))
    path.mkdir(exist_ok=True)
    yield path