Exemple #1
0
def dict_to_yaml(data, width=None, sort=False):
    """ Convert dictionary into yaml """
    output = StringIO()

    # Set formatting options
    yaml = YAML()
    yaml.indent(mapping=4, sequence=4, offset=2)
    yaml.default_flow_style = False
    yaml.allow_unicode = True
    yaml.encoding = 'utf-8'
    yaml.width = width

    # Make sure that multiline strings keep the formatting
    data = copy.deepcopy(data)
    scalarstring.walk_tree(data)

    # Sort the data https://stackoverflow.com/a/40227545
    if sort:
        sorted_data = CommentedMap()
        for key in sorted(data):
            sorted_data[key] = data[key]
        data = sorted_data

    yaml.dump(data, output)
    return output.getvalue()
Exemple #2
0
 def publicize(self, resource_id, public_data):
     walk_tree(public_data)
     path = str(Path(resource_id.path).with_suffix('.public.yaml'))
     ensure_dir(path)
     if path.endswith('.yaml'):
         with open(path, 'wb') as out:
             yaml.dump(public_data, out)
     else:
         with open(path, 'w') as out:
             out.write(public_data)
Exemple #3
0
 def to_disk(self, resource_id, resource):
     resource_data = resource.to_disk(resource_id)
     walk_tree(resource_data)
     # Make a backup of the local version
     backed_up = self.backup_resource(resource_id, resource_data)
     if backed_up:
         log("Backed up file: ", resource_id.path)
     ensure_dir(resource_id.path)
     if resource_id.path.endswith('.yaml'):
         with open(resource_id.path, 'wb') as out:
             yaml.dump(resource_data, out)
     else:
         with open(resource_id.path, 'w') as out:
             out.write(resource_data)
Exemple #4
0
    def test_basic(self):
        from ruamel.yaml.comments import CommentedMap
        from ruamel.yaml.scalarstring import walk_tree

        data = CommentedMap()
        data[1] = 'a'
        data[2] = 'with\nnewline\n'
        walk_tree(data)
        exp = """\
        1: a
        2: |
          with
          newline
        """
        assert round_trip_dump(data) == dedent(exp)
Exemple #5
0
def write_yaml_file(file_path, dict_like):
    """write a dict_like object to a file using the given file path.

    Parameters
    ----------
    file_path: str
        path with file name and extension
    dict_like: dict or OrderedDict
        the data to be written to file
    """
    with open(file_path, 'w', encoding='utf8') as fpt:
        if USE_RUAMEL:
            yaml_obj = YAML()
            yaml_obj.indent(mapping=4, sequence=4, offset=2)
            scalarstring.walk_tree(dict_like)
            yaml_obj.dump(dict_like, fpt)
        else:
            yaml.dump(dict_like, fpt)
Exemple #6
0
 def update_bank(cls, course, question_name, bank_source, new_question):
     course.backup_bank(bank_source)
     course_cache = cls.CACHE[course.course_name]
     course_cache[question_name] = new_question
     # Grab the names of the old questions
     kept_question_names = []
     with open(bank_source) as bank_file:
         questions = yaml.load(bank_file)
         for question in questions:
             question_name = question['question_name']
             kept_question_names.append(['question_name'])
     # Get the actual up-to-date questions
     questions = [
         course_cache[name].to_disk(force=True)
         for name in kept_question_names
     ]
     # Dump them back into the file
     walk_tree(questions)
     with open(bank_source, 'w') as bank_file:
         yaml.dump(questions, out)
def safe_dump_yaml(tool_path: str, tool_json: dict):
    """
    Dump dict to a yaml file without overriding existing files (add _#_ prefix)
    :param tool_path: expected path
    :param tool_json: tool dict
    :return: path where file is dumped
    """
    counter = 1
    basedir = os.path.dirname(tool_path)
    basename = os.path.basename(tool_path)
    # Append and increment prefix while there is a conflict
    while os.path.exists(tool_path):
        tool_path = os.path.join(basedir, '_{}_{}'.format(counter, basename))
        counter += 1
    # Ensure any missing path directories are created
    os.makedirs(os.path.dirname(os.path.abspath(tool_path)), exist_ok=True)
    # Dump yaml to the path
    y = YAML()
    walk_tree(tool_json)
    with open(tool_path, 'w') as f:
        y.dump(tool_json, f)
    return tool_path
 def from_json(self):
     # use roundtrip to preserve order
     errors = 0
     docs = []
     dumper = ruamel.yaml.RoundTripDumper
     for file_name in self._args.file:
         if not self._args.write and file_name == '-':
             inp = sys.stdin.read()
         else:
             inp = open(file_name).read()
         loader = ruamel.yaml.Loader  # RoundTripLoader
         data = ruamel.yaml.load(inp, loader)
         data = commentify(data)
         if self._args.write:
             yaml_file_name = os.path.splitext(file_name)[0] + '.yaml'
             with open(yaml_file_name, 'w') as fp:
                 ruamel.yaml.dump(
                     data,
                     fp,
                     Dumper=dumper,
                     default_flow_style=self._args.flow,
                     allow_unicode=True,
                 )
         else:
             docs.append(data)
     if self._args.write:
         return 1 if errors else 0
     if self._args.literal:
         from ruamel.yaml.scalarstring import walk_tree
         for doc in docs:
             walk_tree(doc)
     print(
         ruamel.yaml.dump_all(
             docs,
             Dumper=dumper,
             default_flow_style=self._args.flow,
             allow_unicode=True,
         ))
     return 1 if errors else 0
Exemple #9
0
    def test_map(self):
        from ruamel.yaml.compat import ordereddict
        from ruamel.yaml.comments import CommentedMap
        from ruamel.yaml.scalarstring import walk_tree, preserve_literal
        from ruamel.yaml.scalarstring import DoubleQuotedScalarString as dq
        from ruamel.yaml.scalarstring import SingleQuotedScalarString as sq

        data = CommentedMap()
        data[1] = 'a'
        data[2] = 'with\nnew : line\n'
        data[3] = '${abc}'
        data[4] = 'almost:mapping'
        m = ordereddict([('\n', preserve_literal), ('${', sq), (':', dq)])
        walk_tree(data, map=m)
        exp = """\
        1: a
        2: |
          with
          new : line
        3: '${abc}'
        4: "almost:mapping"
        """
        assert round_trip_dump(data) == dedent(exp)
def breakdown_wf_local(
        wf_path: str,
        installed_apps: dict = None,
        nested_wf_json: dict = None,  # use if is_main is false
        steps_dir: str = None,  # use if is_main is false
        is_main: bool = True):
    """
    Recursively walk through all the steps (tools and nested wfs)
     and install them in steps folder.
    Reference them in the main workflow.
    :param wf_path: Path where to dump the tool/workflow
    :param installed_apps: Dict containing already installed apps.
    :param nested_wf_json: None in main call, dict in recursive calls
    :param steps_dir: None in main call, path in recursive calls
    :param is_main: True in main call, False in recursive calls
    :return: (Workflow path, Installed apps dictionary)
    """

    msg = ("Decomposing workflow '{}' and"
           " installing parts in 'steps' folder. "
           "This may take a minute or two.\n"
           "Set log level to INFO"
           " to track decomposing progress.").format(os.path.abspath(wf_path))
    logger.info(msg)
    print(colored(msg, 'green'))

    wf_path = os.path.abspath(wf_path)
    installed_apps = installed_apps or dict()
    base_dir = os.path.dirname(wf_path)
    updated_wf_path = '.'.join(wf_path.split('.')[:-1])
    if is_main:
        updated_wf_path += '_decomposed.cwl'
    else:
        updated_wf_path += '.cwl'

    # Resolve main workflow or use provided json for nested wf
    if is_main and not nested_wf_json:
        wf_json = resolve_cwl(wf_path)
    else:
        wf_json = nested_wf_json

    # Make steps dir
    steps_dir = steps_dir or os.path.join(base_dir, 'steps')
    if not os.path.exists(steps_dir):
        os.mkdir(steps_dir)

    wf_json['steps'] = cwl_ensure_dict(wf_json['steps'], 'id')
    for step_id, step in wf_json['steps'].items():
        app_hash = calc_json_hash(step['run'])
        if app_hash in installed_apps:
            wf_json['steps'][step_id]['run'] = os.path.relpath(
                installed_apps[app_hash], base_dir)
        else:
            tool_path = os.path.join(steps_dir, step_id + '.cwl')
            if step['run']['class'] in ['CommandLineTool', 'ExpressionTool']:
                # Dump run contents to file
                tool_path = safe_dump_yaml(tool_path, step['run'])
                # Add to installed apps to avoid duplicates
                installed_apps[app_hash] = os.path.abspath(tool_path)
                # Add a relative path to wf_json
                wf_json['steps'][step_id]['run'] = os.path.relpath(
                    tool_path, base_dir)
            elif step['run']['class'] == 'Workflow':
                nested_wf, installed_apps = breakdown_wf_local(
                    tool_path,
                    installed_apps=installed_apps,
                    nested_wf_json=step['run'],
                    is_main=False,
                    steps_dir=steps_dir)
                wf_json['steps'][step_id]['run'] = os.path.relpath(
                    nested_wf, base_dir)

    if is_main:
        y = YAML()

        with open(updated_wf_path, 'w') as f:
            walk_tree(wf_json)
            y.dump(wf_json, f)
        msg = ("Rewiring done. "
               "New tools are now connected"
               " in the workflow {}.").format(os.path.abspath(updated_wf_path))
        logger.info(msg)
        print(colored(msg, 'green'))
    else:
        wf_hash = calc_json_hash(wf_json)
        if wf_hash in installed_apps:
            return installed_apps[wf_hash], installed_apps
        else:
            safe_dump_yaml(updated_wf_path, wf_json)
            installed_apps[wf_hash] = os.path.abspath(updated_wf_path)

    return os.path.abspath(updated_wf_path), installed_apps
            vault_pw = VaultSecret(pw)
            vault_pw.load()
    except FileNotFoundError:
        print("Password file not found")
        sys.exit(1)
else:
    vault_pw = PromptVaultSecret(prompt_formats=["password: "])
    vault_pw.load()

vl = VaultLib(secrets=[
    (None, vault_pw)
])

def to_yaml(representer, node):
    return representer.represent_scalar('!vault', node, style='|')

yaml = YAML()
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.representer.add_representer(LiteralScalarString, to_yaml)

with open(sys.argv[1], 'r') as orig:
    y = yaml.load(orig)

for value in y:
    y[value] = vl.encrypt(y[value], vault_pw).decode('utf-8')

scalarstring.walk_tree(y)

with open(sys.argv[2], 'w') as dest:
    yaml.dump(y, dest)
Exemple #12
0
def dict_to_yaml(data: dict, out_path: str):
    y = YAML()
    data = deepcopy(data)
    walk_tree(data)
    with open(out_path, 'w') as outfile:
        y.dump(data, outfile)