Ejemplo n.º 1
0
    def testToYAMLs(self, tests, rootFolder='./testsNewBuild/'):
        """Writes a batch of tests to file in the yaml format, grouping them by team and name

        :param tests: list of tests to write to file
        :type tests: list
        :param rootFolder: destination folder, defaults to './testsNewBuild/'
        :param rootFolder: str, optional
        """

        # extract unique test names
        uniqueTestNames = set([c.name for c in tests])
        # group by test names to put them in same files
        for name in uniqueTestNames:
            yaml = YAML()
            yaml.default_flow_style = False
            testDict = None
            for t in tests:
                if t.name == name:
                    f = open(os.path.join(
                        rootFolder, t.team, name + '.yaml'), "w+")
                    if testDict == None:
                        testDict = t.toDict()
                    else:
                        key = 'metric' + str(len(testDict['metrics'])+1)
                        testDict['metrics'][key] = t.toDict()[
                            'metrics']['metric1']
            yaml.dump(testDict, f)
Ejemplo n.º 2
0
def mocked_config_file_path(
    fake_temp_data_pocketsphinx_dic, fake_temp_data_pocketsphinx_lm, tmpdir_factory
):
    path_to_pocketsphix_dic = os.path.join(
        str(fake_temp_data_pocketsphinx_dic), "fake.dic"
    )
    path_to_pocketsphix_lm = os.path.join(
        str(fake_temp_data_pocketsphinx_lm), "fake.lm"
    )
    # config part
    base = tempfile.mkdtemp()
    config_file = os.path.join(base, "config.yaml")

    yaml = YAML()

    m_cfg = yaml.load(COMMON_MOCKED_CONFIG)
    m_cfg["pocketsphinx"]["dic"] = path_to_pocketsphix_dic
    m_cfg["pocketsphinx"]["lm"] = path_to_pocketsphix_lm

    with open(config_file, "w", encoding="utf-8") as fp:
        yaml.dump(m_cfg, fp)

    yield config_file

    shutil.rmtree(base)
Ejemplo n.º 3
0
 def dump(self, data, stream=None, **kw):
     inefficient = False
     if stream is None:
         inefficient = True
         stream = StringIO()
     YAML.dump(self, data, stream, **kw)
     if inefficient:
         return stream.getvalue()
Ejemplo n.º 4
0
    def to_yaml(self, filename):
        if self.stats is None:
            logger.error("Run .skeleton_analysis() before .to_yaml()")
            return

        from ruamel.yaml import YAML
        yaml = YAML(typ="unsafe")
        with open(filename, 'wt', encoding="utf-8") as f:
            yaml.dump(self.stats, f)
Ejemplo n.º 5
0
 def list_products_yaml(self, hostname, system):
     from ruamel.yaml import YAML
     yml = YAML(typ='safe', pure=False)
     yml.default_flow_style = False
     yml.explicit_end = True
     yml.explicit_start = True
     yml.indent(mapping=4, sequence=4, offset=2)
     data = system.to_refhost_dict()
     data["name"] = str(hostname)
     yml.dump(data, self.output)
Ejemplo n.º 6
0
    def to_file(self, filename):
        """
        Saves object to a file in YAML format.

        Args:
            filename (str): File name.

        """
        d = {"mass_info": self.mass_info, "pair_coeffs": self.pair_coeffs,
             "mol_coeffs": self.mol_coeffs}
        yaml = YAML(typ="safe")
        with open(filename, "w") as f:
            yaml.dump(d, f)
Ejemplo n.º 7
0
def edit_tmpvault(filename):
    '''Update yaml config and by changing any key with the value CHANGE_AND_REKEY

    requests a master password and uses pbkdf2 to get a master key to base all
    of the new keys off of
    '''
    yaml = YAML()
    with open(filename) as fobj:
        vault_dict = yaml.load(fobj)
    master_pass = getpass.getpass("Enter master key to generate values: ").encode('utf-8')
    master_key = hashlib.pbkdf2_hmac('sha256', master_pass, os.urandom(16), 100000)
    change_values(vault_dict, 'CHANGE_AND_REKEY', master_key)
    with open(filename, 'w') as fobj:
        yaml.dump(vault_dict, fobj)
Ejemplo n.º 8
0
def dumpyaml(
    yamlp: YAML, data: Any, stream: Any = None, **kw: Any
) -> Optional[str]:
    """Dump YAML to string."""
    inefficient = False
    if stream is None:
        inefficient = True
        stream = StringIO()
    # overriding here to get dumping to
    # not sort keys.
    yamlp = YAML()
    yamlp.indent(mapping=4, sequence=6, offset=3)
    # yamlp.compact(seq_seq=False, seq_map=False)
    yamlp.dump(data, stream, **kw)
    if inefficient:
        return cast(str, stream.getvalue())
    return None
Ejemplo n.º 9
0
    def _to_text(self, filename=None, is_json=False):
        """Serialize to a json/yaml file"""
        extra_data = {} if self.extra_data is None else self.extra_data

        def cell_value(a_cell):
            if a_cell.formula and a_cell.formula.python_code:
                return '=' + a_cell.formula.python_code
            else:
                return a_cell.value

        extra_data.update(dict(
            excel_hash=self._excel_file_md5_digest,
            cell_map=dict(sorted(
                ((addr, cell_value(cell))
                 for addr, cell in self.cell_map.items() if cell.serialize),
                key=lambda x: AddressRange(x[0]).sort_key
            )),
        ))
        if not filename:
            filename = self.filename + ('.json' if is_json else '.yml')

        # hash the current file to see if this function makes any changes
        existing_hash = (self._compute_file_md5_digest(filename)
                         if os.path.exists(filename) else None)

        if not is_json:
            with open(filename, 'w') as f:
                ymlo = YAML()
                ymlo.width = 120
                ymlo.dump(extra_data, f)
        else:
            with open(filename, 'w') as f:
                json.dump(extra_data, f, indent=4)

        del extra_data['cell_map']

        # hash the newfile, return True if it changed, this is only reliable
        # on pythons which have ordered dict (CPython 3.6 & python 3.7+)
        return (existing_hash is None or
                existing_hash != self._compute_file_md5_digest(filename))
Ejemplo n.º 10
0
def main():
    yaml = YAML()
    file_name = sys.argv[1]
    file_in = open(file_name).read()
    docs = yaml.load_all(file_in)
    i = 0
    for doc in docs:

        if i == 0:
            code_old = doc
        else:
            code_new = doc
        i = i + 1
    delta_map = dict()
    follow_keys = list()

    add = list()
    update = list()
    traversal(code_old, code_new, follow_keys, delta_map, update, add)
    yaml.dump(code_old, sys.stdout)

    split = '------love----you------choerodon----'

    print(split)
    yaml.dump(delta_map, sys.stdout)

    print(split)
    change_key_map = dict()

    change_key_map["add"] = add
    change_key_map["update"] = update
    yaml.dump(change_key_map, sys.stdout)
Ejemplo n.º 11
0
    def serialize(self, out):
        def nested_set(target, path, value):
            value = self.serialize_value(value)
            if len(path) > 1:
                if len(path) == 2 and path[1] and path[1][0] == '[' and path[1][-1] == ']' and path[1][1:-1].isdigit():
                    if path[0] not in target:
                        target[path[0]] = []
                    target[path[0]].append(value)
                else:
                    # Add empty dict in case there is value and we
                    # expect dict
                    if path[0] not in target or not isinstance(target[path[0]], dict):
                        target[path[0]] = CommentedMap()
                    nested_set(target[path[0]], path[1:], value)
            else:
                target[path[0]] = value

        units = CommentedMap()
        for unit in self.unit_iter():
            nested_set(units, unit.getid().split('->'), unit.target)
        yaml = YAML()
        yaml.default_flow_style = False
        yaml.dump(self.get_root_node(units), out)
Ejemplo n.º 12
0
    
    obj1 = {}
    DefaultValidatingDraft4Validator(schema).validate(obj1)
    assert obj1 == {'outer-object': {'inner-object': 'INNER-DEFAULT', "inner-list": [1,2,3]}}
    assert obj1['outer-object'].from_default

    print("obj1:")
    print(json.dumps(obj1, indent=4) + '\n')
        
    schema_nodefault = copy.deepcopy(schema)
    del schema_nodefault["properties"]["outer-object"]["default"]
    obj2 = {}
    DefaultValidatingDraft4Validator(schema_nodefault).validate(obj2)
    assert obj2 == {} # whoops

    print("obj2:")
    print(json.dumps(obj2, indent=4) + '\n')
    
    obj3 = {}
    obj3_with_defaults = inject_defaults(obj3, schema, include_yaml_comments=True)
    print(obj3)

    print("obj3_with_defaults:")
    #print(json.dumps(obj3, indent=4) + '\n')
    
    import sys
    from ruamel.yaml import YAML
    yaml = YAML()
    yaml.default_flow_style = False
    yaml.dump(obj3_with_defaults, sys.stdout)
Ejemplo n.º 13
0
 def callback(data):
     # Ref: https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string
     stream = StringIO()
     yaml = YAML()
     yaml.dump(data, stream)
     return stream.getvalue()
Ejemplo n.º 14
0
class Release():

    """The Release command."""

    CWD = 'src'

    def __init__(self, args):
        self.branch = args.branch
        self.release_branch = args.release_branch
        self.finish_release = args.finish
        self.rebase_branch = args.rebase_branch
        self.increment_part = args.increment_part
        self.dry_run = args.dry_run
        self.user = args.user

        self.base_url = 'git+ssh://{}@git.launchpad.net'.format(self.user)
        self.full_url = os.path.join(self.base_url, args.repository)
        self.snap = os.path.basename(self.full_url)

        self._parts = {}
        self._previous_parts = {}

        self.clone_dir = os.path.join(self.CWD, self.snap)
        self._snapcraft_file = os.path.join(
            self.clone_dir, 'snap', 'snapcraft.yaml')

        self._yaml = YAML()
        self._yaml.preserve_quotes = True
        self._yaml.indent(mapping=2, sequence=2, offset=2)
        self.new_tag = '{:snap-%Y-%m-%dT%H%M}'.format(
            datetime.datetime.utcnow())
        self._cleanup_release_tags_commands = []

    def dance(self):
        self._cleanup()
        if self.rebase_branch:
            self._clone(self.full_url, self.branch)
            self._rebase()
            self._push_release_branch(self.branch, force=True)
        elif self.finish_release:
            self._clone(self.full_url, self.release_branch)
            self._get_parts()
            self._finish_release()
            self._push_release_branch(self.release_branch)
        else:
            self._clone(self.full_url, self.branch)
            self._get_previous_parts()
            self._get_parts()
            with open('changelog', 'w') as f:
                f.write("Changelog\n")
            for part in sorted(self._parts,
                               key=lambda x: x in parts_do_not_tag):
                if part in parts_ignore:
                    continue
                self._clone(self._parts[part]['source'])
                tag = self._tag(part)
                try:
                    old_tag = self._previous_parts[part]['source-tag']
                    changelog = self._get_change_log(part, tag, old_tag)
                    if changelog:
                        with open('changelog', 'a') as f:
                            f.write('\n{}:\n'.format(part))
                            f.write(changelog)
                            f.write("\n")
                except KeyError:
                    continue
            self._update_yaml()
            if self._cleanup_release_tags_commands:
                logger.debug("".center(80, '#'))
                logger.debug("# Release tag(s) cleanup command(s):")
                logger.debug("".center(80, '#'))
                for c in self._cleanup_release_tags_commands:
                    logger.debug(c)
            self._push_release_branch(self.branch)

    def _cleanup(self):
        shutil.rmtree(self.CWD, ignore_errors=True)
        os.mkdir(self.CWD)

    def _clone(self, repo, branch=None, target_dir=None, cwd=CWD):
        """Clone project repository."""
        repo_basename = os.path.basename(repo)
        repo_basename = repo_basename.replace('.git', '')
        logger.info("".center(80, '#'))
        cmd = ['git', 'clone', repo]
        if target_dir:
            cmd += [target_dir]
            repo_basename = target_dir
        if branch:
            logger.info("# Cloning {} ({})".format(repo_basename, branch))
            cmd += ['-b', branch]
        else:
            logger.info("# Cloning {}".format(repo_basename))
        logger.info("".center(80, '#'))
        run(cmd, cwd=cwd)
        if not os.path.exists(os.path.join(cwd, repo_basename)):
            logger.error('Unable to clone {}'.format(repo))
            raise SystemExit(1)

    def _get_previous_parts(self):
        cmd = ['git', 'describe', '--abbrev=0', '--tags', '--match', 'v*']
        last_release_tag = run(
            cmd, cwd=self.clone_dir, check=True).stdout.decode().rstrip()
        logger.info("Last release tag: {}".format(last_release_tag))
        cmd = ['git', '--no-pager', 'show', '{}:snap/snapcraft.yaml'.format(
               last_release_tag)]
        previous_snapcraft_file = run(
            cmd, cwd=self.clone_dir, check=True).stdout.decode()
        data = self._yaml.load(previous_snapcraft_file)
        for k, v in data["parts"].items():
            if 'source-tag' in v:
                if 'source' in v:
                    self._previous_parts[k] = v

    def _get_parts(self):
        with open(self._snapcraft_file) as fp:
            self._data = self._yaml.load(fp)
        for k, v in self._data["parts"].items():
            if 'source-tag' in v:
                if 'source' in v:
                    self._parts[k] = v

    def _get_change_log(self, part, new_tag, old_tag):
        repo_basename = os.path.basename(self._parts[part]['source'])
        repo_basename = repo_basename.replace('.git', '')
        cmd = ['git', 'log', '--no-merges', "--pretty=format:+ %s",
               '{}...{}'.format(old_tag, new_tag)]
        return run(cmd, cwd=os.path.join(self.CWD, repo_basename),
                   check=True).stdout.decode()

    def _tag(self, part):
        repo_basename = os.path.basename(self._parts[part]['source'])
        repo_basename = repo_basename.replace('.git', '')
        cmd = ['git', 'describe', '--abbrev=40', '--tags']
        if part not in parts_do_not_tag:
            cmd += ['--match', 'snap-*T*']
        data = run(
            cmd,
            cwd=os.path.join(self.CWD, repo_basename),
            check=True).stdout.decode()
        m = re.search(
            r'(?P<tag>.+?)(?P<additional_commits>\-[0-9]+\-g.{40})?$', data)
        if m:
            last_tag = m.group('tag')
            is_tag_required = (m.group('additional_commits') and
                               part not in parts_do_not_tag)
        else:
            raise SystemExit('Error: no tag found for {}'.format(part))
        if is_tag_required:
            last_tag = self.new_tag
            logger.info("Tag required on {}".format(part))
            self._tag_version(part, self.new_tag)
            self._push_changes(part)
            self._cleanup_release_tags_commands.append(
                'git push --delete {} {}'.format(
                    self._parts[part]['source'], self.new_tag)
            )
        else:
            if part not in parts_do_not_tag:
                logger.info("No new changes on {}".format(part))
            logger.info("{} will be used".format(last_tag))
        self._data["parts"][part]['source-tag'] = last_tag
        return last_tag

    def _tag_version(self, part, new_tag):
        """Tag the code version."""
        repo_basename = os.path.basename(self._parts[part]['source'])
        repo_basename = repo_basename.replace('.git', '')
        run(['git', 'tag', new_tag, '-m', new_tag],
            cwd=os.path.join(self.CWD, repo_basename), check=True)
        logger.info("{} applied on {}".format(new_tag, repo_basename))

    def _push_changes(self, part):
        part_uri = self._parts[part]['source']
        repo_basename = os.path.basename(part_uri).replace('.git', '')
        parse_result = urlparse(part_uri)._replace(
            scheme='git+ssh', netloc=urlparse(self.base_url).netloc)
        part_uri = urlunparse(parse_result)
        logger.info('URL target is {}'.format(part_uri))
        if self.dry_run:
            run(['git', 'push', '--dry-run', part_uri, '--tags'],
                cwd=os.path.join(self.CWD, repo_basename), check=True)
        else:
            logger.info("Pushing changes to origin")
            run(['git', 'push', part_uri, '--tags'],
                cwd=os.path.join(self.CWD, repo_basename), check=True)

    def _update_yaml(self):
        """Update yaml and commit."""
        logger.info("".center(80, '#'))
        logger.info("# Updating parts in {}".format(
            self._snapcraft_file))
        logger.info("".center(80, '#'))
        with open(self._snapcraft_file, 'w') as fp:
            self._yaml.dump(self._data, fp)
        logger.info("".center(80, '#'))
        logger.info("# Updating {} version in {}".format(
            self.snap, self._snapcraft_file))
        logger.info("".center(80, '#'))
        bumpversion_output = run(
            ['bumpversion', self.increment_part, '--allow-dirty', '--list'],
            check=True, cwd=self.clone_dir).stdout.decode()
        new_version = bumpversion_output.splitlines()[-1].replace(
            'new_version=', '')
        logger.info("Bump {} to version {}".format(
            self.snap, new_version))
        run(['git', 'add', '--all'], cwd=self.clone_dir, check=True)
        run(['git', 'commit', '-m', 'Bump version number and tag parts'],
            cwd=self.clone_dir, check=True)

    def _push_release_branch(self, local_branch, force=False):
        if self.dry_run:
            if force:
                run(['git', 'push', '--dry-run', '-f',
                     self.full_url,
                     '{}:{}'.format(local_branch, self.release_branch)],
                    cwd=self.clone_dir, check=True)
                run(['git', 'push', '--dry-run', '-f',
                     self.full_url,
                     '{}:{}'.format(local_branch, self.release_branch),
                     '--tags'],
                    cwd=self.clone_dir, check=True)
            else:
                run(['git', 'push', '--dry-run',
                     self.full_url,
                     '{}:{}'.format(local_branch, self.release_branch)],
                    cwd=self.clone_dir, check=True)
                run(['git', 'push', '--dry-run',
                     self.full_url,
                     '{}:{}'.format(local_branch, self.release_branch),
                     '--tags'],
                    cwd=self.clone_dir, check=True)
        else:
            if force:
                run(['git', 'push', '-f',
                     self.full_url,
                     '{}:{}'.format(local_branch, self.release_branch)],
                    cwd=self.clone_dir, check=True)
                run(['git', 'push', '-f',
                     self.full_url,
                     '{}:{}'.format(local_branch, self.release_branch),
                     '--tags'],
                    cwd=self.clone_dir, check=True)
            else:
                run(['git', 'push',
                     self.full_url,
                     '{}:{}'.format(local_branch, self.release_branch)],
                    cwd=self.clone_dir, check=True)
                run(['git', 'push',
                     self.full_url,
                     '{}:{}'.format(local_branch, self.release_branch),
                     '--tags'],
                    cwd=self.clone_dir, check=True)

    def _rebase(self):
        run(['git', 'rebase', 'origin/{}'.format(self.rebase_branch)],
            cwd=self.clone_dir, check=True)

    def _finish_release(self):
        """Tag, reset parts tags, open next dev version and commit."""
        release_tag = "v{}".format(self._data["version"])
        run(['git', 'tag', release_tag, '-m', release_tag],
            cwd=self.clone_dir, check=True)
        logger.info("{} applied on {}".format(release_tag, self.snap))
        logger.info("".center(80, '#'))
        logger.info("# Updating parts in {}".format(
            self._snapcraft_file))
        logger.info("".center(80, '#'))
        for part in self._parts:
            if part in parts_ignore:
                continue
            self._data["parts"][part]['source-tag'] = ""
        with open(self._snapcraft_file, 'w') as fp:
            self._yaml.dump(self._data, fp)
        logger.info("".center(80, '#'))
        logger.info("# Updating {} version in {}".format(
            self.snap, self._snapcraft_file))
        logger.info("".center(80, '#'))
        bumpversion_output = run(
            ['bumpversion', 'minor', '--allow-dirty', '--list'],
            check=True, cwd=self.clone_dir).stdout.decode()
        new_version = bumpversion_output.splitlines()[-1].replace(
            'new_version=', '')
        logger.info("Bump {} to version {}".format(
            self.snap, new_version))
        run(['git', 'add', '--all'], cwd=self.clone_dir, check=True)
        run(['git', 'commit', '-m', 'Bump version to next dev release'],
            cwd=self.clone_dir, check=True)
Ejemplo n.º 15
0
def override_values_in_packs(new_cpu_number: str,
                             new_memory_amount: str,
                             current_cpu_number: str,
                             current_mem_amount: str,
                             cpu_system_required_min: str,
                             cpu_system_required_percent: str,
                             mem_system_required_min: str,
                             mem_system_required_percent: str,
                             pack_name: str = None):

    yaml_parser = YAML(typ="jinja2",
                       plug_ins=["ruamel.yaml.jinja2.__plug_in__"])
    values_yaml_paths = get_values_file_location(pack_name)

    for values_yaml_path in glob.glob(values_yaml_paths):
        logger.info(f"Changing resources for pack: {values_yaml_path}")

        with open(values_yaml_path, mode="r") as values_yaml_file:
            pack_values = yaml_parser.load(values_yaml_file)

            if not pack_values:
                message = f"{values_yaml_path} file empty!"
                logger.error(message)
                raise ValueError(message)

            try:
                cpu_fraction = pack_values.get(CPU_FRACTION)
                if cpu_fraction:
                    cpu_fraction = float(cpu_fraction)

                for resource_name in RESOURCE_NAMES:
                    if pack_values.get(resource_name):
                        pack_values[resource_name] = \
                            replace_cpu_configuration(data=pack_values.get(resource_name),
                                                      new_cpu_number=new_cpu_number,
                                                      current_cpu_number=current_cpu_number,
                                                      fraction=cpu_fraction,
                                                      system_required_min=cpu_system_required_min,
                                                      system_required_percent=cpu_system_required_percent)

                for cpu_single_value in CPU_SINGLE_VALUES:
                    replace_single_value(
                        data=pack_values,
                        new_value=new_cpu_number,
                        current_value=current_cpu_number,
                        key=cpu_single_value,
                        fraction=cpu_fraction,
                        system_required_min=cpu_system_required_min,
                        system_required_percent=cpu_system_required_percent,
                        round_to_int=(cpu_single_value in CPU_INT_VALUES))

            except Exception:
                logger.exception(
                    "Exception during calculation of new cpu values.")
                raise ValueError

            try:
                memory_fraction = pack_values.get(MEMORY_FRACTION)
                if memory_fraction:
                    memory_fraction = float(memory_fraction)

                for resource_name in RESOURCE_NAMES:
                    if pack_values.get(resource_name):
                        pack_values[resource_name] = \
                            replace_memory_configuration(data=pack_values.get(resource_name),
                                                         new_memory_amount=new_memory_amount,
                                                         current_mem_amount=current_mem_amount,
                                                         fraction=memory_fraction,
                                                         system_required_min=mem_system_required_min,
                                                         system_required_percent=mem_system_required_percent)

                for memory_single_value in MEMORY_SINGLE_VALUES:
                    replace_single_value(
                        data=pack_values,
                        new_value=new_memory_amount,
                        current_value=current_mem_amount,
                        key=memory_single_value,
                        fraction=memory_fraction,
                        system_required_min=mem_system_required_min,
                        system_required_percent=mem_system_required_percent,
                        cpu=False)

            except Exception:
                logger.exception(
                    "Exception during calculation of new memory values.")
                raise ValueError

        with open(values_yaml_path, mode='w') as values_yaml_file:
            yaml_parser.dump(pack_values, values_yaml_file)
            logger.info(
                f"Resources for pack: {values_yaml_path} were changed.\n")
from argparse import ArgumentParser
from ruamel.yaml import YAML
from collections.abc import Iterable

parser = ArgumentParser(
    description=
    'Converts tag separator from dot to colon in sidecar files, easing migration after GH-164.'
)
parser.add_argument('file',
                    type=str,
                    nargs='+',
                    help='YAML sidecar file(s) to migrate.')
args = parser.parse_args()

yaml = YAML(typ='rt')  # preserve order, style and comments
yaml.indent(mapping=2, sequence=2, offset=2)

for file_path in args.file:
    with open(file_path, 'r+') as file:
        sidecar = yaml.load(file)
        if not sidecar: continue

        if 'tags' in sidecar and isinstance(sidecar['tags'], Iterable):
            sidecar['tags'] = [
                tag.replace('.', ':') for tag in sidecar['tags']
            ]

        file.seek(0)
        yaml.dump(sidecar, file)
        file.truncate()
Ejemplo n.º 17
0
Archivo: misc.py Proyecto: mjirik/io3d
def obj_to_file(obj, filename, filetype='auto', ndarray_to_list=False, squeeze=True):
    '''Writes annotation in file.

    :param filetype:
        auto
        yaml
        pkl, pickle
        pklz, picklezip
    :param ndarray_to_list: convert ndarrays in obj to lists
    :param squeeze: squeeze ndarray

    '''
    # import json
    # with open(filename, mode='w') as f:
    #    json.dump(annotation,f)
    if ndarray_to_list:
        obj = ndarray_to_list_in_structure(obj, squeeze=squeeze)

    # write to yaml
    d = os.path.dirname(os.path.abspath(filename))
    if not os.path.exists(d):
        os.makedirs(d)

    if filetype == 'auto':
        _, ext = os.path.splitext(filename)
        filetype = ext[1:]

    if filetype in ('yaml', 'yml'):
        # import yaml
        from ruamel.yaml import YAML
        yaml = YAML(typ="unsafe")
        with open(filename, 'wt', encoding="utf-8") as f:
            yaml.dump(obj, f)
        # if sys.version_info.major == 2:
        #     with open(filename, 'wb') as f:
        #         yaml.dump(obj, f, encoding="utf-8")
        # else:
        #     with open(filename, "w", encoding="utf-8") as f:
        #         yaml.dump(obj, f)
    elif filetype in ('pickle', 'pkl'):
        f = open(filename, 'wb')
        logger.info("filename " + filename)
        # if sys.version_info[0] < 3: import cPickle as pickle
        # else: import _pickle as pickle
        import pickle
        pickle.dump(obj, f, -1)
        f.close
    elif filetype in ('streamingpicklezip', 'spklz'):
        # this is not working :-(
        import gzip
        import sPickle as pickle
        f = gzip.open(filename, 'wb', compresslevel=1)
        # f = open(filename, 'wb')
        pickle.s_dump(obj, f)
        f.close
    elif filetype in ('picklezip', 'pklz'):
        import gzip
        if sys.version_info[0] < 3: import cPickle as pickle
        else: import _pickle as pickle
        f = gzip.open(filename, 'wb', compresslevel=1)
        # f = open(filename, 'wb')
        pickle.dump(obj, f)
        f.close
    elif filetype in('mat'):

        import scipy.io as sio
        sio.savemat(filename, obj)
    else:
        logger.error('Unknown filetype ' + filetype)
Ejemplo n.º 18
0
        rule_groups = in_rules['groups']

    for in_rule in rule_groups:
        item_name = in_rule['name']
        print(' - %s' % item_name)
        alerts = []
        rules = []

        for item in in_rule['rules']:
            if 'alert' in item:
                alerts.append(item)
            else:
                rules.append(item)

        if alerts:
            alert_name = re.sub('[-.](alerts|rules)', '',
                                item_name) + ".alerts"
            print(' -----> %s with %d alerts' % (alert_name, len(alerts)))
            alerts_path = '%s/%s.yaml' % (out_dir, alert_name)
            out_alerts = {'name': alert_name, 'rules': alerts}
            with open(alerts_path, 'w') as out_file:
                yaml.dump([out_alerts], out_file)

        if rules:
            rule_name = re.sub('[-.](alerts|rules)', '', item_name) + ".rules"
            print(' -----> %s with %d rules' % (rule_name, len(rules)))
            rules_path = '%s/%s.yaml' % (out_dir, rule_name)
            out_rules = {'name': rule_name, 'rules': rules}
            with open(rules_path, 'w') as out_file:
                yaml.dump([out_rules], out_file)
Ejemplo n.º 19
0
def to_yaml_string(data):
    stream = StringIO()
    yaml = YAML()
    yaml.default_flow_style = False
    yaml.dump(data, stream)
    return stream.getvalue()
    def save_yaml(path, data):
        ryaml = YAML()
        ryaml.allow_duplicate_keys = True

        with open(path, 'w') as f:
            ryaml.dump(data, f)
Ejemplo n.º 21
0
def _fix_token(config_file=None, force=False, verify=True):
    from ruamel.yaml import YAML
    yaml = YAML()
    config_file = config_file or TRAVIS_CONFIG_FILE
    with open(config_file, "r") as _file:
        try:
            travis_config = yaml.load(_file)
        except Exception:
            raise ValueError(
                "Failed to parse the travis configuration. "
                "Make sure the config only contains valid YAML and keys as specified by travis."
            )

        # Get the generated token from the top level deploy config added by the travis cli
        try:
            real_token = travis_config["deploy"]["api_key"]["secure"]
        except (TypeError, KeyError):
            raise AssertionError("Can't find any top level deployment tokens")

        try:
            # Find the build stage that deploys to releases
            releases_stages = [
                stage for stage in travis_config["jobs"]["include"]
                if stage.get("deploy", dict()).get("provider") == "releases"
            ]
            assert (
                len(releases_stages) > 0
            ), "Can't set the new token because there are no stages deploying to releases"
            assert (
                len(releases_stages) < 2
            ), "Can't set the new token because there are multiple stages deploying to releases"
        except (TypeError, KeyError):
            raise AssertionError(
                "Can't set the new token because there are no deployment stages"
            )

        try:
            is_mock_token = releases_stages[0]["deploy"]["token"][
                "secure"] == "REPLACE_ME"
            is_same_token = releases_stages[0]["deploy"]["token"][
                "secure"] == real_token

            unmodified = is_mock_token or is_same_token
        except (TypeError, KeyError):
            unmodified = False

        # Set the new generated token as the stages deploy token
        _create(releases_stages[0], "deploy", "token", "secure")
        releases_stages[0]["deploy"]["token"]["secure"] = real_token

        # Make sure it is fine to overwrite the config file
        assert unmodified or force, (
            'The secure token in the "{}" stage has already been changed. '
            "Retry with --force if you are sure about replacing it.".format(
                releases_stages[0].get("stage", "releases deployment")))

        # Remove the top level deploy config added by the travis cli
        travis_config.pop("deploy")

        if not unmodified and verify:
            pprint.pprint(travis_config)
            if (not input("Do you want to save this configuration? (y/n) ").
                    strip().lower() == "y"):
                return

    # Save the new travis config
    assert travis_config
    with open(config_file, "w") as _file:
        yaml.dump(travis_config, _file)
    print("Fixed!")
Ejemplo n.º 22
0
    def _run_core_command(
        self,
        patterns_json: List[Any],
        patterns: List[Pattern],
        targets: List[Path],
        language: Language,
        rule: Rule,
        rules_file_flag: str,
        cache_dir: str,
        report_time: bool,
    ) -> dict:
        with tempfile.NamedTemporaryFile(
                "w") as pattern_file, tempfile.NamedTemporaryFile(
                    "w") as target_file, tempfile.NamedTemporaryFile(
                        "w") as equiv_file:
            yaml = YAML()
            yaml.dump({"rules": patterns_json}, pattern_file)
            pattern_file.flush()
            target_file.write("\n".join(str(t) for t in targets))
            target_file.flush()

            cmd = [SEMGREP_PATH] + [
                "-lang",
                language,
                "-json",
                rules_file_flag,
                pattern_file.name,
                "-j",
                str(self._jobs),
                "-target_file",
                target_file.name,
                "-use_parsing_cache",
                cache_dir,
                "-timeout",
                str(self._timeout),
                "-max_memory",
                str(self._max_memory),
            ]

            equivalences = rule.equivalences
            if equivalences:
                self._write_equivalences_file(equiv_file, equivalences)
                cmd += ["-equivalences", equiv_file.name]

            if report_time:
                cmd += ["-json_time"]

            core_run = sub_run(cmd,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
            logger.debug(core_run.stderr.decode("utf-8", errors="replace"))

            out_bytes = core_run.stdout
            err_bytes = core_run.stderr
            returncode = core_run.returncode
            if returncode != 0:
                output_json = self._parse_core_output(out_bytes, err_bytes,
                                                      returncode)

                if "error" in output_json:
                    self._raise_semgrep_error_from_json(
                        output_json, patterns, rule)
                else:
                    raise SemgrepError(
                        f"unexpected json output while invoking semgrep-core with rule '{rule.id}':\n{PLEASE_FILE_ISSUE_TEXT}"
                    )

            output_json = self._parse_core_output(out_bytes, err_bytes,
                                                  returncode)

            return output_json
Ejemplo n.º 23
0
def run(*, src: str) -> None:
    yaml = YAML(typ="rt")
    with open(src) as rf:
        data = yaml.load(rf)
    yaml.dump(data, sys.stdout)
Ejemplo n.º 24
0
class SavedBundleConfig(object):
    def __init__(self, bento_service=None, kind="BentoService"):
        self.kind = kind
        self._yaml = YAML()
        self._yaml.default_flow_style = False
        self.config = self._yaml.load(
            BENTOML_CONFIG_YAML_TEPMLATE.format(
                kind=self.kind,
                bentoml_version=get_bentoml_deploy_version(),
                created_at=str(datetime.utcnow()),
            ))

        if bento_service is not None:
            self.config["metadata"].update({
                "service_name":
                bento_service.name,
                "service_version":
                bento_service.version,
            })
            self.config["env"] = bento_service.env.to_dict()
            self.config['apis'] = _get_apis_list(bento_service)
            self.config['artifacts'] = _get_artifacts_list(bento_service)

    def write_to_path(self, path, filename="bentoml.yml"):
        return self._yaml.dump(self.config, Path(os.path.join(path, filename)))

    @classmethod
    def load(cls, filepath):
        conf = cls()
        with open(filepath, "rb") as config_file:
            yml_content = config_file.read()
        conf.config = conf._yaml.load(yml_content)
        ver = str(conf["version"])

        if ver != BENTOML_VERSION:
            msg = (
                "Saved BentoService bundle version mismatch: loading BentoService "
                "bundle create with BentoML version {},  but loading from BentoML "
                "version {}".format(conf["version"], BENTOML_VERSION))

            # If major version is different, then there could be incompatible API
            # changes. Raise error in this case.
            if ver.split(".")[0] != BENTOML_VERSION.split(".")[0]:
                if not BENTOML_VERSION.startswith('0+untagged'):
                    raise BentoMLConfigException(msg)
                else:
                    logger.warning(msg)
            else:  # Otherwise just show a warning.
                logger.warning(msg)

        return conf

    def get_bento_service_metadata_pb(self):
        bento_service_metadata = BentoServiceMetadata()
        bento_service_metadata.name = self.config["metadata"]["service_name"]
        bento_service_metadata.version = self.config["metadata"][
            "service_version"]
        bento_service_metadata.created_at.FromDatetime(
            self.config["metadata"]["created_at"])

        if "env" in self.config:
            if "setup_sh" in self.config["env"]:
                bento_service_metadata.env.setup_sh = self.config["env"][
                    "setup_sh"]

            if "conda_env" in self.config["env"]:
                bento_service_metadata.env.conda_env = dump_to_yaml_str(
                    self.config["env"]["conda_env"])

            if "pip_dependencies" in self.config["env"]:
                bento_service_metadata.env.pip_dependencies = "\n".join(
                    self.config["env"]["pip_dependencies"])
            if "python_version" in self.config["env"]:
                bento_service_metadata.env.python_version = self.config["env"][
                    "python_version"]
            if "docker_base_image" in self.config["env"]:
                bento_service_metadata.env.docker_base_image = self.config[
                    "env"]["docker_base_image"]

        if "apis" in self.config:
            for api_config in self.config["apis"]:
                api_metadata = BentoServiceMetadata.BentoServiceApi(
                    name=api_config["name"],
                    docs=api_config["docs"],
                    handler_type=api_config.get("handler_type", "unknown"),
                )
                if "handler_config" in api_config:
                    for k, v in api_config["handler_config"].items():
                        api_metadata.handler_config[k] = v
                bento_service_metadata.apis.extend([api_metadata])

        if "artifacts" in self.config:
            for artifact_config in self.config["artifacts"]:
                artifact_metadata = BentoServiceMetadata.BentoArtifact()
                if "name" in artifact_config:
                    artifact_metadata.name = artifact_config["name"]
                if "artifact_type" in artifact_config:
                    artifact_metadata.artifact_type = artifact_config[
                        "artifact_type"]
                bento_service_metadata.artifacts.extend([artifact_metadata])

        return bento_service_metadata

    def __getitem__(self, item):
        return self.config[item]

    def __setitem__(self, key, value):
        self.config[key] = value

    def __contains__(self, item):
        return item in self.config
Ejemplo n.º 25
0
class Cfg(object):
    """Configuration class for the Bandit NHM extraction program."""
    def __init__(self, filename, cmdline=None):
        """Init method for Cfg class.

        Args:
            filename (str): The configuration filename.
            cmdline (str, optional): Currently unused. Defaults to None.

        """

        # yaml.add_representer(OrderedDict, dict_representer)
        # yaml.add_constructor(_mapping_tag, dict_constructor)
        self.yaml = YAML()

        self.__cfgdict = None
        self.__cmdline = cmdline
        self.load(filename)

    def __str__(self):
        """Pretty-print the configuration items"""
        outstr = ''

        for (kk, vv) in iteritems(self.__cfgdict):
            outstr += '{0:s}: '.format(kk)

            if isinstance(vv, list):
                for ll in vv:
                    outstr += '{}, '.format(ll)
                outstr = outstr.strip(', ') + '\n'
            else:
                outstr += '{}\n'.format(vv)
        return outstr

    def __getattr__(self, item):
        # Undefined attributes will look up the given configuration item
        return self.get_value(item)

    def get_value(self, varname):
        """Return the value for a given config variable.

        Args:
            varname (str): Configuration variable.

        Returns:
             The value of the configuration variable or raise KeyError if variable does not exist.

        """

        try:
            return self.__cfgdict[varname]
        except KeyError:
            print('Configuration variable, {}, does not exist'.format(varname))
            raise
            # return None

    def load(self, filename):
        """Load the YAML-format configuration file.

        Args:
            filename (str): Name of the configuration file.

        """
        # tmp = yaml.load(open(filename, 'r'), Loader=NoDatesSafeLoader)
        tmp = self.yaml.load(open(filename, 'r'))
        self.__cfgdict = tmp

    def update_value(self, variable, newval):
        """Update an existing configuration variable with a new value

        Args:
            variable (str): The configuration variable to update.
            newval (str): The value to assign to the variable.

        Raises:
            KeyError: If configuration variable does not exist.

        """

        if variable in self.__cfgdict:
            self.__cfgdict[variable] = newval
        else:
            raise KeyError(
                "Configuration variable, {}, does not exist".format(variable))

    def write(self, filename):
        """"Write the configuration out to a file.

        Args:
            filename (str): Name of file to write configuration to.

        """

        outfile = open(filename, 'w')
        self.yaml.dump(self.__cfgdict, outfile)
Ejemplo n.º 26
0
import os
from pathlib import Path
import sys

from ruamel.yaml import YAML

config_file = Path(
    os.path.join(
        os.path.dirname(__file__),
        "..",
        "_config.yml",
    ))

if len(sys.argv) >= 2:
    title = sys.argv[1]
else:
    title = "DEVELOPMENT DOCS"
yaml = YAML(typ="rt")  # default, if not specfied, is 'rt' (round-trip)
content = yaml.load(config_file)
content["title"] = title
yaml.dump(content, config_file)
Ejemplo n.º 27
0
        args.column,
        'ionMode':
        args.mode,
        'instrument':
        args.instrument,
        'minimumPeakIntensity':
        5000,
        'targets': [t for t in targets if not t['isInternalStandard']]
    }]

    library = {
        'carrot': {
            'lcms': {
                'correction': {
                    'config': corrConfig
                },
                'annotation': {
                    'config': annotConfig
                }
            }
        }
    }

    yaml = YAML()
    yaml.default_flow_style = False
    yaml.indent(sequence=4, offset=2)

    # open output file: 'application-carrot.lcms.yml'
    with open(args.output, 'w') as file:
        yaml.dump(library, file)
Ejemplo n.º 28
0
                        'interval': 1}
              },
              'loggers': {
                  'etmmv': {
                    'handlers': ['file'],
                    'level': loglevel,
                    'propagate': False}
              },
              'root': {
                  'handlers': ['file'],
                  'level': loglevel},
              'version': 1}
    logging.config.dictConfig(config)
    logger.info("\n######## Initializing logging #########")
    if file:
        logger.info(f'logging for file: {file}\n    logging at level: {loglevel}\n    logging to file: {logfile}')
    else:
        logger.info(f'logging at level: {loglevel}\n    logging to file: {logfile}')

if __name__ == "__main__":
    if len(sys.argv) > 1 and os.path.isdir(sys.argv[1]):
        etmdir = sys.argv.pop(1)
    else:
        sys.exit("The etm path must be provided.")

    settings = Settings(etmdir)
    print(settings.settings, "\n")
    print(settings.changes, "\n")
    yaml.dump(settings.settings, sys.stdout)

Ejemplo n.º 29
0
                key = newkey
                newkey = ''

        lineNum += 1

    return ret_val

with open('index.md', 'r') as f:
    endReached = False

    data = f.read().split('\n')
    for d in data:
        print d
        if "<!-- AUTO-GENERATED-START -->" in d:
            print '| Key | Default Value | Description |'
            print '| --- | --- | --- |'
            break

    with open('values.yaml', 'r') as f_v:
        d_v = f_v.read()
        yaml = YAML()
        code = yaml.load(d_v)
        yaml.explicit_start = True
        yaml.dump(code, sys.stdout, transform=decode_helm_yaml)

    for d in data:
        if "<!-- AUTO-GENERATED-END -->" in d:
            endReached = True
        if endReached:
            print d
Ejemplo n.º 30
0
    def _run_rules_direct_to_semgrep_core(
        self,
        rules: List[Rule],
        target_manager: TargetManager,
        profiler: ProfileManager,
    ) -> Tuple[Dict[Rule, List[RuleMatch]], Dict[Rule, List[Any]],
               List[SemgrepError], Set[Path], Dict[Any, Any], ]:
        from itertools import chain
        from collections import defaultdict

        logger.debug(f"Passing whole rules directly to semgrep_core")

        outputs: Dict[Rule, List[RuleMatch]] = defaultdict(list)
        errors: List[SemgrepError] = []
        # cf. for bar_format: https://tqdm.github.io/docs/tqdm/
        with tempfile.TemporaryDirectory() as semgrep_core_ast_cache_dir:
            for rule, language in tuple(
                    chain(*([(rule, language) for language in rule.languages]
                            for rule in rules))):
                debug_tqdm_write(f"Running rule {rule._raw.get('id')}...")
                with tempfile.NamedTemporaryFile(
                        "w", suffix=".yaml"
                ) as rule_file, tempfile.NamedTemporaryFile(
                        "w") as target_file:
                    targets = self.get_files_for_language(
                        language, rule, target_manager)
                    # opti: no need to call semgrep-core if no target files
                    if not targets:
                        continue
                    target_file.write("\n".join(map(lambda p: str(p),
                                                    targets)))
                    target_file.flush()
                    yaml = YAML()
                    yaml.dump({"rules": [rule._raw]}, rule_file)
                    rule_file.flush()

                    cmd = [SEMGREP_PATH] + [
                        "-lang",
                        language,
                        "-fast",
                        "-json",
                        "-config",
                        rule_file.name,
                        "-j",
                        str(self._jobs),
                        "-target_file",
                        target_file.name,
                        "-use_parsing_cache",
                        semgrep_core_ast_cache_dir,
                        "-timeout",
                        str(self._timeout),
                        "-max_memory",
                        str(self._max_memory),
                    ]

                    r = sub_run(cmd,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)
                    out_bytes, err_bytes, returncode = r.stdout, r.stderr, r.returncode
                    output_json = self._parse_core_output(
                        out_bytes, err_bytes, returncode)

                    if returncode != 0:
                        if "error" in output_json:
                            self._raise_semgrep_error_from_json(
                                output_json, [], rule)
                        else:
                            raise SemgrepError(
                                f"unexpected json output while invoking semgrep-core with rule '{rule.id}':\n{PLEASE_FILE_ISSUE_TEXT}"
                            )

                # end with tempfile.NamedTemporaryFile(...) ...
                findings = [
                    RuleMatch.from_pattern_match(
                        rule.id,
                        PatternMatch(pattern_match),
                        message=rule.message,
                        metadata=rule.metadata,
                        severity=rule.severity,
                        fix=rule.fix,
                        fix_regex=rule.fix_regex,
                    ) for pattern_match in output_json["matches"]
                ]
                # TODO: we should do that in Semgrep_generic.ml instead
                findings = dedup_output(findings)
                outputs[rule].extend(findings)
                errors.extend(
                    CoreException.from_json(e, language,
                                            rule.id).into_semgrep_error()
                    for e in output_json["errors"])
        # end for rule, language ...

        return outputs, {}, errors, set(
            Path(p) for p in target_manager.targets), {}
Ejemplo n.º 31
0
#!/usr/bin/python3
from pathlib import Path
from importlib_resources import as_file, files
from ruamel.yaml import YAML

yaml = YAML(typ="safe")
yaml.default_flow_style = False

build_context = Path(__file__).resolve().parent.parent

with as_file(
    files("dandi") / "tests" / "data" / "dandiarchive-docker" / "docker-compose.yml"
) as path:
    with path.open() as fp:
        compose = yaml.load(fp)
    del compose["services"]["redirector"]["image"]
    compose["services"]["redirector"]["build"] = str(build_context)
    with path.open("w") as fp:
        yaml.dump(compose, fp)
Ejemplo n.º 32
0
def _create_aws_lambda_cloudformation_template_file(
    project_dir,
    namespace,
    deployment_name,
    deployment_path_prefix,
    api_names,
    bento_service_name,
    s3_bucket_name,
    py_runtime,
    memory_size,
    timeout,
):
    template_file_path = os.path.join(project_dir, 'template.yaml')
    yaml = YAML()
    sam_config = {
        'AWSTemplateFormatVersion': '2010-09-09',
        'Transform': 'AWS::Serverless-2016-10-31',
        'Globals': {
            'Function': {
                'Timeout': timeout,
                'Runtime': py_runtime
            },
            'Api': {
                'BinaryMediaTypes': ['image~1*'],
                'Cors': {
                    'AllowOrigin': "'*'"
                },
                'Auth': {
                    'ApiKeyRequired': False,
                    'DefaultAuthorizer': 'NONE',
                    'AddDefaultAuthorizerToCorsPreflight': False,
                },
            },
        },
        'Resources': {},
        'Outputs': {
            'S3Bucket': {
                'Value': s3_bucket_name,
                'Description':
                'S3 Bucket for saving artifacts and lambda bundle',
            }
        },
    }
    for api_name in api_names:
        sam_config['Resources'][api_name] = {
            'Type': 'AWS::Serverless::Function',
            'Properties': {
                'Runtime': py_runtime,
                'CodeUri': deployment_name + '/',
                'Handler': 'app.{}'.format(api_name),
                'FunctionName': f'{namespace}-{deployment_name}-{api_name}',
                'Timeout': timeout,
                'MemorySize': memory_size,
                'Events': {
                    'Api': {
                        'Type': 'Api',
                        'Properties': {
                            'Path': '/{}'.format(api_name),
                            'Method': 'post',
                        },
                    }
                },
                'Policies': [{
                    'S3ReadPolicy': {
                        'BucketName': s3_bucket_name
                    }
                }],
                'Environment': {
                    'Variables': {
                        'BENTOML_BENTO_SERVICE_NAME': bento_service_name,
                        'BENTOML_API_NAME': api_name,
                        'BENTOML_S3_BUCKET': s3_bucket_name,
                        'BENTOML_DEPLOYMENT_PATH_PREFIX':
                        deployment_path_prefix,
                    }
                },
            },
        }

    yaml.dump(sam_config, Path(template_file_path))

    # We add Outputs section separately, because the value should not
    # have "'" around !Sub
    with open(template_file_path, 'a') as f:
        f.write("""\
  EndpointUrl:
    Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.\
amazonaws.com/Prod"
    Description: URL for endpoint
""")
    return template_file_path
Ejemplo n.º 33
0
yaml.preserve_quotes = True
pwd = os.getcwd()
work_path = f'{pwd}/host_vars/'
while True:
    try:
        num_rtr = int(input('How many routers are you deploying: '))
        if num_rtr == 0:
            print(
                'Zero is not a valid input, please enter a number from 1 - 12')
            continue
        elif num_rtr > 12:
            print(
                "Script will only generate up to 12 UUIDs, please enter a number from 1 - 12"
            )
            continue
        for n in range(1, num_rtr + 1):
            rtr = str(n)
            uuid_rt = uuid.uuid4()
            with open(f'{work_path}/R{rtr}.yml') as f:
                yml_doc = yaml.load(f)
                # for k,v in yml_doc.items():
                # if yml_doc['uuid']:
                yml_doc['uuid'] = str(uuid_rt)
            with open(f'{work_path}/R{rtr}.yml', "w") as f:
                yaml.dump(yml_doc, f)

            print(f'R{n} has a UUID of: {uuid_rt}')
        break
    except ValueError as err:
        print("Please only enter a numbers from 1 - 12.")
Ejemplo n.º 34
0
def train(cfg):
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    log.info(f'{device} detected.')
    log.debug(OmegaConf.to_yaml(cfg))
    os.chdir(hydra.utils.get_original_cwd())
    writer = SummaryWriter(log_dir=f'{cfg.logging.checkpoint_dir}',
                           comment=f'LR_{cfg.training.optimizer.lr}_reg_{cfg.training.reg}_con_{cfg.training.con}')

    checkpoint_dir = Path(f'{cfg.logging.checkpoint_dir}')
    config_path = checkpoint_dir / 'config.yaml'
    res_dir = checkpoint_dir / 'x_hat'
    res_dir.mkdir(exist_ok=True)

    # this part is to make sure /n are replaced with newlines
    def repr_str(dumper: RoundTripRepresenter, data: str):
        if '\n' in data:
            return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
        return dumper.represent_scalar('tag:yaml.org,2002:str', data)

    yaml = YAML()
    yaml.representer.add_representer(str, repr_str)

    with open(config_path, 'w') as f:
        yaml.dump(OmegaConf.to_yaml(cfg), f)

    # set seed
    if cfg.exp.seed:
        torch.manual_seed(cfg.exp.seed)
        np.random.seed(cfg.exp.seed)

    mat, channels = load_data(cfg.dataset.panel)
    log.info('x dim: {}, y dim: {}'.format(mat.shape[1], mat.shape[0]))
    log.debug(channels)

    X = xr.load_dataarray(cfg.dataset.src).sel(fovs=cfg.dataset.fovs, channels=channels).values.astype(np.float32)

    if X.ndim != 4:
        X = X.reshape((1, *X.shape))

    A = torch.tensor(mat, requires_grad=False).unsqueeze(-1).unsqueeze(-1).float().to(device)
    x = torch.tensor(X.astype(np.float32), requires_grad=False).to(device)
    x_binary = (x > 0).float()
    y = F.conv2d(x, A).to(device)

    # ref_cov = torch.load(cfg.dataset.ref_cov, map_location=device).to(device)
    loss_fn = Loss(A, y)

    if cfg.training.resume:
        # TODO load optimizer state
        w = torch.load(cfg.training.resume)
        x_hat = nn.Parameter(w).to(device)
    else:
        w = torch.empty(*x.shape, requires_grad=True, device=device)
        nn.init.orthogonal_(w)
        x_hat = nn.Parameter(w).to(device)

    optimizer = optim.Adam([x_hat], lr=cfg.training.optimizer.lr)

    for i in range(1, cfg.training.epochs + 1):
        optimizer.zero_grad()
        # mse, reg, con, cov = loss_fn(F.relu(x_hat) if cfg.training.relu else x_hat)
        mse, reg, con = loss_fn(x_hat)
        loss = mse + cfg.training.reg * reg + cfg.training.con * con  # + cfg.training.cov * cov
        loss.backward()

        if cfg.training.grad_clip > 0:
            torch.nn.utils.clip_grad_value_(x_hat, cfg.training.grad_clip)

        optimizer.step()

        if cfg.training.relu:
            x = torch.relu(x)

        e = F.mse_loss(x, x_hat).item()
        e_nonneg = F.mse_loss(x, F.relu(x_hat)).item()
        x_hat_binary = (x_hat > 0).float()
        f1 = 2 * (x_binary * x_hat_binary).sum() / (x_binary.sum() + x_hat_binary.sum())

        if i % cfg.logging.checkpoint_interval == 0:
            # TODO save optimizer state
            torch.save(x_hat, os.path.join(res_dir, 'iter{}_{:.2f}.pt'.
                                           format(i, e)))
            log.info('checkpoint saved!')

        log.info(
            'Iter {} total: {:.3f} least_squares: {:.3f} | reg: {:.3f} con: {:.3f} | recon_mse: {:.3f} | nonneg_mse: {:.3f} | f1: {:.3f}'.format(
                i, loss.item(), mse.item(), reg.item(), con.item(), e, e_nonneg, f1.item())
        )

        writer.add_scalar('Loss/total', loss.item(), i)
        writer.add_scalar('Loss/least_squares', mse.item(), i)
        writer.add_scalar('Loss/L1', reg.item(), i)
        writer.add_scalar('Loss/smoothness', con.item(), i)
        # writer.add_scalar('Loss/covariance', con.item(), i)
        writer.add_scalar('Evaluation/recovery_mse', e, i)
        writer.add_scalar('Evaluation/recovery_mse', e_nonneg, i)
        writer.add_scalar('Evaluation/f1', f1.item(), i)
def update_lastmod(verbose):
    count = 0
    yaml = YAML()

    for post in glob.glob(os.path.join(POSTS_PATH, "*.md")):
        git_log_count = subprocess.getoutput(
            "git log --pretty=%ad \"{}\" | wc -l".format(post))

        if git_log_count == "1":
            continue

        git_lastmod = subprocess.getoutput(
            "git log -1 --pretty=%ad --date=iso \"{}\"".format(post))

        if not git_lastmod:
            continue

        lates_commit = subprocess.check_output(
            ['git', 'log', '-1', '--pretty=%B', post]).decode('utf-8')

        if "[Automation]" in lates_commit and "Lastmod" in lates_commit:
            continue

        frontmatter, line_num = get_yaml(post)
        meta = yaml.load(frontmatter)

        if 'seo' in meta:
            if ('date_modified' in meta['seo']
                    and meta['seo']['date_modified'] == git_lastmod):
                continue
            else:
                meta['seo']['date_modified'] = git_lastmod
        else:
            meta.insert(line_num, 'seo', dict(date_modified=git_lastmod))

        output = 'new.md'
        if os.path.isfile(output):
            os.remove(output)

        with open(output, 'w', encoding='utf-8') as new, \
                open(post, 'r', encoding='utf-8') as old:
            new.write("---\n")
            yaml.dump(meta, new)
            new.write("---\n")
            line_num += 2

            lines = old.readlines()

            for line in lines:
                if line_num > 0:
                    line_num -= 1
                    continue
                else:
                    new.write(line)

        shutil.move(output, post)
        count += 1

        if verbose:
            print("[INFO] update 'lastmod' for:" + post)

    if count > 0:
        print("[INFO] Success to update lastmod for {} post(s).".format(count))
Ejemplo n.º 36
0
            vault_pw = VaultSecret(pw)
            vault_pw.load()
    except FileNotFoundError:
        print("Password file not found")
        sys.exit(1)
else:
    vault_pw = PromptVaultSecret(prompt_formats=["password: "])
    vault_pw.load()

vl = VaultLib(secrets=[
    (None, vault_pw)
])

def to_yaml(representer, node):
    return representer.represent_scalar('!vault', node, style='|')

yaml = YAML()
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.representer.add_representer(LiteralScalarString, to_yaml)

with open(sys.argv[1], 'r') as orig:
    y = yaml.load(orig)

for value in y:
    y[value] = vl.encrypt(y[value], vault_pw).decode('utf-8')

scalarstring.walk_tree(y)

with open(sys.argv[2], 'w') as dest:
    yaml.dump(y, dest)
Ejemplo n.º 37
0
    ''' Update FfDL manifest with the corresponding object storage credentials '''

    f = open('manifest.yml', 'r')
    manifest = yaml.load(f.read())
    f.close()

    manifest['data_stores'][0]['connection']['auth_url'] = s3_url
    manifest['data_stores'][0]['connection']['user_name'] = s3_access_key_id
    manifest['data_stores'][0]['connection']['password'] = s3_secret_access_key
    manifest['data_stores'][0]['training_data']['container'] = data_bucket_name
    manifest['data_stores'][0]['training_results'][
        'container'] = result_bucket_name

    f = open('manifest.yml', 'w')
    yaml.default_flow_style = False
    yaml.dump(manifest, f)
    f.close()
    ''' Submit Training job to FfDL and monitor its status '''

    files = {
        'manifest': open('manifest.yml', 'rb'),
        'model_definition': open('model.zip', 'rb')
    }

    headers = {
        'accept': 'application/json',
        'Authorization': 'test',
        'X-Watson-Userinfo': 'bluemix-instance-id=test-user'
    }

    response = requests.post(ffdl_rest + '/v1/models?version=2017-02-13',
Ejemplo n.º 38
0
def setup_sparseblockstats(setup_dvid_repo):
    dvid_address, repo_uuid = setup_dvid_repo

    _ = 0
    volume_layout = [[_, _, _, _, _, _, _, _], [_, _, _, _, _, 4, _, _],
                     [_, 1, 1, 1, _, _, 1, 1], [_, 1, _, _, _, _, _, _],
                     [_, 1, _, _, _, _, _, _], [_, 1, _, 2, 2, 2, 2, _],
                     [_, _, _, _, _, _, _, _], [_, 3, _, _, _, 3, _, 1]]

    lowres_volume = np.zeros((4, 8, 8), np.uint64)
    lowres_volume[:] = volume_layout

    volume = upsample(lowres_volume, 16)
    assert volume.shape == (64, 128, 128)

    input_segmentation_name = 'sparseblockstats-input'
    create_labelmap_instance(dvid_address, repo_uuid, input_segmentation_name)
    post_labelmap_voxels(dvid_address, repo_uuid, input_segmentation_name,
                         (0, 0, 0), volume)

    # Mask is same as input, but times 10
    mask_volume = volume * 10
    mask_segmentation_name = 'sparseblockstats-mask'
    create_labelmap_instance(dvid_address, repo_uuid, mask_segmentation_name)
    post_labelmap_voxels(dvid_address, repo_uuid, mask_segmentation_name,
                         (0, 0, 0), mask_volume)

    config_text = textwrap.dedent(f"""\
        workflow-name: sparseblockstats
        cluster-type: {CLUSTER_TYPE}
         
        input:
          dvid:
            server: {dvid_address}
            uuid: {repo_uuid}
            segmentation-name: {input_segmentation_name}
            supervoxels: true
           
          geometry:
            message-block-shape: [64,64,64]
            bounding-box: [[0,0,0], [128,128,64]]
 
        mask-input:
          dvid:
            server: {dvid_address}
            uuid: {repo_uuid}
            segmentation-name: {mask_segmentation_name}
            supervoxels: true
           
          geometry:
            message-block-shape: [64,64,64]
            bounding-box: [[0,0,0], [128,128,64]]
 
        sparseblockstats:
          mask-labels: [20,40] # Avoids the top-left block
    """)

    template_dir = tempfile.mkdtemp(suffix="sparseblockstats-template")

    with open(f"{template_dir}/workflow.yaml", 'w') as f:
        f.write(config_text)

    yaml = YAML()
    with StringIO(config_text) as f:
        config = yaml.load(f)

    yaml = YAML()
    yaml.default_flow_style = False
    with open(f"{template_dir}/workflow.yaml", 'w') as f:
        yaml.dump(config, f)

    return template_dir, config, volume, mask_volume, dvid_address, repo_uuid
Ejemplo n.º 39
0
def main():
    show_help()
    try:
        while True:
            user = input(bold(red('\nAMATERASU ') + '> '))
            if user.startswith('use'):
                try:
                    if user.split(' ')[1] == 'network_mapper':
                        try:
                            network_mapper_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'whois_extractor':
                        try:
                            whois_extractor_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'email_extractor':
                        try:
                            email_extractor_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'metadata_extractor':
                        try:
                            metadata_extractor_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'reverse_ip':
                        try:
                            reverse_ip_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'iplocator':
                        try:
                            ip_locator_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'spider':
                        try:
                            spider_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'dns_extractor':
                        try:
                            dns_extractor_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'ftp_bruteforce':
                        try:
                            ftp_bruteforce_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'panelfinder':
                        try:
                            login_panel_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'ssh_bruteforce':
                        try:
                            ssh_bruteforce_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'honeypot_detector':
                        try:
                            honeypot_detector_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'gmail_bruteforce':
                        try:
                            gmail_bruteforce_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                    elif user.split(' ')[1] == 'num_verify':
                        try:
                            num_verify_CONFIG()
                        except KeyboardInterrupt:
                            print()
                            main()
                except IndexError:
                    show_module()
                    print(bold(info('Select a module.')))

            elif user.startswith('set'):
                try:
                    config_file = open('core/config.yaml').read()
                    yaml = YAML()
                    config = yaml.load(config_file)
                    api = config['API']

                    if user.split(' ')[1] == 'shodan_api':
                        api[0]['Shodan'] = user.split(' ')[2]
                        print(bold(info('Shodan API\t' + user.split(' ')[2])))
                        with open('core/config.yaml', 'w') as cf:
                            yaml.dump(config, cf)
                        cf.close()

                    elif user.split(' ')[1] == 'censys_uid':
                        api[1]['Censys UID'] = user.split(' ')[2]
                        print(bold(info('Censys UID\t' + user.split(' ')[2])))
                        with open('core/config.yaml', 'w') as cf:
                            yaml.dump(config, cf)
                        cf.close()

                    elif user.split(' ')[1] == 'censys_secret':
                        api[2]['Censys SECRET'] = user.split(' ')[2]
                        print(
                            bold(info('Censys SECRET\t' + user.split(' ')[2])))
                        with open('core/config.yaml', 'w') as cf:
                            yaml.dump(config, cf)
                        cf.close()

                    elif user.split(' ')[1] == 'numverify_api':
                        api[3]['Numverify'] = user.split(' ')[2]
                        print(
                            bold(info('Numverify API\t' + user.split(' ')[2])))
                        with open('core/config.yaml', 'w') as cf:
                            yaml.dump(config, cf)
                        cf.close()

                except IndexError:
                    print(bold(info('Select what to set\n')))
                    print(
                        bold(
                            info(
                                'API KEY\t\tset (shodan_api|censys_uid|censys_secret) API_KEY'
                            )))
                except Exception as e:
                    print(bold(bad('Error: {}'.format(str(e)))))
                    main()
                except KeyboardInterrupt:
                    print()
                    main()

            elif user.startswith('show'):
                try:
                    if user.split(' ')[1] == 'modules':
                        show_module()
                    elif user.split(' ')[1] == 'author':
                        author()
                    elif user.split(' ')[1] == 'banners' or user.split(
                            ' ')[1] == 'banner':
                        show_banners()
                    elif user.split(' ')[1] == 'help':
                        show_help()
                    elif user.split(' ')[1] == 'apis':
                        show_API()
                except IndexError:
                    print(bold(info('Select what to show.\n')))
                    print(bold(info('Modules\t\tshow modules')))
                    print(bold(info('Author\t\tshow author')))
                    print(bold(info('Banners\t\tshow banners')))
                    print(bold(info('Help\t\tshow help')))
                    print(bold(info('API\t\t\tshow apis')))

            elif user == 'exit':
                print(bold(good('Thanks for using Amaterasu.')))
                sys.exit()
            elif user == 'cls' or user == 'clear':
                clear()
            elif user == 'update':
                update()
            elif user == 'author':
                author()
            elif user == '?':
                show_help()
            else:
                print(bad('Amaterasu could not understand.'))
    except KeyboardInterrupt:
        print()
        print(bad('"exit" to get out.'))
        print()
Ejemplo n.º 40
0
def main():
    config = parse_arguments()
    setup_logging(config['log_level'])

    statistics = dict(
        scanned_count=0,
        hashed_count=0,
        duplicates_count=0,
        duplicates_size=0,
        too_small_count=0,
        too_small_size=0,
    )

    size_counter = Counter()
    size_to_files = defaultdict(list)

    hash_counter = Counter()
    hash_to_files = defaultdict(list)
    hash_to_sizes = {}

    def hash_file(f: Path, size: int):
        log.debug(f"Calculating hash for {f}")
        digest = get_digest(f)

        statistics['hashed_count'] += 1
        hash_counter[digest] += 1
        hash_to_files[digest].append(f)
        hash_to_sizes[digest] = size

    threads = config['hash_threads'] or os.cpu_count()
    log.debug(f"Running {threads} hashing threads")

    futures = []
    with ThreadPoolExecutor(max_workers=threads) as tpe:
        for root, _, files in os.walk(config['workdir']):
            for f in files:
                full_path = Path(root, f)
                size = full_path.stat().st_size

                if size < config['min_size']:
                    statistics['too_small_count'] += 1
                    statistics['too_small_size'] += size
                    continue

                statistics['scanned_count'] += 1
                size_counter[size] += 1
                size_to_files[size].append(full_path)

                if size_counter[size] > 1:
                    # If there's more than one file of this size - calculate hash for it
                    futures.append(tpe.submit(hash_file, full_path, size))

                    if size_counter[size] == 2:
                        # If this is the second file of the same size - calculate digest for the first one too
                        futures.append(tpe.submit(hash_file, size_to_files[size][0], size))

        # Make sure to catch all exceptions
        for fut in futures:
            fut.result()

    sorted_hashes = sorted((x[0] for x in takewhile(lambda x: x[1] > 1, hash_counter.most_common())), key=lambda x: hash_to_sizes[x], reverse=True)

    duplicates = []
    for digest in sorted_hashes:
        files = hash_to_files[digest]
        duplicates_count = len(files) - 1  # Not counting original
        statistics['duplicates_count'] += duplicates_count

        single_size = hash_to_sizes[digest]
        duplicates_size = single_size * duplicates_count
        statistics['duplicates_size'] += duplicates_size

        duplicates.append({
            "hash": digest,
            "single_size": format_size(single_size),
            "single_size_raw": single_size,
            "duplicates_size": format_size(duplicates_size),
            "duplicates_size_raw": duplicates_size,
            "files": [str(f) for f in files]
        })

    result = {
        'statistics': {
            'files_scanned': statistics['scanned_count'],
            'files_hashed': statistics['hashed_count'],
            'too_small_count': statistics['too_small_count'],
            'too_small_size': format_size(statistics['too_small_size']),
            'too_small_size_raw': statistics['too_small_size'],
            'duplicates_found': statistics['duplicates_count'],
            'duplicates_size': format_size(statistics['duplicates_size']),
            'duplicates_size_raw': statistics['duplicates_size']
        },
        'duplicates': duplicates
    }

    yaml = YAML()
    yaml.dump(result, sys.stdout)
    log.info("Finished")
Ejemplo n.º 41
0
    def config_pat() -> None:
        """
        Set the personal access token (PAT) for automatic Github repo creation.
        """
        ConfigCommand.check_ct_config_dir_exists()
        try:
            path = Path(ConfigCommand.CONF_FILE_PATH)
            yaml = YAML()
            settings = yaml.load(path)
            if not all(attr in settings
                       for attr in ['full_name', 'github_username', 'email']):
                print(
                    '[bold red]The cookietemple config file misses some required attributes!'
                )
                print(
                    '[bold blue]Lets set them before setting your Github personal access token!'
                )
                ConfigCommand.config_general_settings()

        except FileNotFoundError:
            print(
                '[bold red]Cannot find a cookietemple config file. Is this your first time using cookietemple?'
            )
            print(
                '[bold blue]Lets create one before setting your Github personal access token!'
            )
            ConfigCommand.config_general_settings()

        if cookietemple_questionary_or_dot_cookietemple(
                function='confirm',
                question=
                'Do you want to configure your GitHub personal access token right now?\n'
                'You can still configure it later by calling    cookietemple config pat',
                default='Yes'):
            print(
                '[bold blue]cookietemple requires your Github Access token to have full repository and workflow permissions!'
            )
            access_token = cookietemple_questionary_or_dot_cookietemple(
                'password', 'Please enter your Github Access token')
            access_token_b = access_token.encode('utf-8')  # type: ignore

            # ask for confirmation since this action will delete the PAT irrevocably if the user has not saved it anywhere else
            if not cookietemple_questionary_or_dot_cookietemple(
                    function='confirm',
                    question=
                    'You´re about to update your personal access token. This action cannot be undone!\n'
                    'Do you really want to continue?',
                    default='Yes'):
                sys.exit(1)

            # encrypt the given PAT and save the encryption key and encrypted PAT in separate files
            print('[bold blue]Generating key for encryption.')
            log.debug('Generating personal access key.')
            key = Fernet.generate_key()
            fer = Fernet(key)
            log.debug('Encrypting personal access token. ')
            print('[bold blue]Encrypting personal access token.')
            encrypted_pat = fer.encrypt(access_token_b)

            # write key
            log.debug(f'Writing key to {ConfigCommand.KEY_PAT_FILE}')
            with open(ConfigCommand.KEY_PAT_FILE, 'wb') as f:
                f.write(key)

            path = Path(ConfigCommand.CONF_FILE_PATH)
            yaml = YAML()
            settings = yaml.load(path)
            settings['pat'] = encrypted_pat
            log.debug(
                f'Dumping configuration to {ConfigCommand.CONF_FILE_PATH}')
            yaml.dump(settings, Path(ConfigCommand.CONF_FILE_PATH))
Ejemplo n.º 42
0
class DataModel:
    def __init__(self, profile):
        self.profile = profile

        self.yaml = YAML()
        self.config = None
        self.modules: Dict[str, Module] = {}

    @staticmethod
    def default_config():
        # TRANSLATORS: This part of text must be formatted in a monospaced font, and no line shall exceed the width of a 70-cell-wide terminal.
        config = _(
            "# ===================================\n"
            "# EH Forwarder Bot Configuration File\n"
            "# ===================================\n"
            "# \n"
            "# This file determines what modules, including master channel, slave channels,\n"
            "# and middlewares, are enabled in this profile.\n"
            "# \n"
            "# \n"
            "# Master Channel\n"
            "# --------------\n"
            "# Exactly one instance of a master channel is required for a profile.\n"
            "# Fill in the module ID and instance ID (if needed) below.\n"
        )
        config += "\nmaster_channel:\n\n"
        # TRANSLATORS: This part of text must be formatted in a monospaced font, and no line shall exceed the width of a 70-cell-wide terminal.
        config += _(
            "# Slave Channels\n"
            "# --------------\n"
            "# \n"
            "# At least one slave channel is required for a profile.\n"
            "# Fill in the module ID and instance ID (if needed) of each slave channel\n"
            "# to be enabled below.\n"
        )
        config += "\nslave_channels: []\n\n"
        # TRANSLATORS: This part of text must be formatted in a monospaced font, and no line shall exceed the width of a 70-cell-wide terminal.
        config += _(
            "# Middlewares\n"
            "# -----------\n"
            "# Middlewares are not required to run an EFB profile. If you are not\n"
            "# going to use any middleware in this profile, you can safely remove\n"
            "# this section. Otherwise, please list down the module ID and instance\n"
            "# ID of each middleware to be enabled below.\n"
        )
        config += "middlewares: []\n"

        str_io = StringIO(config)
        str_io.seek(0)
        return str_io

    def load_config(self):
        coordinator.profile = self.profile
        conf_path = utils.get_config_path()
        if not os.path.exists(conf_path):
            self.config = self.yaml.load(self.default_config())
        else:
            with open(conf_path) as f:
                self.config = self.yaml.load(f)
        self.load_modules_list()

    def save_config(self):
        coordinator.profile = self.profile
        conf_path = utils.get_config_path()
        if not os.path.exists(conf_path):
            os.makedirs(os.path.dirname(conf_path))
        with open(conf_path, 'w') as f:
            self.yaml.dump(self.config, f)

    def load_modules_list(self):
        for i in pkg_resources.iter_entry_points("ehforwarderbot.master"):
            cls = i.load()
            self.modules[cls.channel_id] = Module(type="master",
                                                  id=cls.channel_id,
                                                  name=cls.channel_name,
                                                  emoji=cls.channel_emoji,
                                                  wizard=None)
        for i in pkg_resources.iter_entry_points("ehforwarderbot.slave"):
            cls = i.load()
            self.modules[cls.channel_id] = Module(type="slave",
                                                  id=cls.channel_id,
                                                  name=cls.channel_name,
                                                  emoji=cls.channel_emoji,
                                                  wizard=None)
        for i in pkg_resources.iter_entry_points("ehforwarderbot.middleware"):
            cls = i.load()
            self.modules[cls.middleware_id] = Module(type="middleware",
                                                     id=cls.middleware_id,
                                                     name=cls.middleware_name,
                                                     emoji=None,
                                                     wizard=None)
        for i in pkg_resources.iter_entry_points("ehforwarderbot.wizard"):
            if i.name in self.modules:
                fn = i.load()
                self.modules[i.name] = self.modules[i.name].replace(wizard=fn)

    def get_master_lists(self):
        names = []
        ids = []
        for i in self.modules.values():
            if i.type == "master":
                names.append(i.name)
                ids.append(i.id)
        return names, ids

    def get_slave_lists(self):
        names = []
        ids = []
        for i in self.modules.values():
            if i.type == "slave":
                names.append(i.name)
                ids.append(i.id)
        return names, ids

    @staticmethod
    def split_cid(cid):
        if "#" in cid:
            mid, iid = cid.split("#")
        else:
            mid = cid
            iid = None
        return mid, iid

    def get_instance_display_name(self, cid):
        if not cid:
            return cid
        mid, iid = self.split_cid(cid)
        if mid not in self.modules:
            if iid:
                return _("Unknown/custom module (instance: {instance})").format(
                    iid
                )
            else:
                return _("Unknown/custom module")
        else:
            if iid:
                name = _("{channel} (instance: {instance})").format(
                    channel=self.modules[mid].name,
                    instance=iid
                )
            else:
                name = self.modules[mid].name
        return name

    def has_wizard(self, cid):
        mid, _ = self.split_cid(cid)
        if mid not in self.modules:
            return False
        return callable(self.modules[mid].wizard)

    def get_selected_slave_lists(self):
        if 'slave_channels' not in self.config:
            self.config['slave_channels'] = []
            return [], []
        i = 0

        names = []
        ids = []

        while i < len(self.config['slave_channels']):
            cid = self.config['slave_channels'][i]
            mid, __ = self.split_cid(cid)

            if mid not in self.modules or self.modules[mid].type != "slave":
                names.append(_("Unknown/custom channel ({channel_id})").format(channel_id=cid))
                ids.append(cid)
            else:
                name = self.get_instance_display_name(cid)
                names.append(name)
                ids.append(cid)
            i += 1
        return names, ids

    def get_middleware_lists(self):
        names = []
        ids = []
        for i in self.modules.values():
            if i.type == "middleware":
                names.append(i.name)
                ids.append(i.id)
        return names, ids

    def get_selected_middleware_lists(self):
        if 'middlewares' not in self.config:
            self.config['middlewares'] = []
            return [], []
        i = 0
        names = []
        ids = []
        while i < len(self.config['middlewares']):
            cid = self.config['middlewares'][i]
            mid, __ = self.split_cid(cid)

            if mid not in self.modules or self.modules[mid].type != "middleware":
                names.append(_("Unknown/custom middleware ({middleware_id})").format(middleware_id=cid))
                ids.append(cid)
            else:
                name = self.get_instance_display_name(cid)
                names.append(name)
                ids.append(cid)
            i += 1
        return names, ids
#Import necessary functions from Jinja2 module
from jinja2 import Environment, FileSystemLoader

#Import YAML module
import yaml
from ruamel.yaml import YAML

#Load data from YAML into Python dictionary
config_data = yaml.full_load(open('./ent_cbs_ad_summary_config.yml'))

#Load Jinja2 template
jinja_yaml = YAML()
jinja_yaml.allow_duplicate_keys = True
jinja_yaml.preserve_quotes = True
# jinja_yaml.explicit_start = True
yaml_content = jinja_yaml.load(
    Environment(loader=FileSystemLoader('.')).get_template(
        'bq_summary_factory_template.yml').render(config_data))
print(yaml_content)

#Render the template with data and print the output
with open('final_dag_factory.yml', 'w') as f:
    output_yaml = jinja_yaml.dump(yaml_content, f)
#print(output_yaml)
Ejemplo n.º 44
0
    def log(self, key, data, dtype, options: LogOptions = None):
        """
        handler function for writing data to the server. Can be called directly.

        :param key:
        :param data:
        :param dtype:
        :param options:
        :return:
        """
        # todo: overwrite mode is not tested and not in-use.
        write_mode = "w" if options and options.overwrite else "a"
        if dtype == "log":
            abs_path = os.path.join(self.data_dir, key)
            try:
                with open(abs_path, write_mode + 'b') as f:
                    dill.dump(data, f)
            except FileNotFoundError:
                os.makedirs(os.path.dirname(abs_path))
                with open(abs_path, write_mode + 'b') as f:
                    dill.dump(data, f)
        if dtype == "byte":
            abs_path = os.path.join(self.data_dir, key)
            try:
                with open(abs_path, write_mode + 'b') as f:
                    f.write(data)
            except FileNotFoundError:
                os.makedirs(os.path.dirname(abs_path))
                with open(abs_path, write_mode + 'b') as f:
                    f.write(data)
        elif dtype.startswith("text"):
            abs_path = os.path.join(self.data_dir, key)
            try:
                with open(abs_path, write_mode + "+") as f:
                    f.write(data)
            except FileNotFoundError:
                os.makedirs(os.path.dirname(abs_path))
                with open(abs_path, write_mode + "+") as f:
                    f.write(data)
        elif dtype.startswith("yaml"):
            yaml = YAML()
            yaml.explict_start = True
            stream = StringIO()
            yaml.dump(data, stream)
            output = stream.getvalue()
            abs_path = os.path.join(self.data_dir, key)
            try:
                with open(abs_path, write_mode + "+") as f:
                    if options.write_mode == 'key':
                        d = yaml.load('\n'.join(f))
                        if d is not None:
                            d.update(output)
                            output = d
                    f.write(output)
            except FileNotFoundError:
                os.makedirs(os.path.dirname(abs_path))
                with open(abs_path, write_mode + "+") as f:
                    if options.write_mode == 'key':
                        d = yaml.load('\n'.join(f))
                        if d is not None:
                            d.update(output)
                            output = d
                    f.write(output)
        elif dtype.startswith("image"):
            abs_path = os.path.join(self.data_dir, key)
            if "." not in key:
                abs_path = abs_path + ".png"
            from PIL import Image
            assert data.dtype in ALLOWED_TYPES, "image datatype must be one of {}".format(ALLOWED_TYPES)
            if len(data.shape) == 3 and data.shape[-1] == 1:
                data.resize(data.shape[:-1])
            im = Image.fromarray(data)
            try:
                im.save(abs_path)
            except FileNotFoundError:
                os.makedirs(os.path.dirname(abs_path))
                im.save(abs_path)
Ejemplo n.º 45
0
def update_lastmod(path, verbose, date):
    count = 0
    yaml = YAML()

    for post in glob.glob(path):

        lastmod = ''

        if date == Date.GIT:
            git_log_count = subprocess.getoutput(
                "git log --pretty=%ad \"{}\" | wc -l".format(post))

            if git_log_count == "1":
                continue

            git_lastmod = subprocess.getoutput(
                "git log -1 --pretty=%ad --date=iso \"{}\"".format(post))

            if not git_lastmod:
                continue

            lates_commit = subprocess.check_output(
                ['git', 'log', '-1', '--pretty=%B', post]).decode('utf-8')

            if "[Automation]" in lates_commit and "Lastmod" in lates_commit:
                continue

            lastmod = git_lastmod

        elif date == Date.FS:
            t = os.path.getmtime(post)
            dt = datetime.datetime.fromtimestamp(t)
            lastmod = dt.strftime('%F %T') + time.strftime(' %z')

        frontmatter, line_num = get_yaml(post)
        meta = yaml.load(frontmatter)

        output = 'new.md'
        if os.path.isfile(output):
            os.remove(output)

        with open(output, 'w', encoding='utf-8') as new, \
                open(post, 'r', encoding='utf-8') as old:
            new.write("---\n")
            yaml.dump(meta, new)
            new.write("---\n")
            line_num += 2

            lines = old.readlines()

            for line in lines:
                if line_num > 0:
                    line_num -= 1
                    continue
                else:
                    new.write(line)

        shutil.move(output, post)
        count += 1

        if verbose:
            print("[INFO] update 'lastmod' for:" + post)

    if count > 0:
        print("[INFO] Success to update lastmod for {} post(s).".format(count))
Ejemplo n.º 46
0
           "# - divider: true  # Will insert a divider in the sidebar\n"
           "# - header: My Header  # Will insert a header with no link in the sidebar\n")

    warn = ("#\n"
            "# ==============================\n"
            "# AUTOMATICALLY GENERATED TOC FILE.\n"
            "# You should review the contents of this file, re-order items as you wish,\n"
            "# and nest chapters in sections if you wish. The ======= symbols represent \n"
            "# folder breaks.\n"
            "# \n"
            "# See the demo `toc.yml` for the right structure to follow. You can \n"
            "# generate a demo book by running `jupyter-book create mybook --demo`\n"
            "# ==============================\n\n\n")
    if args.out_path is None:
        print(top + warn)
        yaml.dump(toc, sys.stdout)
    else:
        if os.path.exists(args.out_path) and bool(args.overwrite) is False:
            raise ValueError('toc.yaml file exists, delete the file or set `overwrite=True`')

        # Dump the YAML
        with open(args.out_path, 'w') as ff:
            yaml.dump(toc, ff)

        # Read back in so we can add a comment
        with open(args.out_path, 'r') as ff:
            contents = ff.read()
        with open(args.out_path, 'w') as ff:
            ff.write(top + warn + contents)
        print('TOC written to: {}'.format(args.out_path))
Ejemplo n.º 47
0
        input_requirments = yaml.load(input_file)

        if not input_requirments:
            print(YAML_MISSING)
            exit(1)

        new_requirments = copy.deepcopy(input_requirments)
        deep_update(new_requirments, requirements)

        if args.update:
            requirements = new_requirments

        if args.check:
            changed = deep_diff(input_requirments, new_requirments)
            if changed:
                yaml.dump(changed, sys.stdout)
                exit(1)

    sort_requirements(requirements)

    if args.output == '-':
        output = sys.stdout
    else:
        output = open(args.output, 'w')

    yaml.dump(requirements, output)
    output.close()

    language_requirements = get_languages(all_bears)
    bear_languages = language_requirements
    language_requirements = _create_sorted_commented_map(language_requirements)
Ejemplo n.º 48
0
class Config(object):
    def __init__(self, configFile: str):
        self.configFile = configFile
        self._configData = {}
        self.yaml = YAML()
        self._inBaseConfig = []

    def loadConfig(self) -> None:
        configData = self._readConfig(self.configFile)
        self._validate(configData)
        self._configData = configData

    def _readConfig(self, fileName: str) -> Dict:
        try:
            with open(fileName, mode='r') as config:
                configData = self.yaml.load(config)
                if not configData:
                    configData = {}
                # if this is the base server config, store what keys we loaded
                if fileName == self.configFile:
                    self._inBaseConfig = list(configData.keys())
        except Exception as e:
            raise ConfigError(fileName, e)

        if 'import' not in configData:
            return configData

        for fname in configData['import']:
            includeConfig = self._readConfig('{}/{}.yaml'
                                             .format(os.path.dirname(os.path.abspath(fileName)),
                                                     fname))
            for key, val in includeConfig.items():
                # not present in base config, just assign it
                if key not in configData:
                    configData[key] = val
                    continue
                # skip non-collection types that are already set
                if isinstance(configData[key], (str, int)):
                    continue
                if isinstance(val, str):
                    raise ConfigError(fname, 'The included config file tried '
                                             'to merge a non-string with a '
                                             'string')
                try:
                    iter(configData[key])
                    iter(val)
                except TypeError:
                    # not a collection, so just don't merge them
                    pass
                else:
                    try:
                        # merge with + operator
                        configData[key] += val
                    except TypeError:
                        # dicts can't merge with +
                        try:
                            for subKey, subVal in val.items():
                                if subKey not in configData[key]:
                                    configData[key][subKey] = subVal
                        except (AttributeError, TypeError):
                            # if either of these, they weren't both dicts.
                            raise ConfigError(fname, 'The variable {!r} could '
                                                     'not be successfully '
                                                     'merged'.format(key))

        return configData

    def writeConfig(self) -> None:
        # filter the configData to only those keys
        # that were present in the base server config,
        # or have been modified at runtime
        configData = copy.deepcopy(self._configData)
        to_delete = set(configData.keys()).difference(self._inBaseConfig)
        for key in to_delete:
            del configData[key]

        # write the filtered configData
        try:
            with open(self.configFile, mode='w') as config:
                self.yaml.dump(configData, config)
        except Exception as e:
            raise ConfigError(self.configFile, e)

    def getWithDefault(self, key: str, default=None) -> Any:
        if key in self._configData:
            return self._configData[key]
        return default

    def _validate(self, configData) -> None:
        for key in _required:
            if key not in configData:
                raise ConfigError(self.configFile,
                                  'Required item {!r} was not found in the config.'.format(key))

    def __len__(self):
        return len(self._configData)

    def __iter__(self):
        return iter(self._configData)

    def __getitem__(self, key):
        return self._configData[key]

    def __setitem__(self, key, value):
        # mark this key to be saved in the server config
        if key not in self._inBaseConfig:
            self._inBaseConfig.append(key)

        self._configData[key] = value

    def __contains__(self, key):
        return key in self._configData
            print('bear-requirements.yaml not found. '
                  'Run without flags to generate it.')
            exit(1)

        input_requirments = yaml.load(input_file)

        new_requirments = copy.deepcopy(input_requirments)
        deep_update(new_requirments, requirements)

        if args.update:
            requirements = new_requirments

        if args.check:
            changed = deep_diff(input_requirments, new_requirments)
            if changed:
                yaml.dump(changed, sys.stdout)
                exit(1)

    if args.output == '-':
        output = sys.stdout
    else:
        output = open(args.output, 'w')

    language_requirements = get_languages(all_bears)
    language_requirements = _create_sorted_commented_map(language_requirements)
    file_path = os.path.join(PROJECT_DIR, BEAR_LANGUAGES_YAML)
    with open(file_path, 'w') as outfile:
        yaml.indent(mapping=2, sequence=4, offset=2)
        yaml.dump(language_requirements, outfile)

    sort_requirements(requirements)