Ejemplo n.º 1
0
    def from_yaml(cls,
                  yml: str,
                  defaults: Optional[bool] = False,
                  path: Optional[str] = None,
                  keys: Optional[str] = None) -> 'Parser':
        """Load parser from YAML

        Arguments:
           yml: File name or YAML string
           defaults: If `True`, load from ``ctwrap.defaults`` database
           path: Relative/absolute path
           keys: List of keys
        """
        fname = Path(yml)
        if defaults:
            # load from 'ctwrap/defaults' database
            fname = Path(__file__).parents[0] / 'defaults' / fname
        elif path is not None:
            fname = Path(path) / fname

        try:
            _ = fname.is_file()  # will raise error
            with open(fname) as stream:
                out = yaml.load(stream, Loader=yaml.SafeLoader)
        except OSError:
            out = yaml.load(yml, Loader=yaml.SafeLoader)

        if keys is None:
            return cls(out)

        return cls({k: out[k] for k in keys})
Ejemplo n.º 2
0
def yaml_load(filehandle):
    yaml = get_yaml()
    try:
        return yaml.load(filehandle,
                         Loader=yaml.RoundTripLoader,
                         version="1.2")
    except AttributeError:
        return yaml.load(filehandle)
def _remote_or_local(fn, remote=False):
    """
    Downloads a temp file directly from the specified github branch or
    the current one on disk.
    """
    if remote:
        url = ('https://raw.githubusercontent.com/bioconda/bioconda-recipes/'
               '{branch}/{path}'.format(branch='master', path=fn))
        print('Using config file {}'.format(url))
        with conda.fetch.TmpDownload(url) as f:
            cfg = yaml.load(open(f))
    else:
        cfg = yaml.load(open(os.path.join(HERE, fn)))
    return cfg
def test_pypi_section_order_preserved(testing_workdir):
    """
    Test whether sections have been written in the correct order.
    """
    from conda_build.render import FIELDS
    from conda_build.skeletons.pypi import (ABOUT_ORDER,
                                            REQUIREMENTS_ORDER,
                                            PYPI_META_STATIC)

    api.skeletonize(packages='sympy', repo='pypi')
    # Since we want to check the order of items in the recipe (not whether
    # the metadata values themselves are sensible), read the file as (ordered)
    # yaml, and check the order.
    with open('sympy/meta.yaml', 'r') as file:
        lines = [l for l in file.readlines() if not l.startswith("{%")]

    # The loader below preserves the order of entries...
    recipe = ruamel_yaml.load('\n'.join(lines),
                              Loader=ruamel_yaml.RoundTripLoader)

    major_sections = list(recipe.keys())
    # Blank fields are omitted when skeletonizing, so prune any missing ones
    # before comparing.
    pruned_fields = [f for f in FIELDS if f in major_sections]
    assert major_sections == pruned_fields
    assert list(recipe['about']) == ABOUT_ORDER
    assert list(recipe['requirements']) == REQUIREMENTS_ORDER
    for k, v in PYPI_META_STATIC.items():
        assert list(v.keys()) == list(recipe[k])
Ejemplo n.º 5
0
def prepare_input(argv=None):
    """
    Get, parse and prepare input file.
    """
    p = ArgumentParser(description='insiliChem.bio OpenMM launcher: '
                       'easy to deploy MD protocols for OpenMM')
    p.add_argument('input', metavar='INPUT FILE', type=str,
                   help='YAML input file')
    p.add_argument('--version', action='version', version='%(prog)s v{}'.format(__version__))
    args = p.parse_args(argv if argv else sys.argv[1:])

    # Load config file
    with open(args.input) as f:
        cfg = yaml.load(f, Loader=YamlLoader)
    # Paths and dirs
    cfg['_path'] = os.path.abspath(args.input)
    cfg['system_options'] = prepare_system_options(cfg)
    cfg['outputpath'] = sanitize_path_for_file(cfg.get('outputpath', '.'), args.input)
    try: 
        os.makedirs(cfg['outputpath'])
    except OSError:
        pass

    handler = prepare_handler(cfg)

    return handler, cfg
Ejemplo n.º 6
0
    def to_json(self):
        """Get JSON for an anaconda-project.yml env spec section."""
        # Note that we use _conda_packages (only the packages we
        # introduce ourselves) rather than conda_packages
        # (includes inherited packages).
        packages = list(self._conda_packages)
        pip_packages = list(self._pip_packages)
        if pip_packages:
            packages.append(dict(pip=pip_packages))
        channels = list(self._channels)

        # this is a gross, roundabout hack to get ryaml dicts that
        # have ordering... OrderedDict doesn't work because the
        # yaml saver saves them as some "!!omap" nonsense. Other
        # than ordering, the formatting isn't even preserved here.
        template_json = ryaml.load("something:\n    description: null\n" +
                                   "    packages: []\n" + "    channels: []\n",
                                   Loader=ryaml.RoundTripLoader)

        if self._description is not None:
            template_json['something']['description'] = self._description
        else:
            del template_json['something']['description']
        template_json['something']['packages'] = packages
        template_json['something']['channels'] = channels

        if len(self.inherit_from_names) > 0:
            if len(self.inherit_from_names) == 1:
                names = self.inherit_from_names[0]
            else:
                names = list(self.inherit_from_names)
            template_json['something']['inherit_from'] = names

        return template_json['something']
Ejemplo n.º 7
0
    def test_ignition(self):

        with open(EXAMPLES / 'ignition.yaml') as stream:
            yml = yaml.load(stream, Loader=yaml.SafeLoader)
        initial = cw.Parser(yml['defaults']['initial'])
        self.assertIsInstance(initial.T, pq.Quantity)
        self.assertIsInstance(initial['fuel'], str)
Ejemplo n.º 8
0
def getYamlinfo(inputd):
	data = {}
	for root, dirs, files in os.walk(inputd):

		for file in files:
			if file.endswith(".yaml"):
				inputfile = os.path.join(root, file)			   
				with open(inputfile) as f:
					params = yaml.load(f, Loader=yaml.Loader)
					foldername = root.split("\1m")[-1][1:]
					try:
						testID = int(foldername.split('_')[1])
					except IndexError:
						testID = foldername[-7:]
					data[testID]={
						'Indice':params['global']['data_path'].split('/')[-1],
						'Number of Classes':params['global']['num_classes'],
						'Number of GPUs': params['global']['num_gpus'],
						'Number of Bands': params['global']['number_of_bands'],
						'Overlap': params['sample']['overlap'],
						'Batch Size' :params['training']['batch_size'],
						'Class Weights' :params['training']['class_weights'], 
						'Gamma' :params['training']['gamma'], 
						'Learning Rate' :params['training']['learning_rate'],
						'Number of Epochs' :params['training']['num_epochs'],
						'Step Size' :params['training']['step_size'],
						'Weight Decay':params['training']['weight_decay'],
						'Dropout':params['training']['dropout'],
						'Dropout Probability':params['training']['dropout_prob']
						}
	return (data)
Ejemplo n.º 9
0
def test_pypi_section_order_preserved(testing_workdir):
    """
    Test whether sections have been written in the correct order.
    """
    from conda_build.render import FIELDS
    from conda_build.skeletons.pypi import (ABOUT_ORDER,
                                            REQUIREMENTS_ORDER,
                                            PYPI_META_STATIC)

    api.skeletonize(packages='sympy', repo='pypi')
    # Since we want to check the order of items in the recipe (not whether
    # the metadata values themselves are sensible), read the file as (ordered)
    # yaml, and check the order.
    with open('sympy/meta.yaml', 'r') as file:
        lines = [l for l in file.readlines() if not l.startswith("{%")]

    # The loader below preserves the order of entries...
    recipe = ruamel_yaml.load('\n'.join(lines), Loader=ruamel_yaml.RoundTripLoader)

    major_sections = list(recipe.keys())
    # Blank fields are omitted when skeletonizing, so prune any missing ones
    # before comparing.
    pruned_fields = [f for f in FIELDS if f in major_sections]
    assert major_sections == pruned_fields
    assert list(recipe['about']) == ABOUT_ORDER
    assert list(recipe['requirements']) == REQUIREMENTS_ORDER
    for k, v in PYPI_META_STATIC.items():
        assert list(v.keys()) == list(recipe[k])
Ejemplo n.º 10
0
    def to_json(self):
        """Get JSON for a kapsel.yml env spec section."""
        # Note that we use _conda_packages (only the packages we
        # introduce ourselves) rather than conda_packages
        # (includes inherited packages).
        packages = list(self._conda_packages)
        pip_packages = list(self._pip_packages)
        if pip_packages:
            packages.append(dict(pip=pip_packages))
        channels = list(self._channels)

        # this is a gross, roundabout hack to get ryaml dicts that
        # have ordering... OrderedDict doesn't work because the
        # yaml saver saves them as some "!!omap" nonsense. Other
        # than ordering, the formatting isn't even preserved here.
        template_json = ryaml.load("something:\n    packages: []\n" + "    channels: []\n",
                                   Loader=ryaml.RoundTripLoader)

        template_json['something']['packages'] = packages
        template_json['something']['channels'] = channels

        if len(self.inherit_from_names) > 0:
            if len(self.inherit_from_names) == 1:
                names = self.inherit_from_names[0]
            else:
                names = list(self.inherit_from_names)
            template_json['something']['inherit_from'] = names

        return template_json['something']
Ejemplo n.º 11
0
    def test_create_advanced_pip(self):
        with make_temp_envs_dir() as envs_dir:
            with env_vars(
                {
                    'CONDA_ENVS_DIRS': envs_dir,
                    'CONDA_PIP_INTEROP_ENABLED': 'true',
                }, reset_context):
                env_name = str(uuid4())[:8]
                prefix = join(envs_dir, env_name)
                python_path = join(prefix, PYTHON_BINARY)
                run_command(Commands.CREATE, env_name,
                            support_file('pip_argh.yml'))
                out_file = join(envs_dir, 'test_env.yaml')

            # make sure that the export reconsiders the presence of pip interop being enabled
            PrefixData._cache_.clear()

            with env_vars({
                    'CONDA_ENVS_DIRS': envs_dir,
            }, reset_context):
                # note: out of scope of pip interop var.  Should be enabling conda pip interop itself.
                run_command(Commands.EXPORT, env_name, out_file)
                with open(out_file) as f:
                    d = ruamel_yaml.load(f)
                assert {'pip': ['argh==0.26.2']} in d['dependencies']
Ejemplo n.º 12
0
def _remote_or_local(fn, branch='master', remote=False):
    """
    Downloads a temp file directly from the specified github branch or
    the current one on disk.
    """
    if remote:
        url = (
            'https://raw.githubusercontent.com/bioconda/bioconda-recipes/'
            '{branch}/{path}'.format(branch=branch, path=fn)
        )
        print('Using config file {}'.format(url))
        with conda.fetch.TmpDownload(url) as f:
            cfg = yaml.load(open(f))
    else:
        cfg = yaml.load(open(os.path.join(HERE, fn)))
    return cfg
Ejemplo n.º 13
0
    def test_create_advanced_pip(self):
        with make_temp_envs_dir() as envs_dir:
            with env_vars({
                'CONDA_ENVS_DIRS': envs_dir,
                'CONDA_PIP_INTEROP_ENABLED': 'true',
            }, reset_context):
                env_name = str(uuid4())[:8]
                prefix = join(envs_dir, env_name)
                python_path = join(prefix, PYTHON_BINARY)

                run_command(Commands.CREATE, env_name,
                            support_file('pip_argh.yml'))
                out_file = join(envs_dir, 'test_env.yaml')

            # make sure that the export reconsiders the presence of pip interop being enabled
            PrefixData._cache_.clear()

            with env_vars({
                'CONDA_ENVS_DIRS': envs_dir,
            }, reset_context):
                # note: out of scope of pip interop var.  Should be enabling conda pip interop itself.
                run_command(Commands.EXPORT, env_name, out_file)
                with open(out_file) as f:
                    d = ruamel_yaml.load(f)
                assert {'pip': ['argh==0.26.2']} in d['dependencies']
Ejemplo n.º 14
0
def main():
    logging.basicConfig(level=logging.INFO)

    # args = parse_args()

    image_file = "images/parking_lot_1.png"  #args.image_file
    data_file = "data/coordinates_1.yml"  #args.data_file
    start_frame = 0  #args.start_frame

    if image_file is not None:
        with open(data_file, "w+") as points:
            generator = CoordinatesGenerator(image_file, points, COLOR_RED)
            generator.generate()
        storeCor = open("data\\pastCordinate.pickle", 'wb')

        # source, destination
        pickle.dump(generator.saveCordinate, storeCor)
        storeCor.close()
        print(generator.saveCordinate)
        logging.info(generator.saveCordinate)

    with open(data_file, "r") as data:
        points = yaml.load(data)
        # points = load(data, Loader=yaml.Loader)
        # detector = MotionDetector(args.video_file, points, int(start_frame))
        detector = MotionDetector("videos/parking_lot_1.mp4", points,
                                  int(start_frame))
        #pass the spot no which you want to spot
        # in image the spotno are 1 index
        spotNo = 0
        detector.detect_motion(spotNo)
Ejemplo n.º 15
0
def yaml_load_safe(string):
    """
    Examples:
        >>> yaml_load_safe("key: value")
        {'key': 'value'}

    """
    return yaml.load(string, Loader=yaml.SafeLoader, version="1.2")
Ejemplo n.º 16
0
def yaml_load_standard(string):
    """Uses the default (unsafe) loader.

    Examples:
        >>> yaml_load_standard("prefix: !!python/unicode '/Users/darwin/test'")
        {'prefix': '/Users/darwin/test'}
    """
    return yaml.load(string, Loader=yaml.Loader, version="1.2")
Ejemplo n.º 17
0
 def load_config(self, config_file=os.path.join(BASE_FOLDER, CONFIG_FILE)):
     """Loads the config file to memory"""
     config_path = os.path.abspath(os.path.expanduser(config_file))
     try:
         self._config = yaml.load(read_file(config_path))
     except:
         pass
     return self._config
Ejemplo n.º 18
0
def yaml_load_safe(string):
    """
    Examples:
        >>> yaml_load_safe("key: value")
        {'key': 'value'}

    """
    return yaml.load(string, Loader=yaml.SafeLoader, version="1.2")
Ejemplo n.º 19
0
def yaml_load_standard(string):
    """Uses the default (unsafe) loader.

    Examples:
        >>> yaml_load_standard("prefix: !!python/unicode '/Users/darwin/test'")
        {'prefix': '/Users/darwin/test'}
    """
    return yaml.load(string, Loader=yaml.Loader, version="1.2")
Ejemplo n.º 20
0
def get_meta(recipe, config):
    """
    Given a package name, find the current meta.yaml file, parse it, and return
    the dict.
    """
    cfg = utils.load_config(config)
    env_matrix = cfg['env_matrix']

    # TODO: Currently just uses the first env. Should turn this into
    # a generator.

    env = dict(next(iter(utils.EnvMatrix(yaml.load(open(env_matrix))))))
    pth = os.path.join(recipe, 'meta.yaml')
    jinja_env = jinja2.Environment()
    content = jinja_env.from_string(open(pth, 'r',
                                         encoding='utf-8').read()).render(env)
    meta = yaml.load(content, yaml.RoundTripLoader)
    return meta
Ejemplo n.º 21
0
    def from_yaml(cls,
                  yaml_file: str,
                  strategy: Optional[str] = None,
                  output_name: Optional[str] = None,
                  database: Optional[str] = None,
                  **kwargs: Optional[Dict]):
        """
        Alternate constructor using YAML file as input.

        The :meth:`from_yaml` method is intended as the main route for the creation of
        :class:`SimulationHandler` objects.

        Arguments:
           yaml_file: YAML file located in current folder, *database* (argument),
               or ctwrap's preconfigured YAML database (``yaml`` folder)
           strategy: Batch job strategy name (only needed if more than one are defined)
           output_name: Output name (overrides YAML configuration)
           database: File database (both YAML configuration and output)
           **kwargs: Dependent on implementation
        """

        if 'path' in kwargs:
            database = kwargs.pop['path']  # pylint: disable=unsubscriptable-object
            warnings.warn("Parameter 'path' is superseded by 'database'",
                          DeprecationWarning)

        # load configuration from yaml
        if database is not None:
            full_name = Path(database) / yaml_file

        elif not Path(yaml_file).is_file():
            # attempt to load standard configuration
            full_name = Path(__file__).parents[0] / 'yaml' / yaml_file

        else:
            full_name = Path(yaml_file)

        if not full_name.is_file():
            raise IOError("Unable to locate YAML configuration file '{}'"
                          "".format(yaml_file))

        with open(full_name) as stream:
            content = yaml.load(stream, Loader=yaml.SafeLoader)

        output = content.get('output', {})

        # naming priorities: keyword / yaml / automatic
        if output_name is None:
            output_name = '{}'.format(
                Path(yaml_file).parents[0] / full_name.stem)
            output_name = output.get('name', output_name)

        return cls.from_dict(content,
                             strategy=strategy,
                             output_name=output_name,
                             output_path=database,
                             **kwargs)
Ejemplo n.º 22
0
 def __init__(self, **kwargs):
     sn = kwargs.get("serial", "X104724")
     if sn == "X104724":
         fpath = "{}/{}.cof".format(
             os.path.dirname(os.path.abspath(__file__)), sn)
         with open(fpath, "r") as f:
             s = f.read()
         _config = yaml.load(s)
         self.fitdata = self._get_fitdata(_config)
Ejemplo n.º 23
0
def _save_file(yaml, filename):
    contents = ryaml.dump(yaml, Dumper=ryaml.RoundTripDumper)

    try:
        # This is to ensure we don't corrupt the file, even if ruamel.yaml is broken
        ryaml.load(contents, Loader=ryaml.RoundTripLoader)
    except YAMLError as e:  # pragma: no cover (should not happen)
        print("ruamel.yaml bug; it failed to parse a file that it generated.", file=sys.stderr)
        print("  the parse error was: " + str(e), file=sys.stderr)
        print("Generated file was:", file=sys.stderr)
        print(contents, file=sys.stderr)
        raise RuntimeError("Bug in ruamel.yaml library; failed to parse a file that it generated: " + str(e))

    if not os.path.isfile(filename):
        # might have to make the directory
        dirname = os.path.dirname(filename)
        makedirs_ok_if_exists(dirname)
    _atomic_replace(filename, contents)
Ejemplo n.º 24
0
def _save_file(yaml, filename):
    contents = ryaml.dump(yaml, Dumper=ryaml.RoundTripDumper)

    try:
        # This is to ensure we don't corrupt the file, even if ruamel.yaml is broken
        ryaml.load(contents, Loader=ryaml.RoundTripLoader)
    except YAMLError as e:  # pragma: no cover (should not happen)
        print("ruamel.yaml bug; it failed to parse a file that it generated.", file=sys.stderr)
        print("  the parse error was: " + str(e), file=sys.stderr)
        print("Generated file was:", file=sys.stderr)
        print(contents, file=sys.stderr)
        raise RuntimeError("Bug in ruamel.yaml library; failed to parse a file that it generated: " + str(e))

    if not os.path.isfile(filename):
        # might have to make the directory
        dirname = os.path.dirname(filename)
        makedirs_ok_if_exists(dirname)
    _atomic_replace(filename, contents)
 def configs_loader():
     settings_path = os.path.realpath(os.path.dirname(settings.__file__))
     configs_file = os.path.join(settings_path, 'configs.yml')
     try:
         with open(configs_file, 'r') as f:
             configs = yaml.load(f)
             return configs
     except IOError:
         logger.error('No configs.yml found')
Ejemplo n.º 26
0
def _load_string(contents):
    if contents.strip() == '':
        # ryaml.load below returns None for an empty file, we want
        # to return an empty dict instead.
        return {}
    else:
        # using RoundTripLoader incorporates safe_load
        # (we don't load code)
        assert issubclass(ryaml.RoundTripLoader, ryaml.constructor.SafeConstructor)
        return ryaml.load(contents, Loader=ryaml.RoundTripLoader)
Ejemplo n.º 27
0
def import_settings_file(filename='config.yaml'):

    with open(filename, 'r') as stream:
        try:
            settings_dict = ruamel_yaml.load(stream, Loader=ruamel_yaml.Loader)
            stream.close()
            return settings_dict
        except ruamel_yaml.YAMLError as exc:
            print(exc)
            return 1
Ejemplo n.º 28
0
def load_gt(path):
    with open(path, 'r') as f:
        gts = yaml.load(f, Loader=yaml.CLoader)
        for im_id, gts_im in gts.items():
            for gt in gts_im:
                if 'cam_R_m2c' in gt.keys():
                    gt['cam_R_m2c'] = np.array(gt['cam_R_m2c']).reshape((3, 3))
                if 'cam_t_m2c' in gt.keys():
                    gt['cam_t_m2c'] = np.array(gt['cam_t_m2c']).reshape((3, 1))
    return gts
Ejemplo n.º 29
0
    def setUpClass(cls):
        with open(EXAMPLES / cls._yaml) as stream:
            cls._config = yaml.load(stream, Loader=yaml.SafeLoader)

        out = cls._config.get('output')
        if out:
            cls._output = cwo.Output.from_dict(
                out, file_name=cls._out, file_path=PWD
            )
        else:
            cls._output = None
Ejemplo n.º 30
0
    def construct_include(self, node):
        """Include file referenced at node."""

        filename = os.path.join(self._root, self.construct_scalar(node))
        filename = os.path.abspath(filename)
        extension = os.path.splitext(filename)[1].lstrip('.')

        with open(filename, 'r') as f:
            if extension in ('yaml', 'yml'):
                return yaml.load(f, Loader=self)
            else:
                return ''.join(f.readlines())
Ejemplo n.º 31
0
def get_default_params():
    """return default parameters for tools/metrics.

    Could be refactored to read defaults from a user specified file.
    The current implementation takes the one located within the
    repository.
    """

    with IOTools.open_file(
            os.path.join(os.path.dirname(__file__), "defaults.yml")) as inf:
        result = yaml.load(inf, Loader=RoundTripLoader)
    return result
Ejemplo n.º 32
0
    def construct_include(self, node):
        """Include file referenced at node."""

        filename = os.path.join(self._root, self.construct_scalar(node))
        filename = os.path.abspath(filename)
        extension = os.path.splitext(filename)[1].lstrip('.')

        with open(filename, 'r') as f:
            if extension in ('yaml', 'yml'):
                return yaml.load(f, Loader=self)
            else:
                return ''.join(f.readlines())
Ejemplo n.º 33
0
    def test_minimal(self):

        with open(EXAMPLES / 'minimal.yaml') as stream:
            defaults = yaml.load(stream, Loader=yaml.SafeLoader)
        p = cw.Parser.from_yaml('minimal.yaml', path=EXAMPLES)
        self.assertEqual(len(p), len(defaults))
        self.assertEqual(p.keys(), defaults.keys())
        self.assertIn('defaults', p)
        dd1 = {**p}
        self.assertIsInstance(dd1['defaults'], cw.Parser)
        dd2 = {key: val for key, val in p.items()}
        self.assertEqual(dd1.keys(), dd2.keys())
        self.assertEqual(dd1['defaults'], dd2['defaults'])
Ejemplo n.º 34
0
    def from_txt(cls, file, use_dtypes=True):
        """
        Read the contents of a text file to create a DataPlusMeta object.

        file : str
            Name or path of a text file
        """

        # read the entire file so that it can be split easily
        with open(file, encoding=ENCODING) as f:
            buffer = f.read()

        sections = buffer.split('\n' + SECTION_SEPARATOR)

        # having 3 sections is a basic requirement
        if len(sections) != 3:
            raise RuntimeError('%s does not have three sections.' % file)

        # parse meta
        meta = yaml.load(sections[0], Loader=LOADER)

        # parse column definitions
        cdef = pd.read_csv(StringIO(sections[1]), index_col=0,
                           dtype=dict(dtype=str),
                           **READ_CSV_OPTIONS)

        if use_dtypes and ('dtype' in cdef.columns):
            dtypes = cdef['dtype'].dropna()

            # identify timestamps
            timestamps = dtypes.str.startswith('datetime')

            # pass others to read_csv
            dtype_dict = dtypes[~timestamps].to_dict()

            data = pd.read_csv(StringIO(sections[2]), index_col=None,
                               dtype=dtype_dict, **READ_CSV_OPTIONS)

            # post-parse date columns with standard format
            for col in dtypes[timestamps].index:
                data[col] = pd.to_datetime(data[col], format=DATE_FORMAT)

        else:
            # tolerate missing dtype in cdef for files created by other means
            data = pd.read_csv(StringIO(sections[2]), index_col=None,
                               **READ_CSV_OPTIONS)

        # set index column for normal use
        data = data.set_index(data.columns[0])

        return cls(data, cdef, meta, source=file)
Ejemplo n.º 35
0
def load_info(path):
    with open(path, 'r') as f:
        info = yaml.load(f, Loader=yaml.CLoader)
        for eid in info.keys():
            if 'cam_K' in info[eid].keys():
                info[eid]['cam_K'] = np.array(info[eid]['cam_K']).reshape(
                    (3, 3))
            if 'cam_R_w2c' in info[eid].keys():
                info[eid]['cam_R_w2c'] = np.array(
                    info[eid]['cam_R_w2c']).reshape((3, 3))
            if 'cam_t_w2c' in info[eid].keys():
                info[eid]['cam_t_w2c'] = np.array(
                    info[eid]['cam_t_w2c']).reshape((3, 1))
    return info
Ejemplo n.º 36
0
    def save(self):
        """Write the file to disk, only if any changes have been made.

        Raises ``IOError`` if it fails for some reason.

        Returns:
            None
        """
        self._throw_if_corrupted()

        if not self._dirty:
            return

        contents = ryaml.dump(self._yaml, Dumper=ryaml.RoundTripDumper)

        try:
            # This is to ensure we don't corrupt the file, even if ruamel.yaml is broken
            ryaml.load(contents, Loader=ryaml.RoundTripLoader)
        except YAMLError as e:  # pragma: no cover (should not happen)
            print(
                "ruamel.yaml bug; it failed to parse a file that it generated.",
                file=sys.stderr)
            print("  the parse error was: " + str(e), file=sys.stderr)
            print("Generated file was:", file=sys.stderr)
            print(contents, file=sys.stderr)
            raise RuntimeError(
                "Bug in ruamel.yaml library; failed to parse a file that it generated: "
                + str(e))

        if not os.path.isfile(self.filename):
            # might have to make the directory
            dirname = os.path.dirname(self.filename)
            makedirs_ok_if_exists(dirname)
        _atomic_replace(self.filename, contents)
        self._change_count = self._change_count + 1
        self._dirty = False
Ejemplo n.º 37
0
def load_md(src):
    """Load metadata in YAML from a source.

    Arguments:
        src: dict-like (data), string (filename), or file-like
    """
    if isinstance(src, collections.Mapping):  # dict or yaml is given
        rawconfig = src
    elif isinstance(src, str):  # filename is given
        with open(src, "r") as f:
            rawconfig = yaml.load(f)
    elif (isinstance(src, TextIOBase)):  # file-like is given
        #rawconfig = yaml.load(src)                    # deprecated/unsafe
        rawconfig = yaml.safe_load(src)

    return parse_md(rawconfig)
Ejemplo n.º 38
0
    def load_config(self, **kwargs):
        """Load config from a source specified with a keyword parameter.

        Keyword arguments:
            parameters -- config dictionary.
            file -- config (YAML) file path.
        """
        # Sources of config parameters: argument or file
        if "parameters" in kwargs:
            rawconfig = kwargs["parameters"]
        elif "file" in kwargs:
            self.config_file = kwargs["file"]
            with open(self.config_file, "r") as f:
                rawconfig = yaml.load(f)

        self.content = parse_config(rawconfig)
        return self.content
Ejemplo n.º 39
0
    def save_environment_yml(self, filename):
        """Save as an environment.yml file."""
        # here we want to flatten the env spec to include all inherited stuff
        packages = list(self.conda_packages)
        pip_packages = list(self.pip_packages)
        if pip_packages:
            packages.append(dict(pip=pip_packages))
        channels = list(self.channels)

        yaml = ryaml.load("name: " "\ndependencies: []\nchannels: []\n", Loader=ryaml.RoundTripLoader)

        assert self.name is not None  # the global anonymous spec can't be saved
        yaml['name'] = self.name
        yaml['dependencies'] = packages
        yaml['channels'] = channels

        _save_file(yaml, filename)
Ejemplo n.º 40
0
    def save_environment_yml(self, filename):
        """Save as an environment.yml file."""
        # here we want to flatten the env spec to include all inherited stuff
        packages = list(self.conda_packages)
        pip_packages = list(self.pip_packages)
        if pip_packages:
            packages.append(dict(pip=pip_packages))
        channels = list(self.channels)

        yaml = ryaml.load("name: " "\ndependencies: []\nchannels: []\n", Loader=ryaml.RoundTripLoader)

        assert self.name is not None  # the global anonymous spec can't be saved
        yaml['name'] = self.name
        yaml['dependencies'] = packages
        yaml['channels'] = channels

        _save_file(yaml, filename)
Ejemplo n.º 41
0
    def _default_content(self):
        header = (
            "This is an Anaconda project file.\n" + "\n" + "Here you can describe your project and how to run it.\n" +
            "Use `conda-kapsel run` to run the project.\n" +
            "The file is in YAML format, please see http://www.yaml.org/start.html for more.\n")
        sections = OrderedDict()

        sections['name'] = ("Set the 'name' key to name your project\n")

        sections['icon'] = ("Set the 'icon' key to give your project an icon\n")

        sections['commands'] = ("In the commands section, list your runnable scripts, notebooks, and other code.\n" +
                                "Use `conda-kapsel add-command` to add commands.\n")

        sections['variables'] = ("In the variables section, list any environment variables your code depends on.\n"
                                 "Use `conda-kapsel add-variable` to add variables.\n")

        sections['services'] = (
            "In the services section, list any services that should be\n" + "available before your code runs.\n" +
            "Use `conda-kapsel add-service` to add services.\n")

        sections['downloads'] = ("In the downloads section, list any URLs to download to local files\n" +
                                 "before your code runs.\n" + "Use `conda-kapsel add-download` to add downloads.\n")

        sections['packages'] = ("In the packages section, list any packages that must be installed\n" +
                                "before your code runs.\n" + "Use `conda-kapsel add-packages` to add packages.\n")

        sections['channels'] = (
            "In the channels section, list any Conda channel URLs to be searched\n" + "for packages.\n" + "\n" +
            "For example,\n" + "\n" + "channels:\n" + "   - https://conda.anaconda.org/asmeurer\n")

        sections['env_specs'] = (
            "You can define multiple, named environment specs.\n" + "Each inherits any global packages or channels,\n" +
            "but can have its own unique ones also.\n" + "Use `conda-kapsel add-env-spec` to add environment specs.\n")

        assert self._default_env_specs_func is not None
        default_env_specs = self._default_env_specs_func()
        assert default_env_specs is not None

        # we make a big string and then parse it because I can't figure out the
        # ruamel.yaml API to insert comments in front of map keys.
        def comment_out(comment):
            return ("# " + "\n# ".join(comment.split("\n")) + "\n").replace("# \n", "#\n")

        to_parse = comment_out(header)
        for section_name, comment in sections.items():
            # future: this is if/else is silly, we should be
            # assigning these bodies up above when we assign the
            # comments.
            if section_name in ('name', 'icon'):
                section_body = ""
            elif section_name in ('channels', 'packages'):
                section_body = "  []"
            else:
                section_body = "  {}"
            to_parse = to_parse + "\n#\n" + comment_out(comment) + section_name + ":\n" + section_body + "\n\n\n"

        as_json = ryaml.load(to_parse, Loader=ryaml.RoundTripLoader)

        for env_spec in default_env_specs:
            as_json['env_specs'][env_spec.name] = env_spec.to_json()

        return as_json
Ejemplo n.º 42
0
def yaml_load(filehandle):
    yaml = get_yaml()
    try:
        return yaml.load(filehandle, Loader=yaml.RoundTripLoader, version="1.2")
    except AttributeError:
        return yaml.load(filehandle)
Ejemplo n.º 43
0
"""
import boto3
from awacs.aws import Allow, Statement, Policy, Action
from troposphere import Ref, Template, GetAtt, Output, iam
from utils import create_or_update_stack, stack_info
import ruamel_yaml as yaml
from troposphere.firehose import (
    KinesisStreamSourceConfiguration,
    DeliveryStream,
    BufferingHints,
    S3DestinationConfiguration
)

with open('../config/firehose_config.yml') as f:
    cfg = yaml.load(f)

STACK_NAME = cfg['firehose']['stack_name']
ACCOUNT_ID = cfg['firehose']['account_id']
BUCKET_NAME = cfg['firehose']['bucket_name']
REGION = cfg['firehose']['region']
STREAM_NAME = stack_info(stack_name='ScraperStreamStack')['StreamName']
STREAM_ARN = stack_info(stack_name='ScraperStreamStack')['StreamARN']

t = Template()
description = 'Stack for kinesis firehose stream to deliver to s3 from kinesis'
t.add_description(description)
t.add_version('2010-09-09')

firehose_policy_doc = Policy(
    Statement=[
Ejemplo n.º 44
0
args = parser.parse_args()

if pygments:
    if args.color:
        formatter = Terminal256Formatter
    else:
        formatter = NullFormatter

try: # ... to read json
    i = args.infile.read()
    d = json.loads( i )
    if args.alwaysjson:
        if pygments:
            i = highlight( out, JsonLexer(), formatter() )
        print( i )
    else:
        out = yaml.safe_dump(d, indent=args.indent, allow_unicode=True )
        if pygments:
            out = highlight( out, YamlLexer(), formatter() )
        print( out )
except:
    try: # ... to read yaml
        d = yaml.load( i )
        out = json.dumps(d, indent=args.indent)
        if pygments:
            out = highlight(out, JsonLexer(), formatter() )
        print(out)
    except:
        print("input error: invalid json or yaml format")

Ejemplo n.º 45
0
def yaml_load(filehandle):
    yaml = get_yaml()
    return yaml.load(filehandle, Loader=yaml.RoundTripLoader, version="1.1")
Ejemplo n.º 46
0
def _load_string(contents):
    # using RoundTripLoader incorporates safe_load
    # (we don't load code)
    assert issubclass(ryaml.RoundTripLoader, ryaml.constructor.SafeConstructor)
    return ryaml.load(contents, Loader=ryaml.RoundTripLoader)
Ejemplo n.º 47
0
def yaml_load(string):
    return yaml.load(string, Loader=yaml.RoundTripLoader, version="1.2")