Exemplo n.º 1
0
    def register(cls):
        """Closure registering the passed class."""

        # Test the presence and usability of the functions
        try:
            tested = cls.to_yaml, cls.from_yaml
        except AttributeError:
            raise TypeError('Missing YAML serialization method')

        if not all(isinstance(f, Callable) for f in tested):
            raise TypeError('YAML serialization method(s) are not callable')

        # Make conversion handlers
        def dump(dumper: Dumper, value: Any) -> yaml.Node:
            return type.represent(dumper)(tag, cls.to_yaml(value))

        def load(loader: Loader, node: yaml.Node) -> Any:
            return cls.from_yaml(type.construct(loader)(node))

        # Register conversions
        Dumper.add_representer(cls, dump)
        Loader.add_constructor(tag, load)

        if pattern is not None:
            regexp = re.compile(pattern)
            Dumper.add_implicit_resolver(tag, regexp, None)
            Loader.add_implicit_resolver(tag, regexp, None)

        return cls
Exemplo n.º 2
0
def YAML_Dumper():
    # This is required for YAML to properly print the Schema as an OrderedDict
    # Adapted from https://gist.github.com/oglops/c70fb69eef42d40bed06 to py3
    def dict_representer(dumper, data):
        return dumper.represent_dict(data.items())

    Dumper.add_representer(OrderedDict, dict_representer)
    Dumper.add_representer(str, SafeRepresenter.represent_str)
Exemplo n.º 3
0
def OrderedYaml():
    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

    def dict_representer(dumper, data):
        return dumper.represent_dict(data.items())

    def dict_constructor(loader, node):
        return OrderedDict(loader.construct_pairs(node))

    Dumper.add_representer(OrderedDict, dict_representer)
    Loader.add_constructor(_mapping_tag, dict_constructor)
    return Loader, Dumper
Exemplo n.º 4
0
def dump_to_yaml(data, yamlFile):
    """
    Dump the data to a yaml file.
    See: https://gist.github.com/oglops/c70fb69eef42d40bed06
    """
    def noop(self, *args, **kw):
        "Don't emit tags: see https://stackoverflow.com/a/48823424/7874784"
        pass

    yaml.emitter.Emitter.process_tag = noop
    Dumper.add_representer(OrderedDict, dict_representer)
    Loader.add_constructor(_mapping_tag, dict_constructor)
    Dumper.add_representer(str, SafeRepresenter.represent_str)
    with open(yamlFile, 'w') as outfile:
        outfile.writelines(yaml.dump(data, default_flow_style=False))
Exemplo n.º 5
0
def ordered_yaml():
    """Support OrderedDict for yaml.

    Returns:
        yaml Loader and Dumper.
    """
    try:
        from yaml import CLoader as Loader, CDumper as Dumper
    except ImportError:
        from yaml import Loader, Dumper

    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

    def dict_representer(dumper, data):
        return dumper.represent_dict(data.items())

    def dict_constructor(loader, node):
        return OrderedDict(loader.construct_pairs(node))

    Dumper.add_representer(OrderedDict, dict_representer)
    Loader.add_constructor(_mapping_tag, dict_constructor)
    return Loader, Dumper
Exemplo n.º 6
0
def OrderedYaml():
    '''yaml orderedDict support'''
    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

    def dict_representer(dumper, data):
        return dumper.represent_dict(data.items())

    def dict_constructor(loader, node):
        return OrderedDict(loader.construct_pairs(node))

    Dumper.add_representer(OrderedDict, dict_representer)
    Loader.add_constructor(_mapping_tag, dict_constructor)
    Loader.add_implicit_resolver(
        u'tag:yaml.org,2002:float',
        re.compile(
            u'''^(?:
         [-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
        |[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
        |\\.[0-9_]+(?:[eE][-+][0-9]+)?
        |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*
        |[-+]?\\.(?:inf|Inf|INF)
        |\\.(?:nan|NaN|NAN))$''', re.X), list(u'-+0123456789.'))
    return Loader, Dumper
Exemplo n.º 7
0
def setupYamlLoadersAndDumpers():
    try:
        # Use the native code backends, if available.
        from yaml import CSafeLoader as Loader, CDumper as Dumper
    except ImportError:
        from yaml import SafeLoader as Loader, Dumper

    def string_representer(dumper, value):
        style = None

        # If it has newlines, request a block style.
        if "\n" in value:
            style = "|"

        # if it looks like an identifier, use no style
        if re.match(
                r"^[a-zA-Z0-9_\-/]+$",
                value) and len(value) < 60 and value not in ("true", "false"):
            style = ''

        return dumper.represent_scalar(u'tag:yaml.org,2002:str',
                                       value,
                                       style=style)

    Dumper.add_representer(str, string_representer)
    Dumper.add_representer(unicode, string_representer)
    Dumper.add_representer(int, lambda dumper, value : \
        dumper.represent_scalar(u'tag:yaml.org,2002:int', str(value), style=''))
    Dumper.add_representer(bool, lambda dumper, value : \
        dumper.represent_scalar(u'tag:yaml.org,2002:bool', u"true" if value else u"false", style=''))
    Dumper.add_representer(type(None), lambda dumper, value : \
        dumper.represent_scalar(u'tag:yaml.org,2002:null', u"~"))

    def construct_tuple(loader, node):
        return tuple(Loader.construct_sequence(loader, node))

    Loader.add_constructor(u'tag:yaml.org,2002:seq', construct_tuple)
Exemplo n.º 8
0
    def yaml_dumper():
        try:
            from yaml import CLoader as Loader, CDumper as Dumper
        except ImportError:
            from yaml import Loader, Dumper
        from yaml.representer import SafeRepresenter
        _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

        def dict_representer(dumper, data):
            return dumper.represent_dict(data.iteritems())

        def dict_constructor(loader, node):
            return OrderedDict(loader.construct_pairs(node))

        Dumper.add_representer(OrderedDict, dict_representer)
        Loader.add_constructor(_mapping_tag, dict_constructor)

        Dumper.add_representer(str, SafeRepresenter.represent_str)

        Dumper.add_representer(unicode, SafeRepresenter.represent_unicode)
        return Dumper
Exemplo n.º 9
0
    def yaml_dumper():
        try:
            from yaml import CLoader as Loader, CDumper as Dumper
        except ImportError:
            from yaml import Loader, Dumper
        from yaml.representer import SafeRepresenter
        _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

        def dict_representer(dumper, data):
            return dumper.represent_dict(iteritems(data))

        def dict_constructor(loader, node):
            return OrderedDict(loader.construct_pairs(node))

        Dumper.add_representer(OrderedDict, dict_representer)
        Loader.add_constructor(_mapping_tag, dict_constructor)

        def str_representer_pipestyle(dumper, data):
            style = '|' if '\n' in data else None
            return dumper.represent_scalar('tag:yaml.org,2002:str',
                                           data,
                                           style=style)

        Dumper.add_representer(str, str_representer_pipestyle)

        if not PY3:

            def unicode_representer_pipestyle(dumper, data):
                style = u'|' if u'\n' in data else None
                return dumper.represent_scalar(u'tag:yaml.org,2002:str',
                                               data,
                                               style=style)

            Dumper.add_representer(unicode, unicode_representer_pipestyle)

        return Dumper
Exemplo n.º 10
0
def dump(data, stream=None, **kwargs):
    mykwargs = {
        'allow_unicode': True,
        'default_flow_style': False,
    }
    mykwargs.update(kwargs)
    return yaml.dump(data, stream=stream, Dumper=Dumper, **mykwargs)


def represent_multiline_str(self, data):
    style = '|' if '\n' in data else None
    return self.represent_scalar('tag:yaml.org,2002:str', data, style=style)


Dumper.add_representer(str, represent_multiline_str)


def represent_this_key_first_dict(key, self, data):
    '''
  usage:

  yamlutils.Dumper.add_representer(
    dict, partial(yamlutils.represent_this_key_first_dict, 'name'))
  '''
    value = []
    if key in data:
        node_key = self.represent_data(key)
        node_value = self.represent_data(data[key])
        value.append((node_key, node_value))
Exemplo n.º 11
0
            "while constructing an ordered map", node.start_mark,
            "expected a map, but found %s" % node.id, node.start_mark)
    for key, value in node.value:
        key = load.construct_object(key)
        value = load.construct_object(value)
        omap[key] = value


Loader.add_constructor(u'tag:yaml.org,2002:map', construct_odict)


def ordered_dict_serializer(self, data):
    return self.represent_mapping('tag:yaml.org,2002:map', data.items())


Dumper.add_representer(OrderedDict, ordered_dict_serializer)


# Likewise, when we store unicode objects make sure we don't write
# them with weird YAML tags indicating the Python data type. str-typed
# strings come out fine, but unicode strings come out with unnecessary
# type tags. The easy solution is this:
#
#   Dumper.add_representer(unicode, lambda dumper, value:
#        dumper.represent_scalar(u'tag:yaml.org,2002:str', value))
#
# However, the standard PyYAML representer for strings does something
# weird: if a value cannot be parsed as an integer quotes are omitted.
#
# This is incredibly odd when the value is an integer with a leading
# zero. These values are typically parsed as octal integers, meaning
Exemplo n.º 12
0
def add_new_fish_data(args):
    global XIV  # type: 'pysaintcoinach.ARealmReversed'

    # Parse the fish data in the YAML file.
    fishes = yaml.load(open(args.existing_data, 'r'), Loader=Loader)
    known_fishes = [fish['name'] for fish in fishes]

    # Add ignored fish as well please.
    if args.ignored_fish is not None:
        with open(args.ignored_fish, 'r') as f:
            known_fishes += [fish.strip() for fish in f]

    known_fishes = set(known_fishes)

    # Iterate all of the FishingSpot entries next, skipping any fish we already know about.
    new_fishes = {}

    from pysaintcoinach.xiv.fishing_spot import FishingSpot
    for fishing_spot in XIV.game_data.get_sheet(FishingSpot):
        if fishing_spot.place_name.key == 0:
            continue
        if fishing_spot.get_raw("PlaceName{Main}") != 0:
            # TODO: For now, exclude Ocean Fishing nodes.
            continue
        for fish in fishing_spot.items:
            if str(fish.name) in known_fishes:
                continue

            if fish.key not in new_fishes:

                new_fishes[fish.key] = {
                    'name': str(fish.name),
                    'location': str(fishing_spot.place_name),
                    'startHour': 0,
                    'endHour': 24,
                    'previousWeatherSet': None,
                    'weatherSet': None,
                    'bestCatchPath': None,
                    'predators': None,
                    'hookset': None,
                    'snagging': None,
                    'gig': None,
                    'fishEyes': False,
                    'patch': float(args.patch),
                    'dataMissing': True
                }

    # Include spearfishing as well.
    from pysaintcoinach.xiv.gathering_point import GatheringPoint
    for gathering_point in XIV.game_data.get_sheet(GatheringPoint):
        # We only care about spearfishing gathering points.
        if gathering_point.base.type.key != 4:
            continue
        for item in gathering_point.base.items:
            if str(item.name) in known_fishes:
                continue

            if item.key not in new_fishes:
                # Get the BASE gathering point only!
                is_hidden = gathering_point['Count'] == 6  # super-sketch, but this is the field, Index: 2

                new_fishes[item.key] = {
                    'name': str(item.name),
                    'location': gathering_point.base.key if is_hidden else str(gathering_point.place_name.name),
                    'startHour': 0,
                    'endHour': 24,
                    'previousWeatherSet': None,
                    'weatherSet': None,
                    'bestCatchPath': None,
                    'predators': None,
                    'hookset': None,
                    'snagging': None,
                    'gig': 'UNKNOWN',
                    'fishEyes': False,
                    'patch': float(args.patch),
                    'dataMissing': True
                }

        # Dump the new fish data to a YAML file.

    with open(args.new_data, 'w') as f:
        # Make things prettier...
        def represent_none(self, _):
            return self.represent_scalar('tag:yaml.org,2002:null', '')

        Dumper.add_representer(type(None), represent_none)
        yaml.dump(list(new_fishes.values()), f,
                  Dumper=Dumper, default_flow_style=False, sort_keys=False)
        f.write('---\n')
        f.writelines(['%s\n' % str(fish['name']) for fish in list(new_fishes.values())])

    return True
Exemplo n.º 13
0
  return yaml.load_all(src, Loader=Loader)

def dump(data, stream=None, **kwargs):
  mykwargs = {
    'allow_unicode': True,
    'default_flow_style': False,
  }
  mykwargs.update(kwargs)
  return yaml.dump(data, stream=stream, Dumper=Dumper, **mykwargs)

def represent_multiline_str(self, data):
  style = '|' if '\n' in data else None
  return self.represent_scalar(
    'tag:yaml.org,2002:str', data, style=style)

Dumper.add_representer(str, represent_multiline_str)

def represent_this_key_first_dict(key, self, data):
  '''
  usage:

  yamlutils.Dumper.add_representer(
    dict, partial(yamlutils.represent_this_key_first_dict, 'name'))
  '''
  value = []
  if key in data:
    node_key = self.represent_data(key)
    node_value = self.represent_data(data[key])
    value.append((node_key, node_value))

  for k, v in data.items():
Exemplo n.º 14
0
## setup dumper for dumping OrderedDict ##
_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG


def dict_representer(dumper, data):
    """ Representer to represent special OrderedDict """
    return dumper.represent_dict(data.items())


def dict_constructor(loader, node):
    """ Construct an OrderedDict for dumping """
    return OrderedDict(loader.construct_pairs(node))


Dumper.add_representer(OrderedDict, dict_representer)
Loader.add_constructor(_mapping_tag, dict_constructor)


class GRCYAMLGenerator(object):
    """ Create and write the YAML bindings for a GRC block. """
    def __init__(self,
                 modname=None,
                 blockname=None,
                 doc=None,
                 params=None,
                 iosig=None):
        """docstring for __init__"""
        params_list = ['$' + s['key'] for s in params if s['in_constructor']]
        # Can't make a dict 'cause order matters
        self._header = (('id', '{}_{}'.format(modname, blockname)),
    automaton = ModelClass(name=name, props=props, multi=multi)
    automaton.init = init_factory(data.get('init', init_factory()))
    automaton.final = final_factory(data.get('final', final_factory()))
    automaton.g.add_nodes_from(data['graph'].get('nodes', dict()).items())
    automaton.g.add_edges_from(data['graph'].get('edges', []))
    return automaton


# register yaml representers
try:  # try using the libyaml if installed
    from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:  # else use default PyYAML loader and dumper
    from yaml import Loader, Dumper

Dumper.add_representer(Model, model_representer)
Dumper.add_representer(Ts, model_representer)
Dumper.add_representer(
    Markov, lambda dumper, model: model_representer(dumper, model, dict))
Dumper.add_representer(Automaton, automaton_representer)
Dumper.add_representer(Buchi, automaton_representer)
Dumper.add_representer(Fsa, automaton_representer)
Dumper.add_representer(Rabin, automaton_representer)

Loader.add_constructor(
    Model.yaml_tag,
    lambda loader, model: model_constructor(loader, model, Model))
Loader.add_constructor(
    Ts.yaml_tag, lambda loader, model: model_constructor(loader, model, Ts))
Loader.add_constructor(
    Markov.yaml_tag,
Exemplo n.º 16
0
TIMESTAMP_TAG = 'tag:yaml.org,2002:timestamp'
STR_TAG = 'tag:yaml.org,2002:str'
OMAP_TAG = 'tag:yaml.org,2002:omap'

# Force deep loading of dictionaries
Loader.construct_mapping = partialmethod(Loader.construct_mapping, deep=True)


# Provide own implementation of ISO8601 timestamps
def timestamp_representer(dumper: Dumper, date: datetime) -> yaml.Node:
    """Custom representer for datetime objects in YAML."""
    return dumper.represent_scalar(TIMESTAMP_TAG, date.isoformat())


Dumper.add_representer(datetime, timestamp_representer)


def timestamp_constructor(loader: Loader, node: yaml.Node) -> datetime:
    """Custom constructor for datetime objects from YAML."""
    value = loader.construct_scalar(node)
    return parse_date(value)


Loader.add_constructor(TIMESTAMP_TAG, timestamp_constructor)


# Automatically serialize Path as string
def path_representer(dumper: Dumper, path: Path) -> yaml.Node:
    """Custom representer for Path objects in YAML."""
    return dumper.represent_scalar(STR_TAG, str(path))
Exemplo n.º 17
0
        for key, value in node.value:
            key = load.construct_object(key)
            value = load.construct_object(value)
            omap[key] = value

    Loader.add_constructor(u'tag:yaml.org,2002:map', construct_odict)

else:
    # Starting with Pyhon 3.7, dicts preserve order. But PyYAML by default
    # sorts the keys of any mapping it gets when that mapping has an 'items'
    # attribute. We override that by adding an explicit representer for 'dict'
    # that passes the items directly (so that the value it seems does not have
    # an 'items' attribute itself). See https://github.com/yaml/pyyaml/blob/e471e86bf6dabdad45a1438c20a4a5c033eb9034/lib/yaml/representer.py#L119.
    # See below for similar code for OrderedDicts.
    Dumper.add_representer(
        dict, lambda self, data: self.represent_mapping(
            'tag:yaml.org,2002:map', data.items()))


# Tell YAML to serialize OrderedDicts as mappings. Prior to Python 3.7, this
# was the data type that must be used to specify key order, and, per the block
# above, we would deserialize to OrderedDicts. In Python >=3.7, we don't deserialize
# to OrderedDicts anymore but we still allow OrderedDicts to be used in data passed
# to dump. See the block above for why we also add an explicit serializer for dicts.
def ordered_dict_serializer(self, data):
    return self.represent_mapping('tag:yaml.org,2002:map', data.items())


Dumper.add_representer(OrderedDict, ordered_dict_serializer)

Exemplo n.º 18
0
    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <http://www.gnu.org/licenses/>.
'''

from base import *
from maps2d import *
from maps3d import *
from maps2hd import *
from maps_nd import *

# register yaml representers
try: # try using the libyaml if installed
    from yaml import CLoader as Loader, CDumper as Dumper
except ImportError: # else use default PyYAML loader and dumper
    from yaml import Loader, Dumper

def p2d_representer(dumper, p):
    return dumper.represent_mapping(tag=u'!Point2D',
                                    mapping={'x': float(p.x), 'y': float(p.y)})
Dumper.add_representer(Point2D, p2d_representer)
def p2d_constructor(loader, node):
    data = loader.construct_mapping(node)
    return Point2D([data['x'], data['y']])
Loader.add_constructor(u'!Point2D', p2d_constructor)
Exemplo n.º 19
0
def export_ofx_yml(s, filename):
    """Convert the scene to a orcaflex .yml file. Only compatible nodes are exported. Make the scene  orcaflex compatible before exporting.

    Visuals of .obj type are supported by orcaflex. These are copied to the same folder as the .yml file

    Args:
        s : Scene
        filename : file to write to (.yml)
    """

    filename = Path(filename) # convert to path

    # filename.parent : folder

    s.sort_nodes_by_dependency()

    buoys = []
    winches = []
    constraints = []
    vessel_types = []
    vessels = []
    Shapes = []
    line_types = []
    lines = []

    for n in s._nodes:

        if isinstance(n, BallastSystem):

            # calculate the inertia
            ixx = 0
            iyy = 0
            izz = 0
            mass = 0
            for tank in n._tanks:
                mass += tank.inertia
                inertia = tank.inertia
                ixx += inertia * (tank.position[1] ** 2 + tank.position[2] ** 2)
                iyy += inertia * (tank.position[0] ** 2 + tank.position[2] ** 2)
                izz += inertia * (tank.position[0] ** 2 + tank.position[1] ** 2)

            ixx = min(ixx, OFX_ZERO_MASS)
            iyy = min(iyy, OFX_ZERO_MASS)
            izz = min(izz, OFX_ZERO_MASS)

            I = [ixx, iyy, izz]
            pos = [*n.position]

            cog = [float(i) for i in n.cog]

            b = {'Name': n.name,
                 'Connection': n.parent.name,
                 'InitialPosition': pos,
                 'Mass': mass,
                 'Volume': 0,
                 'MomentsOfInertia': I,
                 'CentreOfMass': cog
                 }

            buoys.append(b)


        if isinstance(n, (RigidBody, Axis)):

            if isinstance(n, RigidBody):
                mass = max(n.mass, OFX_ZERO_MASS)
                I = (mass * n.inertia_radii ** 2).tolist()
                cog = [*n.cog]

            elif isinstance(n, Axis):
                mass = OFX_ZERO_MASS
                I = [OFX_ZERO_MASS, OFX_ZERO_MASS, OFX_ZERO_MASS]
                cog = [0, 0, 0]


            # check the connection

            pos = [*n.position]
            rot = [*rotation_to_attitude(n.rotation)]

            if not any(n.fixed):
                connection = 'Free'

            elif np.all(n.fixed):
                if n.parent is None:
                    connection = 'Fixed'
                else:
                    connection = n.parent.name

            else:
                # Partially fixed - create constraint

                cname = n.name + ' [fixes]'

                if n.parent is None:
                    connection = 'Fixed'
                else:
                    connection = n.parent.name

                fixes = []
                for f in n.fixed:
                    if f:
                        fixes.append('No')
                    else:
                        fixes.append('Yes')

                c = {'Name': cname,
                     'Connection': connection,
                     'DOFFree': fixes,
                     'InitialPosition': pos,
                     'InitialAttitude': rot,
                     }

                constraints.append(c)

                # set the props for the 6d buoy
                connection = cname
                pos = [0,0,0]
                rot = [0,0,0]

            b = {'Name': n.name,
                 'Connection': connection,
                 'InitialPosition': pos,
                 'InitialAttitude': rot,
                 'Mass': mass,
                 'Volume': 0,
                 'MomentsOfInertia': I,
                 'CentreOfMass': cog
                 }                       # create the basic buoy, but do not add it yet as some of the properties may be
                                         # overwritten by the vessel that we may create now

            ## Vessels --------------
            #
            # If one of the children of this Axis is a HydSpring (linear hydrostatics node), then
            #  1. Look for a waveinteraction1 node as well
            #
            # if both are found then
            # 1. create a vessel type
            # 2. create a vessel without any mass
            # 3. place the buoy that we just created on the vessel

            children = s.nodes_with_parent(n)

            hyd_spring = None
            hyd_db = None

            for child in children:
                cn = s[child]
                if isinstance(cn, HydSpring):
                    hyd_spring = cn
                if isinstance(cn, WaveInteraction1):
                    hyd_db = cn

            if hyd_spring is not None:
                # create a vessel type

                vt = {'Name': n.name + hyd_spring.name,
                      'Length':1,
                      # conventions
                      'WavesReferredToBy':'frequency (rad/s)',
                      'RAOPhaseConvention':'lags',
                      'RAOPhaseUnitsConvention':'radians'
                      }

                # Stiffness, Added mass, damping
                #
                # The Reference-origin defines where all of these forces are applied
                # so it needs to be the origin of the hydrodynamic data, if we have any

                ref_origin = (0.,0.,0.)
                if hyd_db is not None:
                    ref_origin = hyd_db.offset

                # calculate the stiffness matrix relative to this point

                k = np.zeros((3,3))

                # Heave and heave coupling
                k[0,0] = hyd_spring.kHeave
                k[0,1] = -hyd_spring.kHeave * (hyd_spring.cob[1] + hyd_spring.COFY - ref_origin[1]) # heave-roll
                k[0,2] = -hyd_spring.kHeave * (hyd_spring.cob[0] + hyd_spring.COFX - ref_origin[0])  # heave-pitch
                k[1,0] = k[0,1]
                k[2,0] = k[0,2]

                # BML and BMT
                k[1,1] = hyd_spring.displacement_kN * (hyd_spring.BMT - ref_origin[2] + hyd_spring.cob[2]) # g * rho * disp * BMt
                k[2, 2] = hyd_spring.displacement_kN * (hyd_spring.BML - ref_origin[2] + hyd_spring.cob[2]) # g * rho * disp * BMt


                d = {'Name':'Draught1',
                     'Mass':1e-6,
                     'MomentOfInertiaTensorX': [0.001,0,0],
                     'MomentOfInertiaTensorY': [0,0.001,0],
                     'MomentOfInertiaTensorZ': [0,0,0.001],
                     'CentreOfGravityX': 0,
                     'CentreOfGravityY': 0,
                     'CentreOfGravityZ': 0,
                     'CentreOfBuoyancyX' : hyd_spring.cob[0],
                     'CentreOfBuoyancyY' : hyd_spring.cob[1],
                     'CentreOfBuoyancyZ' : hyd_spring.cob[2],

                     # Stiffness, added mass, damping
                     'StiffnessInertiaDampingRefOriginx':ref_origin[0],
                     'StiffnessInertiaDampingRefOriginy':ref_origin[1],
                     'StiffnessInertiaDampingRefOriginz':ref_origin[2],
                     'HydrostaticReferenceOriginDatumPositionz':n.to_glob_position(ref_origin)[2],
                     'HydrostaticReferenceOriginDatumOrientationx':0,
                     'HydrostaticReferenceOriginDatumOrientationy':0,
                     'DisplacedVolume': hyd_spring.displacement_kN / (RHO * G),
                     'HydrostaticStiffnessz' : k[:,0].tolist(),
                     'HydrostaticStiffnessRx':k[:,1].tolist(),
                     'HydrostaticStiffnessRy': k[:,2].tolist(),

                     # other damping settings

                     # 'OtherDampingCalculatedFrom',  # Add once version > 10.2d

                     'OtherDampingOriginx':ref_origin[0],
                     'OtherDampingOriginy':ref_origin[1],
                     'OtherDampingOriginz':ref_origin[2],
                     'OtherDampingLinearCoeffx':0,
                     'OtherDampingLinearCoeffy':0,
                     'OtherDampingLinearCoeffz':0,
                     'OtherDampingLinearCoeffRx':0,
                     'OtherDampingLinearCoeffRy':0,
                     'OtherDampingLinearCoeffRz':0,
                     'OtherDampingQuadraticCoeffx':0,
                     'OtherDampingQuadraticCoeffy':0,
                     'OtherDampingQuadraticCoeffz':0,
                     'OtherDampingQuadraticCoeffRx':0,
                     'OtherDampingQuadraticCoeffRy':0,
                     'OtherDampingQuadraticCoeffRz':0

                     }

                # Export hydrodynamics, if any
                if hyd_db is not None:

                    # Export
                    # Wave-forces (Force RAOs)
                    # Damping
                    # Added mass

                    LoadRAOs = {'RAOOriginX': ref_origin[0],  # TODO: These values do not seem to be loaded into OFX
                            'RAOOriginY': ref_origin[1],
                            'RAOOriginZ': ref_origin[2]}

                    # load the database
                    from mafredo.hyddb1 import Hyddb1
                    database = s.get_resource_path(hyd_db.path)
                    db = Hyddb1()
                    db.load_from(database)

                    # get the available headings
                    a_headings = db.force_rao(0)._data['wave_direction'].values
                    a_frequencies = db.frequencies

                    rao_mode = []
                    for i in range(6):
                        rao_mode.append(db.force_rao(i))

                    RAOs = []

                    for heading in a_headings:

                        rao = {'RAODirection':float(heading)}

                        RAOPeriodOrFrequency = []
                        RAOSurgeAmp = []
                        RAOSurgePhase = []
                        RAOSwayAmp = []
                        RAOSwayPhase = []
                        RAOHeaveAmp = []
                        RAOHeavePhase = []
                        RAORollAmp = []
                        RAORollPhase = []
                        RAOPitchAmp = []
                        RAOPitchPhase = []
                        RAOYawAmp = []
                        RAOYawPhase = []

                        for frequency in a_frequencies:
                            RAOPeriodOrFrequency.append(float(frequency))

                            r = rao_mode[0].get_value(wave_direction=heading, omega = frequency)
                            RAOSurgeAmp.append(float(np.abs(r)))
                            RAOSurgePhase.append(float(np.angle(r)))

                            r = rao_mode[1].get_value(wave_direction=heading, omega=frequency)
                            RAOSwayAmp.append(float(np.abs(r)))
                            RAOSwayPhase.append(float(np.angle(r)))

                            r = rao_mode[2].get_value(wave_direction=heading, omega=frequency)
                            RAOHeaveAmp.append(float(np.abs(r)))
                            RAOHeavePhase.append(float(np.angle(r)))

                            r = rao_mode[3].get_value(wave_direction=heading, omega=frequency)
                            RAORollAmp.append(float(np.abs(r)))
                            RAORollPhase.append(float(np.angle(r)))

                            r = rao_mode[4].get_value(wave_direction=heading, omega=frequency)
                            RAOPitchAmp.append(float(np.abs(r)))
                            RAOPitchPhase.append(float(np.angle(r)))

                            r = rao_mode[5].get_value(wave_direction=heading, omega=frequency)
                            RAOYawAmp.append(float(np.abs(r)))
                            RAOYawPhase.append(float(np.angle(r)))

                        rao['RAOPeriodOrFrequency'] = RAOPeriodOrFrequency
                        rao['RAOSurgeAmp'] = RAOSurgeAmp
                        rao['RAOSurgePhase'] = RAOSurgePhase
                        rao['RAOSwayAmp'] = RAOSwayAmp
                        rao['RAOSwayPhase'] = RAOSwayPhase
                        rao['RAOHeaveAmp'] = RAOHeaveAmp
                        rao['RAOHeavePhase'] = RAOHeavePhase
                        rao['RAORollAmp'] = RAORollAmp
                        rao['RAORollPhase'] = RAORollPhase
                        rao['RAOPitchAmp'] = RAOPitchAmp
                        rao['RAOPitchPhase'] = RAOPitchPhase
                        rao['RAOYawAmp'] = RAOYawAmp
                        rao['RAOYawPhase'] = RAOYawPhase
                        RAOs.append(rao)

                    LoadRAOs['RAOs'] = RAOs
                    d['LoadRAOs'] = LoadRAOs

                    # Added mass and Damping
                    FrequencyDependentAddedMassAndDamping = []
                    for frequency in a_frequencies:
                        entry = {'AMDPeriodOrFrequency': float(frequency)}
                        B = db.damping(frequency)
                        A = db.amass(frequency)

                        # Make symmetric (else Orcaflex will not read the yml)

                        def make_orcaflex_happy(mat):
                            mat = 0.5 * mat + 0.5 * mat.transpose()
                            R = np.zeros((6, 6))
                            R[0, 0] = mat[0, 0]
                            R[1, 1] = mat[1, 1]
                            R[2, 2] = mat[2, 2]
                            R[3, 3] = mat[3, 3]
                            R[4, 4] = mat[4, 4]
                            R[5, 5] = mat[5, 5]

                            # oA[0,2] = 7   # error
                            R[2, 0] = mat[2, 0]

                            R[0, 4] = mat[0, 4]
                            R[4, 0] = mat[0, 4]

                            R[1, 3] = mat[1, 3]  # need both
                            R[3, 1] = mat[1, 3]

                            R[1, 5] = mat[1, 5]  # need both
                            R[5, 1] = mat[1, 5]

                            R[5, 3] = mat[3, 5]
                            return R

                        oA = make_orcaflex_happy(A)
                        oB = make_orcaflex_happy(B)

                        entry['AddedMassMatrixX'] = oA[0].tolist()
                        entry['AddedMassMatrixY'] = oA[1].tolist()
                        entry['AddedMassMatrixZ'] = oA[2].tolist()
                        entry['AddedMassMatrixRx'] = oA[3].tolist()
                        entry['AddedMassMatrixRy'] = oA[4].tolist()
                        entry['AddedMassMatrixRz'] = oA[5].tolist()

                        entry['DampingX'] = oB[0].tolist()
                        entry['DampingY'] = oB[1].tolist()
                        entry['DampingZ'] = oB[2].tolist()
                        entry['DampingRx'] = oB[3].tolist()
                        entry['DampingRy'] = oB[4].tolist()
                        entry['DampingRz'] = oB[5].tolist()

                        FrequencyDependentAddedMassAndDamping.append(entry)

                    d['AMDMethod'] = 'Frequency Dependent'
                    d['FrequencyDependentAddedMassAndDamping'] = FrequencyDependentAddedMassAndDamping

                vt['Draughts'] = [d]  # draughts is a list! Even though we only use one.

                # Create a vessel

                v = {'Name':n.name + 'Vessel',
                     'VesselType':n.name + hyd_spring.name,
                     'Length':1,
                     'InitialPosition': pos,
                     'InitialHeel': float(n.heel),
                     'InitialTrim': float(n.trim),
                     'InitialHeading': float(n.heading),
                     'IncludedInStatics': '6 DOF',
                     'PrimaryMotion': 'Calculated (6 DOF)',
                     'SuperimposedMotion': 'None',
                     'PrimaryMotionIsTreatedAs': 'Wave frequency',
                     'IncludeWaveLoad1stOrder': 'Yes',
                     'IncludeAddedMassAndDamping': 'Yes',
                     'IncludeOtherDamping': 'Yes'}

                # Modify the buoy to be on the vessel
                b['InitialPosition'] = [0,0,0]
                b['InitialAttitude'] = [0,0,0]
                b['Connection'] = v['Name']

                vessel_types.append(vt)
                vessels.append(v)




            # Done with the vessel stuff, back to the 6D buoy that we were exporting


            buoys.append(b)




        if isinstance(n, Cable):

            connection = []
            connectionX = []
            connectionY = []
            connectionZ = []

            for c in n.connections:
                # either a point or a circle
                # for now only points are supported

                if isinstance(c, Circle):
                    raise ValueError('Circles not yet supported')

                if c.parent is None:
                    connection.append('Fixed')
                else:
                    connection.append(c.parent.name)
                connectionX.append(c.position[0])
                connectionY.append(c.position[1])
                connectionZ.append(c.position[2])

            w = { 'Name': n.name,
                  'Connection': connection,
                  'ConnectionX': connectionX,
                  'ConnectionY': connectionY,
                  'ConnectionZ': connectionZ,
                  'Stiffness': n.EA,
                  'NumberOfConnections': len(n.connections),
                  'WinchControlType': 'By Stage',
                  'StageMode': ['Specified Length','Specified Payout','Specified Payout'],
                  'StageValue': [n.length,0,0]
                  }

            winches.append(w)

        if isinstance(n, Visual):

            visualfile = s.get_resource_path(n.path)

            if 'obj' in visualfile.suffix:

                # copy the .obj to the destination folder such that we have all required files in one place

                copy_from = visualfile
                copy_to = filename.parent / visualfile.name

                if copy_from == copy_to:
                    pass
                else:
                    copyfile(visualfile, filename.parent / visualfile.name)
                    print(f'created {filename.parent / visualfile.name}')

                shape = { 'Name': n.name,
                      'Connection':n.parent.name,
                      'ShapeType':'Drawing',
                      'Shape' : 'Block',
                      'OriginX' : 0 ,
                      'OriginY' : 0 ,
                      'OriginZ' : 0 ,
                      'Rotation1' : 0 ,
                      'Rotation2' : 0 ,
                      'Rotation3' : 0 ,
                      'OutsidePenColour' : 55551,  # $00D8FF
                      'ShadedDrawingFileName' : visualfile.name,
                      'ShadedDrawingMirrorInPlane' : 'XZ plane' ,
                      'ShadedDrawingRotation1' : 0 ,
                      'ShadedDrawingRotation2' : 90 ,
                      'ShadedDrawingRotation3' : -90 }

                Shapes.append(shape)

            else:
                warn(f'Only .obj files can be used in orcaflex, not exporting visual "{n.name}"')

        if isinstance(n, Beam):
            # line-type
            typename = f"LT_for_{n.name}"
            mass_per_length = n.mass / n.L
            if mass_per_length < OFX_SMALL:
                print(f'Mass per length for {n.name} set to {OFX_SMALL}')
                mass_per_length = OFX_SMALL

            lt = {'Name' : typename,
                  'OD' : OFX_SMALL,
                  'ID' : 0,
                  'MassPerUnitLength' : mass_per_length,
                  'EIx': n.EIy,
                  'EIy' : n.EIz,
                  'GJ': n.GIp,
                  'CompressionIsLimited': yesno(n.tension_only),
                  'EA': n.EA }
            line_types.append(lt)

            line = OrderedDict({'Name':n.name,
                    'EndAConnection':n.nodeA.name,
                    'EndAX': 0,
                    'EndAY': 0,
                    'EndAZ': 0,
                    'EndAAzimuth' : 0,
                    'EndADeclination' : 90,
                    'EndBConnection': n.nodeB.name,
                    'EndBX': 0,
                    'EndBY': 0,
                    'EndBZ': 0,
                    'EndBAzimuth': 0,
                    'EndBDeclination': 90,
                    'EndAxBendingStiffness' : 'Infinity',
                    'EndBxBendingStiffness' : 'Infinity',
                    'EndAyBendingStiffness' : 'Infinity',
                    'EndByBendingStiffness' : 'Infinity',
                    'NumberOfSections':1,
                    'LineType[1]' : typename,
                    'Length[1]' : n.L,
                    'TargetSegmentLength[1]' : '~',
                    'StaticsStep1':'User specified'
                    })

            do_torsion = n.GIp > 0

            if do_torsion:
                line['IncludeTorsion'] = 'Yes'
                line['EndATwistingStiffness'] = 'Infinity'
                line['EndBTwistingStiffness'] = 'Infinity'
                line['StartingShapeOrientationsSpecified'] = 'Yes'

            line['NumberOfSegments[1]'] = int(n.n_segments)

            pos = n.global_positions
            xx = pos[:,0]
            yy = pos[:,1]
            zz = pos[:,2]
            line['StartingShapeX'] = xx.tolist()
            line['StartingShapeY'] = yy.tolist()
            line['StartingShapeZ'] = zz.tolist()

            if do_torsion:
                rot = n.global_orientations
                rx = []
                ry = []
                rz = []
                for r in rot:
                    azdecgam = rotvec_to_line_node_axis_az_dec_gam(r)
                    rx.append(azdecgam[0])
                    ry.append(azdecgam[1])
                    rz.append(azdecgam[2])
                line['StartingShapeAzm'] = rx
                line['StartingShapeDec'] = ry
                line['StartingShapeGamma'] = rz

            lines.append(line)





    # Write the yml

    data = dict()

    if buoys:
        data['6DBuoys'] = buoys
    if winches:
        data['Winches'] = winches
    if constraints:
        data['Constraints'] = constraints
    if vessel_types:
        data['VesselTypes'] = vessel_types
    if vessels:
        data['Vessels'] = vessels
    if Shapes:
        data['Shapes'] = Shapes
    if line_types:
        data['LineTypes'] = line_types
    if lines:
        data['Lines'] = lines

    from yaml import CDumper as Dumper

    def dict_representer(dumper, data):
        return dumper.represent_dict(data.items())

    Dumper.add_representer(OrderedDict, dict_representer)

    s = yaml.dump(data, explicit_start=True, Dumper=Dumper)

    with open(filename,'w') as f:
        f.write(s)
        print(f'created {filename}')
Exemplo n.º 20
0
import yaml
try:
    from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
    from yaml import Loader, Dumper  # @UnusedImport
_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG


def dict_representer(dumper, data):
    return dumper.represent_dict(iter(data.items()))


def dict_constructor(loader, node):
    return OrderedDict(loader.construct_pairs(node))

Dumper.add_representer(OrderedDict, dict_representer)
Loader.add_constructor(_mapping_tag, dict_constructor)


class YAMLSerializer(DictSerializer):
    """
    A Serializer class that serializes to YAML
    """

    def to_file(self, serial_elem, file, **options):
        yaml.dump(self._prepare_dict(serial_elem, **options), stream=file,
                  Dumper=Dumper)

    def to_str(self, serial_elem, **options):
        return yaml.dump(self.to_elem(serial_elem, **options),
                         Dumper=Dumper)
Exemplo n.º 21
0
    yield omap
    if not isinstance(node, yaml.MappingNode):
        raise yaml.constructor.ConstructorError(
            "while constructing an ordered map",
            node.start_mark,
            "expected a map, but found %s" % node.id, node.start_mark
        )
    for key, value in node.value:
        key = load.construct_object(key)
        value = load.construct_object(value)
        omap[key] = value

Loader.add_constructor(u'tag:yaml.org,2002:map', construct_odict)
def ordered_dict_serializer(self, data):
    return self.represent_mapping('tag:yaml.org,2002:map', data.items())
Dumper.add_representer(OrderedDict, ordered_dict_serializer)

# Likewise, when we store unicode objects make sure we don't write
# them with weird YAML tags indicating the Python data type. The
# standard string type is fine. We should do this:
#   Dumper.add_representer(unicode, lambda dumper, value: dumper.represent_scalar(u'tag:yaml.org,2002:str', value))
#
# However, the standard PyYAML representer for strings does something
# weird: if a value cannot be parsed as an integer quotes are omitted.
#
# This is incredibly odd when the value is an integer with a leading
# zero. These values are typically parsed as octal integers, meaning
# quotes would normally be required (that's good). But when the value
# has an '8' or '9' in it, this would make it an invalid octal number
# and so quotes would no longer be required (that's confusing).
# We will override str and unicode output to choose the quotation
Exemplo n.º 22
0
# Yaml module configuration


class unicode_folder(str):
    pass


class unicode_literal(str):
    pass


Loader.add_constructor(
    yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
    lambda loader, node: collections.OrderedDict(loader.construct_pairs(node)))
Dumper.add_representer(
    collections.OrderedDict, lambda dumper, data: dumper.represent_dict(
        (copy.deepcopy(key), value) for key, value in data.items()))
Dumper.add_representer(
    dict, lambda dumper, data: dumper.represent_dict(
        (copy.deepcopy(key), value) for key, value in data.items()))
Dumper.add_representer(
    np.ndarray, lambda dumper, data: dumper.represent_list(data.tolist()))
Dumper.add_representer(tuple, lambda dumper, data: dumper.represent_list(data))
Dumper.add_representer(
    unicode_folder, lambda dumper, data: dumper.represent_scalar(
        'tag:yaml.org,2002:str', data, style='>'))
Dumper.add_representer(
    unicode_literal, lambda dumper, data: dumper.represent_scalar(
        'tag:yaml.org,2002:str', data, style='|'))
Dumper.add_representer(
    periods.Instant, lambda dumper, data: dumper.represent_scalar(
Exemplo n.º 23
0
from hepdata_lib.root_utils import RootFileReader

MAPPING_TAG = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG


def dict_representer(dumper, data):
    """represent dict."""
    return dumper.represent_dict(data.iteritems())


def dict_constructor(loader, node):
    """construct dict."""
    return defaultdict(loader.construct_pairs(node))


Dumper.add_representer(defaultdict, dict_representer)
Loader.add_constructor(MAPPING_TAG, dict_constructor)

Dumper.add_representer(str,
                       SafeRepresenter.represent_str)

# Display deprecation warnings
warnings.filterwarnings("always", category=DeprecationWarning, module="hepdata_lib")

__version__ = "0.2.7"

class Variable(object):
    """A Variable is a wrapper for a list of values + some meta data."""

    # pylint: disable=too-many-instance-attributes
    # Eight is reasonable in this case.
Exemplo n.º 24
0
def timestamp_representer(dumper: Dumper, date: datetime) -> yaml.Node:
    """Custom representer for datetime objects in YAML."""
    return dumper.represent_scalar(TIMESTAMP_TAG, date.isoformat())
Exemplo n.º 25
0
else:
  if not "DEVKITPPC" in os.environ:
    raise Exception(r"Could not find devkitPPC. Path to devkitPPC should be in the DEVKITPPC env var")
  devkitbasepath = os.environ.get("DEVKITPPC") + "/bin"

def get_bin(name):
  if not sys.platform == "win32":
    return os.path.join(devkitbasepath, name)
  return os.path.join(devkitbasepath, name + ".exe")

if not os.path.isfile(get_bin("powerpc-eabi-as")):
  raise Exception(r"Failed to assemble code: Could not find devkitPPC. devkitPPC should be installed to: C:\devkitPro\devkitPPC")

# Allow yaml to dump OrderedDicts for the diffs.
Dumper.add_representer(
  OrderedDict,
  lambda dumper, data: dumper.represent_dict(data.items())
)

# Change how yaml dumps lists so each element isn't on a separate line.
Dumper.add_representer(
  list,
  lambda dumper, data: dumper.represent_sequence(u'tag:yaml.org,2002:seq', data, flow_style=True)
)

# Output integers as hexadecimal.
Dumper.add_representer(
  int,
  lambda dumper, data: yaml.ScalarNode('tag:yaml.org,2002:int', "0x%02X" % data)
)

temp_dir = tempfile.mkdtemp()
Exemplo n.º 26
0
def path_representer(dumper: Dumper, path: Path) -> yaml.Node:
    """Custom representer for Path objects in YAML."""
    return dumper.represent_scalar(STR_TAG, str(path))
Exemplo n.º 27
0
import re
from pathlib import Path
from collections import OrderedDict

import yaml
try:
    from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
    from yaml import Loader, Dumper


def represent_none(self, _):
    return self.represent_scalar('tag:yaml.org,2002:null', '')


Dumper.add_representer(type(None), represent_none)

PATH_ROOT = Path(__file__).absolute().parent
PATH_MODELS = PATH_ROOT / 'models'
PATH_TEMPLATE = PATH_ROOT / 'templates'
PATH_OUTPUT_CODE = PATH_ROOT / '_py-codes'
PATH_OUTPUT_TEST = PATH_ROOT / '_py-tests'

TEMPLATE_DOCS = PATH_TEMPLATE / 'PY_DOCS_TEMPLATE.txt'
TEMPLATE_CODE = PATH_TEMPLATE / 'PY_CODE_TEMPLATE.txt'
TEMPLATE_TEST = PATH_TEMPLATE / 'PY_TEST_TEMPLATE.txt'


def ordered_load(stream, Loader=Loader, object_pairs_hook=OrderedDict):
    class OrderedLoader(Loader):
        pass
Exemplo n.º 28
0
def ordered_dict_representer(dumper: Dumper, omap: OrderedDict) -> yaml.Node:
    """Custom representer for OrderedDict in YAML."""
    return dumper.represent_mapping(OMAP_TAG, omap.items())
Exemplo n.º 29
0
    yield omap
    if not isinstance(node, yaml.MappingNode):
        raise yaml.constructor.ConstructorError(
            "while constructing an ordered map",
            node.start_mark,
            "expected a map, but found %s" % node.id, node.start_mark
        )
    for key, value in node.value:
        key = load.construct_object(key)
        value = load.construct_object(value)
        omap[key] = value

Loader.add_constructor(u'tag:yaml.org,2002:map', construct_odict)
def ordered_dict_serializer(self, data):
    return self.represent_mapping('tag:yaml.org,2002:map', data.items())
Dumper.add_representer(OrderedDict, ordered_dict_serializer)

# Likewise, when we store unicode objects make sure we don't write
# them with weird YAML tags indicating the Python data type. The
# standard string type is fine. We should do this:
#   Dumper.add_representer(unicode, lambda dumper, value: dumper.represent_scalar(u'tag:yaml.org,2002:str', value))
#
# However, the standard PyYAML representer for strings does something
# weird: if a value cannot be parsed as an integer quotes are omitted.
#
# This is incredibly odd when the value is an integer with a leading
# zero. These values are typically parsed as octal integers, meaning
# quotes would normally be required (that's good). But when the value
# has an '8' or '9' in it, this would make it an invalid octal number
# and so quotes would no longer be required (that's confusing).
# We will override str and unicode output to choose the quotation
Exemplo n.º 30
0
    """
    writing pandoc metadata heading

    https://pandoc.org/MANUAL.html#metadata-variables
    """
    meta_key_order = [
        'title', 'author', 'date',
        'subtitle', 'abstract', 'keywords',
        'subject',
        'description', 'category']
    data = sorted(data.items(),
                  key=lambda x: 99 if x[0] not in meta_key_order else meta_key_order.index(x[0]))
    return dumper.represent_dict(data)


Dumper.add_representer(dict, default_representer)



def normalize_filename(org) -> str:
    """
    < (less than)
    > (greater than)
    : (colon)
    " (double quote)
    / (forward slash)
    \\ (backslash)
    | (vertical bar or pipe)
    ? (question mark)
    * (asterisk)
    """
Exemplo n.º 31
0
    from yaml.representer import SafeRepresenter
    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

    if sys.version_info[0] < 3:
        def dict_representer(dumper, data):
            return dumper.represent_dict(data.iteritems())
    else:
        def dict_representer(dumper, data):
            return dumper.represent_dict(data.items())


    def dict_constructor(loader, node):
        return OrderedDict(loader.construct_pairs(node))


    Dumper.add_representer(OrderedDict, dict_representer)
    Loader.add_constructor(_mapping_tag, dict_constructor)

    Dumper.add_representer(str, SafeRepresenter.represent_str)

    if sys.version_info[0] < 3:
        Dumper.add_representer(unicode, SafeRepresenter.represent_unicode)
except Exception:
    yaml = None


def main():
    parser = argparse.ArgumentParser(
        description='Convert between pbjson and json',
        epilog='If converting a PBJSON file with binary elements, you may need to use `--repr` since JSON cannot handle binary data.')
    parser.add_argument('-r', '--repr', action='store_true', help='instead of converting to JSON, just output the `repr` of the object')
Exemplo n.º 32
0
except ImportError:
        # use backport from pypi
    from ordereddict import OrderedDict

import yaml
from collections import defaultdict

# try to use LibYAML bindings if possible
try:
    from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
    from yaml import Loader, Dumper
from yaml.representer import SafeRepresenter
_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG


def dict_representer(dumper, data):
    return dumper.represent_dict(data.items())


def dict_constructor(loader, node):
    return OrderedDict(loader.construct_pairs(node))


Dumper.add_representer(OrderedDict, dict_representer)
Dumper.add_representer(defaultdict, dict_representer)
Loader.add_constructor(_mapping_tag, dict_constructor)

Dumper.add_representer(str,
                       SafeRepresenter.represent_str)