Ejemplo n.º 1
0
def test_double_error():

    s = """MAP
    NAME "sample"
    STATUS ON
    SIZE 600 400
    SYMBOLSET "../etc/symbols.txt"
    EXTENT -180 -90 180
    UNITS DD
    SHAPEPATH "../data"
    IMAGECOLOR 255 255 256
    FONTSET "../etc/fonts.txt"
    WEB
        IMAGEPATH "/ms4w/tmp/ms_tmp/"
        IMAGEURL "/ms_tmp/"
    END
    LAYER
        NAME "global-raster"
        TYPE RASTER
        STATUS DEFAULT
        DATA "bluemarble.gif"
    END
END"""

    d = mappyfile.loads(s, include_position=True)
    # print(json.dumps(d, indent=4))
    v = Validator()
    errors = v.validate(d, add_comments=True)
    # print(json.dumps(d, indent=4))
    # print(errors)
    for e in errors:
        print(e)
    assert(len(errors) == 2)
    print(mappyfile.dumps(d))
Ejemplo n.º 2
0
def test_keyword_versioning():

    properties = {
        "type": "object",
        "properties": {
            "__type__": {
                "enum": ["label"]
            },
            "align": {
                "oneOf": [{
                    "type": "string",
                    "enum": ["left", "center", "right"],
                    "additionalProperties": False
                }, {
                    "type": "string",
                    "pattern": "^\\[(.*?)\\]$",
                    "description": "attribute"
                }],
                "metadata": {
                    "minVersion": 5.4
                }
            }
        }
    }

    v = Validator()
    assert "align" in properties["properties"].keys()
    properties = v.get_versioned_properties(properties, 5.2)
    print(json.dumps(properties, indent=4))
    assert "align" not in properties["properties"].keys()
Ejemplo n.º 3
0
def test_double_error():

    s = """MAP
    NAME "sample"
    STATUS ON
    SIZE 600 400
    SYMBOLSET "../etc/symbols.txt"
    EXTENT -180 -90 180
    UNITS DD
    SHAPEPATH "../data"
    IMAGECOLOR 255 255 256
    FONTSET "../etc/fonts.txt"
    WEB
        IMAGEPATH "/ms4w/tmp/ms_tmp/"
        IMAGEURL "/ms_tmp/"
    END
    LAYER
        NAME "global-raster"
        TYPE RASTER
        STATUS DEFAULT
        DATA "bluemarble.gif"
    END
END"""

    d = mappyfile.loads(s, include_position=True)
    # print(json.dumps(d, indent=4))
    v = Validator()
    errors = v.validate(d, add_comments=True)
    # print(json.dumps(d, indent=4))
    # print(errors)
    for e in errors:
        print(e)
    assert (len(errors) == 2)
    print(mappyfile.dumps(d))
Ejemplo n.º 4
0
def test_all_maps():

    sample_dir = os.path.join(os.path.dirname(__file__), "sample_maps")

    p = Parser(expand_includes=False)
    m = MapfileToDict(include_position=True)
    v = Validator()

    failing_maps = []

    for fn in os.listdir(sample_dir):
        print(fn)
        try:
            ast = p.parse_file(os.path.join(sample_dir, fn))
            d = m.transform(ast)
            errors = v.validate(d)
            try:
                assert (len(errors) == 0)
            except AssertionError as ex:
                logging.warning("Validation errors in %s ", fn)
                logging.warning(errors)
        except (BaseException, UnexpectedToken) as ex:
            logging.warning("Cannot process %s ", fn)
            logging.error(ex)
            failing_maps.append(fn)

    logging.warning(failing_maps)
Ejemplo n.º 5
0
def test_object_versioning():
    """
    Exclude whole objects if they were added in a
    later version of MapServer
    """

    s = """MAP
    NAME "sample"
    LAYER
        TYPE POLYGON
        COMPOSITE
            COMPOP "lighten"
            OPACITY 50
            COMPFILTER "blur(10)"
        END
    END
END"""

    d = mappyfile.loads(s, include_position=False)
    v = Validator()
    errors = v.validate(d, add_comments=True, version=6.0)
    assert len(errors) == 1

    d = mappyfile.loads(s, include_position=False)
    errors = v.validate(d, add_comments=True, version=7.0)
    assert len(errors) == 0
Ejemplo n.º 6
0
def test_all_maps():

    sample_dir = os.path.join(os.path.dirname(__file__), "sample_maps")

    p = Parser(expand_includes=False)
    m = MapfileToDict(include_position=True)
    v = Validator()

    failing_maps = []

    for fn in os.listdir(sample_dir):
        print(fn)
        try:
            ast = p.parse_file(os.path.join(sample_dir, fn))
            d = m.transform(ast)
            errors = v.validate(d)
            try:
                assert(len(errors) == 0)
            except AssertionError as ex:
                logging.warning("Validation errors in %s ", fn)
                logging.warning(errors)
        except (BaseException, UnexpectedToken) as ex:
            logging.warning("Cannot process %s ", fn)
            logging.error(ex)
            failing_maps.append(fn)

    logging.warning(failing_maps)
Ejemplo n.º 7
0
def test_line_position_mutlilines():

    s = """MAP
    NAME "sample"
    LAYER
        NAME "test"
        STATUS DEFAULT
        DATA "SELECT GEOM
        FROM
        TABLE"
        TYPE LINEX
    END
END"""

    p = Parser()
    ast = p.parse(s)
    print(ast)

    d = mappyfile.loads(s, include_position=True)
    v = Validator()
    errors = v.validate(d, add_comments=True)
    print(json.dumps(d, indent=4))
    # print(errors)
    for e in errors:
        print(e)
    assert (len(errors) == 1)
    err = errors[0]
    assert (err["line"] == 9)
    assert (err["column"] == 9)
    print(mappyfile.dumps(d))
Ejemplo n.º 8
0
def validate(d, trace_o_incl=None, version=None):
    """
    Validate a mappyfile dictionary by using the Mapfile schema.
    An optional version number can be used to specify a specific
    a Mapfile is valid for a specific MapServer version.

    Parameters
    ----------

    d: dict
        A Python dictionary based on the the mappyfile schema
    trace_o_incl: list
        A trace of the origin of lines for include files, use to find the original location of an error
    version: float
        The MapServer version number used to validate the Mapfile

    Returns
    -------

    list
          A list containing validation errors

    """
    v = Validator()
    return v.validate(d, trace_o_incl, version=version)
Ejemplo n.º 9
0
def validate(d):
    """
    Validate a mappyfile dictionary by using the Mapfile schema

    d: dict
        A Python dictionary based on the the mappyfile schema
    """
    v = Validator()
    return v.validate(d)
Ejemplo n.º 10
0
def main(msautotest_fld, create_new_copy=True):

    msautotest_copy = os.path.join(os.path.dirname(msautotest_fld),
                                   "msautotest_mappyfile")

    if create_new_copy:
        create_copy(msautotest_fld, msautotest_copy)

    parser = Parser()
    transformer = MapfileToDict()
    pp = PrettyPrinter()

    # these two maps aren't in utf8
    # see https://github.com/mapserver/mapserver/pull/5460
    # ignore_list = ["wms_inspire_scenario1.map","wms_inspire_scenario2.map"]

    # transparent_layer.map has an extra END, see https://github.com/mapserver/mapserver/pull/5468
    # polyline_no_clip.map needs symbol names in quotes, and SYMBOL is ambiguous

    ignore_list = [
        "polyline_no_clip.map", "poly-label-multiline-pos-auto.map",
        "poly-label-pos-auto.map", "embed_sb_rgba.map",
        "embed_sb_rgba_offset.map"
    ]  # has attributes all on the same line

    mapfiles = glob.glob(msautotest_fld + '/**/*.map')
    mapfiles = [f for f in mapfiles if os.path.basename(f) not in ignore_list]

    # target_map = "polyline_no_clip.map"
    # mapfiles = [f for f in mapfiles if os.path.basename(f) in (target_map)]

    v = Validator()

    for fn in mapfiles:

        d = parse_mapfile(parser, transformer, pp, fn)
        errors = v.validate(d, add_messages=True)
        if errors:
            logging.warning("{} failed validation".format(fn))

        output_file = fn.replace(msautotest_fld, msautotest_copy)
        try:
            mappyfile.utils.write(d, output_file)
        except Exception:
            logging.warning(json.dumps(d, indent=4))
            logging.warning("%s could not be successfully re-written", fn)
            raise

        # now try reading it again
        print(json.dumps(d, indent=4))
        d = parse_mapfile(parser, transformer, pp, output_file)

        errors = v.validate(d, add_messages=True)
        if errors:
            logging.warning("{} failed validation".format(fn))
Ejemplo n.º 11
0
def main(msautotest_fld, create_new_copy=True):

    msautotest_copy = os.path.join(os.path.dirname(msautotest_fld), "msautotest_mappyfile")

    if create_new_copy:
        create_copy(msautotest_fld, msautotest_copy)

    parser = Parser()
    transformer = MapfileToDict()
    pp = PrettyPrinter()

    # these two maps aren't in utf8
    # see https://github.com/mapserver/mapserver/pull/5460
    # ignore_list = ["wms_inspire_scenario1.map","wms_inspire_scenario2.map"]

    # transparent_layer.map has an extra END, see https://github.com/mapserver/mapserver/pull/5468
    # polyline_no_clip.map needs symbol names in quotes, and SYMBOL is ambiguous

    ignore_list = ["polyline_no_clip.map",
                   "poly-label-multiline-pos-auto.map", "poly-label-pos-auto.map",
                   "embed_sb_rgba.map", "embed_sb_rgba_offset.map"]  # has attributes all on the same line

    mapfiles = glob.glob(msautotest_fld + '/**/*.map')
    mapfiles = [f for f in mapfiles if os.path.basename(f) not in ignore_list]

    # target_map = "polyline_no_clip.map"
    # mapfiles = [f for f in mapfiles if os.path.basename(f) in (target_map)]

    v = Validator()

    for fn in mapfiles:

        d = parse_mapfile(parser, transformer, pp, fn)
        errors = v.validate(d, add_comments=True)
        if errors:
            logging.warning("{} failed validation".format(fn))

        output_file = fn.replace(msautotest_fld, msautotest_copy)
        try:
            mappyfile.utils.write(d, output_file)
        except Exception:
            logging.warning(json.dumps(d, indent=4))
            logging.warning("%s could not be successfully re-written", fn)
            raise

        # now try reading it again
        print(json.dumps(d, indent=4))
        d = parse_mapfile(parser, transformer, pp, output_file)

        errors = v.validate(d, add_comments=True)
        if errors:
            logging.warning("{} failed validation".format(fn))
Ejemplo n.º 12
0
def test_cached_expanded_schema():
    """
    Check that the full schema properties have been expanded
    """
    v = Validator()
    schema_name = "cluster"

    deref_schema = v.get_expanded_schema(schema_name)
    assert(list(deref_schema["properties"]["filter"].keys())[0] == "anyOf")

    # get the schame again
    deref_schema = v.get_expanded_schema(schema_name)
    assert(list(deref_schema["properties"]["filter"].keys())[0] == "anyOf")
Ejemplo n.º 13
0
def test_cached_expanded_schema():
    """
    Check that the full schema properties have been expanded
    """
    v = Validator()
    schema_name = "cluster"

    deref_schema = v.get_expanded_schema(schema_name)
    assert (list(deref_schema["properties"]["filter"].keys())[0] == "anyOf")

    # get the schame again
    deref_schema = v.get_expanded_schema(schema_name)
    assert (list(deref_schema["properties"]["filter"].keys())[0] == "anyOf")
Ejemplo n.º 14
0
    def __init__(self, indent=4, spacer=" ", quote='"', newlinechar="\n"):
        """
        Option use "\t" for spacer with an indent of 1
        """

        assert (quote == "'" or quote == '"')

        self.indent = indent
        self.spacer = spacer * self.indent
        self.newlinechar = newlinechar
        self.quoter = Quoter(quote)
        self.end = u"END"
        self.validator = Validator()
Ejemplo n.º 15
0
def test_cached_schema():
    """
    Check that the full schema properties have been expanded
    """
    v = Validator()
    schema_name = "cluster"
    validator = v.get_schema_validator(schema_name)
    jsn_schema = validator.schema
    assert (list(jsn_schema["properties"]["filter"].keys())[0] == "$ref")

    # get the schame again
    validator = v.get_schema_validator(schema_name)
    jsn_schema = validator.schema
    assert (list(jsn_schema["properties"]["filter"].keys())[0] == "$ref")
Ejemplo n.º 16
0
def test_cached_schema():
    """
    Check that the full schema properties have been expanded
    """
    v = Validator()
    schema_name = "cluster"
    validator = v.get_schema_validator(schema_name)
    jsn_schema = validator.schema
    assert(list(jsn_schema["properties"]["filter"].keys())[0] == "$ref")

    # get the schame again
    validator = v.get_schema_validator(schema_name)
    jsn_schema = validator.schema
    assert(list(jsn_schema["properties"]["filter"].keys())[0] == "$ref")
Ejemplo n.º 17
0
def test_deref():
    """
    Check that the full schema properties have been expanded
    """
    v = Validator()
    schema_name = "cluster"
    validator = v.get_schema_validator(schema_name)
    jsn_schema = validator.schema

    print(json.dumps(jsn_schema, indent=4))
    print(jsn_schema["properties"]["filter"])
    assert (list(jsn_schema["properties"]["filter"].keys())[0] == "$ref")
    deref_schema = v.get_expanded_schema(schema_name)
    print(json.dumps(deref_schema, indent=4))
    print(deref_schema["properties"]["filter"])
    assert (list(deref_schema["properties"]["filter"].keys())[0] == "anyOf")
Ejemplo n.º 18
0
def test_deref():
    """
    Check that the full schema properties have been expanded
    """
    v = Validator()
    schema_name = "cluster"
    validator = v.get_schema_validator(schema_name)
    jsn_schema = validator.schema

    print(json.dumps(jsn_schema, indent=4))
    print(jsn_schema["properties"]["filter"])
    assert(list(jsn_schema["properties"]["filter"].keys())[0] == "$ref")
    deref_schema = v.get_expanded_schema(schema_name)
    print(json.dumps(deref_schema, indent=4))
    print(deref_schema["properties"]["filter"])
    assert(list(deref_schema["properties"]["filter"].keys())[0] == "anyOf")
Ejemplo n.º 19
0
def test_extra_property_validation():
    """
    Check root errors are handled correctly
    """
    s = """
    MAP
        LAYER
            TYPE POLYGON
        END
    END
    """

    d = to_dict(s)
    d["__unwanted__"] = "error"
    v = Validator()
    errors = v.validate(d, add_comments=True)
    print(errors)
    assert (len(errors) == 1)
Ejemplo n.º 20
0
def test_root_position():
    """
    Check the root objects position is found correctly
    """

    s = """
    MAP
        METADATA
            "wms_title"    "Toronto Landsat 5 TM"
        END
    END
    """

    d = mappyfile.loads(s, include_position=True)
    v = Validator()
    assert d["__position__"]["line"] == 2
    errors = v.validate(d, add_comments=True)
    assert len(errors) == 1
Ejemplo n.º 21
0
def test_extra_property_validation():
    """
    Check root errors are handled correctly
    """
    s = """
    MAP
        LAYER
            TYPE POLYGON
        END
    END
    """

    d = to_dict(s)
    d["unwanted"] = "error"
    v = Validator()
    errors = v.validate(d, add_comments=True)
    print(errors)
    assert(len(errors) == 1)
Ejemplo n.º 22
0
def test_cluster_validation_fail():

    s = u"""
    MAP
        LAYER
            CLUSTER
                MAXDISTANCE 50
                REGION "ELLIPSEZ"
            END
        END
    END
    """

    d = mappyfile.loads(s, include_position=True)
    v = Validator()
    errors = v.validate(d, add_comments=True)
    print(mappyfile.dumps(d))
    assert len(errors) == 1
Ejemplo n.º 23
0
def output(s, include_position=True, schema_name="map"):
    """
    Parse, transform, validate, and pretty print
    the result
    """
    p = Parser()
    m = MapfileToDict(include_position=include_position)
    ast = p.parse(s)
    logging.debug(ast.pretty())
    d = m.transform(ast)
    logging.debug(json.dumps(d, indent=4))
    v = Validator()
    errors = v.validate(d, schema_name=schema_name)
    logging.error(errors)
    pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
    s = pp.pprint(d)
    logging.debug(s)
    assert (len(errors) == 0)
    return s
Ejemplo n.º 24
0
def validate(d):
    """
    Validate a mappyfile dictionary by using the Mapfile schema

    Parameters
    ----------

    d: dict
        A Python dictionary based on the the mappyfile schema

    Returns
    -------

    list
          A list containing validation errors

    """
    v = Validator()
    return v.validate(d)
Ejemplo n.º 25
0
def test_version_warnings():

    s = """MAP
    NAME "sample"
    LAYER
        NAME "test"
        TYPE LINE
        CLASS
            #MADEUP True
            COLOR 0 0 0
        END
    END
END"""

    d = mappyfile.loads(s, include_position=False)
    v = Validator()
    errors = v.validate(d, add_comments=True, version=8.0)
    print(errors)
    assert len(errors) == 1
Ejemplo n.º 26
0
def output(s, include_position=True, schema_name="map"):
    """
    Parse, transform, validate, and pretty print
    the result
    """
    p = Parser()
    m = MapfileToDict(include_position=include_position)
    ast = p.parse(s)
    logging.debug(ast.pretty())
    d = m.transform(ast)
    logging.debug(json.dumps(d, indent=4))
    v = Validator()
    errors = v.validate(d, schema_name=schema_name)
    logging.error(errors)
    pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
    s = pp.pprint(d)
    logging.debug(s)
    assert(len(errors) == 0)
    return s
Ejemplo n.º 27
0
def test_cluster_validation():

    s = u"""
    MAP
        LAYER
            TYPE POINT
            CLUSTER
                MAXDISTANCE 50
                REGION "ELLIPSE"
            END
        END
    END
    """

    d = mappyfile.loads(s, include_position=True)
    v = Validator()
    assert d["__position__"]["line"] == 2
    errors = v.validate(d, add_comments=True)
    print(mappyfile.dumps(d))
    assert len(errors) == 0
Ejemplo n.º 28
0
def save_full_schema(output_file):

    validator = Validator()

    # check individual schema files

    fld = validator.get_schemas_folder()
    jsn_files = glob.glob(fld + '/*.json')

    for fn in jsn_files:
        check_schema(fn)

    # now check the combined schema
    jsn = validator.get_versioned_schema()
    full_schema = json.dumps(jsn, indent=4, sort_keys=False)

    with open(output_file, "w") as f:
        f.write(full_schema)

    check_schema(output_file)
Ejemplo n.º 29
0
def schema(ctx, output_file, version=None):
    """
    Save the Mapfile schema to a file. Set the version parameter to output a specific version.
    Note output-file will be overwritten if it already exists.

    Examples:

        mappyfile schema C:/Temp/mapfile-schema.json

    Example of a specific version:

        mappyfile schema C:/Temp/mapfile-schema-7-6.json --version=7.6
    """
    validator = Validator()
    jsn = validator.get_versioned_schema(version)

    with codecs.open(output_file, "w", encoding="utf-8") as f:
        f.write(json.dumps(jsn, sort_keys=True, indent=4))

    sys.exit(0)
Ejemplo n.º 30
0
def test_property_versioning():

    properties = {
        "force": {
            "oneOf": [{
                "type": "boolean"
            }, {
                "enum": ["group"],
                "metadata": {
                    "minVersion": 6.2
                }
            }]
        }
    }

    v = Validator()
    assert "enum" in properties["force"]["oneOf"][1].keys()
    assert len(properties["force"]["oneOf"]) == 2
    properties = v.get_versioned_properties(properties, 6.0)
    print(json.dumps(properties, indent=4))
    assert len(properties["force"]["oneOf"]) == 1
Ejemplo n.º 31
0
def output(fn):
    """
    Parse, transform, and pretty print
    the result
    """
    p = Parser(expand_includes=False)
    m = MapfileToDict()
    v = Validator()

    try:
        ast = p.parse_file(fn)
        # print(ast)
        d = m.transform(ast)
        logging.debug("Number of layers: {}".format(len(d["layers"])))

        errors = v.validate(d)
        assert (len(errors) == 0)

    except Exception as ex:
        logging.exception(ex)
        logging.warning("%s could not be successfully parsed", fn)
        d = None
        raise

    if d:
        try:
            s = mappyfile.utils.dumps(d)
        except Exception:
            logging.warning(json.dumps(d, indent=4))
            logging.warning("%s could not be successfully re-written", fn)
            raise

        # now try reading it again
        ast = p.parse(s)
        d = m.transform(ast)

        errors = v.validate(d)
        assert (len(errors) == 0)
Ejemplo n.º 32
0
def output(fn):
    """
    Parse, transform, and pretty print
    the result
    """
    p = Parser(expand_includes=False, include_comments=True)
    m = MapfileToDict(include_position=True, include_comments=True)
    v = Validator()

    try:
        ast = p.parse_file(fn)
        # print(ast)
        d = m.transform(ast)
        logging.debug("Number of layers: {}".format(len(d["layers"])))

        errors = v.validate(d, add_comments=True)
        assert(len(errors) == 0)

    except Exception as ex:
        logging.exception(ex)
        logging.warning("%s could not be successfully parsed", fn)
        d = None
        raise

    if d:
        try:
            s = mappyfile.utils.dumps(d)
        except Exception:
            logging.warning(json.dumps(d, indent=4))
            logging.warning("%s could not be successfully re-written", fn)
            raise

        # now try reading it again
        ast = p.parse(s)
        d = m.transform(ast)

        errors = v.validate(d)
        assert(len(errors) == 0)
Ejemplo n.º 33
0
def validate(d, version=None):
    """
    Validate a mappyfile dictionary by using the Mapfile schema.
    An optional version number can be used to specify a specific
    a Mapfile is valid for a specific MapServer version.

    Parameters
    ----------

    d: dict
        A Python dictionary based on the the mappyfile schema
   version: float
        The MapServer version number used to validate the Mapfile

    Returns
    -------

    list
          A list containing validation errors

    """
    v = Validator()
    return v.validate(d, version=version)
Ejemplo n.º 34
0
def output(s, include_position=True, schema_name="map"):
    """
    Parse, transform, validate, and pretty print
    the result
    """
    p = Parser()
    m = MapfileToDict(include_position=include_position)

    # https://stackoverflow.com/questions/900392/getting-the-caller-function-name-inside-another-function-in-python
    logging.info(inspect.stack()[1][3])

    ast = p.parse(s)
    logging.debug(ast.pretty())
    d = m.transform(ast)
    logging.debug(json.dumps(d, indent=4))
    v = Validator()
    errors = v.validate(d, schema_name=schema_name)
    logging.error(errors)
    pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
    s = pp.pprint(d)
    logging.debug(s)
    assert(len(errors) == 0)
    return s
Ejemplo n.º 35
0
def output(s, include_position=True, schema_name="map"):
    """
    Parse, transform, validate, and pretty print
    the result
    """
    p = Parser()
    m = MapfileToDict(include_position=include_position)

    # https://stackoverflow.com/questions/900392/getting-the-caller-function-name-inside-another-function-in-python
    logging.info(inspect.stack()[1][3])

    ast = p.parse(s)
    logging.debug(ast.pretty())
    d = m.transform(ast)
    logging.debug(json.dumps(d, indent=4))
    v = Validator()
    errors = v.validate(d, schema_name=schema_name)
    logging.error(errors)
    pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
    s = pp.pprint(d)
    logging.debug(s)
    assert (len(errors) == 0)
    return s
Ejemplo n.º 36
0
def test_multiple_layer_projection():
    """
    TODO add validation for this case
    """

    s = """
    MAP
    LAYER
        PROJECTION
            "init=epsg:4326"
            "init=epsg:4326"
        END
        PROJECTION
            "init=epsg:4326"
            "init=epsg:4326"
        END
    END
    END
    """
    p = Parser()
    ast = p.parse(s)
    t = MapfileToDict(include_position=True)
    d = t.transform(ast)
    print(json.dumps(d, indent=4))
    assert (len(d["projection"]) == 1)

    p = Parser()
    m = MapfileToDict()

    ast = p.parse(s)
    d = m.transform(ast)

    print(json.dumps(d, indent=4))

    from mappyfile.validator import Validator
    v = Validator()
    return v.validate(d)
Ejemplo n.º 37
0
def test_multiple_layer_projection():
    """
    TODO add validation for this case
    """

    s = """
    MAP
    LAYER
        PROJECTION
            "init=epsg:4326"
            "init=epsg:4326"
        END
        PROJECTION
            "init=epsg:4326"
            "init=epsg:4326"
        END
    END
    END
    """
    p = Parser()
    ast = p.parse(s)
    t = MapfileToDict(include_position=True)
    d = t.transform(ast)
    print(json.dumps(d, indent=4))
    assert(len(d["projection"]) == 1)

    p = Parser()
    m = MapfileToDict()

    ast = p.parse(s)
    d = m.transform(ast)

    print(json.dumps(d, indent=4))

    from mappyfile.validator import Validator
    v = Validator()
    return v.validate(d)
Ejemplo n.º 38
0
def create(type, version=None):
    """
    Create a new mappyfile object, using MapServer defaults (if any).

    Parameters
    ----------

    s: type
        The mappyfile type to be stored in the __type__ property

    Returns
    -------

    dict
        A Python dictionary representing the Mapfile object in the mappyfile format
    """

    # get the schema for this type

    v = Validator()
    try:
        schema = v.get_versioned_schema(version=version, schema_name=type)
    except IOError:
        raise SyntaxError(
            "The mappyfile type '{}' does not exist!".format(type))

    d = OrderedDict()
    d["__type__"] = type

    properties = sorted(schema["properties"].items())

    for k, v in properties:
        if "default" in v:
            d[k] = v["default"]

    return d
Ejemplo n.º 39
0
def test_add_comments():
    s = """
    MAP
        IMAGECOLOR 'FF00FF'
        LAYER
            EXTENT 0 0 0
            TYPE POLYGON
        END
    END
    """
    d = to_dict(s)
    v = Validator()
    errors = v.validate(d, add_comments=True)

    print(len(errors))
    print(json.dumps(d, indent=4))

    for error in errors:
        print(error)

    pp = PrettyPrinter(indent=4, quote='"')  # expected

    res = pp.pprint(d)
    print(res)
Ejemplo n.º 40
0
    def __init__(self,
                 indent=4,
                 spacer=" ",
                 quote='"',
                 newlinechar="\n",
                 end_comment=False,
                 align_values=False,
                 separate_complex_types=False,
                 **kwargs):
        """
        Option use "\t" for spacer with an indent of 1
        """

        assert (quote == "'" or quote == '"')

        self.indent = indent
        self.spacer = spacer * self.indent
        self.quoter = Quoter(quote)
        self.newlinechar = newlinechar
        self.end_comment = end_comment
        self.end = u"END"
        self.validator = Validator()
        self.align_values = align_values
        self.separate_complex_types = separate_complex_types
Ejemplo n.º 41
0
def test_add_comments():
    s = """
    MAP
        IMAGECOLOR 'FF00FF'
        LAYER
            EXTENT 0 0 0
            TYPE POLYGON
        END
    END
    """
    d = to_dict(s)
    v = Validator()
    errors = v.validate(d, add_comments=True)

    print(len(errors))
    print(json.dumps(d, indent=4))

    for error in errors:
        print(error)

    pp = PrettyPrinter(indent=4, quote='"')  # expected

    res = pp.pprint(d)
    print(res)
Ejemplo n.º 42
0
def validate(s):
    d = to_dict(s)
    v = Validator()
    return v.validate(d)
Ejemplo n.º 43
0
def validate(d):
    v = Validator()
    return v.validate(d)
Ejemplo n.º 44
0
class PrettyPrinter(object):
    def __init__(self,
                 indent=4,
                 spacer=" ",
                 quote='"',
                 newlinechar="\n",
                 end_comment=False,
                 align_values=False,
                 separate_complex_types=False,
                 **kwargs):
        """
        Option use "\t" for spacer with an indent of 1
        """

        assert (quote == "'" or quote == '"')

        self.indent = indent
        self.spacer = spacer * self.indent
        self.quoter = Quoter(quote)
        self.newlinechar = newlinechar
        self.end_comment = end_comment
        self.end = u"END"
        self.validator = Validator()
        self.align_values = align_values
        self.separate_complex_types = separate_complex_types

    def __is_metadata(self, key):
        """
        Check to see if the property is hidden metadata
        e.g. "__type__", "__comments__", "__position__"
        """
        if key.startswith("__") and key.endswith("__"):
            return True
        else:
            return False

    def compute_aligned_max_indent(self, max_key_length):
        """
        Computes the indentation as a multiple of self.indent for aligning
        values at the same column based on the maximum key length.
        Example:
        key         value1
        longkey     value2
        longestkey  value3 <-- column at 12, indent of 4, determined by "longestkey"
        """
        indent = max(1, self.indent)
        return int((int(max_key_length / indent) + 1) * indent)

    def compute_max_key_length(self, composite):
        """
        Computes the maximum length of all keys (non-recursive) in the passed
        composite.
        """
        length = 0
        for attr, value in composite.items():
            attr_length = len(attr)
            if (not self.__is_metadata(attr)
                    and attr not in ("metadata", "validation", "values",
                                     "connectionoptions")
                    and not self.is_hidden_container(attr, value)
                    and not attr == "pattern" and not attr == "projection"
                    and not attr == "points" and not attr == "config"
                    and not self.is_composite(value)):
                length = max(length, attr_length)

        return length

    def separate_complex(self, composite, level):
        if not self.separate_complex_types:
            return
        for key in list(composite.keys()):
            if self.is_complex_type(composite, key, level):
                utils.dict_move_to_end(composite, key)

    def whitespace(self, level, indent):
        return self.spacer * (level + indent)

    def add_start_line(self, key, level):
        return self.whitespace(level, 1) + key.upper()

    def add_end_line(self, level, indent, key):
        end_line = self.whitespace(level, indent) + self.end
        if self.end_comment:
            end_line = "{} # {}".format(end_line, key.upper())
        return end_line

    def __format_line(self, spacer, key, value, aligned_max_indent=0):
        if ((aligned_max_indent is None) or (aligned_max_indent == 0)):
            aligned_max_indent = len(key) + 1
        indent = " " * (aligned_max_indent - len(key))
        tmpl = u"{spacer}{key}{indent}{value}"
        d = {"spacer": spacer, "key": key, "value": value, "indent": indent}
        return tmpl.format(**d)

    def process_key_dict(self, key, d, level):
        """
        Process key value dicts e.g. METADATA "key" "value"
        """

        # add any composite level comments
        comments = d.get("__comments__", {})
        lines = []
        self._add_type_comment(level, comments, lines)

        lines += [self.add_start_line(key, level)]
        lines += self.process_dict(d, level, comments)
        lines.append(self.add_end_line(level, 1, key))

        return lines

    def process_dict(self, d, level, comments):
        """
        Process keys and values within a block
        """
        lines = []

        aligned_max_indent = 0
        if (self.align_values):
            max_key_length = self.compute_max_key_length(
                d) + 2  # add length of quotes
            aligned_max_indent = self.compute_aligned_max_indent(
                max_key_length)

        for k, v in d.items():
            if not self.__is_metadata(k):
                qk = self.quoter.add_quotes(k)
                qv = self.quoter.add_quotes(v)
                line = self.__format_line(self.whitespace(level, 2), qk, qv,
                                          aligned_max_indent)
                line += self.process_attribute_comment(comments, k)
                lines.append(line)

        return lines

    def process_config_dict(self, key, d, level):
        """
        Process the CONFIG block
        """
        lines = []
        for k, v in d.items():
            k = "CONFIG {}".format(self.quoter.add_quotes(k.upper()))
            v = self.quoter.add_quotes(v)
            lines.append(self.__format_line(self.whitespace(level, 1), k, v))
        return lines

    def process_repeated_list(self, key, lst, level, aligned_max_indent=1):
        """
        Process blocks of repeated keys e.g. FORMATOPTION
        """
        lines = []

        for v in lst:
            k = key.upper()
            v = self.quoter.add_quotes(v)
            lines.append(
                self.__format_line(self.whitespace(level, 1), k, v,
                                   aligned_max_indent))

        return lines

    def process_projection(self, key, lst, level):

        lines = [self.add_start_line(key, level)]

        if self.quoter.is_string(lst):
            val = self.quoter.add_quotes(lst)
            # the value has been manually set to a single string projection
            lines.append(u"{}{}".format(self.whitespace(level, 2), val))
        elif len(lst) == 1 and lst[0].upper() == "AUTO":
            lines.append(u"{}{}".format(self.whitespace(level, 2), "AUTO"))
        else:
            for v in lst:
                v = self.quoter.add_quotes(v)
                lines.append(u"{}{}".format(self.whitespace(level, 2), v))

        lines.append(self.add_end_line(level, 1, key))
        return lines

    def format_pair_list(self, key, pair_list, level):
        """
        Process lists of pairs (e.g. PATTERN block)
        """

        lines = [self.add_start_line(key, level)]

        list_spacer = self.spacer * (level + 2)
        pairs = ["{}{} {}".format(list_spacer, p[0], p[1]) for p in pair_list]
        lines += pairs

        lines.append(self.add_end_line(level, 1, key))

        return lines

    def format_repeated_pair_list(self, key, root_list, level):
        """
        Process (possibly) repeated lists of pairs e.g. POINTs blocks
        """

        lines = []

        def depth(L):
            return isinstance(L, (tuple, list)) and max(map(depth, L)) + 1

        if depth(root_list) == 2:
            # single set of points only
            root_list = [root_list]

        for pair_list in root_list:
            lines += self.format_pair_list(key, pair_list, level)

        return lines

    def is_composite(self, val):

        if isinstance(val, dict) and "__type__" in val:
            return True
        else:
            return False

    def is_complex_type(self, composite, key, level):
        # symbol needs special treatment
        if key == "symbol" and level > 0:
            return False
        return key in COMPLEX_TYPES or self.is_composite(
            key) or self.is_hidden_container(key, composite[key])

    def is_hidden_container(self, key, val):
        """
        The key is not one of the Mapfile keywords, and its
        values are a list
        """

        if key in OBJECT_LIST_KEYS and isinstance(val, list):
            return True
        else:
            return False

    def pprint(self, composites):
        """
        Print out a nicely indented Mapfile
        """

        # if only a single composite is used then cast to list
        # and allow for multiple root composites

        if composites and not isinstance(composites, list):
            composites = [composites]

        lines = []

        for composite in composites:
            type_ = composite["__type__"]
            if type_ in ("metadata", "validation", "connectionoptions"):
                # types are being parsed directly, and not as an attr of a parent
                lines += self.process_key_dict(type_, composite, level=0)
            else:
                lines += self._format(composite)

        result = str(self.newlinechar.join(lines))
        return result

    def get_attribute_properties(self, type_, attr):

        jsn_schema = self.validator.get_expanded_schema(type_)
        props = jsn_schema["properties"]

        # check if a value needs to be quoted or not, by referring to the JSON schema

        try:
            attr_props = props[attr]
        except KeyError as ex:
            log.error("The key '{}' was not found in the JSON schema for '{}'".
                      format(attr, type_))
            log.error(ex)
            return {}

        return attr_props

    def is_expression(self, option):
        return "description" in option and (option["description"]
                                            == "expression")

    def check_options_list(self, options_list, value):
        for option in options_list:
            if "enum" in option and value.lower() in option["enum"]:
                if value.lower() == "end":
                    # in GEOTRANSFORM "end" is an attribute value
                    return self.quoter.add_quotes(value)
                else:
                    return value.upper()
            elif self.is_expression(option):
                if value.endswith("'i") or value.endswith('"i'):
                    return value

        if self.quoter.in_slashes(value):
            return value
        else:
            return self.quoter.add_quotes(value)

    def format_value(self, attr, attr_props, value):
        """
        TODO - refactor and add more specific tests (particularly for expressions)
        """
        if isinstance(value, bool):
            return str(value).upper()

        if any(i in ["enum"] for i in attr_props):

            if isinstance(value, dict) and not value:
                raise ValueError(
                    "The property {} has an empty dictionary as a value".
                    format(attr))

            if not isinstance(value, numbers.Number):
                if attr == "compop":
                    return self.quoter.add_quotes(value)
                else:
                    return value.upper(
                    )  # value is from a set list, no need for quote
            else:
                return value

        if "type" in attr_props and attr_props[
                "type"] == "string":  # and "enum" not in attr_props
            # check schemas for expressions and handle accordingly
            if self.is_expression(attr_props) and self.quoter.in_slashes(
                    value):
                return value
            elif self.is_expression(attr_props) and (value.endswith("'i")
                                                     or value.endswith('"i')):
                # for case insensitive regex
                return value
            else:
                return self.quoter.add_quotes(value)

        # expressions can be one of a string or an expression in brackets
        if any(i in ["oneOf", "anyOf"]
               for i in attr_props):  # and check that type string is in list
            if "oneOf" in attr_props:
                options_list = attr_props["oneOf"]
            else:
                options_list = attr_props["anyOf"]
            if self.quoter.is_string(value):
                if self.quoter.in_parenthesis(value):
                    pass
                elif attr == "expression" and self.quoter.in_braces(value):
                    # don't add quotes to list expressions such as {val1, val2}
                    pass
                elif attr != "text" and self.quoter.in_brackets(value):
                    # TEXT expressions are often "[field1]-[field2]" so need to leave quotes for these
                    pass
                elif value.startswith("NOT ") and self.quoter.in_parenthesis(
                        value[4:]):
                    value = "NOT {}".format(value[4:])
                else:
                    value = self.check_options_list(options_list, value)

        if isinstance(value, list):
            new_values = []

            for v in value:
                if not isinstance(v, numbers.Number) and attr not in [
                        "offset", "polaroffset"
                ]:
                    # don't add quotes to list of attributes for offset / polaroffset
                    v = self.quoter.add_quotes(v)
                new_values.append(v)

            value = " ".join(list(map(str, new_values)))
        else:
            value = self.quoter.escape_quotes(value)

        return value

    def process_attribute(self,
                          type_,
                          attr,
                          value,
                          level,
                          aligned_max_indent=1):
        """
        Process one of the main composite types (see the type_ value)
        """

        attr_props = self.get_attribute_properties(type_, attr)
        value = self.format_value(attr, attr_props, value)
        line = self.__format_line(self.whitespace(level, 1), attr.upper(),
                                  value, aligned_max_indent)
        return line

    def format_comment(self, spacer, value):
        return "{}{}".format(spacer, value)

    def process_composite_comment(self, level, comments, key):
        """
        Process comments for composites such as MAP, LAYER etc.
        """
        if key not in comments:
            comment = ""
        else:
            value = comments[key]
            spacer = self.whitespace(level, 0)

            if isinstance(value, list):
                comments = [self.format_comment(spacer, v) for v in value]
                comment = self.newlinechar.join(comments)
            else:
                comment = self.format_comment(spacer, value)

        return comment

    def process_attribute_comment(self, comments, key):

        if key not in comments:
            comment = ""
        else:
            value = comments[key]
            spacer = " "

            # for multiple comments associated with an attribute
            # simply join them together as a single string
            if isinstance(value, list):
                value = " ".join(value)

            comment = self.format_comment(spacer, value)

        return comment

    def _add_type_comment(self, level, comments, lines):
        comment = self.process_composite_comment(level, comments, '__type__')

        if comment:
            lines.append(str(comment))

    def _format(self, composite, level=0):

        lines = []
        type_ = None

        # get any comments associated with the composite
        comments = composite.get("__comments__", {})

        if isinstance(composite, dict) and '__type__' in composite:
            type_ = composite['__type__']
            assert type_ in COMPOSITE_NAMES.union(SINGLETON_COMPOSITE_NAMES)
            is_hidden = False
            self._add_type_comment(level, comments, lines)
            s = self.whitespace(level, 0) + type_.upper()
            lines.append(s)

        aligned_max_indent = 0
        if self.align_values:
            max_key_length = self.compute_max_key_length(composite)
            aligned_max_indent = self.compute_aligned_max_indent(
                max_key_length)

        self.separate_complex(composite, level)

        for attr, value in composite.items():
            if self.__is_metadata(attr):
                # skip hidden attributes
                continue
            elif self.is_hidden_container(attr, value):
                # now recursively print all the items in the container
                for v in value:
                    lines += self._format(v, level + 1)
            elif attr == "pattern":
                lines += self.format_pair_list(attr, value, level)
            elif attr in ("metadata", "validation", "values",
                          "connectionoptions"):
                # metadata and values are also composites
                # but will be processed here
                lines += self.process_key_dict(attr, value, level)

            elif attr == "projection":
                lines += self.process_projection(attr, value, level)
            elif attr in REPEATED_KEYS:
                lines += self.process_repeated_list(attr, value, level,
                                                    aligned_max_indent)
            elif attr == "points":
                lines += self.format_repeated_pair_list(attr, value, level)
            elif attr == "config":
                lines += self.process_config_dict(attr, value, level)
            elif self.is_composite(value):
                lines += self._format(value, level +
                                      1)  # recursively add the child class
            else:
                # standard key value pair
                if not type_:
                    raise UnboundLocalError(
                        "The Mapfile object is missing a __type__ attribute")
                line = self.process_attribute(type_, attr, value, level,
                                              aligned_max_indent)
                line += self.process_attribute_comment(comments, attr)
                lines.append(line)

        if not is_hidden:
            # close the container block with an END
            lines.append(self.add_end_line(level, 0, type_))

        return lines