def __init__(self, config, logger = None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_official(item = 8180, id = 3, level = 3, tags = ['merge', 'public equipment'],
            title = T_('{0} toilets not integrated', 'Lyon'))

        self.init(
            u"https://data.grandlyon.com/equipements/toilettes-publiques-sur-le-territoire-du-grand-lyon/",
            u"Toilettes publiques",
            GeoJSON(Source(attribution = u"Métropole de Lyon", millesime = "12/2017",
                    fileUrl = u"https://download.data.grandlyon.com/wfs/grandlyon?SERVICE=WFS&VERSION=2.0.0&outputformat=GEOJSON&request=GetFeature&typename=gin_nettoiement.gintoilettepublique&SRSNAME=urn:ogc:def:crs:EPSG::4326"),
                extractor = lambda geojson: geojson),
            Load("geom_x", "geom_y"),
            Mapping(
                select = Select(
                    types = ["nodes", "ways"],
                    tags = {"amenity": "toilets"}),
                conflationDistance = 100,
                generate = Generate(
                    static1 = {
                        "amenity": "toilets",
                        "access": "public"},
                    static2 = {"source": self.source},
                    mapping1 = {
                        "operator": lambda res: res['gestionnaire'] if res and res['gestionnaire'] else None,
                        "ref": lambda res: res['identifiant'] if res and res['identifiant'] else None } )))
    def __init__(self, config, logger=None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_official(
            item=8150,
            id=31,
            level=3,
            tags=[
                'merge', 'public equipment', 'motorcycle', 'fix:survey',
                'fix:picture'
            ],
            title=T_('Paris motorcycle parking not integrated'))

        self.init(
            "https://opendata.paris.fr/explore/dataset/stationnement-voie-publique-emplacements/information/",
            "Stationnement sur voie publique - emplacements motos",
            CSV(
                SourceOpenDataSoft(
                    attribution="Ville de Paris",
                    url=
                    "https://opendata.paris.fr/explore/dataset/stationnement-voie-publique-emplacements"
                )),
            Load("geo_point_2d",
                 "geo_point_2d",
                 select={
                     "Régime prioritaire": "2 ROUES",
                     "Régime particulier": "Motos",
                 },
                 xFunction=lambda x: Load.float_comma(x.split(',')[1]),
                 yFunction=lambda y: Load.float_comma(y.split(',')[0])),
            Conflate(select=Select(types=["nodes", "ways"],
                                   tags={"amenity": "motorcycle_parking"}),
                     conflationDistance=20,
                     mapping=Mapping(static1={"amenity": "motorcycle_parking"},
                                     static2={"source": self.source},
                                     mapping1=tag_mapping)))
 def analyser_osmosis_common(self):
     self.run(
         sql10, lambda res: {
             "class": 1,
             "data": [self.relation, self.positionAsText],
             "text": T_("Large relation of type {0}", res[2])
         })
    def __init__(self, config, logger = None):
        Analyser_Merge_Point.__init__(self, config, logger)
        self.def_class_missing_official(item = 8130, id = 21, level = 3, tags = ['merge', 'parking'],
            title = T_('{0} parking for disabled not integrated', 'BM'))

        self.init(
            'https://opendata.bordeaux-metropole.fr/explore/dataset/grs_gigc_p',
            'Place de stationnement PMR',
            SHP(SourceOpenDataSoft(
                attribution="Bordeaux Métropole",
                url="https://opendata.bordeaux-metropole.fr/explore/dataset/grs_gigc_p",
                format="shp",
                zip="grs_gigc_p.shp")),
            LoadGeomCentroid(),
            Conflate(
                select = Select(
                    types = ["nodes", "ways"],
                    tags = {
                        "amenity": "parking",
                        "capacity:disabled": None}),
                conflationDistance = 100,
                mapping = Mapping(
                    static1 = {
                        "amenity": "parking",
                        "capacity:disabled": "yes"},
                    static2 = {"source": self.source} )))
    def __init__(self, config, logger=None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_official(
            item=8370,
            id=100,
            level=3,
            tags=['merge', "emergency", "fix:picture", "fix:survey"],
            title=T_('Defibrillator not integrated'))

        self.init(
            "https://www.data.gouv.fr/fr/datasets/localisation-des-defibrillateurs-municipaux-dae-ville-de-lorient/",
            "Localisation des défibrillateurs municipaux (DAE) - Ville de Lorient",
            GeoJSON(
                SourceDataGouv(
                    attribution="Ville de Lorient",
                    dataset="5c70a7f206e3e755537bb849",
                    resource="01f2a133-4929-4001-906c-412f682d0d59")),
            Load("geom_x", "geom_y"),
            Conflate(
                select=Select(types=["nodes", "ways", "relations"],
                              tags={"emergency": "defibrillator"}),
                conflationDistance=50,
                mapping=Mapping(
                    static1={"emergency": "defibrillator"},
                    static2={"source": self.source},
                    text=lambda tags, fields: {
                        "en":
                        ', '.join(filter(lambda x: x, [fields["emplacement"]]))
                    })))
    def way(self, data, tags, nds):
        err = []
        key_set = set(tags.keys())
        if tags.get("area") == "yes":
            tagged_as_bad = set(key_set & self.area_yes_bad)
            if len(tagged_as_bad) > 0:
                err.append({
                    "class":
                    32001,
                    "subclass":
                    1,
                    "text":
                    T_(
                        'Tags, {0}, already make this an area.',
                        '/'.join(map(lambda x: '`{}`'.format(x),
                                     tagged_as_bad)))
                })
            elif not (len(key_set & self.area_yes_good) > 0
                      or tags.get("railway") == "platform"):
                err.append({"class": 32002, "subclass": 1})
        if tags.get(
                "area"
        ) == "no" and not "aeroway" in tags and not "building" in tags and not "landuse" in tags and not "leisure" in tags and not "natural" in tags:
            err.append({"class": 32003, "subclass": 1})

        return err
 def node(self, data, tags):
     err = []
     for k in set(tags).intersection(self.DeprecatedSet):
         if None in self.Deprecated[k]:
             err.append({
                 "class": 4010,
                 "subclass": stablehash(k),
                 "text": T_('The tag `{0}` is deprecated in favour of {1}', k, self.Deprecated[k][None])
             })
         elif tags[k] in self.Deprecated[k]:
             err.append({
                 "class": 40102,
                 "subclass": stablehash(k),
                 "text": T_('The tag `{0}` is deprecated in favour of {1}', "=".join([k, tags[k]]), self.Deprecated[k][tags[k]])
             })
     return err
Beispiel #8
0
    def __init__(self, config, logger=None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_official(
            item=8150,
            id=11,
            level=3,
            tags=['merge', 'public equipment', 'cycle'],
            title=T_('CAPP bicycle parking not integrated'))

        self.init(
            u"http://opendata.agglo-pau.fr/index.php/fiche?idQ=20",
            u"Supports vélos sur la CAPP",
            CSV(
                Source(attribution=u"Communauté d'Agglomération Pau-Pyrénées",
                       millesime="01/2013",
                       fileUrl=
                       u"http://opendata.agglo-pau.fr/sc/call.php?f=1&idf=20",
                       zip="Sta_Velo_Agglo_WGS84.csv")),
            Load("X",
                 "Y",
                 xFunction=self.float_comma,
                 yFunction=self.float_comma),
            Mapping(
                select=Select(types=["nodes"],
                              tags={"amenity": "bicycle_parking"}),
                conflationDistance=50,
                generate=Generate(static1={"amenity": "bicycle_parking"},
                                  static2={"source": self.source},
                                  mapping1={
                                      "capacity":
                                      lambda res: str(int(res["NOMBRE"]) * 2)
                                  })))
Beispiel #9
0
    def __init__(self, config, logger=None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_official(
            item=8370,
            id=90,
            level=3,
            tags=['merge', "emergency", "fix:picture", "fix:survey"],
            title=T_('Defibrillator not integrated'))

        self.init(
            "https://www.data.gouv.fr/fr/datasets/inventaire-des-defibrillateurs-automatises-externes-dae-dans-le-gers-1/",
            "Inventaire des Défibrillateurs Automatisés Externes (DAE) dans le Gers",
            SHP(
                SourceDataGouv(
                    attribution="Département du Gers",
                    dataset="5d26ec979ce2e73529f50c5e",
                    resource="479b8047-f8e3-4536-9a07-12f96c9a3cd7",
                    zip=
                    "inventaire-des-defibrillateurs-automatises-externes-dans-le-gers.shp"
                )), LoadGeomCentroid(srid=2154),
            Conflate(select=Select(types=["nodes", "ways", "relations"],
                                   tags={"emergency": "defibrillator"}),
                     conflationDistance=50,
                     mapping=Mapping(
                         static1={"emergency": "defibrillator"},
                         static2={"source": self.source},
                         text=lambda tags, fields: {
                             "en":
                             ', '.join(
                                 filter(lambda x: x, [
                                     fields["etablissement"], fields[
                                         "horaire"], fields["detail"]
                                 ]))
                         })))
Beispiel #10
0
    def node(self, data, tags):
        if 'name' not in tags:
            return

        for q in [u"?", u"¿", u"؟", u"՞", u";", u"?", u"፧", u"꘏"]:
            if q in tags["name"]:
                return [{"class": 50705, "subclass": 0, "text": T_("Unexpected character: `{0}`", q)}]
    def __init__(self, config, logger=None):
        Analyser_Merge_Point.__init__(self, config, logger)
        self.def_class_missing_official(
            item=8060,
            id=1,
            level=3,
            tags=['merge', 'railway', 'fix:survey', 'fix:imagery'],
            title=T_('Crossing level not integrated'))

        self.init(
            "https://data.sncf.com/explore/dataset/liste-des-passages-a-niveau",
            "Passages à niveau",
            GeoJSON(
                SourceOpenDataSoft(
                    attribution="SNCF Réseau",
                    url=
                    "https://data.sncf.com/explore/dataset/liste-des-passages-a-niveau",
                    format="geojson")),
            Load_XY("geom_x",
                    "geom_y",
                    where=lambda res: res["mnemo"] != "CLASSE 00"),
            Conflate(
                select=Select(types=["nodes"],
                              tags=[{
                                  "railway": ["level_crossing", "crossing"]
                              }, {
                                  "disused:railway":
                                  ["level_crossing", "crossing"]
                              }]),
                conflationDistance=150,
                mapping=Mapping(
                    static2={"source": self.source},
                    mapping1={"railway": lambda res: self.type[res["mnemo"]]},
                    mapping2={"ref": lambda res: self.ref(res["libelle"])})))
Beispiel #12
0
    def __init__(self, config, logger=None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_official(item=8180,
                                        id=8,
                                        level=3,
                                        tags=['merge', 'public equipment'],
                                        title=T_('{0} toilets not integrated',
                                                 'Angers'))

        self.init(
            "https://data.angers.fr/explore/dataset/sanitaires-publics-angers/",
            "Toilettes publiques",
            CSV(
                SourceOpenDataSoft(
                    attribution="Angers Loire Métropole",
                    url=
                    "https://data.angers.fr/explore/dataset/sanitaires-publics-angers"
                )),
            Load("Geo Point",
                 "Geo Point",
                 xFunction=lambda x: x and x.split(',')[1],
                 yFunction=lambda y: y and y.split(',')[0]),
            Conflate(select=Select(types=["nodes", "ways"],
                                   tags={"amenity": "toilets"}),
                     conflationDistance=100,
                     mapping=Mapping(static1={
                         "amenity": "toilets",
                         "access": "yes"
                     },
                                     static2={"source": self.source})))
    def __init__(self, config, logger = None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_official(item = 8370, id = 130, level = 3, tags = ["merge"],
            title = T_("Defibrillator not integrated"))

        self.init(
            u"https://www.data.gouv.fr/fr/datasets/defibrillateurs-publics/",
            u"Défibrillateurs publics",
            CSV(Source(attribution = u"AEDMAP France",
                    fileUrl = u"https://files.pavie.info/depot/remote/aedmap_merge.csv")),
            Load("Longitude", "Latitude",
                 xFunction = Load.float_comma,
                 yFunction = Load.float_comma),
            Conflate(
                select = Select(
                    types = ["nodes"],
                    tags = {"emergency": "defibrillator"}),
                conflationDistance = 50,
                mapping = Mapping(
                    static1 = {"emergency": "defibrillator"},
                    static2 = {"source": self.source},
                    mapping1 = {
                        "defibrillator:location": lambda res: self.cleanName(res["Nom"]) if res["Nom"] else None
                    },
                    text = lambda tags, fields: {"en": " ".join(filter(lambda x: x, [
                        fields["Nom"],
                        fields["Adresse"],
                        fields["Adresse 2"],
                        fields["Code postal"],
                        fields["Ville"],
                    ]))} )))
    def __init__(self, config, logger=None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_official(
            item=8370,
            id=50,
            level=3,
            tags=['merge'],
            title=T_('Defibrillator not integrated'))

        self.init(
            u"https://www.data.gouv.fr/fr/datasets/localisation-des-defibrillateurs-appartenant-au-departement-des-hautes-alpes/",
            u"Localisation des défibrillateurs appartenant au département des Hautes Alpes ",
            SHP(
                Source(
                    attribution=u"data.gouv.fr:Département des Hautes-Alpes",
                    fileUrl=
                    u"https://www.data.gouv.fr/fr/datasets/r/a3a2d9c1-2157-41f5-b2e3-7984ce05e902",
                    zip="dept05_defibrillateurs.shp")),
            Load(("ST_X(ST_Centroid(geom))", ), ("ST_Y(ST_Centroid(geom))", ),
                 srid=2154),
            Mapping(select=Select(types=["nodes", "ways", "relations"],
                                  tags={"emergency": "defibrillator"}),
                    conflationDistance=50,
                    generate=Generate(static1={"emergency": "defibrillator"},
                                      static2={"source": self.source})))
Beispiel #15
0
    def __init__(self, config, logger = None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_official(item = 8180, id =2, level = 3, tags = ['merge', 'public equipment', 'fix:survey', 'fix:picture'],
            title = T_('{0} toilets not integrated', 'Rennes'))

        self.init(
            "https://data.rennesmetropole.fr/explore/dataset/toilettes_publiques_vdr/",
            "Toilettes publiques",
            CSV(SourceOpenDataSoft(
                attribution="Ville de Rennes",
                url="https://data.rennesmetropole.fr/explore/dataset/toilettes_publiques_vdr/")),
            Load("Geo Point", "Geo Point",
                xFunction = lambda x: x and x.split(',')[1],
                yFunction = lambda y: y and y.split(',')[0]),
            Conflate(
                select = Select(
                    types = ["nodes", "ways"],
                    tags = {"amenity": "toilets"}),
                conflationDistance = 100,
                mapping = Mapping(
                    static1 = {
                        "amenity": "toilets",
                        "access": "yes"},
                    static2 = {"source": self.source},
                    mapping1 = {
                        "wheelchair": lambda res: "yes" if res["pmr"] == "OUI" else "no" if res["pmr"] == "NON" else None} )))
Beispiel #16
0
    def node(self, data, tags):
        if u"name" not in tags:
            return
        if (u"highway" not in tags) and (u"waterway" not in tags) and (u"place" not in tags):
            return
        words = []

        name = tags[u"name"]
        name_subst = self.apply_special_subst(name)
        split = self._split(name_subst)
        for i in range(0, len(split), 2):
            split[i] = self.remove_special_subst(split[i])
        splitfix = list(split)

        if split and split[0] and split[0][0] in self.minus:
            words.append(split[0])
            splitfix[0] = split[0].capitalize()
        for i in range(0, len(split), 2):
            word = split[i]
            if word in self.special:
                continue
            if word[0] in self.minus:
                words.append(word)
                splitfix[i] = split[i].capitalize()
        if words:
            return {"class": 906, "subclass": stablehash64(','.join(words)), "text": T_("Missing capital letter for: {0}", u", ".join(sorted(set(words)))),
                     "fix": {"name": "".join(splitfix)} }
        return
Beispiel #17
0
    def __init__(self, config, logger=None):
        Analyser_Osmosis.__init__(self, config, logger)
        self.FR = config.options and (
            "country" in config.options
            and config.options["country"].startswith("FR")
            or "test" in config.options)
        self.classs[100] = self.def_class(
            item=6070,
            level=3,
            tags=['boundary', 'geom', 'fix:chair'],
            title=T_('Survey point out of boundary'),
            fix=T_(
                '''Check the position of node admin_centre role and boundaries.'''
            ),
            trap=T_(
                '''The geodesic markers should not be moved. These are reference points.
Some geodesic markers are really out of boundary.'''))
        self.classs[2] = self.def_class(
            item=6060,
            level=1,
            tags=['boundary', 'geom', 'fix:chair'],
            title=T_('Boundary intersection'),
            detail=T_(
                '''An area is marked as belonging to several city at once.'''),
            fix=T_('''Check how this area should be owned.'''))
        self.classs_change[3] = self.def_class(
            item=6060,
            level=2,
            tags=['boundary', 'geom', 'fix:chair'],
            title=T_('Lone boundary fragment'),
            detail=T_(
                '''Unconnected boundary fragment, a way with a boundary tag not part of a
boundary relation.'''),
            fix=T_(
                '''Delete the way, remove boundary tag or add to a relation.'''
            ))

        self.callback20 = lambda res: {
            "class": 100,
            "data":
            [self.relation_full, self.relation_full, self.positionAsText]
        }
        self.callback40 = lambda res: {
            "class": 2,
            "subclass": stablehash64(res[2]),
            "data":
            [self.relation_full, self.relation_full, self.positionAsText]
        }
        self.callback50 = lambda res: {
            "class": 3,
            "data": [self.way_full, self.positionAsText]
        }
    def __init__(self, config, logger=None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_official(
            item=8160,
            id=11,
            level=3,
            tags=['merge', 'public equipment', 'cycle'],
            title=T_('{0} bicycle rental not integrated', 'IDF'))
        self.def_class_possible_merge(
            item=8161,
            id=13,
            level=3,
            tags=['merge', 'public equipment', 'cycle'],
            title=T_('{0} bicycle rental integration suggestion', 'IDF'))
        self.def_class_update_official(
            item=8162,
            id=14,
            level=3,
            tags=['merge', 'public equipment', 'cycle'],
            title=T_('{0} bicycle update', 'IDF'))

        self.init(
            u"https://opendata.paris.fr/explore/dataset/velib-disponibilite-en-temps-reel/information/",
            u"Vélib' - Disponibilité temps réel",
            GeoJSON(
                Source(
                    attribution=u"Autolib Velib Métropole",
                    millesime="04/2020",
                    fileUrl=
                    u"https://opendata.paris.fr/explore/dataset/velib-disponibilite-en-temps-reel/download/?format=geojson&timezone=Europe/Berlin"
                )), Load("geom_x", "geom_y"),
            Mapping(select=Select(types=["nodes", "ways"],
                                  tags={"amenity": "bicycle_rental"}),
                    conflationDistance=100,
                    generate=Generate(static1={
                        "amenity": "bicycle_rental",
                        "network": u"Vélib’",
                        "operator": "Smovengo"
                    },
                                      static2={"source": self.source},
                                      mapping1={
                                          "name":
                                          "name",
                                          "capacity":
                                          lambda res: res["capacity"]
                                          if res["capacity"] != "0" else None
                                      })))
 def init(self, logger):
     if not module_PyKOpeningHours:
         return False
     Plugin.init(self, logger)
     self.errors[32501] = self.def_class(item=3250,
                                         level=3,
                                         tags=['value', 'fix:chair'],
                                         title=T_('Invalid Opening Hours'))
Beispiel #20
0
    def __init__(self, config, logger=None):
        Analyser_Merge.__init__(self, config, logger)
        self.def_class_missing_osm(
            item=7160,
            id=2,
            level=3,
            tags=['merge', 'post'],
            title=T_('admin_level 8 without tag "postal_code"'))
        self.def_class_possible_merge(
            item=8221,
            id=3,
            level=3,
            tags=['merge', 'post'],
            title=T_('Postal code, integration suggestion'))

        self.init(
            u"https://datanova.legroupe.laposte.fr/explore/dataset/laposte_hexasmal",
            u"Base officielle des codes postaux",
            CSV(Source(
                attribution=u"La Poste",
                millesime="12/2014",
                fileUrl=
                u"https://datanova.legroupe.laposte.fr/explore/dataset/laposte_hexasmal/download/?format=csv&use_labels_for_header=true"
            ),
                separator=u";"), Load(srid=None),
            Conflate(select=Select(types=["relations"],
                                   tags={
                                       "type": "boundary",
                                       "admin_level": "8",
                                       "ref:INSEE": None
                                   }),
                     osmRef="postal_code",
                     extraJoin="ref:INSEE",
                     mapping=Mapping(
                         static2={"source:postal_code": self.source},
                         mapping1={
                             "ref:INSEE": "Code_commune_INSEE",
                             "postal_code": "Code_postal"
                         },
                         text=lambda tags, fields: {
                             "en":
                             "Postal code {0} for {1} (INSEE:{2})".format(
                                 fields["Code_postal"],
                                 (fields["Nom_commune"] or "").strip(), fields[
                                     "Code_commune_INSEE"])
                         })))
 def init(self, logger):
     Plugin.init(self, logger)
     self.errors[31801] = self.def_class(
         item=3180,
         level=2,
         tags=['relation', 'restriction'],
         title=T_('Useless turn restriction'))
     self.Country = self.father.config.options.get("country")
Beispiel #22
0
 def node(self, data, tags):
     for i in self.tag_number:
         if i in tags:
             m = self.Number.match(tags[i])
             if (not m and not (i == "width" and tags[i] == "narrow")
                     and not (i == "maxspeed" and
                              (tags[i] in self.MaxspeedExtraValue
                               or self.MaxspeedClassValue.match(tags[i]) or
                               (tags[i] == "implicit" and
                                ("traffic_sign" in tags) and "maxspeed"
                                in tags["traffic_sign"].split(";"))))
                     and not (i == "maxheight"
                              and tags[i] in self.MaxheightExtraValue)):
                 return {
                     "class": 3091,
                     "subclass": 1,
                     "text": T_("Concerns tag: `{0}`",
                                '='.join([i, tags[i]]))
                 }
             elif m and i == "height" and float(m.group(1)) > 500:
                 return {
                     "class":
                     3092,
                     "subclass":
                     2,
                     "text":
                     T_(
                         "`height={0}` is really tall, consider changing to `ele=*`",
                         m.group(1)),
                     "fix": {
                         "-": ["height"],
                         "+": {
                             "ele": tags["height"]
                         }
                     }
                 }
             elif m and i == "maxspeed" and float(
                     m.group(1)) < 5 and not "waterway" in tags:
                 return {
                     "class":
                     3092,
                     "subclass":
                     3,
                     "text":
                     T_('`{0}` is really slow', 'maxspeed=' + m.group(1))
                 }
 def init(self, logger):
     Plugin.init(self, logger)
     self.errors[20601] = self.def_class(
         item=2060,
         level=3,
         tags=['tag', 'addr'],
         title=T_(
             'Misusing addr:interpolation in combination with other tags'))
    def init(self, logger):
        Plugin.init(self, logger)
        self.errors[900] = self.def_class(item = 4030, level = 1, tags = ['tag', 'fix:chair'],
            title = T_('Tag conflict'),
            detail = T_(
'''The object contains two incompatible tags.'''),
            trap = T_(
'''Sometimes the object needs both tags.'''))

        self.CONFLICT = {}
        self.CONFLICT[0] = set(['aerialway', 'aeroway', 'amenity', 'highway', 'railway', 'waterway', 'landuse'])
        self.CONFLICT[1] = set(['aerialway', 'aeroway', 'amenity', 'highway', 'leisure', 'railway', 'natural'])
        self.CONFLICT[2] = set(['aerialway', 'aeroway', 'amenity', 'highway', 'leisure', 'railway', 'waterway', 'place'])
        self.CONFLICT[3] = set(['building', 'place'])
        self.CONFLICT[4] = set(['information', 'place'])
        self.WHITE_LIST = {
            'landuse': [
                ['school', 'amenity', 'school'],
                ['industrial', 'amenity', 'recycling'],
                ['retail', 'amenity', 'marketplace'],
                ['military', 'aeroway', 'aerodrome'],
            ],
            'water': [
                 ['pond', 'leisure', 'fishing'],
            ],
            'place': [
                ['square', 'area', 'yes'],
                ['square', 'highway', 'pedestrian'],
            ],
            'highway': [
                ['elevator', 'railway', 'subway_entrance'],
                ['path', 'waterway', 'lock_gate'],
                ['footway', 'waterway', 'lock_gate'],
            ],
            'natural': [
                ['water', 'leisure', 'marina'],
                ['water', 'leisure', 'swimming_area'],
                ['water', 'amenity', 'fountain'], # ?
            ],
            'amenity': [
                ['stables', 'leisure', 'horse_riding'],
                ['drinking_water', 'natural', 'spring'],
                ['drinking_water', 'man_made', 'water_tap'],
                ['shelter', 'highway', 'bus_stop'],
            ],
        }.items()
Beispiel #25
0
 def init(self, logger):
     Plugin.init(self, logger)
     self.errors[303241] = self.def_class(
         item=3032,
         level=1,
         tags=['tag', 'highway'],
         title=T_(
             'Discordant maxspeed and source:maxspeed or maxspeed:type'))
    def node(self, data, tags):
        if 'name' not in tags:
            return

        stack = []
        for c in tags["name"]:
            if c in self.quotes_j:
                if len(stack) == 0 or stack[-1][0] == c or c not in stack[-1][1]:
                    group = next(q for q in self.quotes if c in q)
                    stack.append([c, group])
                else:
                    p, group = stack.pop()
                    if c not in group:
                        return [{"class": 50704, "subclass": 0, "text": T_("Umbalanced {0} with {1}", p, c)}]

        if len(stack) > 0:
            return [{"class": 50704, "subclass": 1, "text": T_("Umbalanced {0}", "".join(map(lambda q: q[0], stack)))}]
Beispiel #27
0
    def init(self, logger):
        Plugin.init(self, logger)
        self.errors[3130] = self.def_class(item=3130,
                                           level=3,
                                           tags=['name', 'tag'],
                                           title=T_('Tag name is a brand'))

        self.Brand = self.BRAND.split("\n")
Beispiel #28
0
 def __init__(self, config, country, lang, logger=None):
     _Analyser_Merge_Wikipedia.__init__(
         self, config, 113, T_(u"Wikipedia, city integration suggestion"),
         "city", country, lang, None, {
             "type": "boundary",
             "boundary": "administrative",
             "admin_level": "8"
         }, ["relations"], 1, logger)
Beispiel #29
0
    def __init__(self, config, logger=None):
        Analyser_Merge.__init__(self, config, logger)
        place = "SIBRA"
        self.def_class_missing_official(
            item=8040,
            id=101,
            level=3,
            tags=['merge', 'public transport', 'fix:survey', 'fix:picture'],
            title=T_('{0} stop not integrated', place))
        self.def_class_possible_merge(
            item=8041,
            id=103,
            level=3,
            tags=['merge', 'public transport', 'fix:chair'],
            title=T_('{0} stop, integration suggestion', place))

        self.init(
            u"https://transport.data.gouv.fr/datasets/offre-de-transports-sibra-a-annecy-gtfs",
            u"Réseau urbain Sibra",
            GTFS(
                SourceDataGouv(
                    attribution="SIBRA",
                    dataset="5bd9843e634f413220f7f04a",
                    resource="df07e60d-47bf-4cf2-b5d2-ad4af4c7d586")),
            Load("stop_lon", "stop_lat"),
            Conflate(select=Select(types=["nodes", "ways"],
                                   tags=[{
                                       "highway": "bus_stop"
                                   }, {
                                       "public_transport": "stop_position"
                                   }]),
                     conflationDistance=10,
                     osmRef="ref:FR:SIBRA",
                     mapping=Mapping(
                         static1={
                             "highway": "bus_stop",
                             "public_transport": "stop_position",
                             "bus": "yes"
                         },
                         static2={"source": self.source},
                         mapping1={
                             "ref:FR:SIBRA": "stop_id",
                         },
                         mapping2={"name": "stop_name"},
                         text=lambda tags, fields: T_("{0} stop of {1}", place,
                                                      fields["stop_name"]))))
Beispiel #30
0
    def init(self, logger):
        Plugin.init(self, logger)

        import re
        # From RFC 1738 paragraph 2.1
        self.HasScheme = re.compile(r"^[a-zA-Z0-9.+-]+://")

        self.errors[30931] = self.def_class(
            item=3093,
            level=2,
            tags=['value', 'fix:chair'],
            title=T_('The URL contains a space'))
        self.errors[30932] = self.def_class(
            item=3093,
            level=2,
            tags=['value', 'fix:chair'],
            title=T_('The URL does not have a valid scheme'))