def getData(id, geom_type='nod'): osm_data = None api = OsmApi() if geom_type == 'nod': osm_data = api.NodeGet(int(id)) elif geom_type == 'way': osm_data = api.WayGet(int(id)) elif geom_type == 'rel': osm_data = api.RelationGet(int(id)) return osm_data
def main(): global myOSMAPI config = configparser.ConfigParser() config.read('config.ini') myOSMAPI = OsmApi(username=config.get('OSM', 'OPENSTREETMAP_USERNAME'), password=config.get('OSM', 'OPENSTREETMAP_PASSWORD')) myOSMAPI.ChangesetCreate( {u"comment": u"district wikidata and wikipedia tags"}) i = 0 #reads list of relations that need to be modified txt = open("tmplist", 'r') for place in txt.readlines(): try: i += 1 #get relation details relJSON = myOSMAPI.RelationGet(place) name = relJSON["tag"]["name"] print name #get wikipedia article name qid = open("qidandname", 'r') sch = wikipedia.search(name + '_district', results=1) article = sch[0] print article #get wikidata qid n = wptools.page(sch[0].encode('ascii', 'ignore'), silent=True) qid = n.get_wikidata().wikibase print qid #make changes in osm newrelJSON = relJSON #newrelJSON["tag"]["wikidata"] = qid newrelJSON["tag"]["wikipedia"] = "en:" + article if (i >= 10): myOSMAPI.ChangesetClose() break except: print place + " failed"
def getRelation(id): #time is started start_time = time.time() MyApi = OsmApi() rs = MyApi.RelationGet(id, RelationVersion=-1) #print("The relations are : ", rs) key = rs['tag'].keys() if (len(rs['member']) != 0): for member in rs['member']: if (member['type'] == "way"): resultway = getWay(member['ref']) nodes = resultway['nd'] keys = resultway['tag'].keys() detail = "" for k in keys: try: detail = detail + k + " - " + resultway['tag'][ k] + "\n" #collect all information of particular way except: print("error") #print (detail) global f f = 0 # a new way is started for nd in nodes: getNode(nd, detail) if (member['type'] == "node"): resultnode = MyApi.NodeGet(memeber['ref']) keynode = resultnode['tag'].keys() detail = "" for k in keynode: detail = detail + k + " - " + resultnode['tag'][ k] + "\n" #collect all information of particular node #print (detail) placesDraw(lat=resultnode['lat'], long=resultnode['lon'], name=detail) #print("Time taken in the Process --- %s seconds ---" % (time.time() - start_time)) return rs
class OsmFix(object): def __init__(self, config): osm_user = config.get('Osm', 'username') osm_pass = config.get('Osm', 'password') osm_api = config.get('Osm', 'api') osm_app = config.get('Osm', 'appid') self.osm = OsmApi( api=osm_api, appid=osm_app, username=osm_user, password=osm_pass ) self.kort_api = kort_api.KortApi(config) def get_for_type(self, type, id): """ Returns the 'getter' of the requested OSM type """ if type == 'node': return self.osm.NodeGet(id) if type == 'way': return self.osm.WayGet(id) if type == 'relation': return self.osm.RelationGet(id) def update_for_type(self, type, new_values): """ Returns the 'update' method of the requested OSM type """ if type == 'node': return self.osm.NodeUpdate(new_values) if type == 'way': return self.osm.WayUpdate(new_values) if type == 'relation': return self.osm.RelationUpdate(new_values) def apply_kort_fix(self, limit=1, dry=False): try: for kort_fix in self.kort_api.read_fix(limit): try: log.debug("---- Fix from Kort: ----") log.debug("%s" % pprint.pformat(kort_fix)) osm_entity = self.get_for_type( kort_fix['osm_type'], kort_fix['osm_id'] ) if not osm_entity: raise OsmEntityNotFoundError("OSM entity not found") log.debug("---- OSM type before fix ----") log.debug("%s" % pprint.pformat(osm_entity['tag'])) error_type = errortypes.Error( kort_fix['error_type'], osm_entity ) fixed = error_type.apply_fix(kort_fix) fixed_osm_entity, description = fixed log.debug("---- OSM type after fix ----") log.debug("%s" % pprint.pformat(fixed_osm_entity['tag'])) except (errortypes.ErrorTypeError, OsmEntityNotFoundError, ValueError), e: log.warning( "The fix could not be applied: %s, fix: %s" % (str(e), kort_fix) ) fixed_osm_entity = None if not dry: if fixed_osm_entity is not None: comment = self.gen_changelog_comment( kort_fix, description ) self.submit_entity( kort_fix['osm_type'], fixed_osm_entity, comment ) self.kort_api.mark_fix(kort_fix['fix_id']) except Exception, e: log.exception("Failed to apply fix of Kort to OpenStreetMap")
class Changeset(object): def __init__(self, id, api='https://api.openstreetmap.org'): self.id = id logger.debug('Using api={}'.format(api)) if api: self.osmapi = OsmApi(api=api) else: self.osmapi = None self.meta = None self.changes = None # History for modified/deleted elements, inner dicts indexed by object id self.history_one_version_back = True self.hist = {'node': {}, 'way':{}, 'relation':{}} # Summary of elemets, created, modified, deleted. '_' versions are summarized across all object types self.summary = {'create' : { 'node': 0, 'way':0, 'relation':0, 'relation_tags':{}}, 'modify' : { 'node': 0, 'way':0, 'relation':0, 'relation_tags':{}}, 'delete' : { 'node': 0, 'way':0, 'relation':0, 'relation_tags':{}}, '_create':0, '_modify':0, '_delete':0} # Tag changes self.tagdiff = self.getEmptyDiffDict() # Tags unchanged, i.e. mostly ID if object geometrically changed self.tags = {} # Textual diff description self.diffs = {} # Simple (no tags) nodes self.simple_nodes = {'create':0, 'modify':0, 'delete':0} self.other_users = None self.mileage = None self.apidebug = False self.datadebug = False @staticmethod def get_timestamp(meta, typeof=None, include_discussion=False): if include_discussion and 'comments_count' in meta and int(meta['comments_count'])>0: typeof = 'comment' cset_ts = reduce(lambda c1,c2: c1 if c1['date']>c2['date'] else c2, meta['discussion'])['date'] else: if not typeof: if 'closed_at' in meta.keys(): typeof = 'closed_at' else: typeof = 'created_at' cset_ts = meta[typeof] if type(cset_ts) is datetime.datetime: # Some osmapi's pass datetime's here without tz instead of a unicode string? timestamp = cset_ts.replace(tzinfo=pytz.utc) else: timestamp = diff.OsmDiffApi.timetxt2datetime(cset_ts) return (typeof, timestamp) def printSummary(self): s = self.summary print '{} elements created: Nodes: {}, ways:{} Relations:{}'.format(s['_create'], s['create']['node'], s['create']['way'], s['create']['relation']) print '{} elements modified: Nodes: {}, ways:{} Relations:{}'.format(s['_modify'], s['modify']['node'], s['modify']['way'], s['modify']['relation']) print '{} elements deleted: Nodes: {}, ways:{} Relations:{}'.format(s['_delete'], s['delete']['node'], s['delete']['way'], s['delete']['relation']) print 'Simple nodes: {}'.format(pprint.pformat(self.simple_nodes)) if (self.tagdiff['create'] or self.tagdiff['modify'] or self.tagdiff['delete']): print 'Tag change stats: {}'.format(pprint.pformat(self.tagdiff)) else: print 'No tags changed' if self.other_users: print 'Modifies objects previously edited by: {}'.format(pprint.pformat(self.other_users)) if self.mileage: print 'Mileage (ways): {} meters'.format(int(self.mileage['_all_create']-self.mileage['_all_delete'])) print 'Mileage (navigable): {} meters'.format(int(self.mileage['_navigable_create']-self.mileage['_navigable_delete'])) for nav_cat in self.mileage['by_type'].keys(): for nav_type in self.mileage['by_type'][nav_cat].keys(): print 'Mileage ({}={}): {} meters'.format(nav_cat, nav_type, int(self.mileage['by_type'][nav_cat][nav_type])) def printDiffs(self): self.buildDiffList() pprint.pprint(self.diffs) def _pluS(self, num): '''Return plural s''' if num==1: return '' return 's' def buildDiffList(self, maxtime=None): logger.debug('Start building diff list') self.startProcessing(maxtime) self.diffs = self.getEmptyObjDict() for modif in self.changes: logger.debug('Processing modif: {}'.format(modif)) self.checkProcessingLimits() etype = modif['type'] data = modif['data'] id = data['id'] version = data['version'] action = modif['action'] diff = self.getTagDiff(etype, id, version) label = self.getLabel(etype, id, version) #logger.debug('-- {} {} {} --'.format(action, etype, id)) notes = [] prev_authors = [] entry = (action, label, diff, notes, prev_authors) self.diffs[etype][str(id)] = entry if action == 'modify': old = self.old(etype,id,version-1) if etype=='way': nd_ops = self.diffStat(old['nd'], data['nd']) if nd_ops or diff: if nd_ops: if nd_ops[0]: notes.append(u'added {} node{}'.format(nd_ops[0], self._pluS(nd_ops[0]))) if nd_ops[1]: notes.append(u'removed {} node{}'.format(nd_ops[1], self._pluS(nd_ops[1]))) if old['uid'] != data['uid']: prev_authors.append(old['user']) if etype=='relation': # member is list of dict's: {u'role': u'', u'ref': 1234, u'type': u'way'} ombr = [x['ref'] for x in old['member']] nmbr = [x['ref'] for x in data['member']] m_ops = self.diffStat(ombr, nmbr) if m_ops or diff: if m_ops: if m_ops[0]: notes.append(u'added {} member{}'.format(m_ops[0], self._pluS(m_ops[0]))) if m_ops[1]: notes.append(u'deleted {} member{}'.format(m_ops[1], self._pluS(m_ops[1]))) if old['uid'] != data['uid']: prev_authors.append(old['user']) if not m_ops and ombr!=nmbr: notes.append(u'Reordered members') # TODO: Handle relation role changes (e.g. inner to outer) # TODO: Show relation as modified if member changes (e.g. way has added a node) return self.diffs def diffStat(self, a, b): ''' Given two lists of ids, return tuple with (added, removed) ''' aa = set(a) bb = set(b) d1 = aa-bb d2 = bb-aa if not d1 and not d2: return None return (len(d2), len(d1)) def downloadMeta(self, set_tz=True): if not self.meta: if self.apidebug: logger.debug('osmapi.ChangesetGet({}, include_discussion=True)'.format(self.id)) self.meta = self.osmapi.ChangesetGet(self.id, include_discussion=True) if set_tz: for ts in ['created_at', 'closed_at']: if ts in self.meta: self.meta[ts] = self.meta[ts].replace(tzinfo=pytz.utc) if 'discussion' in self.meta: for disc in self.meta['discussion']: disc['date'] = disc['date'].replace(tzinfo=pytz.utc) if self.datadebug: logger.debug(u'meta({})={}'.format(self.id, self.meta)) def downloadData(self): if not self.changes: if self.apidebug: logger.debug('osmapi.ChangesetDownload({})'.format(self.id)) self.changes = self.osmapi.ChangesetDownload(self.id) if self.datadebug: logger.debug(u'changes({})={}'.format(self.id, self.changes)) def _downloadGeometry(self, overpass_api='https://overpass-api.de/api'): # https://overpass-api.de/api/interpreter?data=[adiff:"2016-07-02T22:23:17Z","2016-07-02T22:23:19Z"];(node(bbox)(changed);way(bbox)(changed););out meta geom(bbox);&bbox=11.4019207,55.8270254,11.4030363,55.8297091 opened = self.get_timestamp(self.meta, 'created_at')[1] - datetime.timedelta(seconds=1) closed = self.get_timestamp(self.meta, 'closed_at')[1] + datetime.timedelta(seconds=1) tfmt = '%Y-%m-%dT%h:%M:%sZ' url = overpass_api+'/interpreter?data=[adiff:"'+ \ opened.strftime(tfmt) + '","' + closed.strftime(tfmt) + \ '"];(node(bbox)(changed);way(bbox)(changed););out meta geom(bbox);&bbox=' + \ '{},{},{},{}'.format(self.meta['min_lon'], self.meta['min_lat'], self.meta['max_lon'], self.meta['max_lat']) #r = requests.get(url, stream=True, headers={'Connection':'close'}) r = requests.get(url) if r.status_code!=200: raise Exception('Overpass error:{}:{}:{}'.format(r.status_code,r.text,url)) #r.raw.decode_content = True def downloadGeometry(self, maxtime=None, way_nodes=True): self.startProcessing(maxtime) for mod in self.changes: self.checkProcessingLimits() etype = mod['type'] data = mod['data'] eid = data['id'] version = data['version'] action = mod['action'] if action == 'create': self.hist[etype][eid] = {1: data} else: self.hist[etype][eid] = {version: data} self.old(etype, eid, version-1) if etype == 'way' and action != 'delete': for nid in data['nd']: self.old('node', nid, data['timestamp']) def startProcessing(self, maxtime=None): self.max_processing_time = maxtime self.processing_start = time.time() def checkProcessingLimits(self): if self.max_processing_time: used = time.time()-self.processing_start logger.debug('Used {:.2f}s of {}s to process history'.format(used, self.max_processing_time)) if used > self.max_processing_time: logger.warning('Timeout: Used {:.2f}s of processing time'.format(used)) raise Timeout def unload(self): ch = self.changes self.changes = None del ch hist = self.hist self.hist = None del hist def wayIsNavigable(self, tags): navigable = ['highway', 'cycleway', 'busway'] return set(navigable) & set(tags) def buildSummary(self, mileage=True, maxtime=None): logger.debug('Start building change summary') self.startProcessing(maxtime) self.other_users = {} self.mileage = {'_navigable_create':0, '_navigable_delete':0, '_all_create':0, '_all_delete':0, 'by_type': {}} for modif in self.changes: logger.debug('Processing modif: {}'.format(modif)) self.checkProcessingLimits() etype = modif['type'] data = modif['data'] eid = data['id'] version = data['version'] action = modif['action'] self.summary['_'+action] += 1 self.summary[action][etype] += 1 diff = self.getTagDiff(etype, eid, version) if diff: self.addDiffDicts(self.tagdiff, diff) self.tags = self.getTags(etype, eid, version, self.tags) if etype=='node': if action == 'delete': old = self.old(etype,eid,version-1) if not diff and ('tag' not in old.keys() or not old['tag']): self.simple_nodes[action] += 1 else: if not diff and ('tag' not in data.keys() or not data['tag']): self.simple_nodes[action] += 1 # For modify and delete we summarize affected users if action != 'create': old = self.old(etype,eid,version-1) old_uid = old['uid'] if old_uid != data['uid']: old_uid = str(old_uid) if not old_uid in self.other_users.keys(): if old['user']: usr = old['user'] else: usr = '******' self.other_users[old_uid] = {'user':usr, 'edits':0} self.other_users[old_uid]['edits'] = +1 # FIXME: Since we are only summing created/deleted ways, we ignore edited mileage if (action == 'create' or action == 'delete') and etype=='way': if action == 'create': # If created, we take the latest node version - in special # cases where a node is edited multiple times in the same # diff, this might not be correct nv = -1 nd = data['nd'] tags = data['tag'] else: # For deleted ways, we take the previous version nv = old['timestamp'] nd = old['nd'] tags = old['tag'] d = 0 flon = flat = None for nid in nd: n = self.old('node', nid, nv) #logger.debug('({}, {})'.format(n['lon'], n['lat'])) if flon: d += geotools.haversine(flon, flat, n['lon'], n['lat']) flon, flat = (n['lon'], n['lat']) if action == 'delete': d = -d self.mileage['_all_'+action] += d navigable = self.wayIsNavigable(tags) if navigable: self.mileage['_navigable_'+action] += d nav_cat = navigable.pop() nav_type = tags[nav_cat] if not nav_cat in self.mileage['by_type'].keys(): self.mileage['by_type'][nav_cat] = {} if not nav_type in self.mileage['by_type'][nav_cat].keys(): self.mileage['by_type'][nav_cat][nav_type] = 0 self.mileage['by_type'][nav_cat][nav_type] += d #else: # # Buildings, natural objects etc # logger.debug('*** Not navigable way ({}) mileage: {} {} {}'.format(tags, d, self.mileage, navigable)) def getEmptyDiffDict(self): return {'create':{}, 'delete':{}, 'modify':{}} def getEmptyObjDict(self): return {'node':{}, 'way':{}, 'relation':{}} def addDiffDicts(self, into, src): for ac in src.keys(): for k,v in src[ac].iteritems(): into[ac][k] = into[ac].get(k, 0)+v def getTagDiff(self, etype, eid, version): ''' Compute tag diffence between 'version' and previous version ''' diff = self.getEmptyDiffDict() curr = self.old(etype,eid,version) ntags = curr['tag'] if version > 1: old = self.old(etype,eid,version-1) otags = old['tag'] else: old = None otags = {} #logger.debug('Tags curr:{}'.format(ntags)) #logger.debug('Tags old:{}'.format(otags)) for t in ntags.keys(): if t in otags: if ntags[t]!=otags[t]: k = u'{}={} --> {}={}'.format(t, otags[t], t, ntags[t]) diff['modify'][k] = diff['modify'].get(k, 0)+1 else: k = u'{}={}'.format(t, ntags[t]) diff['create'][k] = diff['create'].get(k, 0)+1 for t in otags.keys(): if not t in ntags: k = u'{}={}'.format(t, otags[t]) diff['delete'][k] = diff['delete'].get(k, 0)+1 if not diff['create'] and not diff['delete'] and not diff['modify']: return None return diff def getTags(self, etype, eid, version, curr_tags=None): '''Compute unmodified tags, i.e. tags on objects changed geometrically, but where tags are identical between 'version' and previous version''' if not curr_tags: tags = {} else: tags = curr_tags curr = self.old(etype,eid,version) ntags = curr['tag'] if version > 1: old = self.old(etype,eid,version-1) otags = old['tag'] else: old = None otags = {} #logger.debug('Tags curr:{}'.format(ntags)) #logger.debug('Tags old:{}'.format(otags)) for t in ntags.keys(): if t in otags: if ntags[t]==otags[t]: k = u'{}={}'.format(t, ntags[t]) tags[k] = tags.get(k, 0)+1 return tags def getLabel(self, etype, eid, version): e = self.old(etype,eid,version) if 'tag' in e.keys(): tag = e['tag'] if 'name' in tag.keys(): label = u'name={}'.format(tag['name']) else: label = u'{}<{}>'.format(etype.capitalize(), eid) keytags = ['highway', 'amenity', 'man_made', 'leisure', 'historic', 'landuse', 'type'] for kt in keytags: if kt in tag.keys(): return u'{}={}, {}'.format(kt, tag[kt], label) return u'{}<{}>'.format(etype, eid) # Note: Deleted objects only have history def getElement(self, modif): etype = modif['type'] data = modif['data'] eid = data['id'] version = data['version'] action = modif['action'] if action == 'create': self.hist[etype][eid] = {1: data} else: e = self.old(etype, eid, version-1) def getElementHistory(self, etype, eid, version): logger.debug('GetElementHistory({} idw {} version {})'.format(etype, eid, version)); hv = None if self.history_one_version_back or version<4: if not eid in self.hist[etype].keys(): self.hist[etype][eid] = {} if self.apidebug: logger.debug('cset {} -> osmapi.{}Get({},ver={})'.format(self.id, etype.capitalize(), eid, version)) if etype == 'node': hv = self.osmapi.NodeGet(eid, NodeVersion=version) elif etype == 'way': hv = self.osmapi.WayGet(eid, WayVersion=version) elif etype == 'relation': hv = self.osmapi.RelationGet(eid, RelationVersion=version) if hv: self.hist[etype][eid][version] = hv else: # Possibly deleted element, fall-through logger.warning('Failed to get element history by version: {} id {} version {}'.format(etype, eid, version)) if not hv: if self.apidebug: logger.debug('cset {} -> osmapi.{}History({})'.format(self.id, etype.capitalize(), eid)) if etype == 'node': h = self.osmapi.NodeHistory(eid) elif etype == 'way': h = self.osmapi.WayHistory(eid) elif etype == 'relation': h = self.osmapi.RelationHistory(eid) self.hist[etype][eid] = h logger.debug('{} id {} history: {}'.format(etype, eid, h)) def old(self, etype, eid, version, only_visible=True): logger.debug('Get old {} id {} version {}'.format(etype, eid, version)) if not isinstance(version, int): '''Support timestamp versioning. Ways and relations refer un-versioned nodes/ways/relations, i.e. the only way to find the relevant node postion when the node was e.g. edited is to lookup using the timestamp. Using the latest version of the node is node correct if the node was moved subsequently.. ''' ts = diff.OsmDiffApi.timetxt2datetime(version) # First look if we have version very close in time if eid in self.hist[etype]: for v in self.hist[etype][eid].keys(): e = self.hist[etype][eid][v] diffsec = abs((ts-e['timestamp']).total_seconds()) # If timestamp difference is less than two seconds, return the element we have # This will cover e.g. newly created nodes+ways if diffsec<2: return e if not self.osmapi: # If we have no api, return the newest version v = max(self.hist[etype][eid].keys()) e = self.hist[etype][eid][v] if only_visible and not e['visible']: e = self.hist[etype][eid][v-1] return e # Lookup the old node if self.apidebug: logger.debug('cset {} -> osmapi.{}History({})'.format(self.id, etype.capitalize(), eid)) if etype == 'node': self.hist[etype][eid] = self.osmapi.NodeHistory(eid) elif etype == 'way': self.hist[etype][eid] = self.osmapi.WayHistory(eid) elif etype == 'relation': self.hist[etype][eid] = self.osmapi.RelationHistory(eid) k = self.hist[etype][eid].keys() k.sort(reverse=True) version = 1 # Default, if timestamps does not work - should never be needed for v in k: e = self.hist[etype][eid][v] ets = diff.OsmDiffApi.timetxt2datetime(e['timestamp']) if ets<=ts: version = e['version'] break; if version==-1: # Latest version we already have if eid in self.hist[etype].keys(): ks = self.hist[etype][eid].keys() ks.sort() version = ks[-1] logger.debug('version -1 changed to {} (ks={})'.format(version, ks)) for v in range(version, 1, -1): if not only_visible or self.hist[etype][eid][version]['visible']: return self.hist[etype][eid][version] logger.debug('Did not find existing history on {} id {} version {}'.format(etype, eid, version)) if (not eid in self.hist[etype].keys()) or (not version in self.hist[etype][eid].keys()): logger.debug('Do not have element {} id {} version {}'.format(etype, eid, version)) if not eid in self.hist[etype].keys(): logger.debug('Id {} not in {} keys'.format(eid, etype)) elif not version in self.hist[etype][eid].keys(): logger.debug('Version {} not in {}/{} keys'.format(version, etype, eid)) self.getElementHistory(etype, eid, version) ks = self.hist[etype][eid].keys() ks.sort() version = ks[-1] logger.debug('version -1 changed to {} (ks={})'.format(version, ks)) logger.debug('{} id {} version {}: {}'.format(etype, eid, version, self.hist[etype][eid])) elem = self.hist[etype][eid][version] if only_visible and not elem['visible']: if version > 1: # Deleted and then reverted elements will not have lat/lons on the old version logger.debug('Non-visible element found, trying {} id {} version {}'.format(etype, eid, version-1)) elem = self.old(etype, eid, version-1, only_visible) else: logger.error('Non-visible element found: {} id {} version {}'.format(etype, eid, version)) if not ('uid' in elem.keys() and 'user' in elem.keys()): logger.warning('*** Warning, old element type={} id={} v={} elem={}'.format(etype, eid, version, elem)) # API-QUIRK (Anonymous edits, discontinued April 2009): Not all old # elements have uid. See # e.g. 'http://www.openstreetmap.org/api/0.6/way/8599635/history' # Also, 'created_by' does not seem like a complete substitute #if hasattr(elem, 'create_by'): # user = '******'.format(elem['create_by']) #else: user = None # Insert pseudo-values if not 'uid' in elem.keys(): elem['uid'] = 0 if not 'user' in elem.keys(): elem['user'] = user return elem # def getReferencedElements(self): # ''' Get elements referenced by changeset but not directly modified. ''' # for id,w in self.elems['way'].iteritems(): # self.getWay(id, w) # for id,r in self.elems['relation'].iteritems(): # self.getRelation(id, r) # def getNode(self, id, data=None): # if not data: # if self.apidebug: # logger.debug('osmapi.NodeGet({})'.format(id)) # data = self.osmapi.NodeGet(id) # if not data: # Deleted, get history # self.hist['node'][id] = self.osmapi.NodeHistory(id) # else: # self.elems['node'][id] = data # def getWay(self, id, data=None): # if not data: # if self.apidebug: # logger.debug('osmapi.WayGet({})'.format(id)) # data = self.osmapi.WayGet(id) # if not data: # Deleted, get history # self.hist['way'][id] = self.osmapi.WayHistory(id) # else: # # Api has limitations on how many elements we can request in one multi-request # # probably a char limitation, not number of ids # api_max = 100 # all_nds = data['nd'] # for l in [all_nds[x:x+api_max] for x in xrange(0, len(all_nds), api_max)]: # # We dont know node version - if node has been deleted we are in trouble # if self.apidebug: # logger.debug('osmapi.NodesGet({})'.format(l)) # nds = self.osmapi.NodesGet(l) # for nd in nds.keys(): # self.getNode(nd, nds[nd]) # def getRelation(self, id, data=None): # if not data: # if self.apidebug: # logger.debug('osmapi.RelationGet({})'.format(id)) # data = self.osmapi.RelationGet(id) # if not data: # Deleted, get history # self.hist['relation'][id] = self.osmapi.RelationHistory(id) # else: # for mbr in data['member']: # ref = mbr['ref'] # etype = mbr['type'] # # We dont know version - if way/node has been deleted we are in trouble # if etype == 'node': # self.getNode(ref) # elif etype == 'way': # self.getWay(ref) # elif etype == 'relation': # self.getRelation(ref) def isInside(self, area, load_way_nodes=True): '''Return true if there are node edits in changeset and one or more nodes are within area''' hasnodes = False for modif in self.changes: etype = modif['type'] data = modif['data'] action = modif['action'] if etype=='node': hasnodes = True if action!='delete': if area.contains(data['lon'], data['lat']): return True else: # Deleted node do not have lat/lon id = data['id'] version = data['version'] n = self.old(etype,id,version-1) if area.contains(n['lon'], n['lat']): return True if hasnodes: # Changesset has node edits, but none inside area i.e. most likely # not within area. We could have way/relation changes inside area, # which we will miss (FIXME). return False else: # FIXME: We really do not know because only tags/members on/off # ways/relations where changes. Maybe download way/relation nodes # to detect where edit where if load_way_nodes: for modif in self.changes: etype = modif['type'] data = modif['data'] #action = modif['action'] if etype=='way': nd = data['nd'] for nid in nd: n = self.old('node', nid, data['timestamp']) if area.contains(n['lon'], n['lat']): return True return False else: # If we do not load nodes, we assume there are changed within area return True def getGeoJsonDiff(self, include_modified_ways=True): #self.getReferencedElements() g = gj.GeoJson() c_create = '009a00' # Green c_delete = 'ff2200' # Red c_old = 'ffff60' # Yellow c_mod = '66aacc' # Light blue for modif in self.changes: etype = modif['type'] n = modif['data'] id = n['id'] version = n['version'] action = modif['action'] #diff = self.getTagDiff(etype, id, version) f = None if action=='delete': e = self.old(etype,id,version-1) else: e = self.old(etype,id,version) if etype=='node': if action=='modify': oe = self.old(etype,id,version-1) logger.debug('Modify node {} version={} e={}, oe={}'.format(id, version, e, oe)) l = g.addLineString() g.addLineStringPoint(l, e['lon'], e['lat']) g.addLineStringPoint(l, oe['lon'], oe['lat']) g.addColour(l, c_old) g.addProperty(l, 'popupContent', 'Node moved') g.addProperty(l, 'action', action) g.addProperty(l, 'type', etype) #f = g.addPoint(oe['lon'], oe['lat']) #g.addColour(f, c_old) f = g.addPoint(e['lon'], e['lat']) else: f = g.addPoint(e['lon'], e['lat']) if etype=='way' and (include_modified_ways or action!='modify'): f = g.addLineString() nd = e['nd'] for nid in nd: # Using timestamp here means we draw the old way. If # existing points have been moved and new ones added, we # will draw the old way but show points as being moved. n = self.old('node', nid, e['timestamp']) g.addLineStringPoint(f, n['lon'], n['lat']) if f: # Popup text txt = '' e = self.old(etype,id,version) tag = e['tag'] g.addProperty(f, 'action', action) g.addProperty(f, 'type', etype) g.addProperty(f, 'id', id) if action=='delete': g.addColour(f, c_delete) g.addProperty(f, 'tag', {version: tag}) elif action=='create': g.addColour(f, c_create) g.addProperty(f, 'tag', {version: tag}) else: g.addColour(f, c_mod) oe = self.old(etype,id,version-1) g.addProperty(f, 'tag', {version: tag, version-1: oe['tag']}) if self.diffs: diff = self.diffs[etype][str(id)] if diff: if action!='create': # Dont show tags twice d = diff[2] if d: tags = 0 repl = {'create': 'Added tags:', 'modify': 'Modified tags:', 'delete': 'Removed tags:'} for k in ['create', 'modify', 'delete']: if len(d[k].keys()) > 0: txt = self.joinTxt(txt, repl[k]) for kk in d[k].keys(): txt = self.joinTxt(txt, kk) tags += 1 if tags > 0: txt = self.joinTxt(txt, '', new_ph=True) notes = diff[3] if notes: for n in notes: txt = self.joinTxt(txt, n) txt = self.joinTxt(txt, '', new_ph=True) usr = diff[4] if usr: txt = self.joinTxt(txt, u'Affects edits by:', new_ph=True) for u in usr: if not u: u = '(Anonymous)' txt = self.joinTxt(txt, u) if txt != '': txt = self.joinTxt(txt, '') g.addProperty(f, 'popupContent', txt) return g.getData() def joinTxt(self, t1, t2, new_ph=False, pstart='<p>', pend='</p>'): if (t1=='' or t1.endswith(pend)) and t2!='': t1 += pstart+t2.capitalize() else: if t2=='': t1+=pend else: if new_ph: if t1!='': t1 += '.'+pend t1 += pstart+t2.capitalize() else: if not t1.endswith(':'): t1 += ', ' t1 += t2 return t1 def get_elem_elem(self, obj, elem): '''Find elements within elements''' els = elem.split('.')[1:] try: for e in els: logger.debug(u'get e={} obj={}'.format(e,obj)) if type(obj) is dict: obj = obj[e] else: obj = getattr(obj, e) logger.debug(u'new obj={}'.format(obj)) return obj except (AttributeError, KeyError): return None def regex_test(self, regex_filter): '''Check if changeset matches regexp. Input regex_filter is a list of dicts. For each dict in list, check if all dict elements match, if a full match is found, return True. I.e. match is OR between list of dicts and AND between elements in each dict. ''' logger.debug('Cset check regex filter: {}'.format(regex_filter)) for rf in regex_filter: matchcnt = 0 for k,v in rf.iteritems(): logger.debug(u"Evaluating: '{}'='{}'".format(k,v)) if k.startswith('.changes'): if self.regex_test_changes(k, v): logger.debug("Match found") matchcnt += 1 else: e = self.get_elem_elem(self, k) logger.debug(u"regex: field '{}'='{}', regex '{}'".format(k,e,v)) if e: m = re.match(v, e) if m: logger.debug(u"Match found on '{}'".format(e)) matchcnt += 1 if matchcnt == len(rf.keys()): logger.debug(u"Found '{}' matches of: '{}'".format(matchcnt, rf)) return True logger.debug(u"No match: '{}' (found {} of {})".format(rf, matchcnt, len(rf.keys()))) return False def regex_test_changes(self, k, v): '''Regex test on changeset changes. Format is: .changes[.action][.element-type].elements where optional '.action' is either '.modify', '.create' or '.delete' and optional '.element-type' is either '.node', '.way' or '.relation' Examples: '.changes.modify.node.tag.name' '.changes.node.tag.name' ''' # FIXME: This code only looks at the new values (e.g. tags on new # version). We need to investigate old version also to detect e.g. deleted tags if not self.changes: return False action = None elemtype = None rg = k.split('.')[2:] if rg[0] in ['modify', 'create', 'delete']: action = rg.pop(0) if rg[0] in ['node', 'way', 'relation']: elemtype = rg.pop(0) logger.debug("Action: '{}', element type '{}'".format(action, elemtype)) for modif in self.changes: if action and action!=modif['action']: continue if elemtype and elemtype!=modif['type']: continue data = modif['data'] logger.debug('Modif {}'.format(data)) field = '.'+'.'.join(rg) e = self.get_elem_elem(data, field) logger.debug("regex: field '{}'='{}', regex '{}'".format(field,e,v)) if e: return re.match(v, e) return False def build_labels(self, label_rules): '''Build list of labels based on regex and area check. Note that both regex and area check can be defined with and AND rule between then, i.e. both must match if both are defined. ''' labels = [] for dd in label_rules: if dd['label'] in labels: logger.debug('Label already set: {}'.format(dd['label'])) continue # duplicate label match = True if 'regex' in dd: logger.debug('regex test, rule={}'.format(dd)) if self.regex_test(dd['regex']): logger.debug('Regex test OK') else: match = False if 'area_file' in dd: logger.debug('area test, rule={}'.format(dd)) area = poly.Poly() if 'OSMTRACKER_REGION' in os.environ: area_file = os.environ['OSMTRACKER_REGION'] else: area_file = dd['area_file'] area.load(area_file) logger.debug("Loaded area polygon from '{}' with {} points".format(area_file, len(area))) if ('area_check_type' not in dd or dd['area_check_type']=='cset-bbox') and area.contains_chgset(self.meta): logger.debug('Area test OK, changeset bbox') elif set(['min_lon', 'min_lat', 'max_lon', 'max_lat']).issubset(self.meta.keys()) and ('area_check_type' in dd and dd['area_check_type']=='cset-center') and area.contains((float(self.meta['min_lon'])+float(self.meta['max_lon']))/2, (float(self.meta['min_lat'])+float(self.meta['max_lat']))/2): logger.debug('Area test OK, changeset center') else: match = False if match: logger.debug("Adding label '{}'".format(dd['label'])) labels.append(dd['label']) return labels def data_export(self): return {'state': {}, 'summary': self.summary, 'tags': self.tags, 'tagdiff': self.tagdiff, 'simple_nodes': self.simple_nodes, 'diffs': self.diffs, 'other_users': self.other_users, 'mileage_m': self.mileage, 'geometry': self.hist, 'changes': self.changes} def data_import(self, data): self.summary = data['summary'] self.tags = data['tags'] self.tagdiff = data['tagdiff'] self.simple_nodes = data['simple_nodes'] self.diffs = data['diffs'] self.other_users = data['other_users'] self.mileage = data['mileage_m'] self.changes = data['changes'] self.hist = {} # Exporting to JSON causes int keys to be converted to strings for etype in data['geometry'].keys(): self.hist[etype] = {} for eid in data['geometry'][etype].keys(): self.hist[etype][long(eid)] = {} for v in data['geometry'][etype][eid].keys(): self.hist[etype][long(eid)][long(v)] = data['geometry'][etype][eid][v]
#Python script to download from OSM the created bus routes in Yogyakarta and create a GTFS file from osmapi import OsmApi MyApi = OsmApi() #Fixes Routes of TransJogja #TODO: create a web javascript framework to let anybody create the GTFS trough web. routes = [ 5332612, 5334914, 1913445, 1761302, 5334915, 5334916, 5334918, 5334917 ] platforms_id = {} unique_platforms_id = set() routes_info = {} for route_id in routes: platforms_id[route_id] = [] print 'Getting route ', route_id relation = MyApi.RelationGet(route_id) routes_info[route_id] = relation['tag'] for a in relation['member']: if a['role'] == 'platform': platforms_id[route_id].append(a['ref']) unique_platforms_id.add(a['ref']) print platforms_id[route_id] import transitfeed schedule = transitfeed.Schedule() schedule.AddAgency("Transjogja", "http://iflyagency.com", "Indonesia/Yogyakarta") service_period = schedule.GetDefaultServicePeriod() service_period.SetStartDate("20150101") service_period.SetEndDate("20160101")
def online_poi_matching(args): data, comm_data = args try: db = POIBase('{}://{}:{}@{}:{}/{}'.format( config.get_database_type(), config.get_database_writer_username(), config.get_database_writer_password(), config.get_database_writer_host(), config.get_database_writer_port(), config.get_database_poi_database())) pgsql_pool = db.pool session_factory = sessionmaker(pgsql_pool) Session = scoped_session(session_factory) session = Session() osm_live_query = OsmApi() for i, row in data.iterrows(): # for i, row in data[data['poi_code'].str.contains('posta')].iterrows(): try: # Try to search OSM POI with same type, and name contains poi_search_name within the specified distance osm_query = db.query_osm_shop_poi_gpd( row.get('poi_lon'), row.get('poi_lat'), comm_data.loc[comm_data['pc_id'] == row.get( 'poi_common_id')]['poi_type'].values[0], row.get('poi_search_name'), row.get('poi_search_avoid_name'), row.get('poi_addr_street'), row.get('poi_addr_housenumber'), row.get('poi_conscriptionnumber'), row.get('poi_city'), row.get('osm_search_distance_perfect'), row.get('osm_search_distance_safe'), row.get('osm_search_distance_unsafe')) # Enrich our data with OSM database POI metadata if osm_query is not None: row['poi_new'] = False # Collect additional OSM metadata. Note: this needs style change during osm2pgsql osm_id = osm_query['osm_id'].values[0] if osm_query.get( 'osm_id') is not None else None osm_node = osm_query.get( 'node').values[0] if osm_query.get( 'node') is not None else None # Set OSM POI coordinates for all kind of geom lat = osm_query.get('lat').values[0] lon = osm_query.get('lon').values[0] if data.at[i, 'poi_lat'] != lat and data.at[i, 'poi_lon'] != lon: logging.info( 'Using new coodinates %s %s instead of %s %s.', lat, lon, data.at[i, 'poi_lat'], data.at[i, 'poi_lon']) data.at[i, 'poi_lat'] = lat data.at[i, 'poi_lon'] = lon if osm_node == 'node': osm_node = OSM_object_type.node elif osm_node == 'way': osm_node = OSM_object_type.way elif osm_node == 'relation': osm_node = OSM_object_type.relation else: logging.warning('Illegal state: %s', osm_query['node'].values[0]) data.at[i, 'osm_id'] = osm_id data.at[i, 'osm_node'] = osm_node # Refine postcode if row['preserve_original_post_code'] is not True: # Current OSM postcode based on lat,long query. postcode = query_postcode_osm_external( config.get_geo_prefer_osm_postcode(), session, lon, lat, row.get('poi_postcode')) force_postcode_change = False # TODO: Has to be a setting in app.conf if force_postcode_change is True: # Force to use datasource postcode if postcode != row.get('poi_postcode'): logging.info( 'Changing postcode from %s to %s.', row.get('poi_postcode'), postcode) data.at[i, 'poi_postcode'] = postcode else: # Try to use smart method for postcode check ch_posctode = smart_postcode_check( row, osm_query, postcode) if ch_posctode is not None: data.at[i, 'poi_postcode'] = ch_posctode else: logging.info('Preserving original postcode %s', row.get('poi_postcode')) data.at[i, 'osm_version'] = osm_query['osm_version'].values[0] \ if osm_query['osm_version'] is not None else None data.at[i, 'osm_changeset'] = osm_query['osm_changeset'].values[0] \ if osm_query['osm_changeset'] is not None else None if osm_query['osm_timestamp'] is not None: osm_query['osm_timestamp'] = \ data.at[i, 'osm_timestamp'] = pd.to_datetime(str((osm_query['osm_timestamp'].values[0]))) else: osm_query['osm_timestamp'] = None data.at[i, 'poi_distance'] = osm_query.get( 'distance').values[0] if osm_query.get( 'distance') is not None else None # For OSM way also query node points if osm_node == OSM_object_type.way: logging.info( 'This is an OSM way looking for id %s nodes.', osm_id) # Add list of nodes to the dataframe nodes = db.query_ways_nodes(osm_id) data.at[i, 'osm_nodes'] = nodes elif osm_node == OSM_object_type.relation: logging.info( 'This is an OSM relation looking for id %s nodes.', osm_id) # Add list of relation nodes to the dataframe nodes = db.query_relation_nodes(osm_id) data.at[i, 'osm_nodes'] = nodes logging.info( 'Old %s (not %s) type: %s POI within %s m: %s %s, %s %s (%s)', data.at[i, 'poi_search_name'], data.at[i, 'poi_search_avoid_name'], data.at[i, 'poi_type'], data.at[i, 'poi_distance'], data.at[i, 'poi_postcode'], data.at[i, 'poi_city'], data.at[i, 'poi_addr_street'], data.at[i, 'poi_addr_housenumber'], data.at[i, 'poi_conscriptionnumber']) try: # Download OSM POI way live tags if osm_node == OSM_object_type.way: for rtc in range(0, RETRY): logging.info( 'Downloading OSM live tags to this way: %s.', osm_id) cached_way = db.query_from_cache( osm_id, osm_node) if cached_way is None: live_tags_container = osm_live_query.WayGet( osm_id) if live_tags_container is not None: data.at[ i, 'osm_live_tags'] = live_tags_container.get( 'tag') cache_row = { 'osm_id': int(osm_id), 'osm_live_tags': live_tags_container.get('tag'), 'osm_version': live_tags_container.get('version'), 'osm_user': live_tags_container.get('user'), 'osm_user_id': live_tags_container.get('uid'), 'osm_changeset': live_tags_container.get( 'changeset'), 'osm_timestamp': live_tags_container.get( 'timestamp'), 'osm_object_type': osm_node, 'osm_lat': None, 'osm_lon': None, 'osm_nodes': live_tags_container.get('nd') } get_or_create_cache( session, POI_OSM_cache, **cache_row) # Downloading referenced nodes of the way for way_nodes in live_tags_container[ 'nd']: logging.debug( 'Getting node %s belongs to way %s', way_nodes, osm_id) live_tags_node = osm_live_query.NodeGet( way_nodes) cache_row = { 'osm_id': int(way_nodes), 'osm_live_tags': live_tags_node.get('tag'), 'osm_version': live_tags_node.get('version'), 'osm_user': live_tags_node.get('user'), 'osm_user_id': live_tags_node.get('uid'), 'osm_changeset': live_tags_node.get( 'changeset'), 'osm_timestamp': live_tags_node.get( 'timestamp'), 'osm_object_type': OSM_object_type.node, 'osm_lat': live_tags_node.get('lat'), 'osm_lon': live_tags_node.get('lon'), 'osm_nodes': None } get_or_create_cache( session, POI_OSM_cache, **cache_row) break else: logging.warning( 'Download of external data has failed.' ) else: data.at[i, 'osm_live_tags'] = cached_way.get( 'osm_live_tags') break session.commit() # Download OSM POI node live tags elif osm_node == OSM_object_type.node: for rtc in range(0, RETRY): logging.info( 'Downloading OSM live tags to this node: %s.', osm_id) cached_node = db.query_from_cache( osm_id, osm_node) if cached_node is None: live_tags_container = osm_live_query.NodeGet( osm_id) if live_tags_container is not None: data.at[ i, 'osm_live_tags'] = live_tags_container.get( 'tag') cache_row = { 'osm_id': int(osm_id), 'osm_live_tags': live_tags_container.get('tag'), 'osm_version': live_tags_container.get('version'), 'osm_user': live_tags_container.get('user'), 'osm_user_id': live_tags_container.get('uid'), 'osm_changeset': live_tags_container.get( 'changeset'), 'osm_timestamp': live_tags_container.get( 'timestamp'), 'osm_object_type': osm_node, 'osm_lat': live_tags_container.get('lat'), 'osm_lon': live_tags_container.get('lon'), 'osm_nodes': None } get_or_create_cache( session, POI_OSM_cache, **cache_row) break else: logging.warning( 'Download of external data has failed.' ) else: data.at[i, 'osm_live_tags'] = cached_node.get( 'osm_live_tags') break session.commit() elif osm_node == OSM_object_type.relation: for rtc in range(0, RETRY): logging.info( 'Downloading OSM live tags to this relation: %s.', osm_id) live_tags_container = osm_live_query.RelationGet( abs(osm_id)) if live_tags_container is not None: data.at[ i, 'osm_live_tags'] = live_tags_container.get( 'tag') break else: logging.warning( 'Download of external data has failed.' ) session.commit() else: logging.warning('Invalid state for live tags.') except Exception as e: logging.warning( 'There was an error during OSM request: %s.', e) logging.exception('Exception occurred') logging.warning('Live tag is: {}'.format( cached_node.get('osm_live_tags'))) # This is a new POI else: # This is a new POI - will add fix me tag to the new items. data.at[i, 'poi_new'] = True # Get the first character of then name of POI and generate a floating number between 0 and 1 # for a PostGIS function: https://postgis.net/docs/ST_LineInterpolatePoint.html # If there is more than one POI in a building this will try to do a different location and # not only on center or not only on edge ib = row.get('poi_name') if ib is not None: ibp = 1 - (((ord(ib[0]) // 16) + 1) / 17) else: ibp = 0.50 # Refine postcode osm_bulding_q = db.query_osm_building_poi_gpd( row.get('poi_lon'), row.get('poi_lat'), row.get('poi_city'), row.get('poi_postcode'), row.get('poi_addr_street'), row.get('poi_addr_housenumber'), in_building_percentage=ibp) if osm_bulding_q is not None: logging.info( 'Relocating POI coordinates to the building with same address: %s %s, %s %s', row.get('poi_lat'), row.get('poi_lon'), osm_bulding_q.get('lat')[0], osm_bulding_q.get('lon')[0]), row['poi_lat'], row['poi_lon'] = osm_bulding_q.get( 'lat')[0], osm_bulding_q.get('lon')[0] else: logging.info( 'The POI is already in its building or there is no building match. \ Keeping POI coordinates as is as.') if row['preserve_original_post_code'] is not True: postcode = query_postcode_osm_external( config.get_geo_prefer_osm_postcode(), session, data.at[i, 'poi_lon'], data.at[i, 'poi_lat'], row.get('poi_postcode')) if postcode != row.get('poi_postcode'): logging.info('Changing postcode from %s to %s.', row.get('poi_postcode'), postcode) data.at[i, 'poi_postcode'] = postcode else: logging.info('Preserving original postcode %s', row.get('poi_postcode')) logging.info( 'New %s (not %s) type: %s POI: %s %s, %s %s (%s)', row.get('poi_search_name'), row.get('poi_search_avoid_name'), row.get('poi_type'), row.get('poi_postcode'), row.get('poi_city'), row.get('poi_addr_street'), row.get('poi_addr_housenumber'), row.get('poi_conscriptionnumber')) except Exception as e: logging.error(e) logging.error(row) logging.exception('Exception occurred') session.commit() return data except Exception as e: logging.error(e) logging.exception('Exception occurred')
class OsmFix(object): def __init__(self): osm_user = BaseConfig.OSM_USER osm_pass = BaseConfig.OSM_PASSWORD osm_api = BaseConfig.OSM_API_URL self.osm = OsmApi(api=osm_api, appid='Kort', username=osm_user, password=osm_pass) self.kort_api = kort_api.KortApi() def get_for_type(self, type, id): """ Returns the 'getter' of the requested OSM type """ if type == 'node': return self.osm.NodeGet(id) if type == 'way': return self.osm.WayGet(id) if type == 'relation': return self.osm.RelationGet(id) def update_for_type(self, type, new_values): """ Returns the 'update' method of the requested OSM type """ if type == 'node': return self.osm.NodeUpdate(new_values) if type == 'way': return self.osm.WayUpdate(new_values) if type == 'relation': return self.osm.RelationUpdate(new_values) def apply_kort_fix(self, limit=1, dry=False): try: for kort_fix in self.kort_api.read_fix(): try: log.debug("---- Fix from Kort: ----") log.debug("%s" % pprint.pformat(kort_fix)) osm_entity = self.get_for_type(kort_fix['osm_type'], kort_fix['osm_id']) if not osm_entity: raise OsmEntityNotFoundError("OSM entity not found") log.debug("---- OSM type before fix ----") log.debug("%s" % pprint.pformat(osm_entity['tag'])) error_type = errortypes.Error(kort_fix['error_type'], osm_entity) fixed = error_type.apply_fix(kort_fix) fixed_osm_entity, description = fixed log.debug("---- OSM type after fix ----") log.debug("%s" % pprint.pformat(fixed_osm_entity['tag'])) except (errortypes.ErrorTypeError, OsmEntityNotFoundError, ValueError) as e: log.warning("The fix could not be applied: %s, fix: %s" % (str(e), kort_fix)) fixed_osm_entity = None if not dry: if fixed_osm_entity is not None: comment = self.gen_changelog_comment( kort_fix, description) self.submit_entity(kort_fix['osm_type'], fixed_osm_entity, comment, kort_fix) self.kort_api.mark_fix(kort_fix['fix_id']) except Exception as e: log.exception("Failed to apply fix of Kort to OpenStreetMap") def gen_changelog_comment(self, kort_fix, change_description): comment = ( u"Change from kort, user: %s (id: %s), " u"fix id: %s, error: %s (source: %s), " u"description: %s, " u"see this users profile for more information: " u"http://www.openstreetmap.org/user/kort-to-osm" % (kort_fix['username'], kort_fix['user_id'], kort_fix['fix_id'], kort_fix['error_type'], kort_fix['source'], change_description)) return comment def submit_entity(self, type, entity, comment, kort_fix): """ Submits an OSM entity (node, way, relation) to OSM """ self.osm.ChangesetCreate({ "comment": comment[:255], "mechanical": "yes", "kort:username": kort_fix['username'], "kort:user_id": str(kort_fix['user_id']), "kort:fix_id": str(kort_fix['fix_id']), "kort:error_type": kort_fix['error_type'], "kort:error_source": kort_fix['source'] }) changeset = self.update_for_type(type, entity) log.info("%s" % pprint.pformat(changeset)) self.osm.ChangesetClose()