def handle_ObjectRemovedEvent(site, uid): token = get_auth_token() fid = _get_obj_FID(obj=None, uid=uid, token=token) logger.info("ArcGIS: Deleting CaseStudy with FID %s", fid) res = apply_edits(fid, op='deletes', token=token) assert res['deleteResults'] assert res['deleteResults'][0]['objectId'] == fid
def handle_ObjectStateModified(site, uid): """ Handle when a CaseStudy is published / unpublished If published: * if this is a working copy, abort * check if object already exists in ArcGIS * if doesn't exist, add one * if exists, update the existing one If unpublished: * if this is a working copy, abort * check if object exists in ArcGIS * if exists, remove it """ obj = _get_obj_by_measure_id(site, uid) if IWorkingCopy.providedBy(obj): logger.debug("Skipping CaseStudy status change processing") return state = get_state(obj) token = get_auth_token() fid = _get_obj_FID(obj=obj, token=token) if (state != 'published') and fid: # it's unpublished, we'll remove the object logger.info("ArcGIS: Deleting CaseStudy with FID %s", fid) res = apply_edits(fid, op='deletes', token=token) assert res['deleteResults'] assert res['deleteResults'][0]['objectId'] == fid return if state == "published": repr = obj._repr_for_arcgis() # new case study, add it to ArcGIS if fid is None: logger.info("ArcGIS: Adding CaseStudy with measure id %s", uid) entry = json.dumps([repr]) res = apply_edits(entry, op='adds', token=token) assert len(res.get('addResults', [])) == 1 assert res['addResults'][0]['success'] == True # existing case study, sync its info else: logger.info("ArcGIS: Updating CaseStudy with FID %s", fid) repr['attributes']['FID'] = fid entry = json.dumps([repr]) res = apply_edits(entry, op='updates', token=token) assert res['updateResults'] assert res['updateResults'][0]['objectId'] == fid
def handle_ObjectModifiedEvent(site, uid): obj = _get_obj_by_measure_id(site, uid) repr = obj._repr_for_arcgis() token = get_auth_token() fid = _get_obj_FID(obj, token=token) if fid is None: logger.info("ArcGIS: Adding CaseStudy with measure id %s", uid) entry = json.dumps([repr]) res = apply_edits(entry, op='adds', token=token) assert len(res.get('addResults', [])) == 1 assert res['addResults'][0]['success'] == True else: repr['attributes']['FID'] = fid logger.info("ArcGIS: Updating CaseStudy with FID %s", fid) entry = json.dumps([repr]) res = apply_edits(entry, op='updates', token=token) assert res['updateResults'] assert res['updateResults'][0]['objectId'] == fid
def main(): # Needs env vars: # LD_LIBRARY_PATH=<buildout-directory>/parts/gdal-compile/lib # GISPASS='' token = get_auth_token() if sys.argv[1] == 'url': url = get_feature_url() #url = _get_token_service_url(endpoint) token = get_auth_token() print print "Token:", token print print "Feature URL: ", url + "?token=" + token print print "Query URL: ", url + "/query?token=" + token print elif sys.argv[1] == 'dump': print "Dumping..." if len(sys.argv) > 2: path = sys.argv[2] else: path = 'out.xml' res = query_layer(token=token) backup_data(res['features'], path=path) elif sys.argv[1] == 'summary': res = query_layer(token=token) print "Summary {0} entries...".format(len(res['features'])) for entry in res['features']: geo = '{0} x {1}'.format(*entry['geometry'].values()) attr = entry['attributes'] print attr['FID'], ': ', attr['itemname'], ' @ ', geo elif sys.argv[1] == 'del': print "Deleting..." fid = sys.argv[2] res = delete_casestudy(int(fid)) print res elif sys.argv[1] == 'delall': print "Deleting all..." print delete_all_casestudies(token) elif sys.argv[1] == 'addall': path = sys.argv[2] print add_all_casestudies(path, token) elif sys.argv[1] == 'edit': print "Editing..." fid = sys.argv[2] path = sys.argv[3] edit_casestudy(fid, path, token) elif sys.argv[1] == 'getfid': print "Getting FID for measureid..." measureid = sys.argv[2] print "FID: ", _get_obj_FID(uid=int(measureid)) else: print "Invalid command"