def test_supports_merge_override(self): json = """ { "Dev": { "Connection": { "server": "dev-server", "database": "dev_database", "SdeConnectionFile": "DEV:sde(file)" } }, "All": { "Connection": { "server": "", "database": "", "instance": "", "user": "******", "password": "******", "version": "", "SdeConnectionFile": "" } } }""" config = Config(json) config = config.for_environment("Dev") self.assertEqual(config.Connection.Server, "dev-server") self.assertEqual(config.Connection.database, "dev_database") self.assertEqual(config.Connection.instance, "") self.assertEqual(config.Connection.user, "default-user") self.assertEqual(config.Connection.password, "default-password") self.assertEqual(config.Connection.version, "") self.assertEqual(config.Connection.SdeConnectionFile, "DEV:sde(file)")
def test_supports_deep_merge(self): config = Config( '{"Prod": {"Database": {"Server": "prod-sql"}}, "All": {"Database": {"MigrationsPath": "path/to/migrations"}}}' ) config = config.for_environment("Prod") self.assertEqual(config.database.server, "prod-sql") self.assertEqual(config.database.migrations_path, "path/to/migrations")
def test_all_environment_is_not_case_sensitive(self): config = Config('{"Prod": {"Shared": "production!"}, "All": {"Shared": "none", "AllOnly": "works"}}') config = config.for_environment("Prod") self.assertEqual(config.all_only, "works") config = Config('{"Prod": {"Shared": "production!"}, "all": {"Shared": "none", "AllOnly": "works"}}') config = config.for_environment("Prod") self.assertEqual(config.all_only, "works")
def test_all_environment_is_not_case_sensitive(self): config = Config( '{"Prod": {"Shared": "production!"}, "All": {"Shared": "none", "AllOnly": "works"}}' ) config = config.for_environment("Prod") self.assertEqual(config.all_only, "works") config = Config( '{"Prod": {"Shared": "production!"}, "all": {"Shared": "none", "AllOnly": "works"}}' ) config = config.for_environment("Prod") self.assertEqual(config.all_only, "works")
def test_enumerated_json_object_values_are_still_shiny(self): json = """ { "connections": { "firstConnection": { "user": "******", "password":"******" }, "secondConnection": { "user": "******", "password":"******" } } }""" config = Config(json) for k, v in config.connections: self.assertEqual(v.password, "secret")
def test_environment_property_is_included(self): config = Config(self._json_config) environment_config = config.for_environment("theEnvironment") self.assertEqual(environment_config.environment, "theEnvironment")
def test_raises_if_duplicate_normalized_keys_exist(self): json = '{ "someKey": "value", "some_key": "value" }' with self.assertRaisesRegexp(KeyError, "duplicate.+someKey.+some_key"): Config(json)
def test_readable_using_snake_case_property(self): config = Config(self._json_config) self.assertEqual(config.the_environment.the_key, "TheValue")
def test_raises_if_key_not_found(self): config = Config(self._json_config) with self.assertRaisesRegexp(KeyError, "does_not_exist"): config = config.does_not_exist
def test_to_string_returns_json(self): json = self._json_config config = Config(json) self.assertEqual(str(config), json)
def test_modifying_raw_config(self): config = Config(self._json_config) config.raw_config["theEnvironment"]["theKey"] = "NotTheValue" self.assertEqual(config.the_environment.the_key, "NotTheValue")
def test_supports_deep_merge(self): config = Config('{"Prod": {"Database": {"Server": "prod-sql"}}, "All": {"Database": {"MigrationsPath": "path/to/migrations"}}}') config = config.for_environment("Prod") self.assertEqual(config.database.server, "prod-sql") self.assertEqual(config.database.migrations_path, "path/to/migrations")
def test_indexing_json_array(self): config = Config(self._json_config_with_array) self.assertEqual(config.the_array[0].the_key, "Value1") self.assertEqual(config.the_array[1].the_key, "Value2")
def test_environment_specific_config_overrides_all(self): config = Config('{"Prod": {"Shared": "production!"}, "All": {"Shared": "none"}}') config = config.for_environment("Prod") self.assertEqual(config.shared, "production!")
def test_iterating_raw_config(self): config = Config.from_file(self._shared_file_path) keyCount = 0; for key in config.raw_config: keyCount += 1 self.assertEqual(keyCount, 4)
def test_shared_config_is_included(self): config = Config.from_file(self._shared_file_path) config = config.for_environment("Dev") self.assertEqual(config.ci.repo, "https://github.com/ResourceDataInc/Centroid")
def test_environment_specific_config_is_included(self): config = Config(self._json_config) environment_config = config.for_environment("theEnvironment") self.assertEqual(environment_config.the_key, "TheValue")
def test_enumerating_json_object(self): config = Config(self._json_config) itemCount = 0 for item in config: itemCount += 1 self.assertEqual(itemCount, 1)
def test_environment_specific_config_no_environment_property_if_has_environment_config_json(self): config = Config('{"Prod": {"Environment": "production!"}, "All": {"Shared": "none"}}') config = config.for_environment("Prod") self.assertEqual(config.environment, "production!")
def test_iterating_raw_config(self): config = Config.from_file(self._shared_file_path) keyCount = 0 for key in config.raw_config: keyCount += 1 self.assertEqual(keyCount, 4)
def test_create_from_file(self): config = Config.from_file(self._shared_file_path) self.assertEqual(config.dev.database.server, "sqldev01.centroid.local")
def test_environment_specific_config_overrides_all(self): config = Config( '{"Prod": {"Shared": "production!"}, "All": {"Shared": "none"}}') config = config.for_environment("Prod") self.assertEqual(config.shared, "production!")
def test_enumerating_json_array(self): config = Config(self._json_config_with_array) itemCount = 0 for item in config.the_array: itemCount += 1 self.assertEqual(itemCount, 2)
def test_has_key(self): config = Config(self._json_config) self.assertTrue("the_environment" in config) self.assertTrue("does_not_exist" not in config)
def test_create_from_string(self): config = Config(self._json_config) self.assertEqual(config.the_environment.the_key, "TheValue")
layer_infos = { } map_doc = arcpy.mapping.MapDocument(mxd_path) print mxd_path for i, layer in enumerate(arcpy.mapping.ListLayers(map_doc)): if layer.supports('dataSource'): if layer.isGroupLayer: # annotation for j, anno_layer in enumerate(layer): anno_idx = i + j + 1 layer_infos[anno_idx] = _get_layer_info(anno_layer, anno_idx) else: # feature or raster layer layer_infos[i] = _get_layer_info(layer, i) else: # group layer layer_infos[i] = layer.name return layer_infos def _get_layer_info(layer, index): layer_info = map_layer() layer_info.index = index layer_info.name = layer.name layer_info.feature_class = os.path.basename(layer.dataSource) return layer_info if __name__ == '__main__': config = Config.from_file("config.json").for_environment('Dev') rt_path = config.maps.documents_source #rt_path = sys.argv[-1] #rt_path = 'C:/users/jroebuck/projects/agdc/gis/mapservices' map_services = [ map_service.from_path_info(mxd, rt_path) for mxd in get_all_files(rt_path, '.mxd') ] map_services_json = obj_to_json(map_services) with open('map_services.json','w') as txt_out: txt_out.write(map_services_json)