def empty_gwc(self): """ update layer group's json for empty gwc to the repository """ if self.status not in [ResourceStatus.PUBLISHED,ResourceStatus.UPDATED]: #layer is not published, no need to empty gwc return json_filename = self.json_filename_abs; try_set_push_owner("layergroup") hg = None try: json_out = {} json_out["name"] = self.name json_out["workspace"] = self.workspace.name json_out["action"] = "empty_gwc" json_out["empty_time"] = timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f") if self.geoserver_setting: json_out["geoserver_setting"] = json.loads(self.geoserver_setting) #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename],addremove=True, user="******", message="Empty GWC of layer group {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("layergroup",hg) finally: if hg: hg.close() try_clear_push_owner("layergroup")
def unpublish(self): """ remove store's json reference (if exists) from the repository, return True if store is removed for repository; return false, if layers does not existed in repository. """ json_files = [ self.json_filename_abs(action) for action in [ 'publish' ] ] #get all existing files. json_files = [ f for f in json_files if os.path.exists(f) ] if json_files: #file exists, layers is published, remove it. try_set_push_owner("liveserver") hg = None try: hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.remove(files=json_files) hg.commit(include=json_files,addremove=True, user="******", message="Remove live store {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("liveserver",hg) finally: if hg: hg.close() try_clear_push_owner("liveserver") return True else: return False
def unpublish(self): """ remove store's json reference (if exists) from the repository, return True if store is removed for repository; return false, if layers does not existed in repository. """ #remove it from catalogue service res = requests.delete("{}/catalogue/api/records/{}:{}/".format(settings.CSW_URL,self.datasource.workspace.name,self.kmi_name),auth=(settings.CSW_USER,settings.CSW_PASSWORD)) if res.status_code != 404: res.raise_for_status() json_files = [ self.json_filename_abs(action) for action in [ 'publish','empty_gwc' ] ] #get all existing files. json_files = [ f for f in json_files if os.path.exists(f) ] if json_files: #file exists, layers is published, remove it. try_set_push_owner("livelayer") hg = None try: hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.remove(files=json_files) hg.commit(include=json_files,addremove=True, user="******", message="Remove live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name)) increase_committed_changes() try_push_to_repository("livelayer",hg) finally: if hg: hg.close() try_clear_push_owner("livelayer") return True else: return False
def publish(self): """ publish store's json reference (if exists) to the repository, """ json_filename = self.json_filename_abs; try_set_push_owner("wmsserver") hg = None try: json_out = {} json_out["name"] = self.name json_out["capability_url"] = self.get_capability_url json_out["username"] = self.user or "" json_out["password"] = self.password or "" json_out["workspace"] = self.workspace.name json_out["publish_time"] = timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f") if self.geoserver_setting: json_out["geoserver_setting"] = json.loads(self.geoserver_setting) #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename],addremove=True, user="******", message="Update wms store {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("wmsserver",hg) finally: if hg: hg.close() try_clear_push_owner("wmsserver")
def empty_gwc(self): """ update layer's json for empty gwc to the repository """ if self.publish_status.unpublished: #layer is not published, no need to empty gwc raise ValidationError("The wms layer({0}) is not published before.".format(self.kmi_name)) json_filename = self.json_filename_abs('empty_gwc'); try_set_push_owner("livelayer") hg = None try: json_out = {} json_out["name"] = self.kmi_name json_out["workspace"] = self.datasource.workspace.name json_out["store"] = self.datasource.name json_out["action"] = "empty_gwc" json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename],addremove=True, user="******", message="Empty GWC of live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name)) increase_committed_changes() try_push_to_repository("livelayer",hg) finally: if hg: hg.close() try_clear_push_owner("livelayer")
def publish(self): """ Only publish the member layers which is already published. """ json_filename = self.json_filename_abs('publish'); try_set_push_owner("layergroup") hg = None try: json_out = self.update_catalogue_service(extra_datas={"publication_date": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")}) layers = [] for group_layer in LayerGroupLayers.objects.filter(group=self).order_by("order"): if group_layer.layer and group_layer.layer.is_published: layers.append({"type":"wms_layer","name":group_layer.layer.name,"store":group_layer.layer.server.name,"workspace":group_layer.layer.server.workspace.name}) elif group_layer.publish and group_layer.publish.is_published: layers.append({"type":"publish","name":group_layer.publish.name,"workspace":group_layer.publish.workspace.name}) elif group_layer.sub_group and group_layer.sub_group.is_published: layers.append({"type":"group","name":group_layer.sub_group.name,"workspace":group_layer.sub_group.workspace.name}) if not layers: #layergroup is empty,remove it. raise LayerGroupEmpty("Layer group can't be empty.") json_out["layers"] = layers json_out["srs"] = self.srs or None json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") inclusions = self.get_inclusions() dependent_groups = [] for group in inclusions[2].keys(): if group.is_published: dependent_groups.append({"name":group.name,"workspace":group.workspace.name}) json_out["dependent_groups"] = dependent_groups #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) #remove other related json files json_files = [ self.json_filename_abs(action) for action in [ 'empty_gwc' ] ] #get all existing files. json_files = [ f for f in json_files if os.path.exists(f) ] if json_files: hg.remove(files=json_files) json_files.append(json_filename) hg.commit(include=json_files, user="******",addremove=True, message="Update layer group {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("layergroup",hg) finally: if hg: hg.close() try_clear_push_owner("layergroup")
def publish(self): """ publish store's json reference (if exists) to the repository; """ json_filename = self.json_filename_abs; try_set_push_owner("layergroup") hg = None try: layers = [] for group_layer in LayerGroupLayers.objects.filter(group=self).order_by("order"): if group_layer.layer and group_layer.layer.is_published: layers.append({"type":"wms_layer","name":group_layer.layer.layer_name,"store":group_layer.layer.server.name,"workspace":group_layer.layer.server.workspace.name}) elif group_layer.publish : layers.append({"type":"publish","name":group_layer.publish.name,"workspace":group_layer.publish.workspace.name}) elif group_layer.sub_group and group_layer.sub_group.is_published: layers.append({"type":"group","name":group_layer.sub_group.name,"workspace":group_layer.sub_group.workspace.name}) if not layers: #layergroup is empty,remove it. raise LayerGroupEmpty("Layer group can't be empty.") json_out = {} json_out["layers"] = layers; json_out["name"] = self.name json_out["title"] = self.title or "" json_out["abstract"] = self.abstract or "" json_out["workspace"] = self.workspace.name json_out["srs"] = self.srs json_out["publish_time"] = timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f") inclusions = self.get_inclusions() dependent_groups = [] for group in inclusions[2].keys(): if group.is_published: dependent_groups.append({"name":group.name,"workspace":group.workspace.name}) json_out["dependent_groups"] = dependent_groups if self.geoserver_setting: json_out["geoserver_setting"] = json.loads(self.geoserver_setting) #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename], user="******",addremove=True, message="Update layer group {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("layergroup",hg) finally: if hg: hg.close() try_clear_push_owner("layergroup")
def unpublish(self): try_set_push_owner("wmsserver") hg = None try: meta_data = {} meta_data["name"] = self.name meta_data["workspace"] = self.workspace.name #write meta data file file_name = "{}.meta.json".format(self.name) meta_file = os.path.join(BorgConfiguration.UNPUBLISH_DIR, self.workspace.publish_channel.name, self.workspace.name, "stores", file_name) #create the dir if required if not os.path.exists(os.path.dirname(meta_file)): os.makedirs(os.path.dirname(meta_file)) with open(meta_file, "wb") as output: json.dump(meta_data, output, indent=4) json_out = {} json_out['meta'] = { "file": "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file), "md5": file_md5(meta_file) } json_out['action'] = 'remove' json_out["remove_time"] = timezone.localtime( timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") json_filename = self.json_filename_abs('unpublish') #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename], addremove=True, user="******", message="Unpublish wms store {}.{}".format( self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("wmsserver", hg) finally: if hg: hg.close() try_clear_push_owner("wmsserver")
def publish(self): """ publish store's json reference (if exists) to the repository, """ try_set_push_owner("liveserver") hg = None try: meta_data = {} meta_data["name"] = self.name meta_data["host"] = self.host meta_data["port"] = self.port meta_data["database"] = self.db_name meta_data["user"] = self.user meta_data["passwd"] = self.password meta_data["schema"] = self.schema meta_data["workspace"] = self.workspace.name if self.geoserver_setting: meta_data["geoserver_setting"] = json.loads(self.geoserver_setting) #write meta data file file_name = "{}.{}.meta.json".format(self.workspace.name,self.name) meta_file = os.path.join(BorgConfiguration.LIVE_STORE_DIR,file_name) #create the dir if required if not os.path.exists(os.path.dirname(meta_file)): os.makedirs(os.path.dirname(meta_file)) with open(meta_file,"wb") as output: json.dump(meta_data, output, indent=4) json_out = {} json_out['meta'] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),"md5":file_md5(meta_file)} json_out['action'] = 'publish' json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") json_filename = self.json_filename_abs('publish'); #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename],addremove=True, user="******", message="Update live store {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("liveserver",hg) finally: if hg: hg.close() try_clear_push_owner("liveserver")
def publish(self): """ publish layer's json reference (if exists) to the repository, """ json_filename = self.json_filename_abs('publish'); try_set_push_owner("livelayer") hg = None try: meta_data = self.update_catalogue_service(md5=True,extra_datas={"publication_date":datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")}) #write meta data file file_name = "{}.{}.meta.json".format(self.datasource.workspace.name,self.kmi_name) meta_file = os.path.join(BorgConfiguration.LIVE_LAYER_DIR,file_name) #create the dir if required if not os.path.exists(os.path.dirname(meta_file)): os.makedirs(os.path.dirname(meta_file)) with open(meta_file,"wb") as output: json.dump(meta_data, output, indent=4) json_out = {} json_out['meta'] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),"md5":file_md5(meta_file)} json_out['action'] = "publish" json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) #remove other related json files json_files = [ self.json_filename_abs(action) for action in [ 'empty_gwc' ] ] #get all existing files. json_files = [ f for f in json_files if os.path.exists(f) ] if json_files: hg.remove(files=json_files) json_files.append(json_filename) hg.commit(include=json_files,addremove=True, user="******", message="update live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name)) increase_committed_changes() try_push_to_repository("livelayer",hg) finally: if hg: hg.close() try_clear_push_owner("livelayer")
def empty_gwc(self): """ update layer's json for empty gwc to the repository """ if self.publish_status.unpublished: #layer is not published, no need to empty gwc raise ValidationError( "The wms layer({0}) is not published before.".format( self.name)) json_filename = self.json_filename_abs('empty_gwc') try_set_push_owner("wmslayer") hg = None try: json_out = {} json_out["name"] = self.kmi_name json_out["workspace"] = self.server.workspace.name json_out["store"] = self.server.name json_out["action"] = "empty_gwc" json_out["publish_time"] = timezone.localtime( timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename], addremove=True, user="******", message="Empty GWC of wms layer {}.{}".format( self.server.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("wmslayer", hg) finally: if hg: hg.close() try_clear_push_owner("wmslayer")
def publish(self): """ publish layer's json reference (if exists) to the repository, """ json_filename = self.json_filename_abs; try_set_push_owner("wmslayer") hg = None try: json_out = {} json_out["name"] = self.layer_name json_out["native_name"] = self.name json_out["title"] = self.layer_title json_out["abstract"] = self.layer_abstract json_out["workspace"] = self.server.workspace.name json_out["store"] = self.server.name json_out["publish_time"] = timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f") from application.models import Application_Layers json_out["applications"] = ["{0}:{1}".format(o.application,o.order) for o in Application_Layers.objects.filter(wmslayer=self)] if self.geoserver_setting: json_out["geoserver_setting"] = json.loads(self.geoserver_setting) #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename],addremove=True, user="******", message="update wms layer {}.{}".format(self.server.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("wmslayer",hg) finally: if hg: hg.close() try_clear_push_owner("wmslayer")
def unpublish(self): """ remove store's json reference (if exists) from the repository, return True if store is removed for repository; return false, if layers does not existed in repository. """ json_filename = self.json_filename_abs; if os.path.exists(json_filename): #file exists, layers is published, remove it. try_set_push_owner("layergroup") hg = None try: hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.remove(files=[json_filename]) hg.commit(include=[json_filename],addremove=True, user="******", message="Remove layer group {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("layergroup",hg) finally: if hg: hg.close() try_clear_push_owner("layergroup") return True else: return False
def unpublish(self): """ unpublish layer group """ json_files = [ self.json_filename_abs(action) for action in [ 'publish','empty_gwc' ] ] #get all existing files. json_files = [ f for f in json_files if os.path.exists(f) ] if json_files: #file exists, layers is published, remove it. try_set_push_owner("layergroup") hg = None try: hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.remove(files=json_files) hg.commit(include=json_files,addremove=True, user="******", message="Remove layer group {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("layergroup",hg) finally: if hg: hg.close() try_clear_push_owner("layergroup") return True else: return False
def publish(self): """ Only publish the member layers which is already published. """ json_filename = self.json_filename_abs('publish') try_set_push_owner("layergroup") hg = None try: json_out = self.update_catalogue_service( extra_datas={ "publication_date": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") }) layers = [] for group_layer in LayerGroupLayers.objects.filter( group=self).order_by("order"): if group_layer.layer and group_layer.layer.is_published: layers.append({ "type": "wms_layer", "name": group_layer.layer.kmi_name, "store": group_layer.layer.server.name, "workspace": group_layer.layer.server.workspace.name }) elif group_layer.publish and group_layer.publish.is_published: layers.append({ "type": "publish", "name": group_layer.publish.name, "workspace": group_layer.publish.workspace.name }) elif group_layer.sub_group and group_layer.sub_group.is_published: layers.append({ "type": "group", "name": group_layer.sub_group.name, "workspace": group_layer.sub_group.workspace.name }) if not layers: #layergroup is empty,remove it. raise LayerGroupEmpty("Layer group can't be empty.") json_out["layers"] = layers json_out["srs"] = self.srs or None json_out["publish_time"] = timezone.localtime( timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") inclusions = self.get_inclusions() dependent_groups = [] for group in inclusions[2].keys(): if group.is_published: dependent_groups.append({ "name": group.name, "workspace": group.workspace.name }) json_out["dependent_groups"] = dependent_groups #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) #remove other related json files json_files = [ self.json_filename_abs(action) for action in ['empty_gwc'] ] #get all existing files. json_files = [f for f in json_files if os.path.exists(f)] if json_files: hg.remove(files=json_files) json_files.append(json_filename) hg.commit(include=json_files, user="******", addremove=True, message="Update layer group {}.{}".format( self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("layergroup", hg) finally: if hg: hg.close() try_clear_push_owner("layergroup")
def unpublish(self): """ unpublish layer group """ #remove it from catalogue service res = requests.delete("{}/catalogue/api/records/{}:{}/".format( settings.CSW_URL, self.workspace.name, self.name), auth=(settings.CSW_USER, settings.CSW_PASSWORD), verify=settings.CSW_CERT_VERIFY) if res.status_code != 404: res.raise_for_status() publish_file = self.json_filename_abs('publish') publish_json = None if os.path.exists(publish_file): with open(publish_file, "r") as f: publish_json = json.loads(f.read()) else: publish_json = {} json_file = self.json_filename_abs('unpublish') json_out = None try_set_push_owner("layergroup") hg = None try: if publish_json.get("action", "publish") != "remove": json_out = {} json_out["name"] = self.name json_out["workspace"] = self.workspace.name json_out["spatial_data"] = True json_out["channel"] = self.workspace.publish_channel.name json_out[ "sync_geoserver_data"] = self.workspace.publish_channel.sync_geoserver_data json_out['action'] = "remove" #retrieve meta data from the last publish task meta_json = publish_json if "meta" in publish_json and "file" in publish_json["meta"]: meta_file = publish_json["meta"]["file"][ len(BorgConfiguration.MASTER_PATH_PREFIX):] if os.path.exists(meta_file): with open(meta_file, "r") as f: meta_json = json.loads(f.read()) else: meta_json = {} for key in [ "name", "workspace", "channel", "spatial_data", "sync_geoserver_data" ]: if key in meta_json: json_out[key] = meta_json[key] else: json_out = publish_json json_out["remove_time"] = timezone.localtime( timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") #create the dir if required if not os.path.exists(os.path.dirname(json_file)): os.makedirs(os.path.dirname(json_file)) with open(json_file, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) #remove other related json files json_files = [ self.json_filename_abs(action) for action in ['empty_gwc'] ] #get all existing files. json_files = [f for f in json_files if os.path.exists(f)] if json_files: hg.remove(files=json_files) json_files.append(json_file) hg.commit(include=json_files, user="******", addremove=True, message="Unpublish layer group {}.{}".format( self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("layergroup", hg) finally: if hg: hg.close() try_clear_push_owner("layergroup")
def unpublish(self): publish_file = self.json_filename_abs('publish') publish_json = None if os.path.exists(publish_file): with open(publish_file, "r") as f: publish_json = json.loads(f.read()) else: publish_json = {} json_file = self.json_filename_abs('unpublish') json_out = None try_set_push_owner("wmsserver") hg = None try: if publish_json.get("action", "publish") != "remove": json_out = {} json_out["name"] = self.name json_out["workspace"] = self.workspace.name json_out["channel"] = self.workspace.publish_channel.name json_out['action'] = 'remove' json_out[ "sync_geoserver_data"] = self.workspace.publish_channel.sync_geoserver_data #retrieve meta data from the last published task meta_json = publish_json if "meta" in publish_json and "file" in publish_json["meta"]: meta_file = publish_json["meta"]["file"][ len(BorgConfiguration.MASTER_PATH_PREFIX):] if os.path.exists(meta_file): with open(meta_file, "r") as f: meta_json = json.loads(f.read()) else: meta_json = {} for key in [ "name", "workspace", "channel", "sync_geoserver_data" ]: if key in meta_json: json_out[key] = meta_json[key] else: json_out = publish_json json_out["remove_time"] = timezone.localtime( timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") #create the dir if required if not os.path.exists(os.path.dirname(json_file)): os.makedirs(os.path.dirname(json_file)) with open(json_file, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_file], addremove=True, user="******", message="Unpublish wms store {}.{}".format( self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("wmsserver", hg) finally: if hg: hg.close() try_clear_push_owner("wmsserver")
def publish(self): """ publish store's json reference (if exists) to the repository, """ try_set_push_owner("wmsserver") hg = None try: meta_data = {} meta_data["name"] = self.name meta_data["capability_url"] = self.get_capability_url meta_data["channel"] = self.workspace.publish_channel.name meta_data["username"] = self.user or "" meta_data["password"] = self.password or "" meta_data["workspace"] = self.workspace.name meta_data[ "sync_geoserver_data"] = self.workspace.publish_channel.sync_geoserver_data if self.geoserver_setting: meta_data["geoserver_setting"] = json.loads( self.geoserver_setting) #write meta data file file_name = "{}.{}.meta.json".format(self.workspace.name, self.name) meta_file = os.path.join(BorgConfiguration.WMS_STORE_DIR, file_name) #create the dir if required if not os.path.exists(os.path.dirname(meta_file)): os.makedirs(os.path.dirname(meta_file)) with open(meta_file, "wb") as output: json.dump(meta_data, output, indent=4) json_out = {} json_out['meta'] = { "file": "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file), "md5": file_md5(meta_file) } json_out['action'] = 'publish' json_out["publish_time"] = timezone.localtime( timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") json_filename = self.json_filename_abs('publish') #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename], addremove=True, user="******", message="Update wms store {}.{}".format( self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("wmsserver", hg) finally: if hg: hg.close() try_clear_push_owner("wmsserver")
def publish(self): """ publish layer's json reference (if exists) to the repository, """ json_filename = self.json_filename_abs('publish') try_set_push_owner("wmslayer") hg = None try: meta_data = self.update_catalogue_service( extra_datas={ "publication_date": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") }) #write meta data file file_name = "{}.{}.meta.json".format(self.server.workspace.name, self.kmi_name) meta_file = os.path.join(BorgConfiguration.WMS_LAYER_DIR, file_name) #create the dir if required if not os.path.exists(os.path.dirname(meta_file)): os.makedirs(os.path.dirname(meta_file)) with open(meta_file, "wb") as output: json.dump(meta_data, output, indent=4) json_out = {} json_out['meta'] = { "file": "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file), "md5": file_md5(meta_file) } json_out['action'] = "publish" json_out["publish_time"] = timezone.localtime( timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) #remove other related json files json_files = [ self.json_filename_abs(action) for action in ['empty_gwc'] ] #get all existing files. json_files = [f for f in json_files if os.path.exists(f)] if json_files: hg.remove(files=json_files) json_files.append(json_filename) hg.commit(include=json_files, addremove=True, user="******", message="update wms layer {}.{}".format( self.server.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("wmslayer", hg) finally: if hg: hg.close() try_clear_push_owner("wmslayer")
def unpublish(self): """ unpublish layer group """ #remove it from catalogue service res = requests.delete("{}/catalogue/api/records/{}:{}/".format( settings.CSW_URL, self.workspace.name, self.name), auth=(settings.CSW_USER, settings.CSW_PASSWORD)) if res.status_code != 404: res.raise_for_status() json_filename = self.json_filename_abs('unpublish') try_set_push_owner("layergroup") hg = None try: meta_data = {} #add extra data to meta data meta_data["workspace"] = self.workspace.name meta_data["name"] = self.name meta_data["native_name"] = self.name meta_data["auth_level"] = self.workspace.auth_level meta_data["spatial_data"] = True meta_data["channel"] = self.workspace.publish_channel.name meta_data[ "sync_geoserver_data"] = self.workspace.publish_channel.sync_geoserver_data #write meta data file file_name = "{}.meta.json".format(self.name) meta_file = os.path.join(BorgConfiguration.UNPUBLISH_DIR, self.workspace.publish_channel.name, self.workspace.name, "layergroups", file_name) #create the dir if required if not os.path.exists(os.path.dirname(meta_file)): os.makedirs(os.path.dirname(meta_file)) with open(meta_file, "wb") as output: json.dump(meta_data, output, indent=4) json_out = {} json_out['meta'] = { "file": "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file), "md5": file_md5(meta_file) } json_out['action'] = "remove" json_out["remove_time"] = timezone.localtime( timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) #remove other related json files json_files = [ self.json_filename_abs(action) for action in ['empty_gwc'] ] #get all existing files. json_files = [f for f in json_files if os.path.exists(f)] if json_files: hg.remove(files=json_files) json_files.append(json_filename) hg.commit(include=json_files, user="******", addremove=True, message="Unpublish layer group {}.{}".format( self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("layergroup", hg) finally: if hg: hg.close() try_clear_push_owner("layergroup")