Пример #1
0
    def execute(self, job, previous_state):
        """
        dump the full table data into download folder
        """
        if 'data' in job.metadict:
            del job.metadict['data']

        #create the dir if required
        if not os.path.exists(job.dump_dir):
            #dump dir does not exist, create it
            os.makedirs(job.dump_dir)

        file_name = job.publish.table_name + ".db"
        dump_file = os.path.join(job.dump_dir, file_name)
        cmd = self.dump_cmd + [
            "-t", job.publish.workspace.publish_data_schema + "." +
            job.publish.table_name, "-f", dump_file
        ]

        cursor = connection.cursor()
        if not previous_state.is_error_state:
            #table with same name maybe published by previous job. drop it if have.
            cursor.execute('drop table if exists "{0}"."{1}" cascade'.format(
                job.publish.workspace.publish_data_schema,
                job.publish.table_name))
        #move table to publish schema for dump
        cursor.execute('alter table "{0}"."{1}" set schema {2}'.format(
            job.publish.workspace.schema, job.publish.table_name,
            job.publish.workspace.publish_data_schema))
        try:
            #import ipdb;ipdb.set_trace()
            output = subprocess.Popen(cmd,
                                      stdout=subprocess.PIPE,
                                      stderr=subprocess.PIPE,
                                      env=self.env).communicate()
            logger.debug("execute ({0})\nstdin:{1}\nstdout:{2}".format(
                cmd, output[0], output[1]))
        finally:
            #move table back to original schema
            cursor.execute('alter table "{0}"."{1}" set schema "{2}"'.format(
                job.publish.workspace.publish_data_schema,
                job.publish.table_name, job.publish.workspace.schema))

        if output[1].strip():
            return (HarvestStateOutcome.failed, output[1])
        else:
            job.metadict['data'] = {
                "file":
                "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, dump_file),
                "md5":
                file_md5(dump_file)
            }
            return (HarvestStateOutcome.succeed, None)
Пример #2
0
    def unpublish(self):
        try_set_push_owner("wmsserver")
        hg = None
        try:
            meta_data = {}
            meta_data["name"] = self.name
            meta_data["workspace"] = self.workspace.name

            #write meta data file
            file_name = "{}.meta.json".format(self.name)
            meta_file = os.path.join(BorgConfiguration.UNPUBLISH_DIR,
                                     self.workspace.publish_channel.name,
                                     self.workspace.name, "stores", file_name)
            #create the dir if required
            if not os.path.exists(os.path.dirname(meta_file)):
                os.makedirs(os.path.dirname(meta_file))

            with open(meta_file, "wb") as output:
                json.dump(meta_data, output, indent=4)

            json_out = {}
            json_out['meta'] = {
                "file":
                "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),
                "md5":
                file_md5(meta_file)
            }
            json_out['action'] = 'remove'
            json_out["remove_time"] = timezone.localtime(
                timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")

            json_filename = self.json_filename_abs('unpublish')
            #create the dir if required
            if not os.path.exists(os.path.dirname(json_filename)):
                os.makedirs(os.path.dirname(json_filename))

            with open(json_filename, "wb") as output:
                json.dump(json_out, output, indent=4)

            hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
            hg.commit(include=[json_filename],
                      addremove=True,
                      user="******",
                      message="Unpublish wms store {}.{}".format(
                          self.workspace.name, self.name))
            increase_committed_changes()

            try_push_to_repository("wmsserver", hg)
        finally:
            if hg: hg.close()
            try_clear_push_owner("wmsserver")
Пример #3
0
    def execute(self, job, previous_state):
        p = job.publish
        meta_data = p.update_catalogue_service(
            style_dump_dir=job.dump_dir,
            md5=True,
            extra_datas={
                "publication_date":
                datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
            })

        #write meta data file
        file_name = "{}.meta.json".format(p.table_name)
        meta_file = os.path.join(job.dump_dir, file_name)
        with open(meta_file, "wb") as output:
            json.dump(meta_data, output, indent=4)

        job.metadict['meta'] = {
            "file": "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX,
                                  meta_file),
            "md5": file_md5(meta_file)
        }

        return (HarvestStateOutcome.succeed, None)
Пример #4
0
    def publish(self):
        """
         publish layer's json reference (if exists) to the repository,
        """
        json_filename = self.json_filename_abs('publish');
        try_set_push_owner("livelayer")
        hg = None
        try:
            meta_data = self.update_catalogue_service(md5=True,extra_datas={"publication_date":datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")})

            #write meta data file
            file_name = "{}.{}.meta.json".format(self.datasource.workspace.name,self.kmi_name)
            meta_file = os.path.join(BorgConfiguration.LIVE_LAYER_DIR,file_name)
            #create the dir if required
            if not os.path.exists(os.path.dirname(meta_file)):
                os.makedirs(os.path.dirname(meta_file))

            with open(meta_file,"wb") as output:
                json.dump(meta_data, output, indent=4)

            json_out = {}
            json_out['meta'] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),"md5":file_md5(meta_file)}
            json_out['action'] = "publish"
            json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
        
            #create the dir if required
            if not os.path.exists(os.path.dirname(json_filename)):
                os.makedirs(os.path.dirname(json_filename))

            with open(json_filename, "wb") as output:
                json.dump(json_out, output, indent=4)
        
            hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)

            #remove other related json files
            json_files = [ self.json_filename_abs(action) for action in [ 'empty_gwc' ] ]
            #get all existing files.
            json_files = [ f for f in json_files if os.path.exists(f) ]
            if json_files:
                hg.remove(files=json_files)

            json_files.append(json_filename)
            hg.commit(include=json_files,addremove=True, user="******", message="update live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name))
            increase_committed_changes()
                
            try_push_to_repository("livelayer",hg)
        finally:
            if hg: hg.close()
            try_clear_push_owner("livelayer")
Пример #5
0
    def update_catalogue_service(self,md5=False,extra_datas=None):
        meta_data = self.builtin_metadata
        if extra_datas:
            meta_data.update(extra_datas)
        bbox = meta_data.get("bounding_box",None)
        crs = meta_data.get("crs",None)
        #update catalog service
        res = requests.post("{}/catalogue/api/records/?style_content=true".format(settings.CSW_URL),json=meta_data,auth=(settings.CSW_USER,settings.CSW_PASSWORD))
        if 400 <= res.status_code < 600 and res.content:
            res.reason = "{}({})".format(res.reason,res.content)
        res.raise_for_status()
        meta_data = res.json()

        #process styles
        styles = meta_data.get("styles",[])
        #filter out qml and lyr styles
        sld_styles = [s for s in meta_data.get("styles",[]) if s["format"].lower() == "sld"]
        meta_data["styles"] = {}
        style_dump_dir = BorgConfiguration.LIVE_LAYER_DIR
        if not os.path.exists(style_dump_dir):
            os.makedirs(style_dump_dir)

        for style in sld_styles:
            if style["default"]:
                #default sld file
                meta_data["default_style"] = style["name"]
            #write the style into file system
            style_file = os.path.join(style_dump_dir,"{}.{}.{}.sld".format(self.datasource.workspace.name,self.kmi_name,style["name"]))
            with open(style_file,"wb") as f:
                f.write(style["raw_content"].decode("base64"))
            if md5:
                meta_data["styles"][style["name"]] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, style_file),"default":style["default"],"md5":file_md5(style_file)}
            else:
                meta_data["styles"][style["name"]] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, style_file),"default":style["default"]}

        #add extra data to meta data
        meta_data["workspace"] = self.datasource.workspace.name
        meta_data["schema"] = self.datasource.schema
        meta_data["name"] = self.kmi_name
        meta_data["table"] = self.table
        meta_data["datastore"] = self.datasource.name
        meta_data["auth_level"] = self.datasource.workspace.auth_level
        meta_data["preview_path"] = "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, BorgConfiguration.PREVIEW_DIR)
        meta_data["spatial_data"] = SpatialTable.check_spatial(self.spatial_type)
        meta_data["spatial_type"] = SpatialTable.get_spatial_type_desc(self.spatial_type)

        meta_data["channel"] = self.datasource.workspace.publish_channel.name
        meta_data["sync_geoserver_data"] = self.datasource.workspace.publish_channel.sync_geoserver_data

        if self.geoserver_setting:
            meta_data["geoserver_setting"] = json.loads(self.geoserver_setting)
                
        #bbox
        if "bounding_box" in meta_data:
            del meta_data["bounding_box"]
        meta_data["bbox"] = bbox
        meta_data["crs"] = crs

        return meta_data
Пример #6
0
    def publish(self):
        """
         publish store's json reference (if exists) to the repository,
        """
        try_set_push_owner("liveserver")
        hg = None
        try:
            meta_data = {}
            meta_data["name"] = self.name
            meta_data["host"] = self.host
            meta_data["port"] = self.port
            meta_data["database"] = self.db_name
            meta_data["user"] = self.user
            meta_data["passwd"] = self.password
            meta_data["schema"] = self.schema
            meta_data["workspace"] = self.workspace.name
        
            if self.geoserver_setting:
                meta_data["geoserver_setting"] = json.loads(self.geoserver_setting)

            #write meta data file
            file_name = "{}.{}.meta.json".format(self.workspace.name,self.name)
            meta_file = os.path.join(BorgConfiguration.LIVE_STORE_DIR,file_name)
            #create the dir if required
            if not os.path.exists(os.path.dirname(meta_file)):
                os.makedirs(os.path.dirname(meta_file))

            with open(meta_file,"wb") as output:
                json.dump(meta_data, output, indent=4)

            json_out = {}
            json_out['meta'] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),"md5":file_md5(meta_file)}
            json_out['action'] = 'publish'
            json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
        
            json_filename = self.json_filename_abs('publish');
            #create the dir if required
            if not os.path.exists(os.path.dirname(json_filename)):
                os.makedirs(os.path.dirname(json_filename))

            with open(json_filename, "wb") as output:
                json.dump(json_out, output, indent=4)
        
            hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
            hg.commit(include=[json_filename],addremove=True, user="******", message="Update live store {}.{}".format(self.workspace.name, self.name))
            increase_committed_changes()
                
            try_push_to_repository("liveserver",hg)
        finally:
            if hg: hg.close()
            try_clear_push_owner("liveserver")
Пример #7
0
    def unpublish(self):
        """
        unpublish layer group
        """
        #remove it from catalogue service
        res = requests.delete("{}/catalogue/api/records/{}:{}/".format(
            settings.CSW_URL, self.workspace.name, self.name),
                              auth=(settings.CSW_USER, settings.CSW_PASSWORD))
        if res.status_code != 404:
            res.raise_for_status()

        json_filename = self.json_filename_abs('unpublish')

        try_set_push_owner("layergroup")
        hg = None
        try:
            meta_data = {}
            #add extra data to meta data
            meta_data["workspace"] = self.workspace.name
            meta_data["name"] = self.name
            meta_data["native_name"] = self.name
            meta_data["auth_level"] = self.workspace.auth_level
            meta_data["spatial_data"] = True

            meta_data["channel"] = self.workspace.publish_channel.name
            meta_data[
                "sync_geoserver_data"] = self.workspace.publish_channel.sync_geoserver_data

            #write meta data file
            file_name = "{}.meta.json".format(self.name)
            meta_file = os.path.join(BorgConfiguration.UNPUBLISH_DIR,
                                     self.workspace.publish_channel.name,
                                     self.workspace.name, "layergroups",
                                     file_name)
            #create the dir if required
            if not os.path.exists(os.path.dirname(meta_file)):
                os.makedirs(os.path.dirname(meta_file))

            with open(meta_file, "wb") as output:
                json.dump(meta_data, output, indent=4)

            json_out = {}
            json_out['meta'] = {
                "file":
                "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),
                "md5":
                file_md5(meta_file)
            }
            json_out['action'] = "remove"
            json_out["remove_time"] = timezone.localtime(
                timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")

            #create the dir if required
            if not os.path.exists(os.path.dirname(json_filename)):
                os.makedirs(os.path.dirname(json_filename))

            with open(json_filename, "wb") as output:
                json.dump(json_out, output, indent=4)

            hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)

            #remove other related json files
            json_files = [
                self.json_filename_abs(action) for action in ['empty_gwc']
            ]
            #get all existing files.
            json_files = [f for f in json_files if os.path.exists(f)]
            if json_files:
                hg.remove(files=json_files)

            json_files.append(json_filename)
            hg.commit(include=json_files,
                      user="******",
                      addremove=True,
                      message="Unpublish layer group {}.{}".format(
                          self.workspace.name, self.name))
            increase_committed_changes()

            try_push_to_repository("layergroup", hg)
        finally:
            if hg: hg.close()
            try_clear_push_owner("layergroup")
Пример #8
0
    def publish(self):
        """
         publish layer's json reference (if exists) to the repository,
        """
        json_filename = self.json_filename_abs('publish')
        try_set_push_owner("wmslayer")
        hg = None
        try:
            meta_data = self.update_catalogue_service(
                extra_datas={
                    "publication_date":
                    datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
                })

            #write meta data file
            file_name = "{}.{}.meta.json".format(self.server.workspace.name,
                                                 self.kmi_name)
            meta_file = os.path.join(BorgConfiguration.WMS_LAYER_DIR,
                                     file_name)
            #create the dir if required
            if not os.path.exists(os.path.dirname(meta_file)):
                os.makedirs(os.path.dirname(meta_file))

            with open(meta_file, "wb") as output:
                json.dump(meta_data, output, indent=4)

            json_out = {}
            json_out['meta'] = {
                "file":
                "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),
                "md5":
                file_md5(meta_file)
            }
            json_out['action'] = "publish"
            json_out["publish_time"] = timezone.localtime(
                timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")

            #create the dir if required
            if not os.path.exists(os.path.dirname(json_filename)):
                os.makedirs(os.path.dirname(json_filename))

            with open(json_filename, "wb") as output:
                json.dump(json_out, output, indent=4)

            hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)

            #remove other related json files
            json_files = [
                self.json_filename_abs(action) for action in ['empty_gwc']
            ]
            #get all existing files.
            json_files = [f for f in json_files if os.path.exists(f)]
            if json_files:
                hg.remove(files=json_files)

            json_files.append(json_filename)
            hg.commit(include=json_files,
                      addremove=True,
                      user="******",
                      message="update wms layer {}.{}".format(
                          self.server.workspace.name, self.name))
            increase_committed_changes()

            try_push_to_repository("wmslayer", hg)
        finally:
            if hg: hg.close()
            try_clear_push_owner("wmslayer")
Пример #9
0
    def publish(self):
        """
         publish store's json reference (if exists) to the repository,
        """
        try_set_push_owner("wmsserver")
        hg = None
        try:
            meta_data = {}
            meta_data["name"] = self.name
            meta_data["capability_url"] = self.get_capability_url
            meta_data["channel"] = self.workspace.publish_channel.name
            meta_data["username"] = self.user or ""
            meta_data["password"] = self.password or ""
            meta_data["workspace"] = self.workspace.name

            meta_data[
                "sync_geoserver_data"] = self.workspace.publish_channel.sync_geoserver_data

            if self.geoserver_setting:
                meta_data["geoserver_setting"] = json.loads(
                    self.geoserver_setting)

            #write meta data file
            file_name = "{}.{}.meta.json".format(self.workspace.name,
                                                 self.name)
            meta_file = os.path.join(BorgConfiguration.WMS_STORE_DIR,
                                     file_name)
            #create the dir if required
            if not os.path.exists(os.path.dirname(meta_file)):
                os.makedirs(os.path.dirname(meta_file))

            with open(meta_file, "wb") as output:
                json.dump(meta_data, output, indent=4)

            json_out = {}
            json_out['meta'] = {
                "file":
                "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),
                "md5":
                file_md5(meta_file)
            }
            json_out['action'] = 'publish'
            json_out["publish_time"] = timezone.localtime(
                timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")

            json_filename = self.json_filename_abs('publish')
            #create the dir if required
            if not os.path.exists(os.path.dirname(json_filename)):
                os.makedirs(os.path.dirname(json_filename))

            with open(json_filename, "wb") as output:
                json.dump(json_out, output, indent=4)

            hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
            hg.commit(include=[json_filename],
                      addremove=True,
                      user="******",
                      message="Update wms store {}.{}".format(
                          self.workspace.name, self.name))
            increase_committed_changes()

            try_push_to_repository("wmsserver", hg)
        finally:
            if hg: hg.close()
            try_clear_push_owner("wmsserver")
    def execute(self,job,previous_state):
        p = job.publish
        meta_data = p.update_catalogue_service(style_dump_dir=job.dump_dir,md5=True,extra_datas={"publication_date":datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")})

        #write meta data file
        file_name = "{}.meta.json".format(p.table_name)
        meta_file = os.path.join(job.dump_dir,file_name)
        with open(meta_file,"wb") as output:
            json.dump(meta_data, output, indent=4)

        job.metadict['meta'] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),"md5":file_md5(meta_file)}

        return (HarvestStateOutcome.succeed,None)
    def execute(self,job,previous_state):
        """
        dump the full table data into download folder
        """
        if 'data' in job.metadict:
            del job.metadict['data']

        #create the dir if required
        if not os.path.exists(job.dump_dir):
            #dump dir does not exist, create it
            os.makedirs(job.dump_dir)

        file_name = job.publish.table_name + ".tar"
        dump_file = os.path.join(job.dump_dir,file_name)
        cmd = self.dump_cmd + ["-t", job.publish.workspace.publish_data_schema + "." + job.publish.table_name, "-f", dump_file]

        cursor=connection.cursor()
        if not previous_state.is_error_state:
            #table with same name maybe published by previous job. drop it if have.
            cursor.execute('drop table if exists "{0}"."{1}" cascade'.format(job.publish.workspace.publish_data_schema,job.publish.table_name))
        #move table to publish schema for dump
        cursor.execute('alter table "{0}"."{1}" set schema {2}'.format(job.publish.workspace.schema,job.publish.table_name,job.publish.workspace.publish_data_schema))
        try:
            #import ipdb;ipdb.set_trace()
            output = subprocess.Popen(cmd,stdout=subprocess.PIPE,stderr=subprocess.PIPE, env=self.env).communicate()
            logger.debug("execute ({0})\nstdin:{1}\nstdout:{2}".format(cmd,output[0],output[1]))
        finally:
            #move table back to original schema
            cursor.execute('alter table "{0}"."{1}" set schema "{2}"'.format(job.publish.workspace.publish_data_schema,job.publish.table_name,job.publish.workspace.schema))

        if output[1].strip() :
            return (HarvestStateOutcome.failed,output[1])
        else:
            job.metadict['data'] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, dump_file),"md5":file_md5(dump_file)}
            return (HarvestStateOutcome.succeed,None)