示例#1
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    type = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    #Get the current mapset to create the id of the space time dataset
    mapset = grass.gisenv()["MAPSET"]

    if input.find("@") >= 0:
        old_id = input
    else:
        old_id = input + "@" + mapset

    if output.find("@") >= 0:
        new_id = output
    else:
        new_id = output + "@" + mapset
        
    # Do not overwrite yourself
    if new_id == old_id:
        return
        

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    stds = tgis.dataset_factory(type, old_id)

    if new_id.split("@")[1] != mapset:
        grass.fatal(_("Space time %s dataset <%s> can not be renamed. "
                      "Mapset of the new identifier differs from the current "
                      "mapset.") % (stds.get_new_map_instance(None).get_type(), 
                                    old_id))
        
    if stds.is_in_db(dbif=dbif) == False:
        dbif.close()
        grass.fatal(_("Space time %s dataset <%s> not found") % (
            stds.get_new_map_instance(None).get_type(), old_id))

    # Check if the new id is in the database
    new_stds = tgis.dataset_factory(type, new_id)

    if new_stds.is_in_db(dbif=dbif) == True and grass.overwrite() == False:
        dbif.close()
        grass.fatal(_("Unable to rename Space time %s dataset <%s>. Name <%s> "
                      "is in use, please use the overwrite flag.") % (
            stds.get_new_map_instance(None).get_type(), old_id, new_id))
    
    # Remove an already existing space time dataset
    if new_stds.is_in_db(dbif=dbif) == True:
        new_stds.delete(dbif=dbif)
        
    stds.select(dbif=dbif)
    stds.rename(ident=new_id, dbif=dbif)
    stds.update_command_string(dbif=dbif)
示例#2
0
def main():

    name = options["input"]
    type_ = options["type"]
    shellstyle = flags['g']
    system = flags['d']
    history = flags['h']

    # Make sure the temporal database exists
    tgis.init()

    dbif, connected = tgis.init_dbif(None)

    rows = tgis.get_tgis_metadata(dbif)

    if system and not shellstyle:
        #      0123456789012345678901234567890
        print(" +------------------- Temporal DBMI backend information ----------------------+")
        print(" | DBMI Python interface:...... " + str(dbif.get_dbmi().__name__))
        print(" | Temporal database string:... " + str(
            tgis.get_tgis_database_string()))
        print(" | SQL template path:.......... " + str(
            tgis.get_sql_template_path()))
        if rows:
            for row in rows:
                print(" | %s .......... %s"%(row[0], row[1]))
        print(" +----------------------------------------------------------------------------+")
        return
    elif system:
        print("dbmi_python_interface=\'" + str(dbif.get_dbmi().__name__) + "\'")
        print("dbmi_string=\'" + str(tgis.get_tgis_database_string()) + "\'")
        print("sql_template_path=\'" + str(tgis.get_sql_template_path()) + "\'")
        if rows:
            for row in rows:
                print("%s=\'%s\'"%(row[0], row[1]))
        return

    if not system and not name:
        grass.fatal(_("Please specify %s=") % ("name"))

    if name.find("@") >= 0:
        id_ = name
    else:
        id_ = name + "@" + grass.gisenv()["MAPSET"]

    dataset = tgis.dataset_factory(type_, id_)

    if dataset.is_in_db(dbif) == False:
        grass.fatal(_("Dataset <%s> not found in temporal database") % (id_))

    dataset.select(dbif)

    if history == True and type in ["strds", "stvds", "str3ds"]:
        dataset.print_history()
        return

    if shellstyle == True:
        dataset.print_shell_info()
    else:
        dataset.print_info()
示例#3
0
def main():
    # Load metadata library
    from mdlib.mdgrass import GrassMD

    if not options['output']:
        destination = None
        output_name = None
    else:
        destination, output_name = os.path.split(options['output'])

    name = options["input"]
    type_ = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    dbif, connected = tgis.init_dbif(None)

    if name.find("@") >= 0:
        id_ = name
    else:
        id_ = "{}@{}".format(name, grass.gisenv()["MAPSET"])

    dataset = tgis.dataset_factory(type_, id_)

    if not dataset.is_in_db(dbif):
        grass.fatal(_("Dataset <%s> not found in temporal database") % (id_), )

    md = GrassMD(id_, type=type_)
    md.createTemporalISO()
    md.saveXML(
        path=destination,
        xml_out_name=output_name,
        overwrite=os.getenv('GRASS_OVERWRITE', False),
    )
示例#4
0
    def _gatherInformation(self, timeseries, etype, timeseriesList, infoDict):
        """Get info about timeseries and check topology (raises GException)"""
        id = validateTimeseriesName(timeseries, etype)
        sp = tgis.dataset_factory(etype, id)
        # Insert content from db
        sp.select()
        # Get ordered map list
        maps = sp.get_registered_maps_as_objects()

        if not sp.check_temporal_topology(maps):
            raise GException(_("Topology of Space time dataset %s is invalid." % id))

        timeseriesList.append(id)
        infoDict[id] = {}
        infoDict[id]['etype'] = etype
        infoDict[id]['temporal_type'] = sp.get_temporal_type()
        if sp.is_time_relative():
            infoDict[id]['unit'] = sp.get_relative_time_unit()
        infoDict[id]['granularity'] = sp.get_granularity()
        infoDict[id]['map_time'] = sp.get_map_time()
        infoDict[id]['maps'] = maps
示例#5
0
    def _gatherInformation(self, timeseries, etype, timeseriesList, infoDict):
        """Get info about timeseries and check topology (raises GException)"""
        id = validateTimeseriesName(timeseries, etype)
        sp = tgis.dataset_factory(etype, id)
        # Insert content from db
        sp.select()
        # Get ordered map list
        maps = sp.get_registered_maps_as_objects()

        if not sp.check_temporal_topology(maps):
            raise GException(_("Topology of Space time dataset %s is invalid." % id))

        timeseriesList.append(id)
        infoDict[id] = {}
        infoDict[id]["etype"] = etype
        infoDict[id]["temporal_type"] = sp.get_temporal_type()
        if sp.is_time_relative():
            infoDict[id]["unit"] = sp.get_relative_time_unit()
        infoDict[id]["granularity"] = sp.get_granularity()
        infoDict[id]["map_time"] = sp.get_map_time()
        infoDict[id]["maps"] = maps
示例#6
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    type = options["type"]
    temporal_type = options["temporaltype"]
    columns = options["columns"]
    order = options["order"]
    where = options["where"]
    separator = gscript.separator(options["separator"])
    outpath = options["output"]
    colhead = flags['c']

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.dataset_factory(type, None)
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()
    first = True

    if  gscript.verbosity() > 0 and not outpath:
        sys.stderr.write("----------------------------------------------\n")

    if outpath:
        outfile = open(outpath, 'w')

    for ttype in temporal_type.split(","):
        if ttype == "absolute":
            time = "absolute time"
        else:
            time = "relative time"

        stds_list = tgis.get_dataset_list(type, ttype, columns, where, order, dbif=dbif)

        # Use the correct order of the mapsets, hence first the current mapset, then
        # alphabetic ordering
        mapsets = tgis.get_tgis_c_library_interface().available_mapsets()

        # Print for each mapset separately
        for key in mapsets:
            if key in stds_list.keys():
                rows = stds_list[key]

                if rows:
                    if  gscript.verbosity() > 0 and not outpath:
                        if issubclass(sp.__class__, tgis.AbstractMapDataset):
                            sys.stderr.write(_("Time stamped %s maps with %s available in mapset <%s>:\n")%
                                                     (sp.get_type(), time, key))
                        else:
                            sys.stderr.write(_("Space time %s datasets with %s available in mapset <%s>:\n")%
                                                     (sp.get_new_map_instance(None).get_type(), time, key))

                    # Print the column names if requested
                    if colhead and first:
                        output = ""
                        count = 0
                        for key in rows[0].keys():
                            if count > 0:
                                output += separator + str(key)
                            else:
                                output += str(key)
                            count += 1
                        if outpath:
                            outfile.write("{st}\n".format(st=output))
                        else:
                            print(output)
                        first = False

                    for row in rows:
                        output = ""
                        count = 0
                        for col in row:
                            if count > 0:
                                output += separator + str(col)
                            else:
                                output += str(col)
                            count += 1
                        if outpath:
                            outfile.write("{st}\n".format(st=output))
                        else:
                            print(output)
    if outpath:
        outfile.close()
    dbif.close()
示例#7
0
def main():

    # Get the options
    inputs = options["inputs"]
    output = options["output"]
    type = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    #Get the current mapset to create the id of the space time dataset
    mapset = grass.gisenv()["MAPSET"]

    inputs_split = inputs.split(",")
    input_ids = []

    for input in inputs_split:
        if input.find("@") >= 0:
            input_ids.append(input)
        else:
            input_ids.append(input + "@" + mapset)

    # Set the output name correct
    if output.find("@") >= 0:
        out_mapset = output.split("@")[1]
        if out_mapset != mapset:
            grass.fatal(_("Output space time dataset <%s> must be located in this mapset") % (output))
    else:
        output_id = output + "@" + mapset

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    stds_list = []
    first = None

    for id in input_ids:
        stds = tgis.open_old_stds(id, type, dbif)
        if first is None:
            first = stds

        if first.get_temporal_type() != stds.get_temporal_type():
            dbif.close()
            grass.fatal(_("Space time datasets to merge must have the same temporal type"))

        stds_list.append(stds)

    # Do nothing if nothing to merge
    if first is None:
        dbif.close()
        return

    # Check if the new id is in the database
    output_stds = tgis.dataset_factory(type, output_id)
    output_exists = output_stds.is_in_db(dbif=dbif)

    if output_exists == True and grass.overwrite() == False:
        dbif.close()
        grass.fatal(_("Unable to merge maps into space time %s dataset <%s> "\
                      "please use the overwrite flag.") % \
                      (stds.get_new_map_instance(None).get_type(), output_id))

    if not output_exists:
        output_stds = tgis.open_new_stds(output, type,
                                   first.get_temporal_type(),
                                   "Merged space time dataset",
                                   "Merged space time dataset",
                                   "mean", dbif=dbif, overwrite=False)
    else:
        output_stds.select(dbif=dbif)

    registered_output_maps = {}
    # Maps that are already registered in an existing dataset 
    # are not registered again
    if output_exists == True:
        rows = output_stds.get_registered_maps(columns="id", dbif=dbif)
        if rows:
            for row in rows:
                registered_output_maps[row["id"]] = row["id"]

    for stds in stds_list:
        # Avoid merging of already registered maps
        if stds.get_id() != output_stds.get_id():
            maps = stds.get_registered_maps_as_objects(dbif=dbif)

            if maps:
                for map in maps:
                    # Jump over already registered maps
                    if map.get_id() in registered_output_maps:
                        continue

                    map.select(dbif=dbif)
                    output_stds.register_map(map=map, dbif=dbif)
                    # Update the registered map list
                    registered_output_maps[map.get_id()] = map.get_id()

    output_stds.update_from_registered_maps(dbif=dbif)

    if output_exists == True:
        output_stds.update_command_string(dbif=dbif)
示例#8
0
    def _getSTVDData(self, timeseries):
        """Load data and read properties
        :param list timeseries: a list of timeseries
        """

        mode = None
        unit = None
        cats = None
        attribute = self.attribute.GetValue()
        if self.cats.GetValue() != '':
            cats = self.cats.GetValue().split(',')
        if cats and self.poi:
            GMessage(message=_("Both coordinates and categories are set, "
                               "coordinates will be used. The use categories "
                               "remove text from coordinate form"))
        if not attribute or attribute == '':
            GError(parent=self,
                   showTraceback=False,
                   message=_("With Vector temporal dataset you have to select"
                             " an attribute column"))
            return
        columns = ','.join(['name', 'start_time', 'end_time', 'id', 'layer'])
        for series in timeseries:
            name = series[0]
            fullname = name + '@' + series[1]
            etype = series[2]
            sp = tgis.dataset_factory(etype, fullname)
            if not sp.is_in_db(dbif=self.dbif):
                GError(message=_("Dataset <%s> not found in temporal "
                                 "database") % (fullname),
                       parent=self,
                       showTraceback=False)
                return
            sp.select(dbif=self.dbif)

            rows = sp.get_registered_maps(dbif=self.dbif,
                                          order="start_time",
                                          columns=columns,
                                          where=None)

            self.timeDataV[name] = OrderedDict()
            self.timeDataV[name]['temporalDataType'] = etype
            self.timeDataV[name]['temporalType'] = sp.get_temporal_type()
            self.timeDataV[name]['granularity'] = sp.get_granularity()

            if mode is None:
                mode = self.timeDataV[name]['temporalType']
            elif self.timeDataV[name]['temporalType'] != mode:
                GError(parent=self,
                       showTraceback=False,
                       message=_(
                           "Datasets have different temporal type ("
                           "absolute x relative), which is not allowed."))
                return
            self.timeDataV[name]['unit'] = None  # only with relative
            if self.timeDataV[name]['temporalType'] == 'relative':
                start, end, self.timeDataV[name][
                    'unit'] = sp.get_relative_time()
                if unit is None:
                    unit = self.timeDataV[name]['unit']
                elif self.timeDataV[name]['unit'] != unit:
                    GError(message=_("Datasets have different time unit which"
                                     " is not allowed."),
                           parent=self,
                           showTraceback=False)
                    return
            if self.poi:
                self.plotNameListV.append(name)
                # TODO set an appropriate distance, right now a big one is set
                # to return the closer point to the selected one
                out = grass.vector_what(map='pois_srvds',
                                        coord=self.poi.coords(),
                                        distance=10000000000000000)
                if len(out) != len(rows):
                    GError(parent=self,
                           showTraceback=False,
                           message=_("Difference number of vector layers and "
                                     "maps in the vector temporal dataset"))
                    return
                for i in range(len(rows)):
                    row = rows[i]
                    values = out[i]
                    if str(row['layer']) == str(values['Layer']):
                        lay = "{map}_{layer}".format(map=row['name'],
                                                     layer=values['Layer'])
                        self.timeDataV[name][lay] = {}
                        self.timeDataV[name][lay]['start_datetime'] = row[
                            'start_time']
                        self.timeDataV[name][lay]['end_datetime'] = row[
                            'start_time']
                        self.timeDataV[name][lay]['value'] = values[
                            'Attributes'][attribute]
            else:
                wherequery = ''
                cats = self._getExistingCategories(rows[0]['name'], cats)
                totcat = len(cats)
                ncat = 1
                for cat in cats:
                    if ncat == 1 and totcat != 1:
                        wherequery += '{k}={c} or'.format(c=cat, k="{key}")
                    elif ncat == 1 and totcat == 1:
                        wherequery += '{k}={c}'.format(c=cat, k="{key}")
                    elif ncat == totcat:
                        wherequery += ' {k}={c}'.format(c=cat, k="{key}")
                    else:
                        wherequery += ' {k}={c} or'.format(c=cat, k="{key}")

                    catn = "cat{num}".format(num=cat)
                    self.plotNameListV.append("{na}+{cat}".format(na=name,
                                                                  cat=catn))
                    self.timeDataV[name][catn] = OrderedDict()
                    ncat += 1
                for row in rows:
                    lay = int(row['layer'])
                    catkey = self._parseVDbConn(row['name'], lay)
                    if not catkey:
                        GError(parent=self,
                               showTraceback=False,
                               message=_(
                                   "No connection between vector map {vmap} "
                                   "and layer {la}".format(vmap=row['name'],
                                                           la=lay)))
                        return
                    vals = grass.vector_db_select(
                        map=row['name'],
                        layer=lay,
                        where=wherequery.format(key=catkey),
                        columns=attribute)
                    layn = "lay{num}".format(num=lay)
                    for cat in cats:
                        catn = "cat{num}".format(num=cat)
                        if layn not in self.timeDataV[name][catn].keys():
                            self.timeDataV[name][catn][layn] = {}
                        self.timeDataV[name][catn][layn][
                            'start_datetime'] = row['start_time']
                        self.timeDataV[name][catn][layn]['end_datetime'] = row[
                            'end_time']
                        self.timeDataV[name][catn][layn]['value'] = vals[
                            'values'][int(cat)][0]
        self.unit = unit
        self.temporalType = mode
        return
示例#9
0
    def _getSTRDdata(self, timeseries):
        """Load data and read properties
        :param list timeseries: a list of timeseries
        """
        if not self.poi:
            GError(parent=self,
                   message=_("Invalid input coordinates"),
                   showTraceback=False)
            return
        mode = None
        unit = None
        columns = ','.join(['name', 'start_time', 'end_time'])
        for series in timeseries:
            name = series[0]
            fullname = name + '@' + series[1]
            etype = series[2]
            sp = tgis.dataset_factory(etype, fullname)
            if not sp.is_in_db(dbif=self.dbif):
                GError(message=_("Dataset <%s> not found in temporal "
                                 "database") % (fullname),
                       parent=self)
                return
            sp.select(dbif=self.dbif)

            minmin = sp.metadata.get_min_min()
            self.plotNameListR.append(name)
            self.timeDataR[name] = OrderedDict()

            self.timeDataR[name]['temporalDataType'] = etype
            self.timeDataR[name]['temporalType'] = sp.get_temporal_type()
            self.timeDataR[name]['granularity'] = sp.get_granularity()

            if mode is None:
                mode = self.timeDataR[name]['temporalType']
            elif self.timeDataR[name]['temporalType'] != mode:
                GError(parent=self,
                       message=_("Datasets have different temporal"
                                 " type (absolute x relative), "
                                 "which is not allowed."))
                return

            # check topology
            maps = sp.get_registered_maps_as_objects(dbif=self.dbif)
            self.timeDataR[name]['validTopology'] = sp.check_temporal_topology(
                maps=maps, dbif=self.dbif)

            self.timeDataR[name]['unit'] = None  # only with relative
            if self.timeDataR[name]['temporalType'] == 'relative':
                start, end, self.timeDataR[name][
                    'unit'] = sp.get_relative_time()
                if unit is None:
                    unit = self.timeDataR[name]['unit']
                elif self.timeDataR[name]['unit'] != unit:
                    GError(parent=self,
                           message=_("Datasets have different "
                                     "time unit which is not "
                                     "allowed."))
                    return

            rows = sp.get_registered_maps(columns=columns,
                                          where=None,
                                          order='start_time',
                                          dbif=self.dbif)
            for row in rows:
                self.timeDataR[name][row[0]] = {}
                self.timeDataR[name][row[0]]['start_datetime'] = row[1]
                self.timeDataR[name][row[0]]['end_datetime'] = row[2]
                r = RasterRow(row[0])
                r.open()
                val = r.get_value(self.poi)
                r.close()
                if val == -2147483648 and val < minmin:
                    self.timeDataR[name][row[0]]['value'] = None
                else:
                    self.timeDataR[name][row[0]]['value'] = val

        self.unit = unit
        self.temporalType = mode
        return
示例#10
0
def main():
    # lazy imports
    import grass.temporal as tgis

    name = options["input"]
    type_ = options["type"]
    shellstyle = flags["g"]
    system = flags["d"]
    history = flags["h"]

    # Make sure the temporal database exists
    tgis.init()

    dbif, connection_state_changed = tgis.init_dbif(None)

    rows = tgis.get_tgis_metadata(dbif)

    if system and not shellstyle and not history:
        #      0123456789012345678901234567890
        print(
            " +------------------- Temporal DBMI backend information ----------------------+"
        )
        print(" | DBMI Python interface:...... " + str(dbif.get_dbmi().__name__))
        print(" | Temporal database string:... " + str(tgis.get_tgis_database_string()))
        print(" | SQL template path:.......... " + str(tgis.get_sql_template_path()))
        if rows:
            for row in rows:
                print(" | %s .......... %s" % (row[0], row[1]))
        print(
            " +----------------------------------------------------------------------------+"
        )
        return
    elif system and not history:
        print("dbmi_python_interface='" + str(dbif.get_dbmi().__name__) + "'")
        print("dbmi_string='" + str(tgis.get_tgis_database_string()) + "'")
        print("sql_template_path='" + str(tgis.get_sql_template_path()) + "'")
        if rows:
            for row in rows:
                print("%s='%s'" % (row[0], row[1]))
        return

    if not system and not name:
        grass.fatal(_("Please specify %s=") % ("name"))

    if name.find("@") >= 0:
        id_ = name
    else:
        id_ = name + "@" + grass.gisenv()["MAPSET"]

    dataset = tgis.dataset_factory(type_, id_)

    if not dataset.is_in_db(dbif):
        grass.fatal(
            _("Dataset <{n}> of type <{t}> not found in temporal database").format(
                n=id_, t=type_
            )
        )

    dataset.select(dbif)

    if history and type_ in ["strds", "stvds", "str3ds"]:
        dataset.print_history()
        return

    if shellstyle:
        dataset.print_shell_info()
    else:
        dataset.print_info()
示例#11
0
def main():

    name = options["input"]
    type_ = options["type"]
    shellstyle = flags['g']
    system = flags['s']
    history = flags['h']

    # Make sure the temporal database exists
    tgis.init()

    dbif, connected = tgis.init_dbif(None)

    rows = tgis.get_tgis_metadata(dbif)

    if system and not shellstyle:
        #      0123456789012345678901234567890
        print " +------------------- Temporal DBMI backend information ----------------------+"
        print " | DBMI Python interface:...... " + str(
            dbif.get_dbmi().__name__)
        print " | Temporal database string:... " + str(
            tgis.get_tgis_database_string())
        print " | SQL template path:.......... " + str(
            tgis.get_sql_template_path())
        if rows:
            for row in rows:
                print " | %s .......... %s" % (row[0], row[1])
        print " +----------------------------------------------------------------------------+"
        return
    elif system:
        print "dbmi_python_interface=\'" + str(dbif.get_dbmi().__name__) + "\'"
        print "dbmi_string=\'" + str(tgis.get_tgis_database_string()) + "\'"
        print "sql_template_path=\'" + str(tgis.get_sql_template_path()) + "\'"
        if rows:
            for row in rows:
                print "%s=\'%s\'" % (row[0], row[1])
        return

    if not system and not name:
        grass.fatal(_("Please specify %s=") % ("name"))

    if name.find("@") >= 0:
        id_ = name
    else:
        id_ = name + "@" + grass.gisenv()["MAPSET"]

    dataset = tgis.dataset_factory(type_, id_)

    if dataset.is_in_db(dbif) == False:
        grass.fatal(_("Dataset <%s> not found in temporal database") % (id_))

    dataset.select(dbif)

    if history == True and type in ["strds", "stvds", "str3ds"]:
        dataset.print_history()
        return

    if shellstyle == True:
        dataset.print_shell_info()
    else:
        dataset.print_info()
示例#12
0
    def _getLabelsAndMaps(self, timeseries):
        """Returns time labels and map names (done by sampling)
        for both interval and point data.
        """
        sp = tgis.dataset_factory(self.timeseriesInfo[timeseries]['etype'],
                                  timeseries)
        if sp.is_in_db() is False:
            raise GException(
                _("Space time dataset <%s> not found.") % timeseries)
        sp.select()

        listOfMaps = []
        timeLabels = []
        granNum, unit = self.GetGranularity()
        if self.temporalType == TemporalType.ABSOLUTE:
            if self.granularityMode == GranularityMode.ONE_UNIT:
                gran = '%(one)d %(unit)s' % {'one': 1, 'unit': unit}
            else:
                gran = '%(num)d %(unit)s' % {'num': granNum, 'unit': unit}

        elif self.temporalType == TemporalType.RELATIVE:
            unit = self.timeseriesInfo[timeseries]['unit']
            if self.granularityMode == GranularityMode.ONE_UNIT:
                gran = 1
            else:
                gran = granNum
        # start sampling - now it can be used for both interval and point data
        # after instance, there can be a gap or an interval
        # if it is a gap we remove it and put there the previous instance instead
        # however the first gap must be removed to avoid duplication
        maps = sp.get_registered_maps_as_objects_by_granularity(gran=gran)
        if maps and len(maps) > 0:
            lastTimeseries = None
            followsPoint = False  # indicates that we are just after finding a point
            afterPoint = False  # indicates that we are after finding a point
            for mymap in maps:
                if isinstance(mymap, list):
                    if len(mymap) > 0:
                        map = mymap[0]
                else:
                    map = mymap

                series = map.get_id()

                start, end = map.get_temporal_extent_as_tuple()
                if self.timeseriesInfo[timeseries]['map_time'] == 'point':
                    # point data
                    listOfMaps.append(series)
                    afterPoint = True
                    followsPoint = True
                    lastTimeseries = series
                    end = None
                else:
                    end = end
                    # interval data
                    if series:
                        # map exists, stop point mode
                        listOfMaps.append(series)
                        afterPoint = False
                    else:
                        # check point mode
                        if afterPoint:
                            if followsPoint:
                                # skip this one, already there
                                followsPoint = False
                                continue
                            else:
                                # append the last one (of point time)
                                listOfMaps.append(lastTimeseries)
                                end = None
                        else:
                            # append series which is None
                            listOfMaps.append(series)
                timeLabels.append((start, end, unit))

        return timeLabels, listOfMaps
示例#13
0
def main():
    # lazy imports
    import grass.temporal as tgis

    strdsin = options["strds"]
    rasterin = options["raster"]
    strdsout = options["output"]
    bandname = options["bandname"]

    type_ = "strds"

    # make sure the temporal database exists
    tgis.init()

    dbif, connection_state_changed = tgis.init_dbif(None)

    rows = tgis.get_tgis_metadata(dbif)

    if strdsin.find("@") >= 0:
        strdsid_ = strdsin
    else:
        strdsid_ = strdsin + "@" + grass.gisenv()["MAPSET"]

    if rasterin.find("@") >= 0:
        rasterid_ = rasterin
    else:
        rasterid_ = rasterin + "@" + grass.gisenv()["MAPSET"]

    datasetin = tgis.dataset_factory(type_, strdsid_)

    if not datasetin.is_in_db(dbif):
        grass.fatal(
            _("Dataset <{n}> of type <{t}> not found in temporal database").
            format(n=strdsid_, t=type_))

    datasetin.select(dbif)

    start_time = datasetin.temporal_extent.get_start_time()
    end_time = datasetin.temporal_extent.get_end_time()

    # create a new strds using the old strds as template

    # specs of input strds
    sp = tgis.open_old_stds(strdsid_, "strds", dbif)
    ttype, stype, title, descr = sp.get_initial_values()
    dbif.close()

    # t.create, use specs of input strds
    grass.run_command('t.create',
                      type='strds',
                      output=strdsout,
                      temporaltype=ttype,
                      semantictype=stype,
                      title=title,
                      description=descr)

    # register the raster map in the new strds
    rlistfile = grass.tempfile(create=False)
    fd = open(rlistfile, "w")
    if bandname is not None:
        fd.write("%s|%s|%s|%s\n" %
                 (rasterid_, str(start_time), str(end_time), bandname))
    else:
        fd.write("%s|%s|%s\n" % (rasterid_, str(start_time), str(end_time)))
    fd.close()
    # t.register to create new strds
    grass.run_command('t.register', input=strdsout, file=rlistfile)
    grass.try_remove(rlistfile)
示例#14
0
def main():

    # Get the options
    file = options["file"]
    input = options["input"]
    maps = options["maps"]
    type = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    if maps and file:
        grass.fatal(_(
            "%s= and %s= are mutually exclusive") % ("input", "file"))

    if not maps and not file:
        grass.fatal(_("%s= or %s= must be specified") % ("input", "file"))

    mapset = grass.gisenv()["MAPSET"]

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    # In case a space time dataset is specified
    if input:
        sp = tgis.open_old_stds(input, type, dbif)

    maplist = []

    dummy = tgis.RasterDataset(None)

    # Map names as comma separated string
    if maps is not None and maps != "":
        if maps.find(",") == -1:
            maplist = [maps, ]
        else:
            maplist = maps.split(",")

        # Build the maplist
        for count in range(len(maplist)):
            mapname = maplist[count]
            mapid = dummy.build_id(mapname, mapset)
            maplist[count] = mapid

    # Read the map list from file
    if file:
        fd = open(file, "r")

        line = True
        while True:
            line = fd.readline()
            if not line:
                break

            mapname = line.strip()
            mapid = dummy.build_id(mapname, mapset)
            maplist.append(mapid)

    num_maps = len(maplist)
    update_dict = {}
    count = 0

    statement = ""

    # Unregister already registered maps
    grass.message(_("Unregister maps"))
    for mapid in maplist:
        if count%10 == 0:
            grass.percent(count, num_maps, 1)

        map = tgis.dataset_factory(type, mapid)

        # Unregister map if in database
        if map.is_in_db(dbif) == True:
            # Unregister from a single dataset
            if input:
                # Collect SQL statements
                statement += sp.unregister_map(
                    map=map, dbif=dbif, execute=False)

            # Unregister from temporal database
            else:
                # We need to update all datasets after the removement of maps
                map.metadata.select(dbif)
                datasets = map.get_registered_stds(dbif)
                # Store all unique dataset ids in a dictionary
                if datasets:
                    for dataset in datasets:
                        update_dict[dataset] = dataset
                # Collect SQL statements
                statement += map.delete(dbif=dbif, update=False, execute=False)
        else:
            grass.warning(_("Unable to find %s map <%s> in temporal database" %
                            (map.get_type(), map.get_id())))

        count += 1

    # Execute the collected SQL statenents
    if statement:
        dbif.execute_transaction(statement)

    grass.percent(num_maps, num_maps, 1)

    # Update space time datasets
    if input:
        grass.message(_("Unregister maps from space time dataset <%s>"%(input)))
    else:
        grass.message(_("Unregister maps from the temporal database"))

    if input:
        sp.update_from_registered_maps(dbif)
        sp.update_command_string(dbif=dbif)
    elif len(update_dict) > 0:
        count = 0
        for key in update_dict.keys():
            id = update_dict[key]
            sp = tgis.open_old_stds(id, type, dbif)
            sp.update_from_registered_maps(dbif)
            grass.percent(count, len(update_dict), 1)
            count += 1

    dbif.close()
示例#15
0
    def _getLabelsAndMaps(self, timeseries):
        """Returns time labels and map names (done by sampling)
        for both interval and point data.
        """
        sp = tgis.dataset_factory(self.timeseriesInfo[timeseries]['etype'], timeseries)
        if sp.is_in_db() is False:
            raise GException(_("Space time dataset <%s> not found.") % timeseries)
        sp.select()

        listOfMaps = []
        timeLabels = []
        granNum, unit = self.GetGranularity()
        if self.temporalType == TemporalType.ABSOLUTE:
            if self.granularityMode == GranularityMode.ONE_UNIT:
                gran = '%(one)d %(unit)s' % {'one': 1, 'unit': unit}
            else:
                gran = '%(num)d %(unit)s' % {'num': granNum, 'unit': unit}

        elif self.temporalType == TemporalType.RELATIVE:
            unit = self.timeseriesInfo[timeseries]['unit']
            if self.granularityMode == GranularityMode.ONE_UNIT:
                gran = 1
            else:
                gran = granNum
        # start sampling - now it can be used for both interval and point data
        # after instance, there can be a gap or an interval
        # if it is a gap we remove it and put there the previous instance instead
        # however the first gap must be removed to avoid duplication
        maps = sp.get_registered_maps_as_objects_by_granularity(gran=gran)
        if maps and len(maps) > 0:
            lastTimeseries = None
            followsPoint = False  # indicates that we are just after finding a point
            afterPoint = False  # indicates that we are after finding a point
            for mymap in maps:
                if isinstance(mymap, list):
                    if len(mymap) > 0:
                        map = mymap[0]
                else:
                    map = mymap

                series = map.get_id()

                start, end = map.get_temporal_extent_as_tuple()
                if self.timeseriesInfo[timeseries]['map_time'] == 'point':
                    # point data
                    listOfMaps.append(series)
                    afterPoint = True
                    followsPoint = True
                    lastTimeseries = series
                    end = None
                else:
                    end = end
                    # interval data
                    if series:
                        # map exists, stop point mode
                        listOfMaps.append(series)
                        afterPoint = False
                    else:
                        # check point mode
                        if afterPoint:
                            if followsPoint:
                                # skip this one, already there
                                followsPoint = False
                                continue
                            else:
                                # append the last one (of point time)
                                listOfMaps.append(lastTimeseries)
                                end = None
                        else:
                            # append series which is None
                            listOfMaps.append(series)
                timeLabels.append((start, end, unit))

        return timeLabels, listOfMaps
示例#16
0
文件: frame.py 项目: caomw/grass
    def _getData(self, timeseries):
        """Load data and read properties

        :param list timeseries: a list of timeseries
        """
        self.timeData = OrderedDict()
        mode = None
        unit = None
        columns = ','.join(['name', 'start_time', 'end_time'])
        for series in timeseries:
            name = series[0]
            fullname = name + '@' + series[1]
            etype = series[2]
            sp = tgis.dataset_factory(etype, fullname)
            sp.select(dbif=self.dbif)

            self.timeData[name] = OrderedDict()
            if not sp.is_in_db(dbif=self.dbif):
                GError(self, message=_("Dataset <%s> not found in temporal "
                                       "database") % (fullname))
                return

            self.timeData[name]['temporalDataType'] = etype
            self.timeData[name]['temporalType'] = sp.get_temporal_type()
            self.timeData[name]['granularity'] = sp.get_granularity()
            if mode is None:
                mode = self.timeData[name]['temporalType']
            elif self.timeData[name]['temporalType'] != mode:
                GError(parent=self, message=_("Datasets have different temporal"
                                              " type (absolute x relative), "
                                              "which is not allowed."))
                return

            # check topology
            maps = sp.get_registered_maps_as_objects(dbif=self.dbif)
            self.timeData[name]['validTopology'] = sp.check_temporal_topology(maps=maps, dbif=self.dbif)

            self.timeData[name]['unit'] = None  # only with relative
            if self.timeData[name]['temporalType'] == 'relative':
                start, end, self.timeData[name]['unit'] = sp.get_relative_time()
                if unit is None:
                    unit = self.timeData[name]['unit']
                elif self.timeData[name]['unit'] != unit:
                    GError(self, _("Datasets have different time unit which "
                                   "is not allowed."))
                    return

            rows = sp.get_registered_maps(columns=columns, where=None,
                                          order='start_time', dbif=self.dbif)
            for row in rows:
                self.timeData[name][row[0]] = {}
                self.timeData[name][row[0]]['start_datetime'] = row[1]
                self.timeData[name][row[0]]['end_datetime'] = row[2]
                r = RasterRow(row[0])
                r.open()
                val = r.get_value(self.poi)
                r.close()
                self.timeData[name][row[0]]['value'] = val
        self.unit = unit
        self.temporalType = mode
        return
示例#17
0
    def _getSTVDData(self, timeseries):
        """Load data and read properties
        :param list timeseries: a list of timeseries
        """

        mode = None
        unit = None
        cats = None
        attribute = self.attribute.GetValue()
        if self.cats.GetValue() != '':
            cats = self.cats.GetValue().split(',')
        if cats and self.poi:
            GMessage(message=_("Both coordinates and categories are set, "
                               "coordinates will be used. The use categories "
                               "remove text from coordinate form"))
        if not attribute or attribute == '':
            GError(parent=self, showTraceback=False,
                   message=_("With Vector temporal dataset you have to select"
                             " an attribute column"))
            return
        columns = ','.join(['name', 'start_time', 'end_time', 'id', 'layer'])
        for series in timeseries:
            name = series[0]
            fullname = name + '@' + series[1]
            etype = series[2]
            sp = tgis.dataset_factory(etype, fullname)
            if not sp.is_in_db(dbif=self.dbif):
                GError(message=_("Dataset <%s> not found in temporal "
                                 "database") % (fullname), parent=self,
                                 showTraceback=False)
                return
            sp.select(dbif=self.dbif)

            rows = sp.get_registered_maps(dbif=self.dbif, order="start_time",
                                          columns=columns, where=None)

            self.timeDataV[name] = OrderedDict()
            self.timeDataV[name]['temporalDataType'] = etype
            self.timeDataV[name]['temporalType'] = sp.get_temporal_type()
            self.timeDataV[name]['granularity'] = sp.get_granularity()

            if mode is None:
                mode = self.timeDataV[name]['temporalType']
            elif self.timeDataV[name]['temporalType'] != mode:
                GError(parent=self, showTraceback=False,
                       message=_("Datasets have different temporal type ("
                                 "absolute x relative), which is not allowed."))
                return
            self.timeDataV[name]['unit'] = None  # only with relative
            if self.timeDataV[name]['temporalType'] == 'relative':
                start, end, self.timeDataV[name]['unit'] = sp.get_relative_time()
                if unit is None:
                    unit = self.timeDataV[name]['unit']
                elif self.timeDataV[name]['unit'] != unit:
                    GError(message=_("Datasets have different time unit which"
                                     " is not allowed."), parent=self,
                           showTraceback=False)
                    return
            if self.poi:
                self.plotNameListV.append(name)
                # TODO set an appropriate distance, right now a big one is set
                # to return the closer point to the selected one
                out = grass.vector_what(map='pois_srvds',
                                        coord=self.poi.coords(),
                                        distance=10000000000000000)
                if len(out) != len(rows):
                    GError(parent=self, showTraceback=False,
                           message=_("Difference number of vector layers and "
                                     "maps in the vector temporal dataset"))
                    return
                for i in range(len(rows)):
                    row = rows[i]
                    values = out[i]
                    if str(row['layer']) == str(values['Layer']):
                        lay = "{map}_{layer}".format(map=row['name'],
                                                     layer=values['Layer'])
                        self.timeDataV[name][lay] = {}
                        self.timeDataV[name][lay]['start_datetime'] = row['start_time']
                        self.timeDataV[name][lay]['end_datetime'] = row['start_time']
                        self.timeDataV[name][lay]['value'] = values['Attributes'][attribute]
            else:
                wherequery = ''
                cats = self._getExistingCategories(rows[0]['name'], cats)
                totcat = len(cats)
                ncat = 1
                for cat in cats:
                    if ncat == 1 and totcat != 1:
                        wherequery += '{k}={c} or'.format(c=cat, k="{key}")
                    elif ncat == 1 and totcat == 1:
                        wherequery += '{k}={c}'.format(c=cat, k="{key}")
                    elif ncat == totcat:
                        wherequery += ' {k}={c}'.format(c=cat, k="{key}")
                    else:
                        wherequery += ' {k}={c} or'.format(c=cat, k="{key}")

                    catn = "cat{num}".format(num=cat)
                    self.plotNameListV.append("{na}+{cat}".format(na=name,
                                                                  cat=catn))
                    self.timeDataV[name][catn] = OrderedDict()
                    ncat += 1
                for row in rows:
                    lay = int(row['layer'])
                    catkey = self._parseVDbConn(row['name'], lay)
                    if not catkey:
                        GError(parent=self, showTraceback=False,
                           message=_("No connection between vector map {vmap} "
                                     "and layer {la}".format(vmap=row['name'],
                                                              la=lay)))
                        return
                    vals = grass.vector_db_select(map=row['name'], layer=lay,
                                                  where=wherequery.format(key=catkey),
                                                  columns=attribute)
                    layn = "lay{num}".format(num=lay)
                    for cat in cats:
                        catn = "cat{num}".format(num=cat)
                        if layn not in self.timeDataV[name][catn].keys():
                            self.timeDataV[name][catn][layn] = {}
                        self.timeDataV[name][catn][layn]['start_datetime'] = row['start_time']
                        self.timeDataV[name][catn][layn]['end_datetime'] = row['end_time']
                        self.timeDataV[name][catn][layn]['value'] = vals['values'][int(cat)][0]
        self.unit = unit
        self.temporalType = mode
        return
示例#18
0
    def _getData(self, timeseries):
        """Load data and read properties"""
        self.timeData = {}
        mode = None
        unit = None

        for series in timeseries:
            name = series[0] + '@' + series[1]
            etype = series[2]
            sp = tgis.dataset_factory(etype, name)
            if not sp.is_in_db(dbif=self.dbif):
                GError(
                    self,
                    message=_("Dataset <%s> not found in temporal database") %
                    (name))
                return

            sp.select(dbif=self.dbif)

            self.timeData[name] = {}
            self.timeData[name]['elementType'] = series[2]
            self.timeData[name][
                'temporalType'] = sp.get_temporal_type()  # abs/rel

            if mode is None:
                mode = self.timeData[name]['temporalType']
            elif self.timeData[name]['temporalType'] != mode:
                GError(
                    parent=self, message=_(
                        "Datasets have different temporal type "
                        "(absolute x relative), which is not allowed."))
                return

            # check topology
            maps = sp.get_registered_maps_as_objects(dbif=self.dbif)
            self.timeData[name]['validTopology'] = sp.check_temporal_topology(
                maps=maps, dbif=self.dbif)

            self.timeData[name][
                'temporalMapType'] = sp.get_map_time()  # point/interval
            self.timeData[name]['unit'] = None  # only with relative
            if self.timeData[name]['temporalType'] == 'relative':
                start, end, self.timeData[name][
                    'unit'] = sp.get_relative_time()
                if unit is None:
                    unit = self.timeData[name]['unit']
                elif self.timeData[name]['unit'] != unit:
                    GError(
                        self, _("Datasets have different time unit which is not allowed."))
                    return

            self.timeData[name]['start_datetime'] = []
            # self.timeData[name]['start_plot'] = []
            self.timeData[name]['end_datetime'] = []
            # self.timeData[name]['end_plot'] = []
            self.timeData[name]['names'] = []
            self.timeData[name]['north'] = []
            self.timeData[name]['south'] = []
            self.timeData[name]['west'] = []
            self.timeData[name]['east'] = []

            columns = ','.join(['name', 'start_time', 'end_time',
                                'north', 'south', 'west', 'east'])

            rows = sp.get_registered_maps(columns=columns, where=None,
                                          order='start_time', dbif=self.dbif)
            if not rows:
                GError(
                    parent=self,
                    message=_("Dataset <{name}> is empty").format(
                        name=series[0] +
                        '@' +
                        series[1]))
                return
            for row in rows:
                mapName, start, end, north, south, west, east = row
                self.timeData[name]['start_datetime'].append(start)
                self.timeData[name]['end_datetime'].append(end)
                self.timeData[name]['names'].append(mapName)
                self.timeData[name]['north'].append(north)
                self.timeData[name]['south'].append(south)
                self.timeData[name]['west'].append(west)
                self.timeData[name]['east'].append(east)

        self.temporalType = mode
        self.unit = unit
示例#19
0
def main():

    # Get the options
    type = options["type"]
    temporal_type = options["temporaltype"]
    columns = options["columns"]
    order = options["order"]
    where = options["where"]
    separator = gscript.separator(options["separator"])
    outpath = options["output"]
    colhead = flags['c']

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.dataset_factory(type, None)
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()
    first = True

    if  gscript.verbosity() > 0 and not outpath:
        sys.stderr.write("----------------------------------------------\n")

    for ttype in temporal_type.split(","):
        if ttype == "absolute":
            time = "absolute time"
        else:
            time = "relative time"

        stds_list = tgis.get_dataset_list(type,  ttype,  columns,  where,  order, dbif=dbif)

        # Use the correct order of the mapsets, hence first the current mapset, then
        # alphabetic ordering
        mapsets = tgis.get_tgis_c_library_interface().available_mapsets()

        if outpath:
            outfile = open(outpath, 'w')

        # Print for each mapset separately
        for key in mapsets:
            if key in stds_list.keys():
                rows = stds_list[key]

                if rows:
                    if  gscript.verbosity() > 0 and not outpath:
                        if issubclass(sp.__class__,  tgis.AbstractMapDataset):
                            sys.stderr.write(_("Time stamped %s maps with %s available in mapset <%s>:\n")%\
                                                     (sp.get_type(),  time,  key))
                        else:
                            sys.stderr.write(_("Space time %s datasets with %s available in mapset <%s>:\n")%\
                                                     (sp.get_new_map_instance(None).get_type(),  time,  key))

                    # Print the column names if requested
                    if colhead == True and first == True:
                        output = ""
                        count = 0
                        for key in rows[0].keys():
                            if count > 0:
                                output += separator + str(key)
                            else:
                                output += str(key)
                            count += 1
                        if outpath:
                            outfile.write("{st}\n".format(st=output))
                        else:
                            print output
                        first = False

                    for row in rows:
                        output = ""
                        count = 0
                        for col in row:
                            if count > 0:
                                output += separator + str(col)
                            else:
                                output += str(col)
                            count += 1
                        if outpath:
                            outfile.write("{st}\n".format(st=output))
                        else:
                            print output
    if outpath:
        outfile.close()
    dbif.close()
示例#20
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    inputs = options["inputs"]
    output = options["output"]
    type = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    #Get the current mapset to create the id of the space time dataset
    mapset = grass.gisenv()["MAPSET"]

    inputs_split = inputs.split(",")
    input_ids = []

    for input in inputs_split:
        if input.find("@") >= 0:
            input_ids.append(input)
        else:
            input_ids.append(input + "@" + mapset)

    # Set the output name correct
    if output.find("@") >= 0:
        out_mapset = output.split("@")[1]
        if out_mapset != mapset:
            grass.fatal(_("Output space time dataset <%s> must be located in this mapset") % (output))
    else:
        output_id = output + "@" + mapset

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    stds_list = []
    first = None

    for id in input_ids:
        stds = tgis.open_old_stds(id, type, dbif)
        if first is None:
            first = stds

        if first.get_temporal_type() != stds.get_temporal_type():
            dbif.close()
            grass.fatal(_("Space time datasets to merge must have the same temporal type"))

        stds_list.append(stds)

    # Do nothing if nothing to merge
    if first is None:
        dbif.close()
        return

    # Check if the new id is in the database
    output_stds = tgis.dataset_factory(type, output_id)
    output_exists = output_stds.is_in_db(dbif=dbif)

    if output_exists == True and grass.overwrite() == False:
        dbif.close()
        grass.fatal(_("Unable to merge maps into space time %s dataset <%s> "\
                      "please use the overwrite flag.") % \
                      (stds.get_new_map_instance(None).get_type(), output_id))

    if not output_exists:
        output_stds = tgis.open_new_stds(output, type,
                                   first.get_temporal_type(),
                                   "Merged space time dataset",
                                   "Merged space time dataset",
                                   "mean", dbif=dbif, overwrite=False)
    else:
        output_stds.select(dbif=dbif)

    registered_output_maps = {}
    # Maps that are already registered in an existing dataset 
    # are not registered again
    if output_exists == True:
        rows = output_stds.get_registered_maps(columns="id", dbif=dbif)
        if rows:
            for row in rows:
                registered_output_maps[row["id"]] = row["id"]

    for stds in stds_list:
        # Avoid merging of already registered maps
        if stds.get_id() != output_stds.get_id():
            maps = stds.get_registered_maps_as_objects(dbif=dbif)

            if maps:
                for map in maps:
                    # Jump over already registered maps
                    if map.get_id() in registered_output_maps:
                        continue

                    map.select(dbif=dbif)
                    output_stds.register_map(map=map, dbif=dbif)
                    # Update the registered map list
                    registered_output_maps[map.get_id()] = map.get_id()

    output_stds.update_from_registered_maps(dbif=dbif)

    if output_exists == True:
        output_stds.update_command_string(dbif=dbif)
示例#21
0
文件: frame.py 项目: rkrug/grass-ci
    def _getData(self, timeseries):
        """Load data and read properties"""
        self.timeData = {}
        mode = None
        unit = None

        for series in timeseries:
            name = series[0] + '@' + series[1]
            etype = series[2]
            sp = tgis.dataset_factory(etype, name)
            if not sp.is_in_db(dbif=self.dbif):
                GError(
                    self,
                    message=_("Dataset <%s> not found in temporal database") %
                    (name))
                return

            sp.select(dbif=self.dbif)

            self.timeData[name] = {}
            self.timeData[name]['elementType'] = series[2]
            self.timeData[name][
                'temporalType'] = sp.get_temporal_type()  # abs/rel

            if mode is None:
                mode = self.timeData[name]['temporalType']
            elif self.timeData[name]['temporalType'] != mode:
                GError(
                    parent=self, message=_(
                        "Datasets have different temporal type "
                        "(absolute x relative), which is not allowed."))
                return

            # check topology
            maps = sp.get_registered_maps_as_objects(dbif=self.dbif)
            self.timeData[name]['validTopology'] = sp.check_temporal_topology(
                maps=maps, dbif=self.dbif)

            self.timeData[name][
                'temporalMapType'] = sp.get_map_time()  # point/interval
            self.timeData[name]['unit'] = None  # only with relative
            if self.timeData[name]['temporalType'] == 'relative':
                start, end, self.timeData[name][
                    'unit'] = sp.get_relative_time()
                if unit is None:
                    unit = self.timeData[name]['unit']
                elif self.timeData[name]['unit'] != unit:
                    GError(
                        self, _("Datasets have different time unit which is not allowed."))
                    return

            self.timeData[name]['start_datetime'] = []
            # self.timeData[name]['start_plot'] = []
            self.timeData[name]['end_datetime'] = []
            # self.timeData[name]['end_plot'] = []
            self.timeData[name]['names'] = []
            self.timeData[name]['north'] = []
            self.timeData[name]['south'] = []
            self.timeData[name]['west'] = []
            self.timeData[name]['east'] = []

            columns = ','.join(['name', 'start_time', 'end_time',
                                'north', 'south', 'west', 'east'])

            rows = sp.get_registered_maps(columns=columns, where=None,
                                          order='start_time', dbif=self.dbif)
            if not rows:
                GError(
                    parent=self,
                    message=_("Dataset <{name}> is empty").format(
                        name=series[0] +
                        '@' +
                        series[1]))
                return
            for row in rows:
                mapName, start, end, north, south, west, east = row
                self.timeData[name]['start_datetime'].append(start)
                self.timeData[name]['end_datetime'].append(end)
                self.timeData[name]['names'].append(mapName)
                self.timeData[name]['north'].append(north)
                self.timeData[name]['south'].append(south)
                self.timeData[name]['west'].append(west)
                self.timeData[name]['east'].append(east)

        self.temporalType = mode
        self.unit = unit