def init_connection(self):
     if self.state is None:
         istsos.debug("Initializing cursor for aiopg connection")
         self.state = yield from setting.get_state()
         if self.cur is None:
             self.context_manager = (yield from self.state.pool.cursor())
             self.cur = self.context_manager._cur
    def process(self, request):
        """Check the insertObservation consistency
        """
        istsos.debug("Check the insertObservation consistency")
        offering = request['offerings'][0]
        observation = request["observation"]

        # ==================================================================
        # in istSOS if a systemType has been declared, then the observation
        # structure should reflect the declared systemType, following
        # the INSPIRE O&M design patterns
        # http://inspire.ec.europa.eu/documents/Data_Specifications/D2.9_O&M_Guidelines_v2.0rc3.pdf
        # ==================================================================
        typedef = ('http://www.opengis.net/def/'
                   'samplingFeatureType/OGC-OM/2.0/')

        # ===============================
        # = INSPIRE SpecimenObservation =
        # ===============================
        # Feature of interest type must be of type SF_Specimen
        if (offering["systemType"] == "insitu-fixed-specimen"
                and observation["foi_type"] != "%sSF_Specimen" % typedef):
            raise Exception("SpecimenObservation design pattern requires "
                            "SF_Specimen featureOfInterest type")

        # result of a specimen must be single phenomenonTime instant otherwise
        # SpecimenTimeSeriesObservation should be used
        if "timeInstant" not in observation["phenomenonTime"]:
            raise Exception("SpecimenObservation design pattern rwequires "
                            "a phenomenonTime of type timeInstant")
        if len(observation["result"]) > 1:
            raise Exception("SpecimenObservation design pattern rwequires "
                            "a single observation record")
 def commit(self):
     if self._begin is False:
         raise Exception("begin must be called first")
     if self._commit is False:
         istsos.debug("Committing transaction")
         yield from self.cur.execute("COMMIT;")
         self._commit = True
Example #4
0
    def process(self, request):
        """ @todo docstring
        """
        offeringFilter = None

        if request.is_get_observation():
            if request['method'] == 'GET':
                offeringsFilter = request.get_parameter('offering')
                if offeringsFilter is not None:
                    tmpl = OfferingsFilter.get_template()
                    offeringsFilter = offeringsFilter.split(',')
                    if len(offeringsFilter) > 0:
                        tmpl['offerings'] = offeringsFilter
                        offeringFilter = OfferingsFilter(json_source=tmpl)

        elif request.is_insert_observation():
            if request['method'] == 'POST':
                offeringFilter = OfferingsFilter(
                    json_source={
                        "offerings": [
                            request.get_xml().find('.//sos_2_0:offering',
                                                   request.ns).text.strip()
                        ]
                    })

        if offeringFilter is not None:
            request.set_filter(offeringFilter)
            istsos.debug("Offerings filter: %s" % offeringFilter["offerings"])
        else:
            istsos.debug("Offering filter NOT set")
 def rollback(self):
     if self.cur is None or self._begin is False:
         return
     if self._rollback is False:
         istsos.debug("Rolling back transaction")
         yield from self.cur.execute("ROLLBACK;")
         self._rollback = True
    def process(self, request):
        dbmanager = yield from self.init_connection()
        yield from self.begin()
        cur = dbmanager.cur

        foi = request['featureOfInterest']

        # Check if foi with given identifier already exists
        yield from cur.execute(
            """
            SELECT EXISTS(
                SELECT 1
                FROM fois
                WHERE identifier = %s
            ) AS exists;
        """, (foi['identifier'], ))
        rec = yield from cur.fetchone()
        if rec[0] is True:
            raise InvalidParameterValue(
                "indentifier", ("Feature of interest indentifier"
                                " '%s' already inserted" % foi['identifier']))
        if foi['type'] == setting._SAMPLING_POINT:
            istsos.debug("Creating a SAMPLING_POINT")
            yield from self.create_sampling_point(cur, foi)

        yield from self.commit()
Example #7
0
def __get_proxy(istsos_package, action_module, **kwargs):
    from istsos import setting
    import importlib
    state = yield from setting.get_state()
    fileName = action_module[0].lower() + action_module[1:]
    module = 'istsos.%s.%s.%s' % (
        istsos_package,
        state.config["loader"]["type"],
        fileName
    )

    istsos.debug("Importing %s.%s" % (module, action_module))
    try:
        m = importlib.import_module(module)
    except Exception:
        module = 'istsos.%s.%s' % (
            istsos_package,
            fileName
        )
        m = importlib.import_module(module)

    m = getattr(m, action_module)
    if kwargs is not None:
        return m(**kwargs)
    return m()
Example #8
0
    def process(self, request):
        """Depending on the selected procedures call its specific retriever to
        query the relatives observed data.

        :param dict state: must contain an object with the queried procedures
        """

        if 'offerings' not in request:
            # A request can also lead to an empty response
            return

        # @todo check if this helps
        if False:  # len(request['offerings']) > 1:
            istsos.debug("Running retrieval in parallel")
            funcs = [
                self.__get_data(offering, request)
                for offering in request['offerings']
            ]
            yield from asyncio.gather(*funcs)

        else:
            istsos.debug("Running retrieval sequentially")
            if request.get_filter(
                    "responseFormat") in setting._responseFormat['vega']:
                yield from self.__get_kvp(request['offerings'], request)
            elif request.get_filter(
                    "responseFormat") in setting._responseFormat['array']:
                yield from self.__get_array(request['offerings'], request)
            elif request.get_filter(
                    "responseFormat") in setting._responseFormat['array2']:
                yield from self.__get_array_2(request['offerings'], request)
            else:
                for offering in request['offerings']:
                    yield from self.__get_data(offering, request)
Example #9
0
 def __init__(self, **kwargs):
     super(CompositeAction, self).__init__(**kwargs)
     istsos.debug(
         "Constructing %s.%s" % (
             self.__module__,
             self.__class__.__name__
         )
     )
     self.actions = []
 def begin(self):
     if self.cur is None:
         yield from self.init_cursor()
     if self._begin is False:
         istsos.debug("Beginning transaction")
         yield from self.cur.execute("BEGIN;")
         self._begin = True
     else:
         istsos.debug("Transation already started")
    def process(self, request):
        """Check the insertObservation consistency
        """
        istsos.debug("Check the insertObservation consistency")
        offering = request['offerings'][0]
        observation = request["observation"]

        observation_types = []
        for observation_type in offering['observation_type']:
            observation_types.append(observation_type['definition'])
        observation_types_usage_check = list(observation_types)

        observable_properties = []
        for observable_property in offering['observable_property']:
            observable_properties.append(observable_property['definition'])

        # Checking the observation type usage. W\ istSOS all the observation
        # types declared with the insertSensor must be used on each
        # insertObservation
        for observation_type in observation['type']:
            if observation_type not in observation_types:
                raise Exception("observedProperty (%s) is not observed by "
                                "procedure %s." %
                                (observedProperty, observation['procedure']))
            elif observation_type in observation_types_usage_check:
                observation_types_usage_check.pop(
                    observation_types_usage_check.index(observation_type))

        # Checking the observed property usage. W\ istSOS all the observation
        # property declared with the insertSensor must be used on each
        # insertObservation
        for observedProperty in observation['observedProperty']:
            if observedProperty not in observable_properties:
                raise Exception("observedProperty (%s) is not observed by "
                                "procedure %s." %
                                (observedProperty, observation['procedure']))
            observable_properties.pop(
                observable_properties.index(observedProperty))

        # Checking if an observable property is omitted
        if len(observable_properties) > 0:
            raise Exception(
                "With a insertObservation operation all the observable "
                "properties must be used. Missing: %s." %
                (", ".join(observable_properties)))

        # After all the observations are looped check if the procedure
        # have omitted the usage of one or more observation type
        if len(observation_types_usage_check) > 0:
            raise Exception(
                "With a insertObservation operation all the observation "
                "types must be used. Missing: %s." %
                (", ".join(observation_types_usage_check)))
Example #12
0
 def execute(self, request):
     istsos.debug("Executing: %s" % self.__class__.__name__)
     start = time.time()
     try:
         yield from self.before(request)
         yield from self.process(request)
         yield from self.after(request)
         yield from self.close_connection()
     except Exception as ex:
         yield from self.on_exception(request, ex)
     self.time = time.time() - start
     self.update_observers(request)
 def commit(self):
     """Commit can be called only once and only by root action
     """
     root = self.get_root()
     if root is None:
         istsos.debug("%s is committing now" % self.__class__.__name__)
         yield from self.dbmanager.commit()
         self.commit_requested = False
     else:
         istsos.debug("Commit will be executed at the chain's end by %s" %
                      root.__class__.__name__)
         root.commit_requested = True
 def execute(self, request):
     start = time.time()
     istsos.debug("Executing %s" % self.__class__.__name__)
     try:
         yield from self.before(request)
         yield from self.process(request)
         for action in self.actions:
             yield from action.execute(request)
         yield from self.after(request)
         if self.commit_requested:
             yield from self.commit()
         yield from self.close_connection()
     except Exception as ex:
         yield from self.on_exception(request, ex)
         # raise ex  # propagate the exception
     self.time = time.time() - start
     self.update_observers(request)
def get_plugin(plugin, name, **kwargs):
    print('GET_PLUGIN')
    print(plugin)
    print(name)
    import importlib
    fileName = name[0].lower() + name[1:]
    module = 'istsos.plugins.%s.%s.%s' % (plugin, fileName, name)
    istsos.debug("Importing Plugin %s: %s.%s" % (plugin, fileName, name))
    try:
        m = importlib.import_module(module)
    except Exception:
        module = 'istsos.plugins.%s.%s' % (plugin, fileName)
        m = importlib.import_module(module)

    m = getattr(m, name)
    print(m)
    if kwargs is not None:
        return m(**kwargs)
    return m()
    def __download_file(self, request, data, headers=None):
        if request.get_filter("download_file") is not None:
            if 'file_name' in request.get_filter("download_file"):
                file_name=request.get_filter("download_file")['file_name']
            else:
                file_name=request.get_filter("offerings")

            if 'location' in request.get_filter("download_file"):
                download_location=request.get_filter("download_file")['location']
            else:
                download_location='istsos/plugins/unit_con_pint/download_file/'

            file_detail = """%s%s.csv""" % (download_location, file_name)
            f = csv.writer(open(file_detail, "w"))
            if headers is not None:
                f.writerow(headers)
            for x in data:
                f.writerow(x)
            debug_detail = """%s.csv download location is %s""" % (file_name, download_location)
            istsos.debug(debug_detail)
Example #17
0
    def process(self, request):
        """ @todo docstring
        """
        if request.is_insert_sensor():

            # Preparing data dictionary
            data = Offering.get_template()

            # If procedure description is an sensorML 1.0.1 then check if the
            # procedure identifier is given.
            for identifier in request.get_xml().iterfind(
                    './/sml_1_0_1:identifier', request.ns):
                if identifier.get('name') == 'uniqueID':
                    value = identifier.find('.//sml_1_0_1:value', request.ns)
                    data['procedure'] = value.text.strip()
                    break

            istsos.debug("Procedure uniqueID: %s" % data['procedure'])

            # If procedure description is an sensorML 1.0.1 then check if the
            # offering identifier is given.
            for capability in request.get_xml().iterfind(
                    './/sml_1_0_1:capabilities', request.ns):
                if capability.get('name') == 'offering':
                    value = capability.find('.//swe_1_0_1:value', request.ns)
                    data['name'] = value.text.strip()
                if capability.get('name') == 'featuresOfInterest':
                    value = capability.find('.//swe_1_0_1:value', request.ns)
                    data['foi_name'] = value.text.strip()
                    istsos.debug("Foi name: %s" % data['foi_name'])

            istsos.debug("Offering uniqueID: %s" % data['name'])

            # Reading and adding the Observable Properties(s)
            for observableProperty in request.get_xml().iterfind(
                    './/swes_2_0:observableProperty', request.ns):
                data['observable_property'].append(
                    {"definition": observableProperty.text.strip()})

            istsos.debug("Observed properties: %s" %
                         len(data['observable_property']))

            # Reading and adding the Observation Type(s)
            for observationType in request.get_xml().iterfind(
                    './/sos_2_0:observationType', request.ns):
                data['observation_type'].append(
                    {"definition": observationType.text.strip()})

            istsos.debug("Observation types: %s" %
                         len(data['observation_type']))

            # Reading and setting the feature of interest type
            foi_type = request.get_xml().find(
                './/sos_2_0:featureOfInterestType', request.ns)
            if foi_type is not None:
                data['foi_type'] = foi_type.text.strip()
                istsos.debug("Feature of Interest Type: %s" %
                             data['foi_type'].replace(setting._foidef, ''))
            """for classifier in request.get_xml().iterfind(
                    './/sml_1_0_1:classifier', request.ns):
                if classifier.get('name') == 'systemType':
                    value = classifier.find('.//sml_1_0_1:value', request.ns)
                    data['systemType'] = value.text.strip()
                    break

            if data['systemType'] is None:
                # Guess from other configuration elements
                if setting._GEOMETRY_OBSERVATION in data['observation_type'] \
                        and setting._SAMPLING_CURVE == data['foi_type']:
                    data['systemType'] = setting._INSITU_MOBILE_POINT

                elif setting._GEOMETRY_OBSERVATION not in data[
                        'observation_type'] \
                        and setting._SAMPLING_POINT == data['foi_type']:
                    data['systemType'] = setting._INSITU_MOBILE_POINT

                elif setting._SAMPLING_SPECIMEN == data['foi_type']:
                    data['systemType'] = setting._INSITU_FIXED_SPECIMEN

                else:
                    data['systemType'] = setting._INSITU_FIXED_POINT

            istsos.debug(
                "System Type: %s" % data['systemType'])"""

            request['offering'] = Offering(json_source=data)
    def __get_kvp(self, offerings, request):

        dbmanager = yield from self.init_connection()
        cur = dbmanager.cur
        op_filter = request.get_filter('observedProperties')
        tables = {}
        columns = []

        for offering in request['offerings']:
            tName = "_%s" % offering['name'].lower()
            if offering.is_complex():
                tables[tName] = []
                for op in offering['observable_properties']:
                    if op['type'] == setting._COMPLEX_OBSERVATION:
                        continue
                    else:
                        # observedProperty filters are applied here excluding
                        # the observed properties columns from the query
                        if op_filter is not None and (
                                op['definition'] not in op_filter):
                            continue
                        columns.append(op['column'])
                        # columns_qi.append('%s_qi' % op['column'])
                        tables[tName].append(op['column'])

            elif offering.is_array():
                raise Exception("Not implemented yet")
            else:
                tables[tName] = []
                for op in offering['observable_properties']:
                    # observedProperty filters are applied here excluding
                    # the observed properties columns from the query
                    if op_filter is not None and (
                            op['definition'] not in op_filter):
                        continue
                    columns.append(op['column'])
                    # columns_qi.append('%s_qi' % op['column'])
                    tables[tName].append(op['column'])

        unions = []
        unionSelect = []
        jsonKeys = []
        unionColumns = []
        for idx in range(0, len(columns)):
            unionSelect.append(
                "SUM(c%s)::text as c%s" % (idx, idx)
            )
            unionColumns.append(
                "NULL::double precision as c%s" % (idx)
            )
            jsonKeys.append("""
                "%s": ' || COALESCE(c%s, 'null') || '
            """ % (
                columns[idx],
                idx
            ))

        unionSelect = ", ".join(unionSelect)

        temporal = []
        where = []
        params = []
        if request.get_filters() is not None:
            keys = list(request.get_filters())
            for key in keys:
                fltr = request.get_filters()[key]
                if key == 'temporal':
                    if fltr['fes'] == 'during':
                        temporal.append("""
                            begin_time >= %s::timestamp with time zone
                        AND
                            end_time <= %s::timestamp with time zone
                        """)
                        params.extend(fltr['period'])

                    elif fltr['fes'] == 'equals':
                        temporal.append("""
                            begin_time = end_time
                        AND
                            begin_time = %s::timestamp with time zone
                        """)
                        params.append(fltr['instant'])

                    where.append(
                        "(%s)" % (' OR '.join(temporal))
                    )

        for table in tables.keys():
            off_cols = tables[table]
            cols = unionColumns.copy()
            for col in off_cols:
                cols[
                    columns.index(col)
                ] = unionColumns[columns.index(col)].replace(
                    "NULL::double precision",
                    col
                )
            uSql = """
                SELECT
                    end_time, %s
                FROM
                    data.%s
            """ % (
                ", ".join(cols), table
            )
            if len(where) > 0:
                uSql += "WHERE %s" % (
                    'AND'.join(where)
                )
            unions.append("(%s)" % uSql)

        jsonSql = """
            SELECT array_to_json(
                array_agg(('{
                    "e": "' || to_char(
                        end_time, 'YYYY-MM-DD"T"HH24:MI:SSZ')
                        || '",
                    %s
                }')::json)
            )
            FROM
        """ % (
            ", ".join(jsonKeys),
        )

        sql = """
            SET enable_seqscan=false;
            SET SESSION TIME ZONE '+00:00';
            %s
            (
                SELECT end_time, %s
                FROM (
                    %s
                ) a
                GROUP BY end_time
                ORDER BY end_time
            ) b
        """ % (
            jsonSql,
            unionSelect,
            " UNION ".join(unions)
        )
        # print('Length of Uninons')
        # print(len(unions))
        # print(unions)
        istsos.debug(
            (
                yield from cur.mogrify(sql, tuple(params*len(unions)))
            ).decode("utf-8")
        )
        # print('observation.py')
        # print(sql)
        # yield from cur.execute(sql, tuple(params*2)
        yield from cur.execute(sql, tuple(params*len(unions)))
        rec = yield from cur.fetchone()
        # print(rec)
        # print('This is successful')
        request['observations'] = rec[0]
        # recs = yield from cur.fetchall()
        istsos.debug("Data is fetched!")
 def add(self, action):
     istsos.debug("Adding %s.%s" %
                  (action.__module__, action.__class__.__name__))
     self.actions.append(action)
     action.set_parent(self)
Example #20
0
    def __get_array_2(self, offerings, request):

        dbmanager = yield from self.init_connection()
        cur = dbmanager.cur
        op_filter = request.get_filter('observedProperties')
        tables = {}
        columns = []
        headers = [{
            "type": "datetime",
            "name": "Phenomenon Time",
            "column": "e"
        }]

        for offering in request['offerings']:
            tName = "_%s" % offering['name'].lower()
            if offering.is_complex():
                tables[tName] = []
                for op in offering['observable_properties']:
                    if op['type'] == setting._COMPLEX_OBSERVATION:
                        continue
                    else:
                        # observedProperty filters are applied here excluding
                        # the observed properties columns from the query
                        if op_filter is not None and (op['definition']
                                                      not in op_filter):
                            continue
                        columns.append(op['column'])
                        # columns_qi.append('%s_qi' % op['column'])
                        tables[tName].append(op['column'])
                        headers.append({
                            "type": "number",
                            "name": op['name'],
                            "definition": op['definition'],
                            "offering": offering['name'],
                            "uom": op['uom'],
                            "column": op['column']
                        })

            elif offering.is_array():
                raise Exception("Not implemented yet")
            else:
                tables[tName] = []
                for op in offering['observable_properties']:
                    # observedProperty filters are applied here excluding
                    # the observed properties columns from the query
                    if op_filter is not None and (op['definition']
                                                  not in op_filter):
                        continue
                    columns.append(op['column'])
                    # columns_qi.append('%s_qi' % op['column'])
                    tables[tName].append(op['column'])
                    headers.append({
                        "type": "number",
                        "name": op['name'],
                        "definition": op['definition'],
                        "offering": offering['name'],
                        "uom": op['uom'],
                        "column": op['column']
                    })

        unions = []
        unionSelect = []
        jsonKeys = [
            "array_agg(to_char(end_time, 'YYYY-MM-DD\"T\"HH24:MI:SSZ'))"
        ]
        unionColumns = []
        for idx in range(0, len(columns)):
            unionSelect.append("SUM(c%s) as c%s" % (idx, idx))
            unionColumns.append("NULL::double precision as c%s" % (idx))
            jsonKeys.append("array_agg(c%s)" % (idx))

        unionSelect = ", ".join(unionSelect)

        temporal = []
        where = []
        params = []
        if request.get_filters() is not None:
            keys = list(request.get_filters())
            for key in keys:
                fltr = request.get_filters()[key]
                if key == 'temporal':
                    if fltr['fes'] == 'during':
                        temporal.append("""
                            begin_time >= %s::timestamp with time zone
                        AND
                            end_time <= %s::timestamp with time zone
                        """)
                        params.extend(fltr['period'])

                    elif fltr['fes'] == 'equals':
                        temporal.append("""
                            begin_time = end_time
                        AND
                            begin_time = %s::timestamp with time zone
                        """)
                        params.append(fltr['instant'])

                    where.append("(%s)" % (' OR '.join(temporal)))

        for table in tables.keys():
            off_cols = tables[table]
            cols = unionColumns.copy()
            for col in off_cols:
                cols[columns.index(col)] = unionColumns[columns.index(
                    col)].replace("NULL::double precision", col)
            uSql = """
                SELECT
                    end_time, %s
                FROM
                    data.%s
            """ % (", ".join(cols), table)
            if len(where) > 0:
                uSql += "WHERE %s" % ('AND'.join(where))
            unions.append("(%s)" % uSql)

        jsonSql = """
            SELECT %s
            FROM
        """ % (", ".join(jsonKeys), )

        sql = """
            SET enable_seqscan=false;
            SET SESSION TIME ZONE '+00:00';
            %s
            (
                SELECT end_time, %s
                FROM (
                    %s
                ) a
                GROUP BY end_time
                ORDER BY end_time
            ) b
        """ % (jsonSql, unionSelect, " UNION ".join(unions))

        # istsos.debug(
        #     (
        #         yield from cur.mogrify(sql, tuple(params*len(unions)))
        #     ).decode("utf-8")
        # )

        yield from cur.execute(sql, tuple(params * len(unions)))
        rec = yield from cur.fetchone()
        request['observations'] = {}
        for idx in range(0, len(headers)):
            header = headers[idx]
            request['observations'][header['column']] = rec[idx]
        request['headers'] = headers
        istsos.debug("Data is fetched!")
Example #21
0
    def __get_data(self, offering, request):

        dbmanager = yield from self.init_connection()
        cur = dbmanager.cur

        table_name = "data._%s" % offering['name'].lower()

        columns = []
        # columns_qi = []
        op_filter = request.get_filter('observedProperties')

        observation = Observation.get_template({
            "offering":
            offering['name'],
            "procedure":
            offering['procedure']
        })

        if offering.is_complex():
            observation["type"] = setting._COMPLEX_OBSERVATION
            op = offering.get_complex_observable_property()
            observation["observedProperty"] = \
                ObservedPropertyComplex.get_template({
                    "def": op['definition'],
                    "name": op['name'],
                    "type": op['type'],
                    "uom": op['uom']
                })
            for op in offering['observable_properties']:
                if op['type'] == setting._COMPLEX_OBSERVATION:
                    continue
                else:
                    # observedProperty filters are applied here excluding
                    # the observed properties columns from the query
                    if op_filter is not None and (op['definition']
                                                  not in op_filter):
                        continue

                    observation["observedProperty"]['fields'].append(
                        ObservedProperty.get_template({
                            "def": op['definition'],
                            "name": op['name'],
                            "type": op['type'],
                            "uom": op['uom']
                        }))
                    columns.append(op['column'])
                    # columns_qi.append('%s_qi' % op['column'])

        elif offering.is_array():
            raise Exception("Not implemented yet")

        else:
            for op in offering['observable_properties']:
                observation["type"] = op['type']
                # observedProperty filters are applied here excluding
                # the observed properties columns from the query
                if op_filter is not None and (op['definition']
                                              not in op_filter):
                    continue

                observation["observedProperty"] = \
                    ObservedProperty.get_template({
                        "def": op['definition'],
                        "name": op['name'],
                        "type": op['type'],
                        "uom": op['uom']
                    })
                columns.append(op['column'])
                # columns_qi.append('%s_qi' % op['column'])

        observation["phenomenonTime"] = {
            "timePeriod": {
                "begin": "",
                "end": ""
            }
        }

        if request.get_filter(
                "responseFormat") in setting._responseFormat['vega']:
            fastSql = """
                SELECT array_to_json(
                    array_agg(('{
                        "o": "%s",
                        "b": "' || to_char(
                            begin_time, 'YYYY-MM-DD"T"HH24:MI:SS+02:00')
                            || '",
                        "e": "' || to_char(
                            end_time, 'YYYY-MM-DD"T"HH24:MI:SS+02:00')
                            || '",
                        "r": "' || to_char(
                            result_time, 'YYYY-MM-DD"T"HH24:MI:SS+02:00')
                            || '",
                        "a": "' || %s || '"
                    }')::json)
                )
                FROM (
            """ % (
                offering['name'],
                columns[0],
            )

        else:
            fastSql = """
                SELECT
                    array_to_json(
                        array_agg(('{
                            "offering": "%s",
                            "procedure": "%s",
                            "type": %s,
                            "featureOfInterest": "ciao",
                            "phenomenonTime": {
                                "timePeriod": {
                                    "begin": "' || begin_time || '",
                                    "end": "' || end_time || '"
                                }
                            },
                            "resultTime": {
                                "timeInstant": {
                                    "instant": "' || result_time || '"
                                }
                            },
                            "result": "' || %s || '",
                            "observedProperty": %s
                        }')::json)
                )
                FROM (
                """ % (offering['name'], offering['procedure'],
                       json.dumps(observation["type"]), columns[0],
                       json.dumps(observation["observedProperty"]))

        sql = """
            SELECT
                begin_time,
                end_time,
                result_time,
                %s""" % (", ".join(columns)) + """
            FROM %s
            """ % table_name
        temporal = []
        where = []
        params = []
        if request.get_filters() is not None:
            keys = list(request.get_filters())
            for key in keys:
                fltr = request.get_filters()[key]
                if key == 'temporal':
                    if fltr['fes'] == 'during':
                        temporal.append("""
                            begin_time >= %s::timestamp with time zone
                        AND
                            end_time <= %s::timestamp with time zone
                        """)
                        params.extend(fltr['period'])

                    elif fltr['fes'] == 'equals':
                        temporal.append("""
                            begin_time = end_time
                        AND
                            begin_time = %s::timestamp with time zone
                        """)
                        params.append(fltr['instant'])

                    where.append("(%s)" % (' OR '.join(temporal)))

        if len(where) > 0:
            sql += "WHERE %s" % ('AND'.join(where))

        sql = """
            SET enable_seqscan=false;
            SET SESSION TIME ZONE '+02:00';
            %s
            %s
             ORDER BY begin_time ) t
        """ % (fastSql, sql)

        istsos.debug((yield from cur.mogrify(sql,
                                             tuple(params))).decode("utf-8"))

        yield from cur.execute(sql, tuple(params))
        rec = yield from cur.fetchone()
        request['observations'] += rec[0]
        # recs = yield from cur.fetchall()
        istsos.debug("Data is fetched!")
    def process(self, request):
        """Load all the offerings relative to the given filter.
        """

        dbmanager = yield from self.init_connection()
        cur = dbmanager.cur

        sql = """
SELECT DISTINCT
    offerings.id,
    offering_name,
    procedure_name,
    foi_type,
    sampled_foi,
    data_table_exists,
    pt_begin,
    pt_end,
    rt_begin,
    rt_end,
    config
FROM
    offerings,
    off_obs_prop,
    observed_properties
WHERE
    id_opr = observed_properties.id
AND
    id_off = offerings.id
        """

        where = []
        if request.get_filters() is not None:
            keys = list(request.get_filters())
            for key in keys:
                if key == 'specimen':
                    where.append("foi_type = '%s'" %
                                 setting._SAMPLING_SPECIMEN)
                if key == 'observedProperties' and \
                        len(request.get_filter(key)) > 0:
                    ops = []
                    for op in request.get_filter(key):
                        ops.append((yield from
                                    cur.mogrify("observed_properties.def = %s",
                                                (op, ))).decode("utf-8"))
                    where.append("(%s)" % " OR ".join(ops))

        if len(where) > 0:
            sql += "AND %s" % ('\nAND '.join(where))

        istsos.debug((yield from cur.mogrify(sql)).decode("utf-8"))
        yield from cur.execute(sql)
        recs = yield from cur.fetchall()

        for res in recs:

            off = Offering.get_template({
                'id': res[0],
                'offering': res[1],
                'procedure': res[2],
                'foi_type': res[3],
                'sampled_foi': res[4],
                'config': res[10]
            })

            table = res[5]

            if res[6] is not None and res[7] is not None:
                off["phenomenon_time"] = {
                    "begin": res[6].isoformat(),
                    "end": res[7].isoformat()
                }

            if res[8] is not None and res[9] is not None:
                off["result_time"] = {
                    "begin": res[8].isoformat(),
                    "end": res[9].isoformat()
                }

            yield from cur.execute(
                """
                SELECT
                    observation_type
                FROM
                    off_obs_type
                WHERE
                    id_off = %s;""", (res[0], ))

            observation_types = yield from cur.fetchall()

            for observation_type in observation_types:
                off['observation_types'].append(observation_type[0])

            if table:
                yield from cur.execute(
                    """
                    SELECT
                        off_obs_prop.id,
                        observed_properties.name,
                        observed_properties.def,
                        uoms.name,
                        observation_type
                    FROM
                        off_obs_prop
                    INNER JOIN observed_properties
                        ON id_opr = observed_properties.id
                    LEFT JOIN uoms
                        ON id_uom = uoms.id
                    WHERE
                        id_off = %s
                    ORDER BY
                        off_obs_prop.id
                """, (res[0], ))

                r_obs = yield from cur.fetchall()

                for obs_prop in r_obs:
                    op = ObservableProperty.get_template({
                        "id":
                        obs_prop[0],
                        "name":
                        obs_prop[1],
                        "definition":
                        obs_prop[2],
                        "uom":
                        obs_prop[3],
                        # "type": obs_prop[3]
                    })
                    if obs_prop[4] is not None:
                        op['type'] = obs_prop[4]

                    off['observable_properties'].append(ObservableProperty(op))

            request['offeringsList'].append(Offering(off))
    def after(self, request):
        response = etree.XML("""<sos:GetObservationResponse
            xmlns:sos="http://www.opengis.net/sos/2.0"
            xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
            xmlns:swe="http://www.opengis.net/swe/2.0"
            xmlns:swes="http://www.opengis.net/swes/2.0"
            xmlns:gml="http://www.opengis.net/gml/3.2"
            xmlns:ogc="http://www.opengis.net/ogc"
            xmlns:om="http://www.opengis.net/om/2.0"
            xmlns:xlink="http://www.w3.org/1999/xlink">
        </sos:GetObservationResponse>""")

        istsos.debug("Preparing %s observations" %
                     len(request['observations']))
        if len(request['observations']) > 0:
            ns = request.ns
            data = etree.SubElement(
                response,
                '{%s}observationData' % ns['sos_2_0'],
            )
            oid = 0
            for observation in request['observations']:
                oid += 1
                # Preparing default metadata elements
                omObs = etree.SubElement(data,
                                         '{%s}OM_Observation' % ns['om_2_0'])
                omObs.set("{%s}id" % ns['gml_3_2'], str(oid))
                etree.SubElement(omObs, '{%s}type' % ns['om_2_0']).set(
                    "{%s}href" % ns['xlink'], observation['type'])
                # Adding om:phenomenonTime
                phenomenonTime = etree.SubElement(
                    omObs, '{%s}phenomenonTime' % ns['om_2_0'])

                if 'timeInstant' in observation['phenomenonTime']:
                    timeInstantId = "p%s" % oid
                    # timeResultId = "r%s" % oid
                    timeInstant = etree.SubElement(
                        phenomenonTime, '{%s}TimeInstant' % ns['gml_3_2'])
                    etree.SubElement(
                        timeInstant,
                        '{%s}timePosition' % ns['gml_3_2']).text = observation[
                            'phenomenonTime']['timeInstant']['instant']

                    if observation[
                            'phenomenonTime']['timeInstant']['instant'] == \
                            observation[
                            'resultTime']['timeInstant']['instant']:
                        timeInstant.set("{%s}id" % ns['gml_3_2'],
                                        timeInstantId)
                        etree.SubElement(omObs,
                                         '{%s}resultTime' % ns['om_2_0']).set(
                                             "{%s}href" % ns['xlink'],
                                             '#%s' % timeInstantId)
                    else:
                        # Adding om:resultTime
                        resultTime = etree.SubElement(
                            omObs, '{%s}resultTime' % ns['om_2_0'])
                        timeInstant = etree.SubElement(
                            resultTime, '{%s}TimeInstant' % ns['gml_3_2'])
                        # timeInstant.set(
                        #    "{%s}id" % ns['gml_3_2'], timeInstantId)
                        etree.SubElement(
                            timeInstant, '{%s}timePosition' %
                            ns['gml_3_2']).text = observation['resultTime'][
                                'timeInstant']['instant']

                else:
                    timePeriod = etree.SubElement(
                        phenomenonTime, '{%s}TimePeriod' % ns['gml_3_2'])
                    # timePeriod.set(
                    #    "{%s}id" % ns['gml_3_2'], timeInstantId)
                    etree.SubElement(
                        timePeriod, '{%s}beginPosition' %
                        ns['gml_3_2']).text = observation['phenomenonTime'][
                            'timePeriod']['begin']
                    etree.SubElement(
                        timePeriod, '{%s}endPosition' % ns['gml_3_2']
                    ).text = observation['phenomenonTime']['timePeriod']['end']

                    # Adding om:resultTime
                    resultTime = etree.SubElement(
                        omObs, '{%s}resultTime' % ns['om_2_0'])
                    timeInstant = etree.SubElement(
                        resultTime, '{%s}TimeInstant' % ns['gml_3_2'])
                    # timeInstant.set(
                    #    "{%s}id" % ns['gml_3_2'], timeInstantId)
                    etree.SubElement(
                        timeInstant,
                        '{%s}timePosition' % ns['gml_3_2']).text = observation[
                            'resultTime']['timeInstant']['instant']

                # Adding om:observedProperty
                etree.SubElement(omObs, '{%s}procedure' % ns['om_2_0']).set(
                    "{%s}href" % ns['xlink'], observation["procedure"])

                # Adding om:observedProperty
                etree.SubElement(omObs,
                                 '{%s}observedProperty' % ns['om_2_0']).set(
                                     "{%s}href" % ns['xlink'],
                                     observation["observedProperty"]["def"])

                # Adding om:featureOfInterest
                etree.SubElement(omObs,
                                 '{%s}featureOfInterest' % ns['om_2_0']).set(
                                     "{%s}href" % ns['xlink'],
                                     observation["featureOfInterest"])

                # Adding om:result
                omresult = etree.SubElement(omObs, '{%s}result' % ns['om_2_0'])

                if observation['type'] == setting._COMPLEX_OBSERVATION:
                    omresult.set(
                        "{%s}type" % ns['xsi'],
                        setting.get_observation_type(
                            setting._COMPLEX_OBSERVATION)['type'])

                    dataRecord = etree.SubElement(
                        omresult, '{%s}DataRecord' % ns['swe_2_0'])

                    opFields = observation.get_field_list()
                    for idx in range(0, len(opFields)):
                        opField = opFields[idx]

                        field = etree.SubElement(dataRecord,
                                                 '{%s}field' % ns['swe_2_0'])

                        quantity = etree.SubElement(
                            field, '{%s}Quantity' % ns['swe_2_0'])

                        quantity.set("definition", opField["def"])

                        etree.SubElement(quantity,
                                         '{%s}uom' % ns['swe_2_0']).set(
                                             "code", opField['uom'])

                        etree.SubElement(
                            quantity, '{%s}value' % ns['swe_2_0']
                        ).text = "%s" % observation["result"][idx]

                elif observation['type'] == setting._ARRAY_OBSERVATION:
                    pass
                else:
                    omresult.set("uom", observation["observedProperty"]["uom"])
                    omresult.set(
                        "{%s}type" % ns['xsi'],
                        setting.get_observation_type(
                            observation["observedProperty"]['type'])['type'])
                    omresult.text = str(observation["result"])

        # request['response'] = self.get_classic_response(request)
        # request['response'] = self.get_array_response(request)
        request['response'] = (
            '<?xml version="1.0" encoding="UTF-8"?>\n%s') % etree.tostring(
                response, encoding='unicode', method='xml')
 def close(self):
     if self.cur is None:
         raise Exception("Cursor is not opened")
     istsos.debug("Closing database connection")
     self.context_manager.__exit__()
Example #25
0
    def add_field(self, offering, observedProperty, cur):
        istsos.debug("Adding field: %s" % observedProperty['def'])

        # Getting offering's observable property
        observable_property = offering.get_observable_property(
            observedProperty['def'])

        # Get id from off_obs_prop table
        id_obp = observable_property['id']

        uom = observedProperty["uom"]

        # If uom is given, check if exists, otherwise insert
        # the new unit of measure into the uoms table
        if uom is not None:

            # Updating cache object
            observable_property['uom'] = uom

            # query the uom
            yield from cur.execute(
                """
                SELECT id
                FROM uoms
                WHERE name = %s
            """, (uom, ))
            rec = yield from cur.fetchone()
            if rec is None:
                # Uom does not exist, insert the new uom
                yield from cur.execute(
                    """
                    INSERT INTO uoms(name)
                    VALUES (%s) RETURNING id;
                """, (uom, ))
                rec = yield from cur.fetchone()

            id_uom = rec[0]

            istsos.debug("uom: %s#%s" % (uom, id_uom))

            # Now update the configuration into the
            # off_obs_prop table, if the UOM is given
            yield from cur.execute(
                """
                UPDATE off_obs_prop
                    SET
                        observation_type=%s,
                        col_name=%s,
                        id_uom=%s
                WHERE id=%s;
            """, (observedProperty['type'], "_%s" % id_obp, id_uom, id_obp))

        # uom is not given, id_uom in off_obs_prop will be null
        else:
            yield from cur.execute(
                """
                UPDATE off_obs_prop
                    SET
                        observation_type=%s,
                        col_name=%s
                WHERE id=%s;
            """, (observedProperty['type'], "_%s" % id_obp, id_obp))

        # Adding missing columns in the measures table
        # of this offering
        sqlType = None
        if observedProperty['type'] in [
                setting._CATEGORY_OBSERVATION, setting._TEXT_OBSERVATION
        ]:
            sqlType = "character varying"

        elif observedProperty['type'] == setting._COUNT_OBSERVATION:
            sqlType = "integer"

        elif observedProperty['type'] == setting._MESAUREMENT_OBSERVATION:
            sqlType = "double precision"

        elif observedProperty['type'] == setting._TRUTH_OBSERVATION:
            sqlType = "boolean"

        elif observedProperty['type'] == setting._GEOMETRY_OBSERVATION:
            sqlType = "geometry"

        elif observedProperty['type'] == setting._COMPLEX_OBSERVATION:
            sqlType = None

        else:
            raise Exception("Observation type '%s' unknown" %
                            observedProperty['type'])

        if sqlType is not None:
            yield from cur.execute(
                """
                ALTER TABLE data._%s
                    ADD COLUMN _%s %s;
                ALTER TABLE data._%s
                    ADD COLUMN _%s_qi integer;
            """ %
                (offering['name'], id_obp, sqlType, offering['name'], id_obp))
            # Updating missing parameters
            observable_property['column'] = '_%s' % id_obp

        observable_property['type'] = observedProperty['type']
Example #26
0
 def after(self, request):
     if "offering" in request and "id" not in request['offering']:
         istsos.debug(
             "OfferingCreator shall set the id in the Offering entity, "
             "but it looks like it is not."
         )
    def __get_array(self, offerings, request):
        # print("To PRINT REQUEST AT OBSERVATION")
        # print(request[0]['offerings']['observable_properties'])
        # To_unit=request['json']['in_unit']
        # print("################################3")
        ConvertUnit=''
        dbmanager = yield from self.init_connection()
        cur = dbmanager.cur
        op_filter = request.get_filter('observedProperties')
        # print('Print Unit of observations line 250')
        # print(op_filter)
        tables = {}
        columns = []
        headers = [{
            "type": "time",
            "name": "Phenomenon Time",
            "column": "datetime"
        }]

        for offering in request['offerings']:
            tName = "_%s" % offering['name'].lower()
            if offering.is_complex():
                tables[tName] = []
                for op in offering['observable_properties']:
                    if op['type'] == setting._COMPLEX_OBSERVATION:
                        continue
                    else:
                        # observedProperty filters are applied here excluding
                        # the observed properties columns from the query
                        if op_filter is not None and (
                                op['definition'] not in op_filter):
                            continue
                        columns.append(op['column'])
                        # columns_qi.append('%s_qi' % op['column'])
                        tables[tName].append(op['column'])
                        ConvertUnit=op['uom']
                        headers.append({
                            "type": "number",
                            "name": op['name'],
                            "definition": op['definition'],
                            "offering": offering['name'],
                            "uom": op['uom']
                        })

            elif offering.is_array():
                raise Exception("Not implemented yet")
            else:
                tables[tName] = []
                for op in offering['observable_properties']:
                    # observedProperty filters are applied here excluding
                    # the observed properties columns from the query
                    if op_filter is not None and (
                            op['definition'] not in op_filter):
                        continue
                    columns.append(op['column'])
                    # columns_qi.append('%s_qi' % op['column'])
                    tables[tName].append(op['column'])
                    ConvertUnit=op['uom']
                    headers.append({
                        "type": "number",
                        "name": op['name'],
                        "definition": op['definition'],
                        "offering": offering['name'],
                        "uom": op['uom']
                    })
        # print('IT Is Unit IN OFFERING')
        # print(ConvertUnit)
        unions = []
        unionSelect = []
        jsonKeys = []
        unionColumns = []
        # for idx in range(0, len(columns)):
        #     unionSelect.append(
        #         "SUM(c%s)::text as c%s" % (idx, idx)
        #     )
        #     unionColumns.append(
        #         "NULL::double precision"
        #     )
        #     jsonKeys.append("COALESCE(c%s, 'null')" % (idx))
        # for idx in range(0, len(columns)):
        #     unionSelect.append(
        #         "SUM(c%s)::text as c%s" % (idx, idx)
        #     )
        #     unionColumns.append(
        #         "NULL::double precision as c%s" % (idx)
        #     )
        #     jsonKeys.append("COALESCE(c%s, 'null')" % (idx))

        for idx in range(0, len(columns)):
            unionSelect.append(
                "SUM(c%s)::text as c%s" % (idx, idx)
            )
            unionColumns.append(
                "NULL::double precision as c%s" % (idx)
            )
            jsonKeys.append("COALESCE(c%s, 'null')" % (idx))

        unionSelect = ", ".join(unionSelect)

        temporal = []
        where = []
        params = []
        if request.get_filters() is not None:
            keys = list(request.get_filters())
            for key in keys:
                fltr = request.get_filters()[key]
                if key == 'temporal':
                    if fltr['fes'] == 'during':
                        temporal.append("""
                            begin_time >= %s::timestamp with time zone
                        AND
                            end_time <= %s::timestamp with time zone
                        """)
                        params.extend(fltr['period'])

                    elif fltr['fes'] == 'equals':
                        temporal.append("""
                            begin_time = end_time
                        AND
                            begin_time = %s::timestamp with time zone
                        """)
                        params.append(fltr['instant'])

                    where.append(
                        "(%s)" % (' OR '.join(temporal))
                    )

        # print('Print Unit of Measurement')
        # print(headers)

        for table in tables.keys():
            off_cols = tables[table]
            cols = unionColumns.copy()
            # print('Print col in observations')
            # print(cols)
            # print(off_cols)
            for col in off_cols:
                # ConvertScript="""np.ting from(select SUBSTRING(CAST(tmp.num as varchar),'[0-9]+') as ting from(select %s *'m'::unit@ 'mm' as num)as tmp)as np"""% (", ",col)
                # ConvertScript="""
                # c0.ting as c0 
                # from
                # (
                # select SUBSTRING(CAST(tmp.num as varchar),'[0-9]+') as ting 
                # from
                # (
                # select """+col+ """*'m'::unit@ 'mm' as num from data._belin
                # )as tmp)"""
                ##################################
                # ConvertScript="""np.ting as c0 
                #     from
                #     (
                #     select SUBSTRING(CAST(tmp.num as varchar),'[0-9]+') as ting 
                #     from
                #     (
                #     select """+col+ """*'m'::unit@ 'mm' as num
                #     )as tmp) as np"""
                # # ConvertScript="np.ting from(select SUBSTRING(CAST(tmp.num as varchar),'[0-9]+') as ting from(select" +col+ "*'m'::unit@ 'mm' as num)as tmp)as np"
                # print('Printing ConvertScript')
                # print(ConvertScript)
                ###################################
                # cols[
                #     columns.index(col)
                # ] = unionColumns[columns.index(col)].replace(
                #     "NULL::double precision", ConvertScript
                # )
                ####################################3
                # convert_unit="""%s*'%s'::unit@@'%s' """%(col,ConvertUnit,To_unit)
                # print('Print convert query for postgresql-unit')
                # print(convert_unit)
                # cols[
                #     columns.index(col)
                # ] = unionColumns[columns.index(col)].replace(
                #     "NULL::double precision",
                #     convert_unit
                # )
                #############################
                # cols[
                #     columns.index(col)
                # ] = unionColumns[columns.index(col)].replace(
                #     "NULL::double precision",
                #     col+"*'m'::unit@@'mm' "
                # )
                if 'in_unit' in request['json']:                
                    To_unit=request['json']['in_unit']
                    convert_unit="""%s*'%s'::unit@@'%s' """%(col,ConvertUnit,To_unit)
                    # print('Print convert query for postgresql-unit')
                    # print(convert_unit)
                    cols[
                        columns.index(col)
                    ] = unionColumns[columns.index(col)].replace(
                        "NULL::double precision",
                        convert_unit
                    )
                else:
                    cols[
                        columns.index(col)
                    ] = unionColumns[columns.index(col)].replace(
                        "NULL::double precision",
                        col
                    )

            # print('Print col in observations 1')
            # print(cols)
            # print(off_cols)
            # uSql = """
            #     SELECT
            #         end_time, %s
            #     FROM
            #         data.%s
            # """ % (
            #     ", ".join(cols), table
            # )

            uSql = """
                SELECT
                    end_time, %s
                FROM
                    data.%s
            """ % (
                ", ".join(cols), table
            )

            # print('Query Printing uSql')
            # print(uSql)
            # uSql = """
            #     SELECT
            #         end_time, %s '*' %s ::unit@ %s
            #     FROM
            #         data.%s
            # """ % (
            #     ", ".join(cols), ConvertUnit, To_unit, table
            # )
            if len(where) > 0:
                uSql += "WHERE %s" % (
                    'AND'.join(where)
                )
            unions.append("(%s)" % uSql)
            # print('Query Printing uSql')
            # print(uSql)

        jsonSql = """
            SELECT array_agg(
                ARRAY[
                    to_char(end_time, 'YYYY-MM-DD"T"HH24:MI:SSZ'),
                    %s
                ]
            )
            FROM
        """ % (
            ", ".join(jsonKeys),
        )
        # print('Query Printing jsonSql')
        # print(jsonSql)
        # print('Query Printing unionSelect')
        # print(unionSelect)
        # print('union')
        # print(unions)

        # sql = """
        #     SET enable_seqscan=false;
        #     SET SESSION TIME ZONE '+00:00';
        #     %s
        #     (
        #         SELECT end_time, %s
        #         FROM (
        #             %s
        #         ) a
        #         GROUP BY end_time
        #         ORDER BY end_time
        #     ) b
        # """ % (
        #     jsonSql,
        #     unionSelect,
        #     " UNION ".join(unions)
        # )

        sql = """
            SET enable_seqscan=false;
            SET SESSION TIME ZONE '+00:00';
            %s
            (
                SELECT end_time, %s
                FROM (
                    %s
                ) a
                GROUP BY end_time
                ORDER BY end_time
            ) b
        """ % (
            jsonSql,
            unionSelect,
            " UNION ".join(unions)
        )

        # sql = """
        #     SET enable_seqscan=false;
        #     SET SESSION TIME ZONE '+00:00';
        #     %s
        #     (
        #         SELECT end_time, %s*%s::unit@%s 
        #         FROM (
        #             %s
        #         ) a
        #         GROUP BY end_time
        #         ORDER BY end_time
        #     ) b
        # """ % (
        #     jsonSql,
        #     unionSelect,
        #     ConvertUnit,
        #     To_unit,
        #     " UNION ".join(unions)
        # )

        istsos.debug(
            (
                yield from cur.mogrify(sql, tuple(params*len(unions)))
            ).decode("utf-8")
        )        
        # istsos.debug(
        #     (
        #         yield from cur.mogrify(sql, tuple(params*2))
        #     ).decode("utf-8")
        # )

        # print("Observation.py")
        # print(sql)
        yield from cur.execute(sql, tuple(params*len(unions)))
        # yield from cur.execute(sql, tuple(params*0))
        rec = yield from cur.fetchone()
        request['observations'] = rec[0]
        request['headers'] = headers
        # recs = yield from cur.fetchall()
        istsos.debug("Data is fetched!")
 def close_connection(self):
     root = self.get_root()
     if root is None and self.dbmanager is not None:
         istsos.debug("%s is closing now" % self.__class__.__name__)
         yield from self.dbmanager.close()
         self.dbmanager = None
    def after(self, request):

        offering = request['offerings'][0]

        if len(request['offerings'])==0:
            raise Exception(
                "Offering \"%s\" not registered" % self.data['offering'])

        if offering['foi_type'] == setting._SAMPLING_SPECIMEN:
            raise Exception(
                "Offering type \"speciment\" not yet supported")

        if not offering['fixed']:
            raise Exception(
                "Not fixed Offering not yet supported")

        columns = []
        for op in offering['observable_properties']:
            if not op['type'] == setting._COMPLEX_OBSERVATION:
                columns.extend([
                    op['column'],
                    "%s_qi" % op['column']
                ])

        bp = ep = None
        if offering['phenomenon_time'] is not None:
            bp = istsos.str2date(
                offering['phenomenon_time']['timePeriod']['begin']
            )
            ep = istsos.str2date(
                offering['phenomenon_time']['timePeriod']['end']
            )

        obsCnt = 1

        if setting._COMPLEX_OBSERVATION in offering['observation_types']:
            obsCnt = len(offering['observable_properties']) - 1

        dbmanager = yield from self.init_connection()
        cur = dbmanager.cur

        rows = self.data['observations']
        values = []
        for row in rows:
            try:
                sampling_time = row.pop(0)
                sampling_time_dt = istsos.str2date(sampling_time)

            except Exception as dtex:
                raise Exception(
                    "Procedure %s, Sampling time (%s) "
                    "wrong format" % (
                        offering['name'], sampling_time
                    )
                )

            params = [
                str(uuid.uuid1()).replace('-', ''),
                sampling_time,
                sampling_time,
                sampling_time
            ]

            # Check time consistency
            if offering['phenomenon_time'] is not None:
                # If the end position exists the new measures
                # must be after
                if sampling_time_dt < ep:
                    # non blocking exception: skip row
                    istsos.debug("Skipping observation: %s" % row)
                    continue

            if len(row) != obsCnt:
                istsos.debug(
                    "Observations count missmatch (%s!=%s)" % (
                        len(row), obsCnt
                    )
                )
                continue

            params = params + row
            values.append(
                (
                    yield from cur.mogrify(
                        (
                            '(%s, %s, %s, %s, ' +
                            ', '.join(
                                ["%s, 100"] * obsCnt
                            ) + ')'
                        ),
                        tuple(params)
                    )
                ).decode("utf-8")
            )

        if len(values)>0:

            yield from self.begin()

            print(
                (
                    yield from cur.mogrify(
                        ("""
                        INSERT INTO data._%s(
                            obs_id,
                            begin_time,
                            end_time,
                            result_time,
                            %s
                        )
                        """ % (
                            offering['name'].lower(),
                            ", ".join(columns)
                        )) + (
                            " VALUES %s" % ", ".join(values)
                        )
                    )
                ).decode("utf-8")
            )

            yield from cur.execute(
                ("""
                INSERT INTO data._%s(
                    obs_id,
                    begin_time,
                    end_time,
                    result_time,
                    %s
                )
                """ % (
                    offering['name'].lower(),
                    ", ".join(columns)
                )) + (
                    " VALUES %s" % ", ".join(values)
                )
            )

            if offering['phenomenon_time'] is not None:
                yield from cur.execute("""
                    UPDATE public.offerings
                    SET
                        pt_end=%s::TIMESTAMPTZ
                    WHERE id = %s;
                """, (sampling_time, offering['id']))

            else:
                yield from cur.execute("""
                    UPDATE public.offerings
                    SET
                        pt_end=%s::TIMESTAMPTZ,
                        pt_begin=%s::TIMESTAMPTZ
                    WHERE id = %s;
                """, (sampling_time, sampling_time, offering['id']))

            yield from self.commit()

        request['response'] = Response(
            json_source=Response.get_template()
        )