Exemplo n.º 1
0
    def check_message(self, connector, host, secret_key, resource, parameters):
        self.start_check(resource)

        md = download_metadata(connector, host, secret_key, resource['id'])
        if get_extractor_metadata(md, self.extractor_info['name']) and not self.overwrite:
            self.log_skip(resource,"metadata indicates it was already processed")
            return CheckMessage.ignore
        return CheckMessage.download
 def check_message(self, connector, host, secret_key, resource, parameters):
     if resource['name'].startswith(
             'ir_fullfield') and resource['name'].endswith(".tif"):
         # Check metadata to verify we have what we need
         md = download_metadata(connector, host, secret_key, resource['id'])
         if get_extractor_metadata(
                 md, self.extractor_info['name']) and not self.overwrite:
             self.log_skip(resource,
                           "metadata indicates it was already processed")
             return CheckMessage.ignore
         return CheckMessage.download
     else:
         self.log_skip(resource,
                       "regex not matched for %s" % resource['name'])
         return CheckMessage.ignore
    def check_message(self, connector, host, secret_key, resource, parameters):
        self.start_check(resource)

        if resource['name'].startswith('rgb_fullfield') > -1 and resource[
                'name'].endswith('_mask.tif'):
            # Check metadata to verify we have what we need
            md = download_metadata(connector, host, secret_key, resource['id'])
            if get_extractor_metadata(
                    md, self.extractor_info['name'],
                    self.extractor_info['version']) and not self.overwrite:
                self.log_skip(resource,
                              "metadata indicates it was already processed")
                return CheckMessage.ignore
            return CheckMessage.download
        else:
            self.log_skip(resource,
                          "expected filename mismatch: %s" % resource['name'])
            return CheckMessage.ignore
Exemplo n.º 4
0
    def check_message(self, connector, host, secret_key, resource, parameters):
        if "rulechecked" in parameters and parameters["rulechecked"]:
            return CheckMessage.download
        self.start_check(resource)

        if resource['name'].endswith('_left.tif') or resource['name'].endswith(
                '_right.tif'):
            # Check metadata to verify we have what we need
            md = download_metadata(connector, host, secret_key, resource['id'])
            if get_extractor_metadata(
                    md, self.extractor_info['name']) and not self.overwrite:
                self.log_skip(resource,
                              "metadata indicates it was already processed")
                return CheckMessage.ignore
            return CheckMessage.download
        else:
            self.log_skip(resource, "not left/right geotiff")
            return CheckMessage.ignore
Exemplo n.º 5
0
    def process_message(self, connector, host, secret_key, resource,
                        parameters):
        self.start_message()

        stream_name = 'Energy Farm Observations'
        disp_name = self.sensors.get_display_name()
        if 'Weather CEN' in resource['name']:
            curr_sens = disp_name + ' - CEN'
            stream_name += ' CEN'
            main_coords = [-88.199801, 40.062051, 0]
        elif 'WeatherNE' in resource['name']:
            curr_sens = disp_name + ' - NE'
            stream_name += ' NE'
            main_coords = [-88.193298, 40.067379, 0]
        elif 'WeatherSE' in resource['name']:
            curr_sens = disp_name + ' - SE'
            stream_name += ' SE'
            main_coords = [-88.193573, 40.056910, 0]
        geom = {"type": "Point", "coordinates": main_coords}

        # Get sensor or create if not found
        sensor_data = get_sensor_by_name(connector, host, secret_key,
                                         curr_sens)
        if not sensor_data:
            sensor_id = create_sensor(connector, host, secret_key, curr_sens,
                                      geom, {
                                          "id": "Met Station",
                                          "title": "Met Station",
                                          "sensorType": 4
                                      }, "Urbana")
        else:
            sensor_id = sensor_data['id']

        # Get stream or create if not found
        stream_data = get_stream_by_name(connector, host, secret_key,
                                         stream_name)
        if not stream_data:
            stream_id = create_stream(connector, host, secret_key, stream_name,
                                      sensor_id, geom)
        else:
            stream_id = stream_data['id']

        # Get metadata to check till what time the file was processed last. Start processing the file after this time
        allmd = download_metadata(connector, host, secret_key, resource['id'])
        last_processed_time = 0
        datapoint_count = 0
        for md in allmd:
            if 'content' in md and 'last processed time' in md['content']:
                last_processed_time = md['content']['last processed time']
                if 'datapoints_created' in md['content']:
                    datapoint_count = md['content']['datapoints_created']
                else:
                    datapoint_count = 0
                delete_metadata(connector, host, secret_key, resource['id'],
                                md['agent']['name'].split("/")[-1])

        # Parse file and get all the records in it.
        ISO_8601_UTC_OFFSET = dateutil.tz.tzoffset("-07:00", -7 * 60 * 60)
        records = parse_file(resource["local_paths"][0],
                             last_processed_time,
                             utc_offset=ISO_8601_UTC_OFFSET)
        # Add props to each record.
        for record in records:
            record['properties']['source_file'] = resource['id']
            record['stream_id'] = str(stream_id)

        total_dp = 0
        datapoint_list = []
        for record in records:
            datapoint_list.append({
                "start_time": record['start_time'],
                "end_time": record['end_time'],
                "type": "Point",
                "geometry": record['geometry'],
                "properties": record['properties']
            })
            if len(datapoint_list) > self.batchsize:
                create_datapoints(connector, host, secret_key, stream_id,
                                  datapoint_list)
                total_dp += len(datapoint_list)
                datapoint_list = []
        if len(datapoint_list) > 0:
            create_datapoints(connector, host, secret_key, stream_id,
                              datapoint_list)
            total_dp += len(datapoint_list)

        # Mark dataset as processed
        metadata = build_metadata(
            host, self.extractor_info, resource['id'], {
                "last processed time": records[-1]["end_time"],
                "datapoints_created": datapoint_count + total_dp
            }, 'file')
        upload_metadata(connector, host, secret_key, resource['id'], metadata)

        self.end_message()