Esempio n. 1
0
 def measure(self):
     _logger.info("mocked measure")
     if random.randint(0, 10) > 7:
         return Result(ResultState.ERROR)
     else:
         value = random.randint(1, 300) / 10
         return Result(ResultState.OK, pm10=value, pm25=value)
    def test_loop_actor_on_hold(self):
        loop_count = 3

        process = MockProcess()

        process._mqtt_out_actor = "_mqtt_channel_sensor_switch"

        process.test_open(loop_count=loop_count)

        loop_params = process.create_dummy_loop_params()
        loop_params.on_hold = True
        loop_params.use_switch_actor = True
        process._determine_loop_params = MagicMock()
        process._determine_loop_params.return_value = loop_params

        process.run()

        self.assertEqual(0, process.test_sensor.open.call_count)
        self.assertEqual(0, process.test_sensor.measure.call_count)
        self.assertEqual(1, process.test_sensor.close.call_count)  # finally

        self.assertEqual(len(process.mqtt_messages), loop_count * 2 + 1)
        message = Result(ResultState.DEACTIVATED, timestamp=process._now()).create_message()
        for m in process.mqtt_messages:
            self.assertTrue(m in [message, SwitchSensor.OFF.value])
Esempio n. 3
0
 def __init__(self, segments, settings, segment_generator, effects):
     self.segments = segments
     self.settings = settings
     self.breath_pause = segment_generator.generate_breath_pause()
     self.effects = effects
     self.segment_selector = SegmentSelector(segments)
     self.logger = Logger()
     self.result = Result()
     self.init()
    def test_max_time(self):
        time_interval_max = random.random() * 1000

        process = MockProcess()

        process._time_interval_max = time_interval_max
        process._last_result = Result(ResultState.OK, pm10=1, pm25=1, timestamp=process.now)

        compare = process._calc_interval_time()

        self.assertEqual(compare, time_interval_max)
Esempio n. 5
0
    def test_create_message(self):

        now = datetime.datetime(2020,
                                1,
                                1,
                                2,
                                2,
                                3,
                                tzinfo=datetime.timezone.utc)

        r = Result(ResultState.ERROR, timestamp=now)
        m = r.create_message()
        self.assertEqual(
            m,
            '{"PM10": null, "PM25": null, "STATE": "ERROR", "TIMESTAMP": "2020-01-01T02:02:03+00:00"}'
        )

        r = Result(ResultState.OK, pm10=0.1, pm25=0.2, timestamp=now)
        m = r.create_message()
        self.assertEqual(
            m,
            '{"PM10": 0.1, "PM25": 0.2, "STATE": "OK", "TIMESTAMP": "2020-01-01T02:02:03+00:00"}'
        )
    def test_middle(self):
        time_max = 300
        time_min = 100

        process = MockProcess()
        process._time_interval_max = time_max
        process._time_interval_min = time_min

        dust = (process.DEFAULT_ADAPTIVE_DUST_UPPER + process.DEFAULT_ADAPTIVE_DUST_LOWER) / 2
        process._last_result = Result(ResultState.OK, pm10=dust, pm25=1, timestamp=process.now)

        compare = process._calc_interval_time()

        self.assertAlmostEqual(compare, (time_max + time_min) / 2)
    def test_min_time(self):
        time_interval_max = 200 + random.random() * 1000
        time_interval_min = time_interval_max / 5

        process = MockProcess()

        process._time_interval_max = time_interval_max
        process._time_interval_min = time_interval_min
        process._last_result = Result(ResultState.OK, pm25=1, timestamp=process.now,
                                      pm10=process.DEFAULT_ADAPTIVE_DUST_UPPER + 1)

        compare = process._calc_interval_time()

        self.assertEqual(compare, time_interval_min)
Esempio n. 8
0
    def measure(self):
        if self._sensor is None:
            raise SensorError("sensor was not opened!")
        if not self._warmup:
            raise SensorError("sensor was not warmed up before measurement!")

        try:
            measurement = self._sensor.query()
        except SerialException as ex:
            self._error_ignored += 1
            if self._error_ignored > self._abort_after_n_errors:
                raise SensorError(ex)

            _logger.error("self._sensor.query() failed, but ignore %s of %s!",
                          self._error_ignored, self._abort_after_n_errors)
            _logger.exception(ex)
            return Result(ResultState.ERROR)
        else:
            if measurement is None:
                pm25, pm10 = None, None
            else:
                pm25, pm10 = measurement

            if not self.check_measurement(pm10=pm10, pm25=pm25):
                self._error_ignored += 1
                if self._error_ignored >= self._abort_after_n_errors:
                    raise SensorError(
                        f"{self._error_ignored} wrong measurments!")

                _logger.warning(
                    "wrong measurment (ignore %s of %s): pm25=%s; pm10=%s!",
                    self._error_ignored, self._abort_after_n_errors, pm25,
                    pm10)
                return Result(ResultState.ERROR)
            else:
                self._error_ignored = 0
                return Result(ResultState.OK, pm10=pm10, pm25=pm25)
Esempio n. 9
0
def retrieve_results(n_percentile):
    search_queries = parse_trec('documents/irg_queries.trec')
    search_collections = parse_trec('documents/irg_collection_clean.trec')
    # search_collections = parse_trec('documents/irg_collection_short.trec')
    # search_collections = eliminate_stopwords(search_collections)
    # write_collection_doc(search_collections, 'documents/irg_collection_clean.trec')

    print('======= Statistics =======')
    print(f'Queries: {len(search_queries)}')
    print(f'Collections: {len(search_collections)}')
    print(f'Removal of {int((1-n_percentile)*100)}%-ile')
    print('==========================')

    # TF-IDF
    document_results = []
    for search_query_id, search_query_text in search_queries.items():
        print(
            f'Current query id: {search_query_id}, text: "{search_query_text}"'
        )
        terms = search_query_text.split(' ')
        documents = keep_n_percentile_most_relevant_words(search_collections,
                                                          search_query_text,
                                                          n=n_percentile)
        document_scores = {}
        search_texts_collection = TextCollection(documents.values())
        for document_id, document_text in documents.items():
            for term in terms:
                current_score = document_scores.get(document_id, 0.0)
                document_scores[
                    document_id] = current_score + search_texts_collection.tf_idf(
                        term, document_text)

        rank = 1
        for document_id, document_scores in sorted(document_scores.items(),
                                                   key=lambda kv: kv[1],
                                                   reverse=True):
            if rank <= 1000:
                document_results.append(
                    Result(search_query_id, document_id, rank,
                           document_scores))
                rank += 1

    result_writer(document_results,
                  f'IE_result_keep_{int(n_percentile*100)}_percentile.trec')
    print('Done')
Esempio n. 10
0
 def dummy_measure(self):
     return Result(ResultState.OK, pm10=1, pm25=1)
Esempio n. 11
0

####################### SET MODEL TRAINING #############################
sampleSize = '0'  # choose between 0 to 2. 0 correspondes to 50D, 1 to 100D, 2 to 200D. 3 to 5 is also available but for generation based scaling
loss='WCategoricalCrossentropy' # loss function chosen. choose between "WCategoricalCrossentropy" or "categorical_crossentropy". WCategoricalCrossentropy is the expected loss described in the thesis.



######################THESE PARAMETERS ARE FIXED #########################
esconfig = [0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1] 
#performance = Performance()

timeSteps = 2  # the number of time steps for the LSTM network
precision_value = 'ert-2'

#set to precision value to negative 2
result = Result(ert_column=precision_value)

#load the performance and ELA files generated from data gathering
performance = pd.read_csv("./perf/DataGathering_performance.csv")
ela = pd.read_csv("./perf/DataGathering_elaFeatures.csv")

result.addPerformance(performance)
result.addELA(ela)

#this could sometimes fail if the training sample does not contain at least two time steps. This could happen if the CMA-ES finds the optimal value before the 2nd checkpoint
Xtrain, Ytrain = result.createTrainSet(dataset=sampleSize, algorithm=None, reset=False, interface=None, RNN=timeSteps)


model = Models(Xtrain,Ytrain,_shuffle=False)
model.trainLSTM(stepSize=timeSteps, size=sampleSize, loss=loss, precision_value=precision_value)
Esempio n. 12
0
from src.result import Result


def format_result(entry):
    formatted = f'{entry.query_id}'
    formatted += f' {entry.iteration}'
    formatted += f' {entry.doc_number}'
    formatted += f' {entry.rank}'
    formatted += f' {entry.score}'
    formatted += f' {entry.system}'
    return formatted


def result_writer(results, file):
    open(file, 'a+').write('\n'.join([format_result(entry) for entry in results]) + '\n')


if __name__ == '__main__':
    result1 = Result(10, 'docNumber1', 1, 2.5)
    result2 = Result(10, 'docNumber2', 2, 2.2)
    for result in [result1, result2]:
        print(format_result(result))
Esempio n. 13
0
    def run(self):
        first_meassurement = True
        loop_params = None
        state = SensorState.START

        try:
            self._wait_for_mqtt_connection()

            self._reset_timer()  # better testing
            while not self._shutdown:

                if state == SensorState.START:
                    self._process_mqtt_messages()
                    loop_params = self._determine_loop_params()

                if loop_params.on_hold:
                    if state == SensorState.START:
                        if loop_params.use_switch_actor:
                            self._switch_sensor(SwitchSensor.OFF)
                            state = SensorState.SWITCHED_OFF
                        else:
                            self._sensor.open(warm_up=False)  # prepare for sending to sleep!
                            state = SensorState.COOLING_DOWN

                        # skip the first deativation message, hopefully all subscriptions are complete the next time
                        if not first_meassurement or not loop_params.missing_subscriptions:
                            self._handle_result(loop_params, Result(ResultState.DEACTIVATED))
                else:
                    if state == SensorState.START:
                        if loop_params.use_switch_actor:
                            self._switch_sensor(SwitchSensor.ON)
                            state = SensorState.SWITCHING_ON
                        else:
                            state = SensorState.CONNECTING

                    if state == SensorState.SWITCHING_ON and self._time_counter >= loop_params.tlim_switching_on:
                        state = SensorState.CONNECTING

                    if state == SensorState.CONNECTING:
                        self._sensor.open(warm_up=True)
                        state = SensorState.WARMING_UP

                    if state == SensorState.WARMING_UP and self._time_counter >= loop_params.tlim_warming_up:
                        result = self._sensor.measure()
                        self._handle_result(loop_params, result)
                        state = SensorState.COOLING_DOWN

                if state == SensorState.COOLING_DOWN and \
                        (self._time_counter >= loop_params.tlim_cool_down or loop_params.on_hold):
                    self._sensor.close(sleep=loop_params.sensor_sleep)
                    state = SensorState.WAITING_FOR_RESET

                if self._time_counter >= loop_params.tlim_interval:  # any state
                    first_meassurement = False
                    self._reset_timer()
                    state = SensorState.START

                self._wait(self._time_step)

        finally:
            self.close()
Esempio n. 14
0
                   baseBudget=10000,
                   dimensions=[2, 3],
                   esconfig=esconfig,
                   function=i,
                   performance=performance,
                   pflacco=True,
                   localSearch=None)
    suite.runDataGathering()
    performance.saveToCSVPerformance('DataGathering')
    performance.saveToCSVELA('DataGathering')

####################### Data Preprocessing #############################
sampleSize = '2'  # choose between 0 to 2. 0 correspondes to 50D, 1 to 100D, 2 to 200D. 3 to 5 is also available but for generation based scaling
timeSteps = 2  # the number of time steps for the LSTM network

result = Result()

#load the performance and ELA files generated from data gathering
performance = pd.read_csv("./perf/DataGathering_performance.csv")
ela = pd.read_csv("./perf/DataGathering_elaFeatures.csv")

result.addPerformance(performance)
result.addELA(ela)

#this could sometimes fail if the training sample does not contain at least two time steps. This could happen if the CMA-ES finds the optimal value before the 2nd checkpoint
Xtrain, Ytrain = result.createTrainSet(dataset=sampleSize,
                                       algorithm=None,
                                       reset=False,
                                       interface=None,
                                       RNN=timeSteps)