Beispiel #1
0
class GoalConnector(CsvConnector):
    _csv2model_fields = dict(accountName='account',
                             goalName='name',
                             startDate=Converter.to_datetime('start_date',
                                                             fmt='%d.%m.%Y'),
                             total=Converter.to_rubles('total'))
    _pointer_filename = 'pathToGoalsCsv'
    def test_weather_metrics(self, city):
        """Compere temperature parameters of metrics and imperial units"""
        imperial_weather = self.weather_api.get_imperial_weather_by_name(city)
        assert imperial_weather.status_code == 200

        metric_weather = self.weather_api.get_metric_weather_by_name(city)
        assert metric_weather.status_code == 200

        cels_temp = metric_weather.json()['main']['temp']
        fahr_temp = imperial_weather.json()['main']['temp']

        converted_temp = Converter.fahrenheit_to_celsius(fahr_temp)

        assert cels_temp == converted_temp

        cels_temp_min = metric_weather.json()['main']['temp_min']
        fahr_temp_min = imperial_weather.json()['main']['temp_min']

        converted_temp_min = Converter.celsius_to_fahrenheit(cels_temp_min)

        assert fahr_temp_min == converted_temp_min

        cels_temp_max = metric_weather.json()['main']['temp_max']
        fahr_temp_max = imperial_weather.json()['main']['temp_max']

        converted_temp_max = Converter.fahrenheit_to_celsius(fahr_temp_max)

        assert cels_temp_max == converted_temp_max
Beispiel #3
0
    def _extract_blockchain_and_open_transactions_block_from_file_content(
            self, file_content):

        block_chain = Converter.de_serialize_file_object(file_content[0][:-1])
        transactions_block = Converter.de_serialize_file_object(
            file_content[1])
        return block_chain, transactions_block
Beispiel #4
0
 def __init__(self, backend, on_receive, on_receive_interval):
     self.backed = backend
     self.on_receive = on_receive
     self.on_receive_interval = on_receive_interval
     self.storage = Storage()
     if self.storage.fetch_status() != "disconnected":
         self.storage.update_status("disconnected")
     self.loop = asyncio.new_event_loop()
     self.converter = Converter()
Beispiel #5
0
    def sign_transaction(self):

        signer = PKCS1_v1_5.new(
            RSA.importKey(Converter.string_to_binary(self.private_key)))

        self._create_payload_hash()

        signature = signer.sign(self.payload_hash)

        return Converter.binary_to_string(signature)
Beispiel #6
0
    def _generate_private_and_public_keys(self):
        """"""
        private_key, public_key = KeysGen.gen_private_and_public_keys()

        private_key_str = Converter.binary_to_string(
            private_key.exportKey(format="DER"))
        public_key_str = Converter.binary_to_string(
            public_key.exportKey(format="DER"))

        return private_key_str, public_key_str
Beispiel #7
0
class TransactionConnector(CsvConnector):
    _csv2model_fields = dict(
        date=Converter.to_datetime('date', fmt='%Y-%m-%d'),
        outcomeAccountName='outcome_account',
        outcome=Converter.to_rubles('outcome'),
        incomeAccountName='income_account',
        income=Converter.to_rubles('income'),
    )
    _header_line_no = 4
    _pointer_filename = 'pathToTransactionsCsv'
    _use_all_csv_fields = False
Beispiel #8
0
    def data_loader(file_name='dataset/NetInfo/acc/info.dict', quantize=False):
        import json

        lst = json.load(open(file_name, 'r'))
        X_all = []
        y_all = []
        converter = Converter()
        for k, v in lst.items():
            dic = json.loads(k)
            tmp = converter.spec2feature(dic, quantize)
            X_all.append(tmp)
            y_all.append(v / 100.)
        return X_all, y_all
Beispiel #9
0
 def sign(self, hashval, privkey, pub):
     h = hash_function(privkey)
     pub_key = Converter().hexToByte(pub)
     a = 2**(BITS_SIZE - 2) + sum(2**i * BitOp().bit(h, i)
                                  for i in range(3, BITS_SIZE - 2))
     r = self.__hint(
         ''.join([h[i]
                  for i in range(BITS_SIZE / 8, BITS_SIZE / 4)]) + hashval)
     R = self.ed.scalar_multiplication(self.B, r)
     S = (r + self.__hint(self.__encodepoint(R) + pub_key + hashval) *
          a) % self.ed.l
     return self.__encodepoint(R) + self.__encodeint(S), Converter(
     ).byteToHex(self.__encodepoint(R) + self.__encodeint(S))
Beispiel #10
0
    def save_data(self):
        """Save blockchain and transaction to file"""

        #Todo
        # Saves to file will change so it saves to a database
        try:

            FileHandler.clear_file_content()
            FileHandler.write(Converter.to_json(self._create_savable_block()))
            FileHandler.write("\n")
            FileHandler.write(
                Converter.to_json(
                    self._convert_object_in_transaction_block_to_json()))

        except IOError:
            print('Saving failed!')
Beispiel #11
0
 def test_converter_creation(self, text_handler, russian_rules,
                             russian_constants, russian_lookups):
     converter = Converter(text_handler, russian_rules, russian_constants,
                           russian_lookups)
     assert type(converter) == Converter
     assert type(converter.text_handler) == TextHandler
     assert type(converter.rules) == dict
Beispiel #12
0
    def test_calling_to_rubles_converter_converts_strings_to_currency_obj(
            self):
        converter = Converter.to_rubles('some_cash')
        result = converter('123')

        assert isinstance(result, Money)
        assert int(result.amount) == 123
        assert result.currency == 'RUR'
    def verify_transaction_signature(self):

        public_key = self._get_public_key(self.transaction)
        verifier = PKCS1_v1_5.new(public_key)
        pay_load_hash = self._create_payload_hash(self.transaction)

        return verifier.verify(
            pay_load_hash,
            Converter.string_to_binary(self.transaction.signature))
Beispiel #14
0
    def test_calling_to_datetime_converter_converts_string_to_datetime(self):
        converter = Converter.to_datetime('bogus', fmt='%Y-%m-%d %H:%M:%S')
        result = converter('2017-02-02 17:18:19')

        assert result == datetime(year=2017,
                                  month=2,
                                  day=2,
                                  hour=17,
                                  minute=18,
                                  second=19)
Beispiel #15
0
 def validate(self, signature, message, publickey):
     pub_k = Converter().hexToByte(publickey)
     sign = signature
     if len(sign) != BITS_SIZE / 4:
         raise Exception("signature length is wrong")
     if len(pub_k) != BITS_SIZE / 8:
         raise Exception("public-key length is wrong")
     R = self.__decodepoint(sign[0:BITS_SIZE / 8])
     A = self.__decodepoint(pub_k)
     S = self.__decodeint(sign[BITS_SIZE / 8:BITS_SIZE / 4])
     h = self.__hint(self.__encodepoint(R) + pub_k + message)
     vef = self.ed.scalar_multiplication(self.B, S)
     sig = self.ed.edwards(R, self.ed.scalar_multiplication(A, h))
     if vef != sig:
         raise SignatureError(vef, sig)
     else:
         return True, "Valid"
Beispiel #16
0
# Code for "APQ: Joint Search for Network Architecture, Pruning and Quantization Policy"
# CVPR 2020
# Tianzhe Wang, Kuan Wang, Han Cai, Ji Lin, Zhijian Liu, Song Han
# {usedtobe, kuanwang, hancai, jilin, zhijian, songhan}@mit.edu

import torch
import os
import torch.nn as nn
import numpy as np
from utils.converter import Converter

cvt = Converter()


def preparation(quantize=False, file_name=None, all=False, data_size=None):
    from utils.converter import Converter

    def data_loader(file_name='dataset/NetInfo/acc/info.dict', quantize=False):
        import json

        lst = json.load(open(file_name, 'r'))
        X_all = []
        y_all = []
        converter = Converter()
        for k, v in lst.items():
            dic = json.loads(k)
            tmp = converter.spec2feature(dic, quantize)
            X_all.append(tmp)
            y_all.append(v / 100.)
        return X_all, y_all
Beispiel #17
0
 def publickey(self, privkey):
     h = hash_function(privkey)
     a = 2**(BITS_SIZE - 2) + sum(2**i * BitOp().bit(h, i)
                                  for i in range(3, BITS_SIZE - 2))
     A = self.ed.scalar_multiplication(self.B, a)
     return Converter().byteToHex(self.__encodepoint(A))
Beispiel #18
0
    logging.info("Starting training with parameters: {0}".format(vars(args)))
    """##### Loading the dataset"""

    if args.semeval:
        raw_train = SemEvalDatasetReader(files.semeval_train)
        raw_test = SemEvalDatasetReader(files.semeval_test)
    else:
        if not path.exists(files.evalita_train):
            raw_train, raw_test = EvalitaDatasetReader(files.evalita).split()
        else:
            raw_train = EvalitaDatasetReader(files.evalita_train)
            raw_test = EvalitaDatasetReader(files.evalita_test)
    raw_train, raw_val = raw_train.split(test_size=0.1)

    converter = Converter(sequence_max_length=args.max_seq_length)

    X_train = converter.texts_to_sequences(raw_train.X)
    Y_train = raw_train.Y
    X_val = converter.texts_to_sequences(raw_val.X)
    Y_val = raw_val.Y
    X_test = converter.texts_to_sequences(raw_test.X)
    Y_test = raw_test.Y
    Y_dictionary = raw_train.Y_dictionary
    Y_class_weights = len(Y_train) / np.power(np.bincount(Y_train), 1.1)
    Y_class_weights *= 1.0 / np.min(Y_class_weights)
    logging.info("Class weights: %s" % str(Y_class_weights))

    del raw_train
    del raw_val
    del raw_test
Beispiel #19
0
class Storage:
    sqlite = None
    schema_version = 2

    def __init__(self):
        self.parameters = {
            "database": data_path + "/data.db",
            "isolation_level": None,
        }
        self.converter = Converter()

    def connect(self):
        connection = sqlite3.connect(**self.parameters)
        connection.row_factory = self.row_factory
        return connection

    def row_factory(self, cursor, row):
        dictionary = {}
        for index, column in enumerate(cursor.description):
            dictionary[column[0]] = row[index]
        return dictionary

    def init(self):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute(
                "SELECT name FROM sqlite_master WHERE type = 'table'")
            tables = []
            for row in cursor.fetchall():
                tables.append(row["name"])

            schema_version = self.schema_version
            if "version" not in tables:
                cursor.execute("CREATE TABLE version (version INTEGER)")
                cursor.execute("INSERT INTO version VALUES (%s)" %
                               self.schema_version)
            else:
                schema_version = int(
                    cursor.execute("SELECT version FROM version").fetchone()
                    ["version"])

            if "status" not in tables:
                cursor.execute("CREATE TABLE status (status TEXT)")
                cursor.execute("INSERT INTO status VALUES ('disconnected')")

            if "logs" not in tables:
                cursor.execute(("CREATE TABLE logs ("
                                "id INTEGER PRIMARY KEY,"
                                "message TEXT"
                                ")"))

            if "measurements" not in tables:
                cursor.execute(("CREATE TABLE measurements ("
                                "id INTEGER PRIMARY KEY,"
                                "name TEXT,"
                                "timestamp INTEGER,"
                                "voltage REAL,"
                                "current REAL,"
                                "power REAL,"
                                "temperature REAL,"
                                "data_plus REAL,"
                                "data_minus REAL,"
                                "mode_id INTEGER,"
                                "mode_name TEXT,"
                                "accumulated_current INTEGER,"
                                "accumulated_power INTEGER,"
                                "accumulated_time INTEGER,"
                                "resistance REAL,"
                                "session_id INTEGER"
                                ")"))

            if "sessions" not in tables:
                cursor.execute(("CREATE TABLE sessions ("
                                "id INTEGER PRIMARY KEY,"
                                "version TEXT,"
                                "name TEXT,"
                                "timestamp INTEGER"
                                ")"))

            if schema_version == 1:
                logging.info(
                    "migrating database to new version, this may take a while..."
                )

                self.backup()

                cursor.execute(
                    ("ALTER TABLE measurements ADD session_id INTEGER"))

                cursor.execute(
                    "DELETE FROM measurements WHERE name = '' OR name IS NULL")

                query = cursor.execute(
                    "SELECT name, MIN(timestamp) AS timestamp FROM measurements WHERE session_id IS NULL GROUP BY name ORDER BY MIN(id)"
                )
                rows = query.fetchall()
                for row in rows:
                    session_name = row["name"]
                    cursor.execute(
                        "INSERT INTO sessions (name, timestamp) VALUES (:name, :timestamp)",
                        (session_name, row["timestamp"]))
                    session_id = cursor.lastrowid
                    cursor.execute(
                        "UPDATE measurements SET session_id = :session_id WHERE name = :name",
                        (session_id, session_name))

                cursor.execute("UPDATE version SET version = 2")

    def store_measurement(self, data):
        if data is None:
            return

        columns = []
        placeholders = []
        values = []
        for name, value in data.items():
            columns.append(name)
            placeholders.append(":" + name)
            values.append(value)

        columns = ", ".join(columns)
        placeholders = ", ".join(placeholders)
        values = tuple(values)

        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute(
                "INSERT INTO measurements (" + columns + ") VALUES (" +
                placeholders + ")", values)

    def destroy_measurements(self, session):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute("DELETE FROM measurements WHERE session_id = ?",
                           (session, ))
            cursor.execute("DELETE FROM sessions WHERE id = ?", (session, ))

    def fetch_sessions(self):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            return cursor.execute(
                "SELECT * FROM sessions ORDER BY timestamp DESC").fetchall()

    def fetch_measurements_count(self, session):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute(
                "SELECT COUNT(id) AS count FROM measurements WHERE session_id = ?",
                (session, ))
            return int(cursor.fetchone()["count"])

    def fetch_measurements(self, session, limit=None, offset=None):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            sql = "SELECT * FROM measurements WHERE session_id = ? ORDER BY timestamp ASC"
            if limit is None or offset is None:
                cursor.execute(sql, (session, ))
            else:
                cursor.execute(sql + " LIMIT ?, ?", (session, offset, limit))
            items = cursor.fetchall()

        for index, item in enumerate(items):
            items[index] = self.converter.convert(item)

        return items

    def fetch_last_measurement_by_name(self, name):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute(
                "SELECT * FROM measurements WHERE name = ? ORDER BY timestamp DESC LIMIT 1",
                (name, ))
            return cursor.fetchone()

    def fetch_last_measurement(self):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute(
                "SELECT * FROM measurements ORDER BY timestamp DESC LIMIT 1")
            return cursor.fetchone()

    def get_selected_session(self, selected):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            if selected == "":
                session = cursor.execute(
                    "SELECT * FROM sessions ORDER BY timestamp DESC LIMIT 1"
                ).fetchone()
            else:
                session = cursor.execute("SELECT * FROM sessions WHERE id = ?",
                                         (selected, )).fetchone()

        return session

    def log(self, message):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute("INSERT INTO logs (message) VALUES (?)",
                           (message, ))

    def fetch_log(self):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute("SELECT message FROM logs")

            log = ""
            for row in cursor.fetchall():
                log += row["message"]

        return log

    def clear_log(self):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute(
                "DELETE FROM logs WHERE id NOT IN (SELECT id FROM logs ORDER BY id DESC LIMIT 250)"
            )

    def update_status(self, status):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute("UPDATE status SET status = ?", (status, ))

    def fetch_status(self):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute("SELECT status FROM status")
            return cursor.fetchone()["status"]

    def create_session(self, name, version):
        with closing(self.connect()) as sqlite:
            cursor = sqlite.cursor()
            cursor.execute(
                "INSERT INTO sessions (name, version, timestamp) VALUES (?, ?, ?)",
                (name, version, time()))
            return cursor.lastrowid

    def backup(self):
        path = self.parameters["database"]
        backup_path = "%s.backup-%s" % (
            path, pendulum.now().format("YYYY-MM-DD_HH-mm-ss"))
        if os.path.exists(path):
            shutil.copy(path, backup_path)
Beispiel #20
0
class Daemon:
    running = None
    thread = None
    storage = None
    config = None
    interface = None
    buffer = None
    buffer_expiration = None

    def __init__(self, backend, on_receive, on_receive_interval):
        self.backed = backend
        self.on_receive = on_receive
        self.on_receive_interval = on_receive_interval
        self.storage = Storage()
        if self.storage.fetch_status() != "disconnected":
            self.storage.update_status("disconnected")
        self.loop = asyncio.new_event_loop()
        self.converter = Converter()

    def start(self):
        self.running = True
        if self.thread is None:
            self.thread = Thread(target=self.run)
        if not self.thread.is_alive():
            self.thread.start()

    def stop(self):
        self.log("Disconnecting")
        self.running = False
        if self.interface:
            self.interface.disconnect()
        while self.thread and self.thread.is_alive():
            sleep(0.1)
        self.emit("disconnected")
        self.thread = None

    def run(self):
        self.storage = Storage()
        self.config = Config()

        self.interface = Wrapper()

        try:
            self.log("Connecting")
            self.retry(self.interface.connect)
            self.emit("connected")
            self.log("Connected")

            name = self.config.read("name")
            interval = float(self.config.read("rate"))
            version = self.config.read("version")
            session_id = self.storage.create_session(name, version)
            while self.running:
                begin = timer()
                data = self.retry(self.interface.read)

                if isinstance(data, str):
                    if data in ["disconnected", "connected"]:
                        self.disconnect()
                        return
                    raise Exception(data)
                else:
                    self.log(json.dumps(data))
                    if data:
                        data["session_id"] = session_id
                        self.update(data, version)
                    self.storage.store_measurement(data)

                measurement_runtime = timer() - begin
                sleep_time = interval - measurement_runtime
                if sleep_time > 0:
                    sleep(sleep_time)

        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            logging.exception(sys.exc_info()[0])
            self.emit("log", traceback.format_exc())
            self.emit("log-error")
        finally:
            self.disconnect()

    def disconnect(self):
        self.interface.disconnect()
        self.emit("disconnected")
        self.log("Disconnected")
        self.thread = None

    def update(self, data, version):
        format = Format(version)

        table = []
        for name in format.table_fields:
            callback = getattr(format, name)
            table.append(callback(data))

        graph = {}
        for name in format.graph_fields:
            if name == "timestamp":
                callback = getattr(format, name)
                value = callback(data)
            else:
                value = data[name]
            graph[name] = value

        graph = self.converter.convert(graph)

        if self.on_receive:
            if not self.buffer:
                self.buffer = []

            data["timestamp"] = int(data["timestamp"])
            self.buffer.append(data)

            execute = True
            if self.on_receive_interval:
                execute = False
                if not self.buffer_expiration or self.buffer_expiration <= time(
                ):
                    execute = True
                    self.buffer_expiration = time() + self.on_receive_interval

            if execute:
                payload = json.dumps(self.buffer)
                self.buffer = None
                payload_file = os.path.join(
                    os.getcwd(), "on-receive-payload-%s.json") % time()
                with open(payload_file, "w") as file:
                    file.write(payload)
                command = self.on_receive + " \"" + payload_file + "\""
                subprocess.Popen(command, shell=True, env={})

        self.emit("update", json.dumps({
            "table": table,
            "graph": graph,
        }))

    def retry(self, callback):
        timeout = time() + 60
        count = 10
        reconnect = False
        while True:
            try:
                if reconnect:
                    self.interface.disconnect()
                    self.interface.connect()
                    # noinspection PyUnusedLocal
                    reconnect = False

                return callback()
            except (KeyboardInterrupt, SystemExit):
                raise
            except:
                count -= 1
                logging.exception(sys.exc_info()[0])
                if timeout <= time() or count <= 0:
                    raise
                else:
                    self.log("operation failed, retrying")
                    self.emit("log", traceback.format_exc())
                    reconnect = True

    def emit(self, event, data=None):
        if event == "log":
            self.storage.log(data)
        elif event in [
                "connecting", "connected", "disconnecting", "disconnected"
        ]:
            self.storage.update_status(event)
        self.backed.emit(event, data)

    def log(self, message):
        prefix = pendulum.now().format("YYYY-MM-DD HH:mm:ss") + " - "
        self.emit("log", prefix + message + "\n")
Beispiel #21
0
 def __init__(self):
     self.parameters = {
         "database": data_path + "/data.db",
         "isolation_level": None,
     }
     self.converter = Converter()
 def __init__(self, latency_predictor: LatencyPredictor,
              accuracy_predictor: AccuracyPredictor):
     self.latency_predictor = latency_predictor
     self.accuracy_preditcor = accuracy_predictor
     self.converter = Converter()
Beispiel #23
0
def converter(text_handler, russian_rules, russian_constants):
    return Converter(text_handler, russian_rules, russian_constants,
                     russian_lookups)
 def _get_public_key(self, transaction):
     return RSA.importKey(Converter.string_to_binary(transaction.sender))
Beispiel #25
0
 def test_to_datetime_returns_converter_instance(self):
     assert isinstance(Converter.to_datetime('start_date', fmt='%Y-%m-%d'),
                       Converter)
Beispiel #26
0
    def test_to_rubles_converts_empty_string_to_zero(self):
        converter = Converter.to_rubles('rubles')
        result = converter('')

        assert int(result.amount) == 0
Beispiel #27
0
    def test_to_rubles_handles_decimal_comma_correctly(self):
        converter = Converter.to_rubles('money')
        result = converter('3,62')

        assert result.amount == Decimal('3.62')
class EvolutionFinder():
    def __init__(self, latency_predictor: LatencyPredictor,
                 accuracy_predictor: AccuracyPredictor):
        self.latency_predictor = latency_predictor
        self.accuracy_preditcor = accuracy_predictor
        self.converter = Converter()

    def random_spec(self, constraint):
        while True:
            spec = self.converter.random_spec()
            if not self.converter.is_valid(spec):
                continue
            lat = self.latency_predictor.predict_lat(spec)
            if lat <= constraint:
                return spec, lat

    def mutate_spec(self, spec, constraint):
        while True:
            identity = []
            new_spec = copy.deepcopy(spec)
            block_mutation_prob = 0.1
            father = spec

            for i in range(21):
                depth = i % 4 + 1
                stg = i // 4
                if random.random() < block_mutation_prob:
                    self.converter.change_spec(new_spec, i)

                if depth > father['d'][stg]:
                    identity.append(1)
                else:
                    identity.append(0)
            bad = False
            for i in range(21):
                depth = i % 4 + 1
                stg = i // 4
                if depth == 3 and identity[i]:
                    if not identity[i + 1]:
                        bad = True
                if not identity[i]:
                    new_spec['d'][stg] = max(new_spec['d'][stg], depth)

            if not self.converter.is_valid(new_spec):
                continue
            lat = self.latency_predictor.predict_lat(new_spec)
            if not bad and lat <= constraint:
                return new_spec, lat

    def crossover_spec(self, spec1, spec2, constraint):
        while True:
            new_spec = copy.deepcopy(spec1)
            identity = []
            for i in range(21):
                depth = i % 4 + 1
                stg = i // 4
                father = copy.deepcopy(
                    spec1) if random.random() < 0.5 else copy.deepcopy(spec2)

                new_spec['ks'][i] = father['ks'][i]
                new_spec['e'][i] = father['e'][i]
                for it in range(4):  # quantization policy
                    qname = self.converter.num2qname[it]
                    new_spec[qname][i] = father[qname][i]

                if depth > father['d'][stg]:
                    identity.append(1)
                else:
                    identity.append(0)
            bad = False
            for i in range(21):
                depth = i % 4 + 1
                stg = i // 4
                if depth == 3 and identity[i]:
                    if not identity[i + 1]:
                        bad = True
                if not identity[i]:
                    new_spec['d'][stg] = max(new_spec['d'][stg], depth)
            if not self.converter.is_valid(new_spec):
                continue
            lat = self.latency_predictor.predict_lat(new_spec)
            if not bad and lat <= constraint:
                return new_spec, lat

    def run_evolution_search(self,
                             max_time_budget=1000,
                             population_size=100,
                             mutation_numbers=50,
                             constraint=120):
        """Run a single roll-out of regularized evolution to a fixed time budget."""
        times, best_valids, best_tests = [0.0], [-100], [-100]
        population = []  # (validation, spec, latency) tuples
        child_pool = []
        lat_pool = []
        best_info = None
        print('Generate random population...')
        for _ in range(population_size):
            spec, lat = self.random_spec(constraint)
            child_pool.append(spec)
            lat_pool.append(lat)

        accs = self.accuracy_preditcor.predict_accuracy(child_pool)
        for i in range(mutation_numbers):
            population.append((accs[i].item(), child_pool[i], lat_pool[i]))
        print('Start Evolution...')
        iter = 0
        # After the population is seeded, proceed with evolving the population.
        while True:
            parents_size = population_size // 4
            parents = sorted(population,
                             key=lambda x: x[0])[::-1][:parents_size]
            acc = parents[0][0]
            if iter > 0 and iter % 100 == 1:
                print('Iter: {} Acc: {}'.format(iter - 1, parents[0][0]))

            times.append(iter)
            if acc > best_valids[-1]:
                best_valids.append(acc)
                best_info = parents[0]
            else:
                best_valids.append(best_valids[-1])
            if iter > max_time_budget:
                break
            # sample = random_combination(population, tournament_size)[::-1]
            # best_spec = sorted(sample, key=lambda i: i[0])[-1][1]
            population = parents
            child_pool = []
            lat_pool = []

            for i in range(mutation_numbers):
                par_spec = population[np.random.randint(parents_size)][1]
                # Mutate
                new_spec, lat = self.mutate_spec(par_spec, constraint)
                child_pool.append(new_spec)
                lat_pool.append(lat)

            for i in range(mutation_numbers):
                par_spec1 = population[np.random.randint(parents_size)][1]
                par_spec2 = population[np.random.randint(parents_size)][1]
                # Crossover
                new_spec, lat = self.crossover_spec(par_spec1, par_spec2,
                                                    constraint)
                child_pool.append(new_spec)
                lat_pool.append(lat)

            accs = self.accuracy_preditcor.predict_accuracy(child_pool)
            for i in range(mutation_numbers):
                population.append((accs[i].item(), child_pool[i], lat_pool[i]))
            iter = iter + 1

        return times, best_valids, best_info
Beispiel #29
0
 def hash_block(block):
     """"""
     hashed_block = block.to_json()
     hashed_block['transactions'] = Transaction.convert_all_transaction_block_to_ordered_dict(hashed_block['transactions'])
     return HashImplementer.hash_string_using_sha256(Converter.to_string(hashed_block))
Beispiel #30
0
from models import Elit
from utils.converter import Converter
from utils.operators.basic import Operator as op
from utils.operators.modified import Operator as op_m
from utils.policies import Policy
from utils.test_functions import levi

import numpy as np

converter = Converter(type_='grey')
crossover = op.crossover()
mutator = op_m.Mutate.multi(3)
policies = {
    'include': Policy.elitarium(),
    'exclude': Policy.elitarium(),
    'parents': Policy.random()
}

n = 100
eps = 1

model = Elit(converter=converter,
             policies=policies,
             mutator=mutator,
             crossover=crossover)
model.run(*(levi()),
          epochs=101,
          n=20,
          eps=eps,
          optimize='min',
          t=10,
Beispiel #31
0
    def test_to_datetime_initializes_converter_w_field_name_and_callable(self):
        converter = Converter.to_datetime('start_date', fmt='%d.%m.%Y')

        assert converter.model_field_name == 'start_date'
        assert callable(converter._convert)