示例#1
0
 def test_restore_backup(self, tmpfile):
     ddb = Database(provider="sqlite", filename=str(tmpfile))
     init_onetable(ddb)
     ddb.disconnect()
     bck_db = tmpfile.read_bytes()
     m = MakeMigrations(tmpfile, Version("1.3.2"),
                        {"1.3.2": "AZEZRT ERTERT"})
     assert not m(lambda x: True, lambda x: True)
     assert tmpfile.read_bytes() == bck_db
示例#2
0
 def check_migrations(self, check_cb: Callable) -> bool:
     """
     Check migration with cb.
     Test are done on another Database
     :return: True if success
     """
     if not check_cb:
         return True
     logger.info("Checking migrations...")
     f = tempfile.NamedTemporaryFile(delete=False)
     f.close()
     shutil.copy(self.tmp_file, f.name)
     check_db = Database(provider="sqlite", filename=f.name)
     res = check_cb(check_db)
     check_db.disconnect()  # sinon unlink fail on windows
     os.unlink(f.name)
     return res
示例#3
0
class Config(object):
    '''
    Main configuration here.

    If a property consider as *readonly*, we will use ``property`` decorator
    for it.

    This class is responsible to setup user config dir.
    On Unix, we will have ``$HOME/.iottalk``;
    on windows, it is ``$USERPROFILE/_iottalk``.
    '''

    __gateway_port = 17000
    __beacon_port = 1900
    __http_port = 9992
    __ip = '127.0.0.1'
    __uuid = ''
    debug = False
    __db_conf = {
        'type': 'sqlite',
        'url': 'iottalk.db',
        'host': 'localhost',
        'port': -1,
        'user': '',
        'passwd': '',
    }
    __db = None
    __userdir = ''
    __mqtt = {
        'scheme': 'mqtt',
        'host': 'localhost',
        'port': 1883,
    }

    def __init__(self):
        self.setup_userdir()

    @property
    def userdir(self):
        if self.__userdir:
            return self.__userdir

        if utils.is_posix():
            self.__userdir = os.path.join(os.environ['HOME'],
                                          '.iottalk')
        elif utils.is_win():
            self.__userdir = os.path.join(os.environ['USERPROFILE'],
                                          '_iottalk')
        else:
            raise OSError('Unsupport os type "{}"'.format(os.name))

        return self.__userdir

    def setup_userdir(self):
        path = self.userdir

        if os.path.exists(path) and not os.path.isdir(path):
            raise OSError('Path "{}" is not a dir'.format(path))
        elif os.path.exists(path) and os.path.isdir(path):
            return

        os.mkdir(path)

    @property
    def gateway_port(self):
        return self.__gateway_port

    @property
    def beacon_port(self):
        return self.__beacon_port

    @property
    def beacon_url(self):
        return 'udp://{}:{}'.format(self.ip, self.beacon_port)

    @property
    def ip(self):
        return self.__ip

    @property
    def uuid(self):
        '''
        :TODO: load the uuid from config file.
        '''
        if not self.__uuid:
            self.__uuid = uuid4()
        return self.__uuid

    @property
    def http_port(self):
        return self.__http_port

    @property
    def db(self):
        '''
        :return: The pony orm db instance without db provider binding
        '''
        if self.__db:
            return self.__db

        self.__db = Database()
        return self.__db

    @property
    def db_conf(self):
        '''
        The db cononection configuration.
        Here is the schema::

            {
                'type': str,
                'url': str,
                'host': str,
                'port': int,
                'user': str,
                'passwd': str,
            }

        >>> config.db_conf = {'type': 'answer', 'port': 42}
        >>> assert config.db_conf['type'] == 'answer'
        >>> config.db_conf['port']
        42
        '''
        return self.__db_conf.copy()

    @db_conf.setter
    def db_conf(self, value):
        '''
        :param dict value: the update dictionary

        We accecpt a subset of value with following schema::

            {
                'type': str,
                'url': str,
                'host': str,
                'port': int,
                'user': str,
                'passwd': str,
            }

        :raise ValueError: if we get any invalid key.
        :raise TypeError: if we get wrong type of content.
        '''
        key_set = ('type', 'url', 'host', 'port', 'user', 'passwd')

        for key, val in value.items():
            if key not in key_set:
                raise ValueError('Invalid key: {!r}'.format(key))
            if key != 'port' and not isinstance(val, string_types):
                raise TypeError('{!r} must be a string'.format(key))
            elif key == 'port' and not isinstance(val, int):
                raise TypeError("'port' must be an int")

        self.__db_conf.update(value)

    @property
    def available_protos(self):
        '''
        .. todo::
            should auto-detect the working server
        '''
        return ('mqtt', 'zmq', 'websocket')

    @property
    def feature_cates(self):
        '''
        The list of feature categories

        .. deprecated::
        '''
        return ('sight', 'hearing', 'feeling', 'motion', 'other')

    def __del__(self):
        if self.db.provider and self.db.provider is not None:
            self.__db.disconnect()

    @property
    def mqtt_conf(self):
        return self.__mqtt.copy()
示例#4
0
class BandwidthDatabase:
    """
    Simple database that stores bandwidth transactions in Tribler as a work graph.
    """
    CURRENT_DB_VERSION = 9
    MAX_HISTORY_ITEMS = 100  # The maximum number of history items to store.

    def __init__(self,
                 db_path: Path,
                 my_pub_key: bytes,
                 store_all_transactions: bool = False) -> None:
        """
        Sets up the persistence layer ready for use.
        :param db_path: The full path of the database.
        :param my_pub_key: The public key of the user operating the database.
        :param store_all_transactions: Whether we store all pairwise transactions in the database. This is disabled by
        default and used for data collection purposes.
        """
        self.db_path = db_path
        self.my_pub_key = my_pub_key
        self.store_all_transactions = store_all_transactions
        create_db = str(db_path) == ":memory:" or not self.db_path.is_file()
        self.database = Database()

        # This attribute is internally called by Pony on startup, though pylint cannot detect it
        # with the static analysis.
        # pylint: disable=unused-variable
        @self.database.on_connect(provider='sqlite')
        def sqlite_disable_sync(_, connection):
            cursor = connection.cursor()
            cursor.execute("PRAGMA journal_mode = WAL")
            cursor.execute("PRAGMA synchronous = 1")
            cursor.execute("PRAGMA temp_store = 2")
            # pylint: enable=unused-variable

        self.MiscData = misc.define_binding(self.database)
        self.BandwidthTransaction = db_transaction.define_binding(self)
        self.BandwidthHistory = history.define_binding(self)

        self.database.bind(provider='sqlite',
                           filename=str(db_path),
                           create_db=create_db,
                           timeout=120.0)
        self.database.generate_mapping(create_tables=create_db)

        if create_db:
            with db_session:
                self.MiscData(name="db_version",
                              value=str(self.CURRENT_DB_VERSION))

    @db_session
    def has_transaction(self, transaction: BandwidthTransactionData) -> bool:
        """
        Return whether a transaction is persisted to the database.
        :param transaction: The transaction to check.
        :return: A boolean value, indicating whether we have the transaction in the database or not.
        """
        return self.BandwidthTransaction.exists(
            public_key_a=transaction.public_key_a,
            public_key_b=transaction.public_key_b,
            sequence_number=transaction.sequence_number)

    @db_session
    def get_my_latest_transactions(
            self,
            limit: Optional[int] = None) -> List[BandwidthTransactionData]:
        """
        Return all latest transactions involving you.
        :param limit: An optional integer, to limit the number of results returned. Pass None to get all results.
        :return A list containing all latest transactions involving you.
        """
        results = []
        db_txs = select(tx for tx in self.BandwidthTransaction
                        if tx.public_key_a == self.my_pub_key or tx.public_key_b == self.my_pub_key)\
            .limit(limit)
        for db_tx in db_txs:
            results.append(BandwidthTransactionData.from_db(db_tx))
        return results

    @db_session
    def get_latest_transaction(
            self, public_key_a: bytes,
            public_key_b: bytes) -> BandwidthTransactionData:
        """
        Return the latest transaction between two parties, or None if no such transaction exists.
        :param public_key_a: The public key of the party transferring the bandwidth.
        :param public_key_b: The public key of the party receiving the bandwidth.
        :return The latest transaction between the two specified parties, or None if no such transaction exists.
        """
        db_obj = self.BandwidthTransaction.get(public_key_a=public_key_a,
                                               public_key_b=public_key_b)
        return BandwidthTransactionData.from_db(db_obj) if db_obj else None

    @db_session
    def get_latest_transactions(
            self,
            public_key: bytes,
            limit: Optional[int] = 100) -> List[BandwidthTransactionData]:
        """
        Return the latest transactions of a given public key, or an empty list if no transactions exist.
        :param public_key: The public key of the party transferring the bandwidth.
        :param limit: The number of transactions to return. (Default: 100)
        :return The latest transactions of the specified public key, or an empty list if no transactions exist.
        """
        db_txs = select(tx for tx in self.BandwidthTransaction
                        if public_key in (tx.public_key_a, tx.public_key_b))\
            .limit(limit)
        return [BandwidthTransactionData.from_db(db_txn) for db_txn in db_txs]

    @db_session
    def get_total_taken(self, public_key: bytes) -> int:
        """
        Return the total amount of bandwidth taken by a given party.
        :param public_key: The public key of the peer of which we want to determine the total taken.
        :return The total amount of bandwidth taken by the specified peer, in bytes.
        """
        return sum(transaction.amount
                   for transaction in self.BandwidthTransaction
                   if transaction.public_key_a == public_key)

    @db_session
    def get_total_given(self, public_key: bytes) -> int:
        """
        Return the total amount of bandwidth given by a given party.
        :param public_key: The public key of the peer of which we want to determine the total given.
        :return The total amount of bandwidth given by the specified peer, in bytes.
        """
        return sum(transaction.amount
                   for transaction in self.BandwidthTransaction
                   if transaction.public_key_b == public_key)

    @db_session
    def get_balance(self, public_key: bytes) -> int:
        """
        Return the bandwidth balance (total given - total taken) of a specific peer.
        :param public_key: The public key of the peer of which we want to determine the balance.
        :return The bandwidth balance the specified peer, in bytes.
        """
        return self.get_total_given(public_key) - self.get_total_taken(
            public_key)

    def get_my_balance(self) -> int:
        """
        Return your bandwidth balance, which is the total amount given minus the total amount taken.
        :return Your bandwidth balance, in bytes.
        """
        return self.get_balance(self.my_pub_key)

    @db_session
    def get_num_peers_helped(self, public_key: bytes) -> int:
        """
        Return the number of unique peers that a peer with the provided public key has helped.
        :param public_key: The public key of the peer of which we want to determine this number.
        :return The unique number of peers helped by the specified peer.
        """
        result = list(
            select(
                count(g.public_key_b) for g in self.BandwidthTransaction
                if g.public_key_a == public_key))
        return result[0]

    @db_session
    def get_num_peers_helped_by(self, public_key: bytes) -> int:
        """
        Return the number of unique peers that a peer with the provided public key has been helped by.
        :param public_key: The public key of the peer of which we want to determine this number.
        :return The unique number of peers that helped the specified peer.
        """
        result = list(
            select(
                count(g.public_key_a) for g in self.BandwidthTransaction
                if g.public_key_b == public_key))
        return result[0]

    @db_session
    def get_history(self) -> List:
        """
        Get the history of your bandwidth balance as an ordered list.
        :return A list. Each item in this list contains a timestamp and a balance.
        """
        history = []
        for history_item in self.BandwidthHistory.select().order_by(
                self.BandwidthHistory.timestamp):
            history.append({
                "timestamp": history_item.timestamp,
                "balance": history_item.balance
            })

        return history

    def shutdown(self) -> None:
        """
        Shutdown the database.
        """
        self.database.disconnect()
示例#5
0
class GenerateDatabase:
    def __init__(self, version: str, dest_path: Path):

        self.version = version
        self.dest_path = dest_path
        self.sqlite = self.dest_path / (self.version + ".sqlite")
        self.sql = self.dest_path / (self.version + ".sql")
        self.db = Database()
        self.f = Faker(self.db)

        # setup temp file
        file = tempfile.NamedTemporaryFile(delete=False)
        file.close()
        self.tmp_path = Path(file.name)
        self.db.bind(provider="sqlite", filename=file.name)
        init_models(self.db)
        self.db.generate_mapping(create_tables=True)

    def __call__(self):
        self.build()
        self.finalize()

    @property
    def sqlite_and_sql(self):
        return self.sqlite.is_file() and self.sql.is_file()

    def build(self):
        getattr(self, "version_" + self.version.replace(".", "_"))()
        self.db.disconnect()

    def finalize(self):

        if self.sqlite_and_sql:
            self.compare_schema()
        else:
            self.store_db_and_schema()

    def compare_schema(self):
        actual_schema = self.sql.read_text()
        assert (
            Schema(self.db).schema == actual_schema
        ), f"Les schémas actuel et stocké ne correspondent plus. version: {self.version}"

    def store_db_and_schema(self):
        shutil.move(self.tmp_path, self.sqlite)
        schema = Schema(self.sqlite)
        schema.version = self.version
        schema.to_file(self.sql)

    """
    Sous cette marque, on définie les fonctions pour les versions
    """

    def generate_items(self):
        """genere au moins 1 élément de chaque table"""
        self.f.f_annotationDessin(
        )  # User, Annee,  GroupeMatiere, Matiere, ACtivite, Section, Page , Annotation
        self.f.f_tableauCell()  # TableauSection, TableauCell
        self.f.f_friseLegende()  # FriseSection, ZoneFrise, FriseLegende
        self.f.f_configuration()  # configuration

    def version_1_3_0(self):
        self.generate_items()
        self.f.f_annotationDessin(points="""[{"x": 0.3, "y": 0.4}]""")

    def version_1_4_0(self):
        self.generate_items()

    def version_1_5_0(self):
        self.generate_items()
        self.f.f_traduction(content="coucou", locale="fr_FR")

    def version_1_6_0(self):
        self.generate_items()
        self.f.f_traduction(content="hello", locale="fr_FR")
示例#6
0
class MakeMigrations:
    def __init__(
            self,
            filename: Union[str, Path],  # chemin vers la ddb
            actual_version: Union[
                Version, str] = None,  # version actuelle (dans les sources)
            migrations: dict = None,  # pool de migrations
    ):
        # migrations = migrations
        self.actual_version = (actual_version if isinstance(
            actual_version, Version) else Version(actual_version))

        self.old_file = Path(filename)  # ddb à faire migrer

        # création d'une base temporaire pour effectuer les migrations
        tmp = tempfile.NamedTemporaryFile(suffix=".sqlite", delete=False)
        tmp.close()
        self.tmp_file = Path(tmp.name)
        shutil.copy(self.old_file, self.tmp_file)  # duplication de la DDB

        # outils pour migrations
        self.tmp_db = Database(provider="sqlite", filename=tmp.name)
        self.schema = Schema(file=self.tmp_db)
        if self.schema.version == self.actual_version:
            logger.info(f"version {self.actual_version}: No migration needed")
            return
        self.migrator = Migrator(self.tmp_db, self.actual_version, migrations)
        logger.info(
            f"starting migrations from version {self.schema.version} to {self.actual_version}"
        )

    def make_migrations(self):
        self.migrator()

    def check_migrations(self, check_cb: Callable) -> bool:
        """
        Check migration with cb.
        Test are done on another Database
        :return: True if success
        """
        if not check_cb:
            return True
        logger.info("Checking migrations...")
        f = tempfile.NamedTemporaryFile(delete=False)
        f.close()
        shutil.copy(self.tmp_file, f.name)
        check_db = Database(provider="sqlite", filename=f.name)
        res = check_cb(check_db)
        check_db.disconnect()  # sinon unlink fail on windows
        os.unlink(f.name)
        return res

    def generate_new_mapping(self, generate_cb: Callable):
        """
        Generate new mapping using pony models, and apply some more migrations.
        On compte sur exceptions en cas d'erreur
        """
        if not generate_cb:
            return
        logger.info("Generating new mapping...")
        generate_cb(self.tmp_db)
        self.tmp_db.generate_mapping(create_tables=True)
        self.tmp_db.disconnect()

    def _backup_name(self):

        old_schema = Schema(file=self.old_file)
        backup_name = (
            f"mycartable_backup-from_{old_schema.version}"
            f"-to_{self.actual_version}-{datetime.now().isoformat().replace(':','_')}"
        )
        return backup_name

    def backup_old(self):
        backup_file = self.old_file.parent / self._backup_name()
        shutil.copy(self.old_file, backup_file)
        return backup_file

    def move_tmp_to_old(self):
        self.tmp_file.replace(self.old_file)

    def restore_backup(self, backup_file: Path):
        fail_name = backup_file.name.replace("backup", "failed_migrate")
        if self.old_file.is_file():
            self.old_file.replace(self.old_file.parent / fail_name)
        backup_file.replace(self.old_file)

    def __call__(self,
                 check_cb: Callable = None,
                 generate_cb: Callable = None) -> bool:
        """
        check_cb: voir check_migrations
        generate_db: voir generate_new_mapping
        - on fait la sauvegarde
        - réalisation les migrations sur la base temporaire
        - on vérifie que les données sont compatible avec les schema des sources
        - on remplace l'ancienne par la nouvelle

        :return: True sinon False

        """
        if not hasattr(self, "migrator"):
            # no mirgator == same version, no migration do nothing, succeed
            return True

        backup_file = self.backup_old()

        try:
            self.make_migrations()
            self.generate_new_mapping(generate_cb)
            self.check_migrations(check_cb)
            self.move_tmp_to_old()
        except Exception as err:
            logger.exception(err)
            self.restore_backup(backup_file)
            return False
        return True