def _update_state(database, block_num, address, resource):
    try:
        # update state table
        address_parts = addresser.parse(address)
        address_binary = bytes_from_hex(address)
        key = address_binary
        keys = {"address": address_binary}
        object_id = bytes_from_hex(address_parts.object_id)
        object_type = address_parts.object_type.value
        related_id = bytes_from_hex(address_parts.related_id)
        related_type = address_parts.related_type.value
        relationship_type = address_parts.relationship_type.value
        data = {
            "block_updated": int(block_num),
            "updated_at": r.now(),
            "object_type": object_type,
            "object_id": object_id,
            "related_type": related_type,
            "relationship_type": relationship_type,
            "related_id": related_id,
            **resource,
        }
        table_query = database.get_table("state")
        query = table_query.get(key).replace(
            lambda doc: r.branch(
                # pylint: disable=singleton-comparison
                (doc == None),  # noqa
                r.expr(data).merge(
                    {
                        "address": key,
                        "block_created": int(block_num),
                        "created_at": r.now(),
                    }
                ),
                doc.merge(data),
            )
        )
        result = database.run_query(query)
        if not result["inserted"] == 1 and not result["replaced"]:
            LOGGER.warning("error updating state table:\n%s\n%s", result, query)

        key = [address_binary, int(block_num)]
        data["address"] = key
        if result["inserted"] == 1:
            data["block_created"] = int(block_num)
            data["created_at"] = r.now()
        elif result["replaced"] == 1:
            LOGGER.warning(result)

        table_query = database.get_table("state_history")
        query = table_query.get(key).replace(data)
        result = database.run_query(query)
        if not result["inserted"] == 1 and not result["replaced"]:
            LOGGER.warning("error updating state_history table:\n%s\n%s", result, query)

    except Exception as err:  # pylint: disable=broad-except
        LOGGER.warning("update_state %s error:", type(err))
        LOGGER.warning(err)
Exemple #2
0
def process(rec, database):
    """ Process inbound queue records
    """
    try:
        if "batch" not in rec or not rec["batch"]:
            database.run_query(
                database.get_table("inbound_queue").get(rec["id"]).delete())
            rec["sync_direction"] = "inbound"
            database.run_query(database.get_table("sync_errors").insert(rec))
            return

        batch = batch_pb2.Batch()
        batch.ParseFromString(rec["batch"])
        batch_list = batcher.batch_to_list(batch=batch)
        status = ClientSync().send_batches_get_status(batch_list=batch_list)
        if status[0]["status"] == "COMMITTED":
            if "metadata" in rec and rec["metadata"]:
                data = {
                    "address": rec["address"],
                    "object_type": rec["object_type"],
                    "object_id": rec["object_id"],
                    "provider_id": rec["provider_id"],
                    "created_at": r.now(),
                    "updated_at": r.now(),
                    **rec["metadata"],
                }
                query = (
                    database.get_table("metadata").get(
                        rec["address"]).replace(lambda doc: r.branch(
                            # pylint: disable=singleton-comparison
                            (doc == None),  # noqa
                            r.expr(data),
                            doc.merge({
                                "metadata": rec["metadata"],
                                "updated_at": r.now()
                            }),
                        )))
                result = database.run_query(query)
                if (not result["inserted"]
                        and not result["replaced"]) or result["errors"] > 0:
                    LOGGER.warning("error updating metadata record:\n%s\n%s",
                                   result, query)
            rec["sync_direction"] = "inbound"
            database.run_query(database.get_table("changelog").insert(rec))
            database.run_query(
                database.get_table("inbound_queue").get(rec["id"]).delete())
        else:
            rec["error"] = get_status_error(status)
            rec["sync_direction"] = "inbound"
            database.run_query(database.get_table("sync_errors").insert(rec))
            database.run_query(
                database.get_table("inbound_queue").get(rec["id"]).delete())
    except Exception as err:  # pylint: disable=broad-except
        LOGGER.exception("%s exception processing inbound record:\n%s",
                         type(err).__name__, rec)
        LOGGER.exception(err)
Exemple #3
0
def set_status(conn, row, status):
    return conn.db().table('tasks').get(row['id']).update({
        'status':
        status,
        'time':
        r.now().to_epoch_time()
    })
    def test_filter_newer_than_now(self, conn):
        table = r.db('d').table('people')

        new = r.row["last_updated"] >= r.now()
        result = table.filter(new).run(conn)
        result = list(result)
        assertEqual(1, len(result))
    def test_filter_older_than_now(self, conn):
        table = r.db('d').table('people')

        old = r.row["last_updated"] <= r.now()
        result = table.filter(old).run(conn)
        result = list(result)
        assertEqual(2, len(result))
    def test_add_seconds(self, conn):
        """
        Find entries from 3 hours ago to now
        """
        table = r.db('d').table('people')

        three_hours = 10_800
        old = r.row["last_updated"].lt(r.now().add(three_hours))
        result = table.filter(old).run(conn)
        result = list(result)
        assertEqual(2, len(result))  # present and future
    def test_subtract_seconds(self, conn):
        """
        Find entries older than 3 hours
        """
        table = r.db('d').table('people')

        three_hours = 10_800
        old = r.row["last_updated"].lt(r.now().sub(three_hours))
        result = table.filter(old).run(conn)
        result = list(result)
        assertEqual("past", result[0]['id'])
Exemple #8
0
    async def release(self, email: Union[str, None]):
        """
        Admin can provider empty email

        Raises:
            ReleaseError
        """
        device = await db.table("devices").get(self.udid).run()
        if not device:
            raise ReleaseError("device not exist")
        if email and device.get('userId') != email:
            raise ReleaseError("device is not owned by you")

        if not device.get("using"):  # already released
            return

        # Update database
        await self.update({
            "using": False,
            "userId": None,
            "colding": True,
            "usingDuration": r.row["usingDuration"].default(0).add(r.now().sub(r.row["usingBeganAt"]))
        }) # yapf: disable

        # 设备先要冷却一下(Provider清理并检查设备)
        source = device2source(device)
        if not source:  # 设备离线了
            return

        async def cold_device():
            from tornado.httpclient import HTTPError
            from tornado.httpclient import AsyncHTTPClient, HTTPRequest
            http_client = AsyncHTTPClient()
            secret = source.get('secret', '')
            if not source.get('url'):
                await self.update({"colding": False})
                return

            source_id = source.get("id")
            from .provider import ProviderHeartbeatWSHandler
            await ProviderHeartbeatWSHandler.release(source_id, device['udid'])

            try:
                url = source['url'] + "/cold?" + urllib.parse.urlencode(
                    dict(udid=device['udid'], secret=secret))
                request = HTTPRequest(url, method="POST", body='')
                await http_client.fetch(request)
            except HTTPError as e:
                logger.error("device [%s] release error: %s", self.udid, e)
                await self.update({"colding": False})

        IOLoop.current().add_callback(cold_device)
def _handle_delta(database, delta):
    """ Handle state changes
    """
    try:
        # Check for and resolve forks
        delta.block_num = int(delta.block_num)
        old_block = database.fetch("blocks", delta.block_num)
        if old_block is not None:
            if old_block["block_id"] != delta.block_id:
                drop_results = database.drop_fork(delta.block_num)
                if drop_results["deleted"] == 0:
                    LOGGER.warning(
                        "Failed to drop forked resources since block: %s",
                        str(delta.block_num),
                    )
            else:
                return

        # Parse changes and update database
        update = get_updater(database, delta.block_num)
        remove = get_remover(database, delta.block_num)
        for change in delta.state_changes:
            if addresser.family.is_family(change.address):
                if not change.value:
                    remove(change.address)
                else:
                    resources = data_to_dicts(change.address, change.value)
                    for resource in resources:
                        update(change.address, resource)

        # Add new block to database
        new_block = {
            "block_num": int(delta.block_num),
            "block_id": delta.block_id,
            "previous_block_id": delta.previous_block_id,
            "state_root_hash": delta.state_root_hash,
            "block_datetime": r.now(),
        }
        block_results = database.insert("blocks", new_block)
        if block_results["inserted"] == 0:
            LOGGER.warning(
                "Failed to insert block #%s: %s", str(delta.block_num), delta.block_id
            )

    except Exception as err:  # pylint: disable=broad-except
        LOGGER.exception("%s error handling delta:", type(err))
        LOGGER.exception(err)
def check_targets(self):
    conn = connect()
    for target in t_targets().run(conn):
        kwargs = {
            "attempts": 1,
            "timeout": period.total_seconds(),
            "result_format": "binary",
            "method": target["method"],
        }
        if kwargs["method"] == "POST":
            kwargs["data"] = target["body"]

        add_report_query = lambda msg: t_reports().insert(
            {
                "target_id": target["id"],
                "date": r.now(),
                "message": msg
            })
        http_query = r.http(target["url"], **kwargs).default(lambda x: x)
        add_report_query(http_query).run(conn)
    conn.close()
Exemple #11
0
def _remove_state(database, block_num, address):
    """ Update the state, state_history and metadata tables
    """
    try:
        # update state table
        now = r.now()
        address_parts = addresser.parse(address)
        address_binary = bytes_from_hex(address)
        object_id = bytes_from_hex(address_parts.object_id)
        object_type = address_parts.object_type.value
        related_id = bytes_from_hex(address_parts.related_id)
        related_type = address_parts.related_type.value
        relationship_type = address_parts.relationship_type.value

        state = database.get_table("state")
        state_history = database.get_table("state_history")

        query = state.get(address_binary).delete(return_changes=True)
        result = database.run_query(query)
        if result["errors"] > 0:
            LOGGER.warning("error deleting from state table:\n%s\n%s", result, query)
        if result["deleted"] and "changes" in result and result["changes"]:
            query = state_history.insert(result["changes"][0]["old_val"])
            result = database.run_query(query)
            if result["errors"] > 0:
                LOGGER.warning(
                    "error inserting into state_history table:\n%s\n%s", result, query
                )

        if not related_id:
            query = database.get_table("metadata").get(address_binary).delete()
            result = database.run_query(query)
            if result["errors"] > 0:
                LOGGER.warning("error removing metadata record:\n%s\n%s", result, query)

    except Exception as err:  # pylint: disable=broad-except
        LOGGER.warning("remove_state %s error:", type(err))
        LOGGER.warning(err)
Exemple #12
0
 def test_get_now(self):
     now = r.now()
     assert type(now) == ast.Now
Exemple #13
0
def _update_state(database, block_num, address, resource):
    """ Update the state, state_history and metadata tables
    """
    try:
        # update state table
        now = r.now()
        address_parts = addresser.parse(address)
        address_binary = bytes_from_hex(address)
        object_id = bytes_from_hex(address_parts.object_id)
        object_type = address_parts.object_type.value
        related_id = bytes_from_hex(address_parts.related_id)
        related_type = address_parts.related_type.value
        relationship_type = address_parts.relationship_type.value

        state = database.get_table("state")
        state_history = database.get_table("state_history")

        data = {
            "address": address_binary,
            "object_type": object_type,
            "object_id": object_id,
            "related_type": related_type,
            "relationship_type": relationship_type,
            "related_id": related_id,
            "block_created": int(block_num),
            "block_num": int(block_num),
            "updated_date": now,
            **resource,
        }
        delta = {"block_num": int(block_num), "updated_at": now, **resource}
        query = state.get(address_binary).replace(
            lambda doc: r.branch(
                # pylint: disable=singleton-comparison
                (doc == None),  # noqa
                r.expr(data),
                doc.merge(delta),
            ),
            return_changes=True,
        )
        result = database.run_query(query)
        if result["errors"] > 0:
            LOGGER.warning("error updating state table:\n%s\n%s", result,
                           query)
        if result["replaced"] and "changes" in result and result["changes"]:
            query = state_history.insert(result["changes"][0]["old_val"])
            # data["address"] = [address_binary, int(block_num)]
            result = database.run_query(query)
            if result["errors"] > 0:
                LOGGER.warning("error updating state_history table:\n%s\n%s",
                               result, query)

        if not related_id:
            data["address"] = address_binary
            del data["related_type"]
            del data["relationship_type"]
            del data["related_id"]
            query = (
                database.get_table("metadata").get(address_binary).replace(
                    lambda doc: r.branch(
                        # pylint: disable=singleton-comparison
                        (doc == None),  # noqa
                        r.expr(data),
                        doc.merge(delta),
                    )))
            result = database.run_query(query)
            if result["errors"] > 0:
                LOGGER.warning("error updating metadata record:\n%s\n%s",
                               result, query)

    except Exception as err:  # pylint: disable=broad-except
        LOGGER.warning("update_state %s error:", type(err))
        LOGGER.warning(err)
Exemple #14
0
def new_task(conn, event, task):
    return conn.db().table('tasks').insert({
        **task, 'event': event,
        'status': 'ready',
        'time': r.now().to_epoch_time()
    })
Exemple #15
0
 def _get_payload(self, name, value, tags, id_):
     payload = super(RethinkBackend, self)._get_payload(name, value, tags)
     payload['timestamp'] = r.now()
     if id_:
         payload['id'] = id_
     return payload