Exemplo n.º 1
0
def rethinkdb_date_greater(greater_date, comparison_date, relaxed_interval):
    return r.branch(
        r.lt(greater_date[0], comparison_date[0]), False,
        r.eq(greater_date[0], comparison_date[0]),
        r.branch(
            r.eq(greater_date[1], 'XX').or_(r.eq(comparison_date[1], 'XX')),
            relaxed_interval, r.lt(greater_date[1], comparison_date[1]), False,
            r.eq(greater_date[1], comparison_date[1]),
            r.branch(
                r.eq(greater_date[2], 'XX').or_(r.eq(comparison_date[2],
                                                     'XX')), relaxed_interval,
                r.lt(greater_date[2], comparison_date[2]), False, True), True),
        True)
Exemplo n.º 2
0
def rethinkdb_updater_overwrite(id, old_doc, new_doc):
    return (new_doc.keys().set_union(old_doc.keys()).map(lambda key: r.branch(
        old_doc.keys().contains(key).and_(new_doc.keys().contains(key).not_()
                                          ), [key, old_doc[key]],
        new_doc.keys().contains(key).and_(old_doc.keys().contains(key).not_()),
        [key, new_doc[key]],
        r.branch(
            key.eq('sequences'), [
                key, old_doc['sequences'].set_union(new_doc['sequences'])
            ], key.eq('number_sequences'), [
                key, old_doc['sequences'].set_union(new_doc['sequences']).
                count()
            ],
            key.eq('timestamp').or_(key.eq('virus_inclusion_date')).or_(
                key.eq('sequence_inclusion_date')), [key, old_doc[key]],
            [key, new_doc[key]])))).coerce_to('object')
Exemplo n.º 3
0
def _update_legacy(database, block_num, address, resource, data_type):
    """ Update the legacy sync tables (expansion by object type name)
    """
    try:
        data = {
            "id": address,
            "start_block_num": int(block_num),
            "end_block_num": int(sys.maxsize),
            **resource,
        }

        table_query = database.get_table(TABLE_NAMES[data_type])
        query = table_query.get(address).replace(lambda doc: r.branch(
            # pylint: disable=singleton-comparison
            (doc == None),  # noqa
            r.expr(data),
            doc.merge(resource),
        ))
        result = database.run_query(query)
        if result["errors"] > 0:
            LOGGER.warning("error updating legacy state table:\n%s\n%s",
                           result, query)

    except Exception as err:  # pylint: disable=broad-except
        LOGGER.warning("_update_legacy %s error:", type(err))
        LOGGER.warning(err)
Exemplo n.º 4
0
def _update_legacy(database, block_num, address, resource):
    try:
        data_type = addresser.get_address_type(address)
        if data_type in TABLE_NAMES:
            data = {
                "id": address,
                "start_block_num": int(block_num),
                "end_block_num": int(sys.maxsize),
                **resource,
            }

            table_query = database.get_table(TABLE_NAMES[data_type])
            query = table_query.get(address).replace(lambda doc: r.branch(
                # pylint: disable=singleton-comparison
                (doc == None),  # noqa
                r.expr(data),
                doc.merge(resource),
            ))
            result = database.run_query(query)
            if (not result["inserted"]
                    and not result["replaced"]) or result["errors"] > 0:
                LOGGER.warning("error updating legacy state table:\n%s\n%s",
                               result, query)

    except Exception as err:  # pylint: disable=broad-except
        LOGGER.warning("_update_legacy %s error:", type(err))
        LOGGER.warning(err)
def _update_state(database, block_num, address, resource):
    try:
        # update state table
        address_parts = addresser.parse(address)
        address_binary = bytes_from_hex(address)
        key = address_binary
        keys = {"address": address_binary}
        object_id = bytes_from_hex(address_parts.object_id)
        object_type = address_parts.object_type.value
        related_id = bytes_from_hex(address_parts.related_id)
        related_type = address_parts.related_type.value
        relationship_type = address_parts.relationship_type.value
        data = {
            "block_updated": int(block_num),
            "updated_at": r.now(),
            "object_type": object_type,
            "object_id": object_id,
            "related_type": related_type,
            "relationship_type": relationship_type,
            "related_id": related_id,
            **resource,
        }
        table_query = database.get_table("state")
        query = table_query.get(key).replace(
            lambda doc: r.branch(
                # pylint: disable=singleton-comparison
                (doc == None),  # noqa
                r.expr(data).merge(
                    {
                        "address": key,
                        "block_created": int(block_num),
                        "created_at": r.now(),
                    }
                ),
                doc.merge(data),
            )
        )
        result = database.run_query(query)
        if not result["inserted"] == 1 and not result["replaced"]:
            LOGGER.warning("error updating state table:\n%s\n%s", result, query)

        key = [address_binary, int(block_num)]
        data["address"] = key
        if result["inserted"] == 1:
            data["block_created"] = int(block_num)
            data["created_at"] = r.now()
        elif result["replaced"] == 1:
            LOGGER.warning(result)

        table_query = database.get_table("state_history")
        query = table_query.get(key).replace(data)
        result = database.run_query(query)
        if not result["inserted"] == 1 and not result["replaced"]:
            LOGGER.warning("error updating state_history table:\n%s\n%s", result, query)

    except Exception as err:  # pylint: disable=broad-except
        LOGGER.warning("update_state %s error:", type(err))
        LOGGER.warning(err)
Exemplo n.º 6
0
def process(rec, database):
    """ Process inbound queue records
    """
    try:
        if "batch" not in rec or not rec["batch"]:
            database.run_query(
                database.get_table("inbound_queue").get(rec["id"]).delete())
            rec["sync_direction"] = "inbound"
            database.run_query(database.get_table("sync_errors").insert(rec))
            return

        batch = batch_pb2.Batch()
        batch.ParseFromString(rec["batch"])
        batch_list = batcher.batch_to_list(batch=batch)
        status = ClientSync().send_batches_get_status(batch_list=batch_list)
        if status[0]["status"] == "COMMITTED":
            if "metadata" in rec and rec["metadata"]:
                data = {
                    "address": rec["address"],
                    "object_type": rec["object_type"],
                    "object_id": rec["object_id"],
                    "provider_id": rec["provider_id"],
                    "created_at": r.now(),
                    "updated_at": r.now(),
                    **rec["metadata"],
                }
                query = (
                    database.get_table("metadata").get(
                        rec["address"]).replace(lambda doc: r.branch(
                            # pylint: disable=singleton-comparison
                            (doc == None),  # noqa
                            r.expr(data),
                            doc.merge({
                                "metadata": rec["metadata"],
                                "updated_at": r.now()
                            }),
                        )))
                result = database.run_query(query)
                if (not result["inserted"]
                        and not result["replaced"]) or result["errors"] > 0:
                    LOGGER.warning("error updating metadata record:\n%s\n%s",
                                   result, query)
            rec["sync_direction"] = "inbound"
            database.run_query(database.get_table("changelog").insert(rec))
            database.run_query(
                database.get_table("inbound_queue").get(rec["id"]).delete())
        else:
            rec["error"] = get_status_error(status)
            rec["sync_direction"] = "inbound"
            database.run_query(database.get_table("sync_errors").insert(rec))
            database.run_query(
                database.get_table("inbound_queue").get(rec["id"]).delete())
    except Exception as err:  # pylint: disable=broad-except
        LOGGER.exception("%s exception processing inbound record:\n%s",
                         type(err).__name__, rec)
        LOGGER.exception(err)
Exemplo n.º 7
0
 def test_branch_1(self, conn):
     expected = [{
         'id': 'one',
         'value': 5,
         'over_20': False
     }, {
         'id': 'three',
         'value': 22,
         'over_20': True
     }, {
         'id': 'two',
         'value': 12,
         'over_20': False
     }, {
         'id': 'four',
         'value': 31,
         'over_20': True
     }]
     result = r.db('x').table('t').map(
         r.branch(r.row['value'] > 20, r.row.merge({'over_20': True}),
                  r.row.merge({'over_20': False}))).run(conn)
     result = list(result)
     assertEqUnordered(expected, list(result))
Exemplo n.º 8
0
def _update_state(database, block_num, address, resource):
    """ Update the state, state_history and metadata tables
    """
    try:
        # update state table
        now = r.now()
        address_parts = addresser.parse(address)
        address_binary = bytes_from_hex(address)
        object_id = bytes_from_hex(address_parts.object_id)
        object_type = address_parts.object_type.value
        related_id = bytes_from_hex(address_parts.related_id)
        related_type = address_parts.related_type.value
        relationship_type = address_parts.relationship_type.value

        state = database.get_table("state")
        state_history = database.get_table("state_history")

        data = {
            "address": address_binary,
            "object_type": object_type,
            "object_id": object_id,
            "related_type": related_type,
            "relationship_type": relationship_type,
            "related_id": related_id,
            "block_created": int(block_num),
            "block_num": int(block_num),
            "updated_date": now,
            **resource,
        }
        delta = {"block_num": int(block_num), "updated_at": now, **resource}
        query = state.get(address_binary).replace(
            lambda doc: r.branch(
                # pylint: disable=singleton-comparison
                (doc == None),  # noqa
                r.expr(data),
                doc.merge(delta),
            ),
            return_changes=True,
        )
        result = database.run_query(query)
        if result["errors"] > 0:
            LOGGER.warning("error updating state table:\n%s\n%s", result,
                           query)
        if result["replaced"] and "changes" in result and result["changes"]:
            query = state_history.insert(result["changes"][0]["old_val"])
            # data["address"] = [address_binary, int(block_num)]
            result = database.run_query(query)
            if result["errors"] > 0:
                LOGGER.warning("error updating state_history table:\n%s\n%s",
                               result, query)

        if not related_id:
            data["address"] = address_binary
            del data["related_type"]
            del data["relationship_type"]
            del data["related_id"]
            query = (
                database.get_table("metadata").get(address_binary).replace(
                    lambda doc: r.branch(
                        # pylint: disable=singleton-comparison
                        (doc == None),  # noqa
                        r.expr(data),
                        doc.merge(delta),
                    )))
            result = database.run_query(query)
            if result["errors"] > 0:
                LOGGER.warning("error updating metadata record:\n%s\n%s",
                               result, query)

    except Exception as err:  # pylint: disable=broad-except
        LOGGER.warning("update_state %s error:", type(err))
        LOGGER.warning(err)