Ejemplo n.º 1
0
def match_new_io(base_io, new_io, match_map=None, copy=True):
    """
    Generate notifications for new I/O data being added to base I/O data.

    Args:
        base_io:    The existing (base) I/O data being added to, and possibly
                    referred to by the arriving (new) I/O data. Will be used
                    to complete the data being notified about. Can already
                    contain the new I/O data, it will be considered "new"
                    regardless.
        new_io:     The arriving (new) I/O data being added to the existing
                    (base) data. Can refer to the existing I/O data. The new
                    I/O data can already be added to the base I/O data,
                    anything in the new data will be considered "new"
                    regardless.
        match_map:  The map of subscription match functions: a dictionary with
                    OO data object list names and a list of tuples, each
                    containing the name of the subscription and a match
                    function.

                    Each function must accept an object from the corresponding
                    object list in OO data, and return an iterable producing
                    kcidb.monitor.output.NotificationMessage objects, or None,
                    which is equivalent to an empty iterable.

                    The default is a dictionary of matching functions from all
                    kcidb.subscriptions.* modules, where each is called
                    "match_<OBJ_NAME>", where "<OBJ_NAME>" is an object list
                    name without the "s" ending.
        copy:       True, if the data should be copied before
                    referencing/modifying. False, if the data could be
                    referenced and modified in-place.
                    Optional, default is True.

    Returns:
        The list of notifications: kcidb.monitor.output.Notification objects.
    """
    assert LIGHT_ASSERTS or io.schema.is_valid(base_io)
    assert LIGHT_ASSERTS or io.schema.is_valid(new_io)

    # Merge the new data into the base (*copy* new data as we'll need it)
    merged_io = io.merge(base_io, [new_io],
                         copy_target=copy,
                         copy_sources=True)
    # Convert both to OO representation
    merged_oo = oo.from_io(merged_io, copy=False)
    new_oo = oo.from_io(new_io, copy=copy)
    # Remove all objects with missing parents from the merged data
    rooted_oo = oo.remove_orphans(merged_oo)
    # Delist everything except loaded or modified objects, but keep references
    masked_oo = oo.apply_mask(rooted_oo, new_oo)
    # Generate notifications
    return match_oo(masked_oo, match_map)
Ejemplo n.º 2
0
def notify_main():
    """Execute the kcidb-notify command-line tool"""
    sys.excepthook = misc.log_and_print_excepthook
    description = 'kcidb-notify - Generate notifications for new I/O data'
    parser = misc.ArgumentParser(description=description)
    parser.add_argument('base',
                        metavar='BASE_FILE',
                        nargs='?',
                        help='Path to a JSON file with base I/O data')
    args = parser.parse_args()

    base = io.new()
    if args.base is not None:
        try:
            with open(args.base, "r") as json_file:
                base_reports = [
                    io.schema.validate(data)
                    for data in misc.json_load_stream_fd(json_file.fileno())
                ]
                base = io.merge(base,
                                base_reports,
                                copy_target=False,
                                copy_sources=False)
        except (jq.JSONParseError,
                jsonschema.exceptions.ValidationError) as err:
            raise Exception("Failed reading base file") from err

    try:
        for new in misc.json_load_stream_fd(sys.stdin.fileno()):
            new = io.schema.validate(new)
            for notification in subscriptions.match_new_io(base, new):
                sys.stdout.write(notification.render().as_string(
                    policy=email.policy.SMTPUTF8))
                sys.stdout.write("\x00")
                sys.stdout.flush()
            base = io.merge(base, [new], copy_target=False, copy_sources=False)
    except (jq.JSONParseError, jsonschema.exceptions.ValidationError) as err:
        raise Exception("Failed reading new I/O data") from err
Ejemplo n.º 3
0
def merge_main():
    """Execute the kcidb-merge command-line tool"""
    sys.excepthook = misc.log_and_print_excepthook
    description = 'kcidb-merge - Upgrade and merge I/O data sets'
    parser = misc.OutputArgumentParser(description=description)
    args = parser.parse_args()

    sources = [
        io.schema.validate(data)
        for data in misc.json_load_stream_fd(sys.stdin.fileno())
    ]
    merged_data = io.merge(io.new(),
                           sources,
                           copy_target=False,
                           copy_sources=False)
    misc.json_dump(merged_data, sys.stdout, indent=args.indent, seq=args.seq)
Ejemplo n.º 4
0
def kcidb_load_queue(event, context):
    """
    Load multiple KCIDB data messages from the LOAD_QUEUE_SUBSCRIBER queue
    into the database, if it stayed unmodified for at least
    DATASET_LOAD_PERIOD.
    """
    # Do nothing, if updated recently
    now = datetime.datetime.now(datetime.timezone.utc)
    last_modified = DB_CLIENT.get_last_modified()
    LOGGER.debug("Now: %s, Last modified: %s", now, last_modified)
    if last_modified and now - last_modified < DATASET_LOAD_PERIOD:
        LOGGER.info("Database too fresh, exiting")
        return

    # Pull messages
    msgs = kcidb_load_queue_msgs(LOAD_QUEUE_SUBSCRIBER, LOAD_QUEUE_MSG_MAX,
                                 LOAD_QUEUE_OBJ_MAX, LOAD_QUEUE_TIMEOUT_SEC)
    if msgs:
        LOGGER.info("Pulled %u messages", len(msgs))
    else:
        LOGGER.info("Pulled nothing, exiting")
        return

    # Create merged data referencing the pulled pieces
    LOGGER.debug("Merging %u messages...", len(msgs))
    data = kcidb_io.merge(kcidb_io.new(), (msg[1] for msg in msgs),
                          copy_target=False,
                          copy_sources=False)
    LOGGER.info("Merged %u messages", len(msgs))
    # Load the merged data into the database
    obj_num = kcidb_io.get_obj_num(data)
    LOGGER.debug("Loading %u objects...", obj_num)
    DB_CLIENT.load(data)
    LOGGER.info("Loaded %u objects", obj_num)

    # Acknowledge all the loaded data
    for msg in msgs:
        LOAD_QUEUE_SUBSCRIBER.ack(msg[0])
    LOGGER.debug("ACK'ed %u messages", len(msgs))

    # Forward the loaded data to the "loaded" topic
    for msg in msgs:
        LOADED_QUEUE_PUBLISHER.publish(msg[1])
    LOGGER.debug("Forwarded %u messages", len(msgs))