Exemple #1
0
    def dump_lattice(self, value, typ=None, causal_dependencies={}):
        if not typ:
            if isinstance(value, set):
                return self.dump_lattice(value, SetLattice)
            elif isinstance(value, dict):
                return self.dump_lattice(value, MapLattice)
            elif isinstance(value, list):
                return self.dump_lattice(value, OrderedSetLattice)
            else:
                return self.dump_lattice(value, LWWPairLattice)

        if typ == SetLattice:
            result = set()
            for v in value:
                result.add(self.dump(v))

            result = SetLattice(result)
        elif typ == MapLattice:
            result = {}
            for key in value:
                result[key] = self.dump_lattice(value[key])

            result = MapLattice(result)
        elif typ == OrderedSetLattice:
            result = list()
            for v in value:
                result.append(self.dump(v))

            result = OrderedSetLattice(ListBasedOrderedSet(result))
        elif typ == LWWPairLattice:
            result = LWWPairLattice(generate_timestamp(0), self.dump(value))
        elif typ == SingleKeyCausalLattice:
            # We assume that we will use the default vector clock for causal
            # metadata.
            data = SetLattice({self.dump(value)})
            result = SingleKeyCausalLattice(DEFAULT_VC, data)
        elif typ == MultiKeyCausalLattice:
            # We assume that we will use the default vector clock for causal
            # metadata.
            data = SetLattice({self.dump(value)})
            result = MultiKeyCausalLattice(DEFAULT_VC,
                                           MapLattice(causal_dependencies),
                                           data)
        else:
            raise ValueError(f'Unexpected lattice type: {str(typ)}')

        return result
Exemple #2
0
def _exec_dag_function_causal(pusher_cache, kvs, triggers, function, schedule,
                              user_lib):
    schedule = schedule[0]
    triggers = triggers[0]

    fname = schedule.target_function
    fargs = list(schedule.arguments[fname].values)

    key_version_locations = {}
    dependencies = {}

    for trigger in triggers:
        fargs += list(trigger.arguments.values)

        # Combine the locations of upstream cached key versions from all
        # triggers.
        for addr in trigger.version_locations:
            if addr in key_version_locations:
                key_version_locations[addr].extend(
                    trigger.version_locations[addr].key_versions)
            else:
                key_version_locations[addr] = list(
                    trigger.version_locations[addr])

        # Combine the dependency sets from all triggers.
        for dependency in trigger.dependencies:
            vc = VectorClock(dict(dependency.vector_clock), True)
            key = dependency.key

            if key in dependencies:
                dependencies[key].merge(vc)
            else:
                dependencies[key] = vc

    fargs = [serializer.load(arg) for arg in fargs]

    result = _exec_func_causal(kvs, function, fargs, user_lib, schedule,
                               key_version_locations, dependencies)

    this_ref = None
    for ref in schedule.dag.functions:
        if ref.name == fname:
            this_ref = ref  # There must be a match.

    success = True
    if this_ref.type == MULTIEXEC:
        if serializer.dump(result) in this_ref.invalid_results:
            return False, False

    # Create a new trigger with the schedule ID and results of this execution.
    new_trigger = _construct_trigger(schedule.id, fname, result)

    # Serialize the key version location information into this new trigger.
    for addr in key_version_locations:
        new_trigger.version_locations[addr].keys.extend(
            key_version_locations[addr])

    # Serialize the set of dependency versions for causal metadata.
    for key in dependencies:
        dep = new_trigger.dependencies.add()
        dep.key = key
        dependencies[key].serialize(dep.vector_clock)

    is_sink = True
    for conn in schedule.dag.connections:
        if conn.source == fname:
            is_sink = False
            new_trigger.target_function = conn.sink

            dest_ip = schedule.locations[conn.sink]
            sckt = pusher_cache.get(sutils.get_dag_trigger_address(dest_ip))
            sckt.send(new_trigger.SerializeToString())

    if is_sink:
        logging.info('DAG %s (ID %s) completed in causal mode; result at %s.' %
                     (schedule.dag.name, schedule.id, schedule.output_key))

        vector_clock = {}
        okey = schedule.output_key
        if okey in dependencies:
            prev_count = 0
            if schedule.client_id in dependencies[okey]:
                prev_count = dependencies[okey][schedule.client_id]

            dependencies[okey].update(schedule.client_id, prev_count + 1)
            dependencies[okey].serialize(vector_clock)
            del dependencies[okey]
        else:
            vector_clock = {schedule.client_id: 1}

        # Serialize result into a MultiKeyCausalLattice.
        vector_clock = VectorClock(vector_clock, True)
        result = serializer.dump(result)
        dependencies = MapLattice(dependencies)
        lattice = MultiKeyCausalLattice(vector_clock, dependencies,
                                        SetLattice({result}))

        succeed = kvs.causal_put(schedule.output_key, lattice,
                                 schedule.client_id)
        while not succeed:
            succeed = kvs.causal_put(schedule.output_key, lattice,
                                     schedule.client_id)

        # Issues requests to all upstream caches for this particular request
        # and asks them to garbage collect pinned versions stored for the
        # context of this request.
        for cache_addr in key_version_locations:
            gc_address = utils.get_cache_gc_address(cache_addr)
            sckt = pusher_cache.get(gc_address)
            sckt.send_string(schedule.client_id)

    return is_sink, [success]
Exemple #3
0
    def _deserialize(self, tup):
        if tup.lattice_type == LWW:
            # Deserialize last-writer-wins lattices
            val = LWWValue()
            val.ParseFromString(tup.payload)

            return LWWPairLattice(val.timestamp, val.value)
        elif tup.lattice_type == SET:
            # Deserialize unordered-set lattices
            s = SetValue()
            s.ParseFromString(tup.payload)

            result = set()
            for k in s.values:
                result.add(k)

            return SetLattice(result)
        elif tup.lattice_type == ORDERED_SET:
            # Deserialize ordered-set lattices
            res = ListBasedOrderedSet()
            val = SetValue()
            val.ParseFromString(tup.payload)
            for v in val.values:
                res.insert(v)

            return OrderedSetLattice(res)

        elif tup.lattice_type == SINGLE_CAUSAL:
            # Deserialize single-key causal lattices
            val = SingleKeyCausalValue()

            # Deserialize the vector_clock stored in the Protobuf into a
            # MapLattice, where each value is a MaxIntLattice of the VC
            # counter.
            vc = VectorClock(val.vector_clock, True)

            # Create a SetLattice with the value(s) stored by this lattice.
            values = set()
            for v in val.values():
                values.add(v)

            return SingleKeyCasaulLattice(vc, SetLattice(values))

        elif tup.lattice_type == MULTI_CAUSAL:
            # Deserialize multi-key causal lattices
            val = MultiKeyCausalValue()

            # Deserialize the vector_clock stored in the Protobuf into a
            # MapLattice, where each value is a MaxIntLattice of the VC
            # counter.
            vc = VectorClock(val.vector_clock, True)

            # Deserialize the set of dependencies of this key into a MapLattice
            # where the keys are names of other KVS keys and the values are
            # MapLattices that have the vector clocks for those keys.
            dep_map = {}
            for kv in val.dependencies:
                key = kv.key
                dep_map[key] = VectorClock(kv.vector_clock, True)

            # Create a SetLattice with the value(s) stored by this lattice.
            values = set()
            for v in val.values():
                values.add(v)

            dependencies = MapLattice(dep_map)
            value = SetLattice(values)

            return MultiKeyCausalLattice(vc, dependencies, value)
        else:
            raise ValueError('Unsupported type cannot be serialized: ' +
                             str(tup.lattice_type))