Exemplo n.º 1
0
def simulate_data_upload(env: Environment, replica: FunctionReplica):
    node = replica.node.ether_node
    func = replica
    started = env.now

    if 'data.skippy.io/sends-to-storage' not in func.pod.spec.labels:
        return

    # FIXME: storage
    size = parse_size_string(
        func.pod.spec.labels['data.skippy.io/sends-to-storage'])
    path = func.pod.spec.labels['data.skippy.io/sends-to-storage/path']

    storage_node_name = env.cluster.get_storage_nodes(path)[0]
    logger.debug('%.2f replica %s uploading data %s to %s', env.now, node,
                 path, storage_node_name)

    if storage_node_name == node.name:
        # FIXME this is essentially a disk read and not a network connection
        yield env.timeout(size / 1.25e+8)  # 1.25e+8 = 1 GBit/s
        return

    storage_node = env.cluster.get_node(storage_node_name)
    route = env.topology.route_by_node_name(node.name, storage_node.name)
    flow = SafeFlow(env, size, route)
    yield flow.start()
    for hop in route.hops:
        env.metrics.log_network(size, 'data_upload', hop)
    env.metrics.log_flow(size, env.now - started, route.source,
                         route.destination, 'data_upload')
Exemplo n.º 2
0
 def from_str(memory, cpu):
     """
     :param memory: "64Mi"
     :param cpu: "250m"
     :return:
     """
     return Resources(int(cpu.rstrip('m')), parse_size_string(memory))
Exemplo n.º 3
0
    def register_images(self, env: Environment):
        containers: docker.ContainerRegistry = env.container_registry
        for image, size, arch in self.images:
            containers.put(ImageProperties(image, parse_size_string(size), arch=arch))

        for name, tag_dict in containers.images.items():
            for tag, images in tag_dict.items():
                logging.info('%s, %s, %s', name, tag, images)
Exemplo n.º 4
0
    def setup(self, env: Environment):
        containers: docker.ContainerRegistry = env.container_registry

        containers.put(
            ImageProperties('resnet50-inference-cpu',
                            parse_size_string('56M'),
                            arch='arm32'))
        containers.put(
            ImageProperties('resnet50-inference-cpu',
                            parse_size_string('56M'),
                            arch='x86'))
        containers.put(
            ImageProperties('resnet50-inference-cpu',
                            parse_size_string('56M'),
                            arch='aarch64'))

        containers.put(
            ImageProperties('resnet50-training-cpu',
                            parse_size_string('128M'),
                            arch='arm32'))
        containers.put(
            ImageProperties('resnet50-training-cpu',
                            parse_size_string('128M'),
                            arch='x86'))
        containers.put(
            ImageProperties('resnet50-training-cpu',
                            parse_size_string('128M'),
                            arch='aarch64'))

        # log all the images in the container
        for name, tag_dict in containers.images.items():
            for tag, images in tag_dict.items():
                logger.info('%s, %s, %s', name, tag, images)
Exemplo n.º 5
0
from ether.util import parse_size_string
from skippy.core.storage import DataItem

resnet_train_bucket = 'bucket_resnet50_train'
resnet_pre_bucket = 'bucket_resnet50_pre'
resnet_model_bucket = 'bucket_resnet50_model'
speech_bucket = 'bucket_speech'
mobilenet_bucket = 'mobilenet_bucket'

resnet_train_bucket_item = DataItem(resnet_train_bucket, 'raw_data',
                                    parse_size_string('58M'))
resnet_pre_bucket_item = DataItem(resnet_pre_bucket, 'raw_data',
                                  parse_size_string('14M'))
resnet_model_bucket_item = DataItem(resnet_model_bucket, 'model',
                                    parse_size_string('103M'))
speech_model_tflite_bucket_item = DataItem(speech_bucket, 'model_tflite',
                                           parse_size_string('48M'))
speech_model_gpu_bucket_item = DataItem(speech_bucket, 'model_gpu',
                                        parse_size_string('188M'))
mobilenet_model_tflite_bucket_item = DataItem(mobilenet_bucket, 'model_tflite',
                                              parse_size_string('4M'))
mobilenet_model_tpu_bucket_item = DataItem(mobilenet_bucket, 'model_tpu',
                                           parse_size_string('4M'))

bucket_names = [
    resnet_model_bucket, resnet_train_bucket, resnet_pre_bucket,
    mobilenet_bucket, speech_bucket
]

data_items = [
    resnet_train_bucket_item, resnet_pre_bucket_item, resnet_model_bucket_item,
Exemplo n.º 6
0
def create_node(name: str, cpus: int, mem: str, arch: str,
                labels: Dict[str, str]) -> Node:
    capacity = Capacity(cpu_millis=cpus * 1000, memory=parse_size_string(mem))
    return Node(name, capacity=capacity, arch=arch, labels=labels)