示例#1
0
def benchmark(workers, memory, loopcount, matn):
    iterable = [(loopcount, matn) for i in range(workers)]

    fexec = FunctionExecutor(runtime_memory=memory)
    start_time = time.time()
    worker_futures = fexec.map(compute_flops, iterable)
    results = fexec.get_result()
    end_time = time.time()

    worker_stats = [f.stats for f in worker_futures]
    total_time = end_time - start_time

    print("Total time:", round(total_time, 3))
    est_flops = workers * 2 * loopcount * matn**3
    print('Estimated GFLOPS:', round(est_flops / 1e9 / total_time, 4))

    res = {
        'start_time': start_time,
        'total_time': total_time,
        'est_flops': est_flops,
        'worker_stats': worker_stats,
        'results': results
    }

    return res
def read(backend, bucket_name, number, keylist_raw, read_times):

    blocksize = 1024 * 1024

    def read_object(key_name, storage):
        m = hashlib.md5()
        bytes_read = 0
        print(key_name)

        start_time = time.time()
        for unused in range(read_times):
            fileobj = storage.get_object(bucket_name, key_name, stream=True)
            try:
                buf = fileobj.read(blocksize)
                while len(buf) > 0:
                    bytes_read += len(buf)
                    #if bytes_read % (blocksize *10) == 0:
                    #    mb_rate = bytes_read/(time.time()-t1)/1e6
                    #    print('POS:'+str(bytes_read)+' MB Rate: '+ str(mb_rate))
                    m.update(buf)
                    buf = fileobj.read(blocksize)
            except Exception as e:
                print(e)
                pass
        end_time = time.time()
        mb_rate = bytes_read / (end_time - start_time) / 1e6
        print('MB Rate: ' + str(mb_rate))

        return {
            'start_time': start_time,
            'end_time': end_time,
            'mb_rate': mb_rate,
            'bytes_read': bytes_read
        }

    if number == 0:
        keynames = keylist_raw
    else:
        keynames = [keylist_raw[i % len(keylist_raw)] for i in range(number)]

    fexec = FunctionExecutor(backend=backend, runtime_memory=1024)
    start_time = time.time()
    worker_futures = fexec.map(read_object, keynames)
    results = fexec.get_result()
    end_time = time.time()

    total_time = end_time - start_time
    worker_stats = [f.stats for f in worker_futures]

    res = {
        'start_time': start_time,
        'total_time': total_time,
        'worker_stats': worker_stats,
        'results': results
    }

    return res
示例#3
0
class FuturesList(list):
    def _create_executor(self):
        if not self.executor:
            from lithops import FunctionExecutor
            self.executor = FunctionExecutor(config=self.config)

    def _extend_futures(self, fs):
        for fut in self:
            fut._produce_output = False
        if not hasattr(self, 'alt_list'):
            self.alt_list = []
            self.alt_list.extend(self)
        self.alt_list.extend(fs)
        self.clear()
        self.extend(fs)

    def map(self, map_function, sync=False, **kwargs):
        self._create_executor()
        if sync:
            self.executor.wait(self)
        fs = self.executor.map(map_function, self, **kwargs)
        self._extend_futures(fs)
        return self

    def map_reduce(self, map_function, reduce_function, sync=False, **kwargs):
        self._create_executor()
        if sync:
            self.executor.wait(self)
        fs = self.executor.map_reduce(map_function, self, reduce_function,
                                      **kwargs)
        self._extend_futures(fs)
        return self

    def wait(self, **kwargs):
        self._create_executor()
        fs_tt = self.alt_list if hasattr(self, 'alt_list') else self
        return self.executor.wait(fs_tt, **kwargs)

    def get_result(self, **kwargs):
        self._create_executor()
        fs_tt = self.alt_list if hasattr(self, 'alt_list') else self
        return self.executor.get_result(fs_tt, **kwargs)

    def __reduce__(self):
        self.executor = None
        return super().__reduce__()
def write(backend, bucket_name, mb_per_file, number, key_prefix):
    def write_object(key_name, storage):
        bytes_n = mb_per_file * 1024**2
        d = RandomDataGenerator(bytes_n)
        print(key_name)
        start_time = time.time()
        storage.put_object(bucket_name, key_name, d)
        end_time = time.time()

        mb_rate = bytes_n / (end_time - start_time) / 1e6
        print('MB Rate: ' + str(mb_rate))

        return {
            'start_time': start_time,
            'end_time': end_time,
            'mb_rate': mb_rate
        }

    # create list of random keys
    keynames = [
        key_prefix + str(uuid.uuid4().hex.upper()) for unused in range(number)
    ]

    fexec = FunctionExecutor(backend=backend, runtime_memory=1024)
    start_time = time.time()
    worker_futures = fexec.map(write_object, keynames)
    results = fexec.get_result()
    end_time = time.time()

    worker_stats = [f.stats for f in worker_futures]
    total_time = end_time - start_time

    res = {
        'start_time': start_time,
        'total_time': total_time,
        'worker_stats': worker_stats,
        'bucket_name': bucket_name,
        'keynames': keynames,
        'results': results
    }

    return res
示例#5
0
BUCKET_NAME = 'lithops-sample-data'  # change-me


def my_function(obj_id, storage):
    print(obj_id)

    data = storage.get_cloudobject(obj_id)

    return data.decode()


if __name__ == '__main__':

    obj_key = 'cloudobject1.txt'
    storage = Storage()
    obj_id = storage.put_cloudobject('Hello World!', BUCKET_NAME, obj_key)
    print(obj_id)

    fexec = FunctionExecutor()
    fexec.call_async(my_function, obj_id)
    print(fexec.get_result())

    obj_key = 'cloudobject2.txt'
    storage = fexec.storage
    obj_id = storage.put_cloudobject('Hello World!', BUCKET_NAME, obj_key)
    print(obj_id)

    fexec.call_async(my_function, obj_id)
    print(fexec.get_result())