Ejemplo n.º 1
0
 def load_from_store(self):
     if not self._store:
         raise AttributeError("No datastore defined!")
     if not self._store.has_blob('all_keys'):
         return False
     data = Serializer.deserialize(self._store.get_blob('all_keys'))
     self.load_from_data(data)
     return True
Ejemplo n.º 2
0
 def load_from_store(self):
     if not self._store:
         raise AttributeError("No datastore defined!")
     if not self._store.has_blob('all_keys'):
         return False
     data = Serializer.deserialize(self._store.get_blob('all_keys'))
     self.load_from_data(data)
     return True
Ejemplo n.º 3
0
    def load_map_file(self):
        self._map = {}
        self._reverse_map = defaultdict(lambda: [])
        self._blob_sizes = defaultdict(lambda: 0)
        self._current_blob_key = None

        map_filename = self._map_filename()
        if os.path.exists(map_filename):
            with open(map_filename, "rb") as map_file:
                self._map = Serializer.deserialize(map_file.read())

            for store_key, (blob_key, start, stop) in self._map.items():
                print blob_key, start, stop
                if not store_key in self._reverse_map[blob_key]:
                    self._reverse_map[blob_key].append(store_key)
                self._blob_sizes[blob_key] += stop - start
            smallest_size = None
            for blob_key, size in self._blob_sizes.items():
                if not smallest_size or size < smallest_size:
                    self._current_blob_key = blob_key
                    smallest_size = size
Ejemplo n.º 4
0
 def __init__(self):
     super(IaBatchedSerializer, self).__init__(PickleSerializer(), 1000)
Ejemplo n.º 5
0
import json
import sys
import os

from tabulate import tabulate

from config import RAW_DATA, LOOPS, SERIALIZED_DATA_DIR, DESERIALIZED_DATA_DIR
from serializers import JsonSerializer, PickleSerializer, XmlSerializer, ProtobufSerializer, AvroSerializer, \
    YamlSerializer, MsgpackSerializer
from utils import execute_and_profile

_serializers = [
    JsonSerializer('json', is_binary=False),
    PickleSerializer('pickle', is_binary=True),
    XmlSerializer('xml', is_binary=False),
    ProtobufSerializer('protobuf', is_binary=True),
    AvroSerializer('avro', is_binary=True),
    YamlSerializer('yaml', is_binary=False),
    MsgpackSerializer('msgpack', is_binary=True)
]


def _print_results(enc_table, dec_table):
    enc_table.sort(key=lambda x: x[1])
    enc_table.insert(0, ['Package', 'Seconds', 'Size'])
    
    dec_table.sort(key=lambda x: x[1])
    dec_table.insert(0, ['Package', 'Seconds'])
    
    print ("\nEncoding Test (%d loops)" % LOOPS)
    print (tabulate(enc_table, headers="firstrow"))
Ejemplo n.º 6
0
 def save_to_store(self):
     if not self._store:
         raise AttributeError("No datastore defined!")
     saved_data = self.save_to_data(in_place=True)
     data = Serializer.serialize(saved_data)
     self._store.store_blob(data, 'all_keys')
Ejemplo n.º 7
0
 def save_to_store(self):
     if not self._store:
         raise AttributeError("No datastore defined!")
     saved_data = self.save_to_data(in_place = True)
     data = Serializer.serialize(saved_data)
     self._store.store_blob(data,'all_keys')
Ejemplo n.º 8
0
 def write_map_file(self):
     map_filename = self._map_filename()
     with open(map_filename, "wb") as map_file:
         map_file.write(Serializer.serialize(self._map))