Esempio n. 1
0
 def _load_from_cache(self, cache_file_name, defines, dtype, engine, suffix,
                      cache_is_valid, template_kwargs):
     include_dirs = self._get_include_dirs(engine)
     try:
         with tarfile.open(cache_file_name, "r:gz") as tar:
             cached_source = tar.extractfile("source." + suffix).read()
             src, my_defines = self._generate_source(
                 defines, include_dirs, dtype, suffix, template_kwargs)
             real_source = src.encode("utf-8")
             if cached_source != real_source:
                 return None, None
             for dep in set(self._scan_include_dependencies(suffix)):
                 cached_source = tar.extractfile(
                     os.path.basename(dep)).read()
                 with open(dep, "rb") as fr:
                     real_source = fr.read()
                 if cached_source != real_source:
                     return None, None
             cache = pickle.loads(tar.extractfile("binaries.pickle").read())
             if not cache_is_valid(cache):
                 return None, None
             bins = cache["binaries"]
             if not isinstance(bins, bytes) and (
                     not isinstance(bins, list) or len(bins) == 0 or
                     not isinstance(bins[0], bytes)):
                 self.warning("Cached binaries have an invalid format")
                 return None, None
             return cache["binaries"], my_defines
     except Exception as e:
         self.debug("Failed to load %s: %s", cache_file_name, e)
         return None, None
Esempio n. 2
0
 def fill_minibatch(self):
     chunks_map = [
         self.get_address(sample) + (i,) for i, sample in
         enumerate(self.minibatch_indices.mem[:self.minibatch_size])]
     chunks_map.sort()
     prev_chunk_number = -1
     chunk = None
     for chunk_number, chunk_offset, index in chunks_map:
         if prev_chunk_number != chunk_number:
             prev_chunk_number = chunk_number
             self.file.seek(self.offset_table[chunk_number])
             buffer = self.file.read(self.offset_table[chunk_number + 1] -
                                     self.offset_table[chunk_number])
             chunk = pickle.loads(self.decompress(buffer))
         mb_data, mb_labels = chunk
         self.minibatch_data[index] = mb_data[chunk_offset]
         if self.has_labels:
             self.minibatch_labels[index] = mb_labels[chunk_offset]
Esempio n. 3
0
 def messageReceived(self, message):
     self.graphics.debug("Received %d bytes", len(message[0]))
     raw_data = snappy.decompress(message[0][len('graphics'):])
     obj = pickle.loads(raw_data)
     self.graphics.update(obj, raw_data)
Esempio n. 4
0
 def messageReceived(self, message):
     self.graphics.debug("Received %d bytes", len(message[0]))
     raw_data = snappy.decompress(message[0][len('graphics'):])
     obj = pickle.loads(raw_data)
     self.graphics.update(obj, raw_data)