def fetch(self, context): """ Gets queryset with query arguments provided by `get_queryargs()`, Dumps the result fetched to a pickle file if `use_cache` was set, and loads only if it was not out of the date. """ queryargs = self.get_queryargs(context) if self.use_cache: # gets the unique identifier by `queryargs` cache_id = hashlib.md5(repr(queryargs)).hexdigest() cache_pickle = os.path.join(self.app.data_dirname, self.cache_dirname, cache_id) if os.path.exists(cache_pickle) and \ not self.is_expired(cache_pickle): logger.warning("Using cached queryset '%s'." % cache_id) with open(cache_pickle) as f: queryset = pickle.load(f) else: queryset = self.fetcher.fetch(**queryargs) makeparents(cache_pickle) with open(cache_pickle, 'w') as f: pickle.dump(queryset, f) else: queryset = self.fetcher.fetch(**queryargs) return queryset
def export(self, root): filepath = os.path.join(root, self.normpath) makeparents(filepath) filename, _ = os.path.splitext(filepath) filepath = filename + self.output_suffix with open(filepath, 'w') as f: f.write(self.to_json())
def write(self, data, filepath): lock.acquire() makeparents(filepath) lock.release() with open(filepath, 'wb') as f: f.write(data) return
def draw(self, model, context): data_dir = os.path.join(self.app.data_dirname, context['title']) img_name = os.path.join(data_dir, model.filepath) dst_name = os.path.join(data_dir, 'masks', model.filepath) makeparents(dst_name) draw.draw_polygons(img_name, dst_name, model.datalist, self.PALLET, grayscale=True)
def write(response): if not response: stat.nfail += 1 else: data, name = response dst_file = os.path.join(dirpath, name) makeparents(dst_file) with open(dst_file, 'w') as f: f.write(data) stat.nok += 1
def dump(self, data_model): # creates the upper directory to save json files makeparents(data_model.dest_filepath) filename, _ = os.path.splitext(data_model.dest_filepath) with open(filename + data_model.output_suffix, 'w') as f: f.write(data_model.to_string())
def dump(self): makeparents(self.cache_path) with open(self.cache_path, 'w') as f: pickle.dump(self.pool_table, f)
def dump(self, model, dst): filepath = os.path.join(dst, model.normpath) makeparents(filepath) filename, _ = os.path.splitext(filepath) with open(filename + model.output_suffix, 'w') as f: f.write(model.to_string())