async def set_spec(self, spec: dict): self.current_spec = spec.copy() # just in case the user messes with it if self.save_file: with open(self.save_file, "w") as f: json.dump(spec, f) self.dispatcher.emit("spec_update", spec)
def store_stock(): with requests.Session() as session: session.auth = AUTH response = session.get(url=URL_STOCK) assert response.status_code == 200, response.text stock = response.json() stock = [s for s in stock if s['exchange'] in {'NYSE'}] pprint(f'Stock size: {len(stock)}') with open(STOCK_FILE, 'w') as stock_io: json.dump(stock, stock_io, indent=2)
def write_output(top_rated_categories, top_n_categories): outerlist = [] answer = {} count = 0 for row in top_rated_categories: if count < top_n_categories: innerlist = [] innerlist.append(row[0]) innerlist.append(row[1]) outerlist.append(innerlist) count += 1 else: break answer["result"] = outerlist with open(output_file, 'w+') as fp: json.dump(answer, fp)
def addFile(self, fh, hash: str = None, metadata: dict = None, allow_replace=False): """ Adds the file stream to the stash as a blob. Args: fh: file open for read metadata: Optional dictionary of metadata allow_replace: If True, then ok to replace existing file Returns: fldr_dest, sha256, path_to_file """ sha = hashlib.sha256() nbytes = 0 tmpfile_name = None with tempfile.NamedTemporaryFile(delete=False) as tmp_dest: tmpfile_name = tmp_dest.name fbuf = fh.read(self.BLOCK_SIZE) while len(fbuf) > 0: if hash is None: sha.update(fbuf) tmp_dest.write(fbuf) nbytes += len(fbuf) fbuf = fh.read(self.BLOCK_SIZE) if nbytes <= 0: raise ValueError("No content in provided file hande") if hash is None: hash = sha.hexdigest() fldr_dest = self.pathFromHash(hash) abs_fldr = os.path.join(self.root_path, fldr_dest) os.makedirs(abs_fldr, exist_ok=True) f_base = os.path.join(abs_fldr, hash) f_dest = f"{f_base}.{self.EXTENSION}" if os.path.exists(f_dest) and not allow_replace: os.unlink(tmpfile_name) raise ValueError("Entry already exists: %s", hash) shutil.move(tmpfile_name, f_dest) if not metadata is None: with open(f"{f_base}.json", "w") as fout: json.dump(metadata, fout, indent=2) self.L.debug("wrote %s bytes to %s", nbytes, f_dest) return fldr_dest, hash, os.path.join(fldr_dest, f"{hash}.{self.EXTENSION}")
def add(self, b: bytes, hash: str = None, metadata: dict = None, allow_replace=False): """ Add a blob file to the stash. Raises an error if the blob already exists and allow_replace is False Args: b: bytes hash: pre-computed SHA256 hash or None metadata: optional dict to be written as json allow_replace: OK to replace existing hash, otherwise raise ValueError Returns: fldr_dest, sha256, path_to_file """ if hash is None: sha = hashlib.sha256() sha.update(b) hash = sha.hexdigest() else: hash = hash.strip().lower() fldr_dest = self.pathFromHash(hash) abs_fldr = os.path.join(self.root_path, fldr_dest) os.makedirs(abs_fldr, exist_ok=True) f_base = os.path.join(abs_fldr, hash) f_dest = f"{f_base}.{self.EXTENSION}" if os.path.exists(f_dest) and not allow_replace: raise ValueError("Entry already exists: %s", hash) with open(f_dest, "wb") as fout: fout.write(b) if not metadata is None: with open(f"{f_base}.json", "w") as fout: json.dump(metadata, fout, indent=2) self.L.debug("wrote %s bytes to %s", len(b), f_dest) return fldr_dest, hash, os.path.join(fldr_dest, f"{hash}.{self.EXTENSION}")