def test_open_file(self): path = MultiplexedPath(self.folder) with self.assertRaises(FileNotFoundError): path.read_bytes() with self.assertRaises(FileNotFoundError): path.read_text() with self.assertRaises(FileNotFoundError): path.open()
def sha256(path: pathlib.Path) -> str: if not path.exists(): return "" try: return hashlib.sha256(path.read_bytes()).hexdigest()[:6] except FileNotFoundError: return ""
def read_file_binary(filename: str) -> bytes: """Get the contents of a binary file contained with qutebrowser. Args: filename: The filename to open as string. Return: The file contents as a bytes object. """ path = _resource_path(filename) return path.read_bytes()
def read_file_binary(filename: str) -> bytes: """Get the contents of a binary file contained with qutebrowser. Args: filename: The filename to open as string. Return: The file contents as a bytes object. """ path = _path(filename) with _keyerror_workaround(): return path.read_bytes()
def _read(self, dirs, filename, binary=False): ''' Return text/binary contents of a file, None if file does not exist. ''' dirpath = '/'.join(dirs) + '/' + filename if len(dirs) else '' path = zipfile.Path(self._zipfile, dirpath + filename) if path.exists(): if binary: return path.read_bytes() else: with path.open() as f: return '\n'.join(f.readlines()) else: return None
def copy_to(widget, images, sym=False): paths = [ Path(index.data(Qt.UserRole)[0]) for index in images if index.data(300) is not None ] folder = Path( QFileDialog.getExistingDirectory(widget, 'Open Directory', str(PATH.parent), QFileDialog.ShowDirsOnly)) for path in paths: name = folder / path.name if sym and not name.exists(): name.symlink_to(path) else: name.write_bytes(path.read_bytes())
def copy_to(widget, images, sym=False): paths = [ Path(index.data(Qt.UserRole)[0]) for index in images if index.data(300) is not None ] folder = Path( QFileDialog.getExistingDirectory(widget, 'Open Directory', getenv('COPY_DIR', '*'))) for path in paths: name = folder / path.name if sym and not name.exists(): name.symlink_to(path) else: name.write_bytes(path.read_bytes()) set_key(r'GUI\.env', 'COPY_DIR', str(folder)) load_dotenv(r'GUI\.env')
def _copy_files(self, src, dest): for path in zipfile.Path(self._zipfile, src).iterdir(): with open(dest, 'wb') as f: f.write(path.read_bytes())
def _copy_file(self, filename, src, dest): path = zipfile.Path(self._zipfile, src + '/' + filename) with open(dest, 'wb') as f: f.write(path.read_bytes())
def run(self): if self.target is None: raise AsyncError('Process must have an associated target') if not self.input_files.exists(): raise AsyncError('No input files to process') # Check file suffixes if self.allowed_suffixes: for prod in self.input_files.all(): filename = prod.data.name or '' try: assert_valid_suffix(os.path.basename(filename), self.allowed_suffixes) except AssertionError as ex: raise AsyncError("Error running pipeline {}".format(ex)) with tempfile.TemporaryDirectory() as tmpdir_name: tmpdir = Path(tmpdir_name) # Do the actual work flags = json.loads(self.flags_json) if self.flags_json else {} outputs = self.do_pipeline(tmpdir, **flags) # Save outputs new_dps = [] for output in outputs: if not isinstance(output, PipelineOutput): output = PipelineOutput(*output) path, output_type, data_product_type, data = output if output_type == DataProduct: identifier = f'{self.identifier}_{path.name}' prod = DataProduct.objects.create( product_id=identifier, target=self.target, data_product_type=data_product_type) prod.data.save(identifier, ContentFile(path.read_bytes())) new_dps.append(prod) elif output_type == ReducedDatum: identifier = f'{self.identifier}_{data[0]:.0f}' phot_data = {'magnitude': data[1], 'error': data[2]} t = Time(data[0], format='mjd', scale='utc') try: dp = DataProduct.objects.get(data=data[3]) except Exception as e: dp = None rd, created = ReducedDatum.objects.get_or_create( target=self.target, data_product=dp, data_type=data_product_type, source_name=identifier, timestamp=t.to_value('datetime'), ) rd.value = json.dumps(phot_data) rd.save() else: raise AsyncError(f"Invalid output type '{output_type}'") # Create a group to collect DataProduct outputs into if new_dps: self.group = DataProductGroup.objects.create( name=f'{self.identifier}_outputs') for prod in new_dps: prod.group.add(self.group) prod.save() self.status = ASYNC_STATUS_CREATED self.save()