def test_normal_ops(self): fp = pjoin(self.dir, "target") self.write_file(fp, "w", "me") af = self.kls(fp) af.write("dar") self.assertEqual(fileutils.readfile_ascii(fp), "me") af.close() self.assertEqual(fileutils.readfile_ascii(fp), "dar")
def test_del(self): fp = pjoin(self.dir, "target") self.write_file(fp, "w", "me") self.assertEqual(fileutils.readfile_ascii(fp), "me") af = self.kls(fp) af.write("dar") del af self.assertEqual(fileutils.readfile_ascii(fp), "me") self.assertEqual(len(os.listdir(self.dir)), 1)
def test_del(self): fp = pjoin(self.dir, "target") write_file(fp, "w", "me") assert fileutils.readfile_ascii(fp) == "me" af = self.kls(fp) af.write("dar") del af assert fileutils.readfile_ascii(fp) == "me" assert len(os.listdir(self.dir)) == 1
def test_discard(self): fp = pjoin(self.dir, "target") self.write_file(fp, "w", "me") self.assertEqual(fileutils.readfile_ascii(fp), "me") af = self.kls(fp) af.write("dar") af.discard() self.assertFalse(os.path.exists(af._temp_fp)) af.close() self.assertEqual(fileutils.readfile_ascii(fp), "me") # finally validate that it handles multiple discards properly. af = self.kls(fp) af.write("dar") af.discard() af.discard() af.close()
def test_data_source_check(self): self.assertEqual(self.chf(local_source(self.fn)), self.expected_long) self.assertEqual( self.chf(data_source(fileutils.readfile_ascii(self.fn))), self.expected_long)
def _sync(self, verbosity, output_fd, force=False, **kwargs): dest = self._pre_download() if self.uri.lower().startswith('https://'): # default to using system ssl certs context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) else: context = None headers = {} etag_path = pjoin(self.basedir, '.etag') modified_path = pjoin(self.basedir, '.modified') if not force: # use cached ETag to check if updates exist previous_etag = readfile_ascii(etag_path, none_on_missing=True) if previous_etag: headers['If-None-Match'] = previous_etag # use cached modification timestamp to check if updates exist previous_modified = readfile_ascii(modified_path, none_on_missing=True) if previous_modified: headers['If-Modified-Since'] = previous_modified req = urllib.request.Request(self.uri, headers=headers, method='GET') # TODO: add customizable timeout try: resp = urllib.request.urlopen(req, context=context) except urllib.error.URLError as e: if e.getcode() == 304: # Not Modified logger.debug("content is unchanged") return True raise base.SyncError( f'failed fetching {self.uri!r}: {e.reason}') from e # Manually check cached values ourselves since some servers appear to # ignore If-None-Match or If-Modified-Since headers. convert = lambda x: x.strip() if x else None etag = resp.getheader('ETag') modified = resp.getheader('Last-Modified') if not force: if etag is not None and convert(etag) == convert(previous_etag): logger.debug(f"etag {etag} is equal, no update available") return True if modified is not None and convert(modified) == convert( previous_modified): logger.debug(f"header mtime is unmodified: {modified}") return True try: os.makedirs(self.basedir, exist_ok=True) except OSError as e: raise base.SyncError( f'failed creating repo dir {self.basedir!r}: {e.strerror}' ) from e length = resp.getheader('content-length') if length: length = int(length) blocksize = max(4096, length // 100) else: blocksize = 1000000 try: self._download = AtomicWriteFile(dest, binary=True, perms=0o644) except OSError as e: raise base.PathError(self.basedir, e.strerror) from e # retrieve the file while providing simple progress output size = 0 while True: buf = resp.read(blocksize) if not buf: if length: sys.stdout.write('\n') break self._download.write(buf) size += len(buf) if length: sys.stdout.write('\r') progress = '=' * int(size / length * 50) percent = int(size / length * 100) sys.stdout.write("[%-50s] %d%%" % (progress, percent)) self._post_download(dest) # TODO: store this in pkgcore cache dir instead? # update cached ETag/Last-Modified values if etag: with open(etag_path, 'w') as f: f.write(etag) if modified: with open(modified_path, 'w') as f: f.write(modified) return True
def test_data_source_check(self): assert self.chf(local_source(self.fn)) == self.expected_long assert self.chf(data_source(fileutils.readfile_ascii( self.fn))) == self.expected_long