def _download_data(self, response, file_path): ret = bytearray() total_length = response.headers.get('content-length') if total_length is None: # no content length header if not file_path: ret += response.content else: if self.output: total_length = len(response.content) progress = human_readable_progress(total_length, total_length) print_progress(self.output, 50, progress) save_append(file_path, response.content) else: total_length = int(total_length) encoding = response.headers.get('content-encoding') gzip = (encoding == "gzip") # chunked can be a problem: https://www.greenbytes.de/tech/webdav/rfc2616.html#rfc.section.4.4 # It will not send content-length or should be ignored def download_chunks(file_handler=None, ret_buffer=None): """Write to a buffer or to a file handler""" chunk_size = 1024 if not file_path else 1024 * 100 download_size = 0 last_progress = None for data in response.iter_content(chunk_size): download_size += len(data) if ret_buffer is not None: ret_buffer.extend(data) if file_handler is not None: file_handler.write(to_file_bytes(data)) if self.output: units = progress_units(download_size, total_length) progress = human_readable_progress( download_size, total_length) if last_progress != units: # Avoid screen refresh if nothing has change print_progress(self.output, units, progress) last_progress = units return download_size if file_path: mkdir(os.path.dirname(file_path)) with open(file_path, 'wb') as handle: dl_size = download_chunks(file_handler=handle) else: dl_size = download_chunks(ret_buffer=ret) response.close() if dl_size != total_length and not gzip: raise ConanException("Transfer interrupted before " "complete: %s < %s" % (dl_size, total_length)) if not file_path: return bytes(ret) else: return
def test_init_overwrite(): """ config init --force MUST override current content """ client = TestClient() client.run('config init') dummy_content = 'DUMMY CONTENT. SHOULD BE REMOVED!' save_append(client.cache.conan_conf_path, dummy_content) save_append(client.cache.remotes_path, dummy_content) save_append(client.cache.settings_path, dummy_content) save_append(client.cache.default_profile_path, dummy_content) client.run('config init --force') assert dummy_content not in load(client.cache.conan_conf_path) assert dummy_content not in load(client.cache.conan_conf_path) assert dummy_content not in load(client.cache.settings_path) assert dummy_content not in load(client.cache.remotes_path) assert dummy_content not in load(client.cache.default_profile_path)
def test_init_overwrite(self): # create and add dummy content to the config files self.client.run('config init') dummy_content = 'DUMMY CONTENT. SHOULD BE REMOVED!' save_append(self.client.cache.conan_conf_path, dummy_content) save_append(self.client.cache.remotes_path, dummy_content) save_append(self.client.cache.settings_path, dummy_content) save_append(self.client.cache.default_profile_path, dummy_content) # overwrite files self.client.run('config init --force') self.assertNotIn(dummy_content, load(self.client.cache.conan_conf_path)) self.assertNotIn(dummy_content, load(self.client.cache.remotes_path)) self.assertNotIn(dummy_content, load(self.client.cache.conan_conf_path)) self.assertNotIn(dummy_content, load(self.client.cache.settings_path)) self.assertNotIn(dummy_content, load(self.client.cache.default_profile_path))
def save(path, content, append=False): # TODO: All this three functions: save, save_append and this one should be merged into one. if append: save_append(path=path, content=content) else: files_save(path=path, content=content, only_if_modified=False)
def download(self, url, file_path=None, auth=None, retry=1, retry_wait=0, overwrite=False, headers=None): if file_path and not os.path.isabs(file_path): file_path = os.path.abspath(file_path) if file_path and os.path.exists(file_path): if overwrite: if self.output: self.output.warn("file '%s' already exists, overwriting" % file_path) else: # Should not happen, better to raise, probably we had to remove # the dest folder before raise ConanException("Error, the file to download already exists: '%s'" % file_path) t1 = time.time() ret = bytearray() response = call_with_retry(self.output, retry, retry_wait, self._download_file, url, auth, headers) if not response.ok: # Do not retry if not found or whatever controlled error if response.status_code == 404: raise NotFoundException("Not found: %s" % url) raise ConanException("Error %d downloading file %s" % (response.status_code, url)) try: total_length = response.headers.get('content-length') if total_length is None: # no content length header if not file_path: ret += response.content else: total_length = len(response.content) progress = human_readable_progress(total_length, total_length) print_progress(self.output, 50, progress) save_append(file_path, response.content) else: total_length = int(total_length) encoding = response.headers.get('content-encoding') gzip = (encoding == "gzip") # chunked can be a problem: https://www.greenbytes.de/tech/webdav/rfc2616.html#rfc.section.4.4 # It will not send content-length or should be ignored def download_chunks(file_handler=None, ret_buffer=None): """Write to a buffer or to a file handler""" chunk_size = 1024 if not file_path else 1024 * 100 download_size = 0 last_progress = None for data in response.iter_content(chunk_size=chunk_size): download_size += len(data) if ret_buffer is not None: ret_buffer.extend(data) if file_handler is not None: file_handler.write(to_file_bytes(data)) units = progress_units(download_size, total_length) progress = human_readable_progress(download_size, total_length) if last_progress != units: # Avoid screen refresh if nothing has change if self.output: print_progress(self.output, units, progress) last_progress = units return download_size if file_path: mkdir(os.path.dirname(file_path)) with open(file_path, 'wb') as handle: dl_size = download_chunks(file_handler=handle) else: dl_size = download_chunks(ret_buffer=ret) if dl_size != total_length and not gzip: raise ConanException("Transfer interrupted before " "complete: %s < %s" % (dl_size, total_length)) duration = time.time() - t1 log_download(url, duration) if not file_path: return bytes(ret) else: return except Exception as e: logger.debug(e.__class__) logger.debug(traceback.format_exc()) # If this part failed, it means problems with the connection to server raise ConanConnectionError("Download failed, check server, possibly try again\n%s" % str(e))
def _download_data(self, response, file_path): ret = bytearray() total_length = response.headers.get('content-length') progress_bar = None if self.output and self.output.is_terminal: progress_bar = tqdm(unit='B', unit_scale=True, unit_divisor=1024, dynamic_ncols=False, leave=True, ascii=True, file=self.output) if total_length is None: # no content length header if not file_path: ret += response.content else: if self.output: total_length = len(response.content) if progress_bar is not None: progress_bar.desc = "Downloading {}".format( os.path.basename(file_path)) progress_bar.total = total_length progress_bar.update(total_length) save_append(file_path, response.content) else: total_length = int(total_length) encoding = response.headers.get('content-encoding') gzip = (encoding == "gzip") # chunked can be a problem: # https://www.greenbytes.de/tech/webdav/rfc2616.html#rfc.section.4.4 # It will not send content-length or should be ignored if progress_bar is not None: progress_bar.total = total_length def download_chunks(file_handler=None, ret_buffer=None): """Write to a buffer or to a file handler""" chunk_size = 1024 if not file_path else 1024 * 100 download_size = 0 last_time = 0 if progress_bar is not None: progress_bar.desc = "Downloading {}".format( os.path.basename(file_path)) for data in response.iter_content(chunk_size): download_size += len(data) if ret_buffer is not None: ret_buffer.extend(data) if file_handler is not None: file_handler.write(to_file_bytes(data)) if progress_bar is not None: progress_bar.update(len(data)) elif self.output and time.time( ) - last_time > TIMEOUT_BEAT_SECONDS: last_time = time.time() self.output.write(TIMEOUT_BEAT_CHARACTER) return download_size if file_path: mkdir(os.path.dirname(file_path)) with open(file_path, 'wb') as handle: dl_size = download_chunks(file_handler=handle) else: dl_size = download_chunks(ret_buffer=ret) response.close() if dl_size != total_length and not gzip: raise ConanException("Transfer interrupted before " "complete: %s < %s" % (dl_size, total_length)) if progress_bar is not None: progress_bar.close() elif self.output: self.output.writeln(TIMEOUT_BEAT_CHARACTER) if not file_path: return bytes(ret) else: return