def do_test(dirname): name1 = os.path.join(dirname, "foo") name2 = os.path.join(dirname, "bar") assert os.path.exists(name1) assert os.path.exists(name2) assert open(name1).read() == 'stuff-foo' assert open(name2).read() == 'stuff-bar' saved_backup = {} from os import rename as real_rename def mock_rename(src, dst): if '.bak' in dst: saved_backup['path'] = dst if os.path.exists(dst): _raise_file_exists(dst) else: real_rename(src, dst) monkeypatch.setattr('os.rename', mock_rename) rename_over_existing(name1, name2) assert not os.path.exists(name1) assert os.path.exists(name2) assert open(name2).read() == 'stuff-foo' assert not os.path.exists(saved_backup['path'])
def do_test(dirname): name1 = os.path.join(dirname, "foo") name2 = os.path.join(dirname, "bar") assert os.path.exists(name1) assert os.path.exists(name2) assert open(name1).read() == 'stuff-foo' assert open(name2).read() == 'stuff-bar' from os import rename as real_rename def mock_rename(src, dst): if os.path.exists(dst): _raise_file_exists(dst) elif '.bak' in dst: raise OSError("Failing rename to backup") else: real_rename(src, dst) monkeypatch.setattr('os.rename', mock_rename) with pytest.raises(OSError) as excinfo: rename_over_existing(name1, name2) assert 'Failing rename to backup' in repr(excinfo.value) assert os.path.exists(name1) assert os.path.exists(name2) assert open(name1).read() == 'stuff-foo' assert open(name2).read() == 'stuff-bar'
def do_test(dirname): name1 = os.path.join(dirname, "foo") name2 = os.path.join(dirname, "bar") assert os.path.exists(name1) assert os.path.exists(name2) assert open(name1).read() == 'stuff-foo' assert open(name2).read() == 'stuff-bar' saved_backup = {} from os import rename as real_rename def mock_rename(src, dst): if '.bak' in dst: saved_backup['path'] = dst if os.path.exists(dst): _raise_file_exists(dst) elif 'path' in saved_backup and os.path.exists(saved_backup['path']) and src != saved_backup['path']: assert not os.path.exists(name2) assert os.path.exists(saved_backup['path']) raise OSError("Failed to copy after backup") else: real_rename(src, dst) monkeypatch.setattr('os.rename', mock_rename) with pytest.raises(OSError) as excinfo: rename_over_existing(name1, name2) assert 'Failed to copy after backup' in repr(excinfo.value) assert os.path.exists(name1) assert os.path.exists(name2) assert open(name1).read() == 'stuff-foo' assert open(name2).read() == 'stuff-bar' assert not os.path.exists(saved_backup['path'])
def do_test(dirname): name1 = os.path.join(dirname, "foo") name2 = os.path.join(dirname, "bar") assert os.path.exists(name1) assert not os.path.exists(name2) assert open(name1).read() == 'stuff-foo' rename_over_existing(name1, name2) assert not os.path.exists(name1) assert os.path.exists(name2) assert open(name2).read() == 'stuff-foo'
def _atomic_replace(path, contents, encoding='utf-8'): tmp = path + ".tmp-" + str(uuid.uuid4()) try: with codecs.open(tmp, 'w', encoding) as file: file.write(contents) file.flush() file.close() rename_over_existing(tmp, path) finally: try: os.remove(tmp) except (IOError, OSError): pass
def do_test(dirname): name1 = os.path.join(dirname, "foo") name2 = os.path.join(dirname, "bar") assert os.path.exists(name1) assert os.path.exists(name2) assert open(name1).read() == 'stuff-foo' assert open(name2).read() == 'stuff-bar' def mock_rename(src, dst): raise IOError("it all went wrong") monkeypatch.setattr('os.rename', mock_rename) with pytest.raises(IOError) as excinfo: rename_over_existing(name1, name2) assert 'it all went wrong' in str(excinfo.value)
def unpack_zip(zip_path, target_path, errors): try: with zipfile.ZipFile(zip_path, mode='r') as zf: target_dir, target_file = os.path.split(target_path) tmp_dir = tempfile.mkdtemp(prefix=(target_path + "_tmp"), dir=target_dir) try: zf.extractall(tmp_dir) extracted = os.listdir(tmp_dir) if len(extracted) == 0: errors.append("Zip archive was empty.") return False elif len(extracted) == 1 and extracted[0] == target_file: # don't keep a pointless directory level, if # the zip just contains a single directory or # file with the same name as the target src_path = os.path.join(tmp_dir, extracted[0]) else: src_path = tmp_dir src_is_dir = os.path.isdir(src_path) target_is_dir = os.path.isdir(target_path) if os.path.exists(target_path) and (src_is_dir != target_is_dir): if src_is_dir: errors.append( "%s exists and isn't a directory, not unzipping a directory over it." % target_path) else: errors.append( "%s exists and is a directory, not unzipping a plain file over it." % target_path) return False else: rename.rename_over_existing(src_path, target_path) finally: if os.path.isdir(tmp_dir): shutil.rmtree(path=tmp_dir) return True except Exception as e: errors.append("Failed to unzip %s: %s" % (zip_path, str(e))) return False
def do_test(dirname): name1 = os.path.join(dirname, "foo") name2 = os.path.join(dirname, "bar") assert os.path.exists(name1) assert os.path.exists(name2) assert open(name1).read() == 'stuff-foo' assert open(name2).read() == 'stuff-bar' saved_backup = {} from os import rename as real_rename def mock_rename(src, dst): if '.bak' in dst: saved_backup['path'] = dst if os.path.exists(dst): _raise_file_exists(dst) else: real_rename(src, dst) monkeypatch.setattr('os.rename', mock_rename) def mock_remove(filename): raise OSError("not removing") monkeypatch.setattr('os.remove', mock_remove) # we shouldn't throw if we can't remove the backup rename_over_existing(name1, name2) assert not os.path.exists(name1) assert os.path.exists(name2) assert open(name2).read() == 'stuff-foo' # backup file gets left around assert os.path.exists(saved_backup['path']) # otherwise the os.remove monkeypatch affects cleaning up the tmp # directory - but only on python 2. monkeypatch.undo()
def _archive_project(project, filename): """Make an archive of the non-ignored files in the project. Args: project (``Project``): the project filename (str): name for the new zip or tar.gz archive file Returns: a ``Status``, if failed has ``errors`` """ failed = project.problems_status() if failed is not None: for error in failed.errors: project.frontend.error(error) return failed frontend = _new_error_recorder(project.frontend) if not os.path.exists(project.project_file.filename): frontend.error("%s does not exist." % project.project_file.basename) return SimpleStatus(success=False, description="Can't create an archive.", errors=frontend.pop_errors()) # this would most likely happen in a GUI editor, if it reloaded # the project from memory but hadn't saved yet. if project.project_file.has_unsaved_changes: frontend.error("%s has been modified but not saved." % project.project_file.basename) return SimpleStatus(success=False, description="Can't create an archive.", errors=frontend.pop_errors()) infos = _enumerate_archive_files( project.directory_path, frontend, requirements=project.union_of_requirements_for_all_envs) if infos is None: return SimpleStatus(success=False, description="Failed to list files in the project.", errors=frontend.pop_errors()) # don't put the destination zip into itself, since it's fairly natural to # create a archive right in the project directory relative_dest_file = subdirectory_relative_to_directory( filename, project.directory_path) if not os.path.isabs(relative_dest_file): infos = [ info for info in infos if info.relative_path != relative_dest_file ] tmp_filename = filename + ".tmp-" + str(uuid.uuid4()) try: if filename.lower().endswith(".zip"): _write_zip(project.name, infos, tmp_filename, frontend) elif filename.lower().endswith(".tar.gz"): _write_tar(project.name, infos, tmp_filename, compression="gz", frontend=frontend) elif filename.lower().endswith(".tar.bz2"): _write_tar(project.name, infos, tmp_filename, compression="bz2", frontend=frontend) elif filename.lower().endswith(".tar"): _write_tar(project.name, infos, tmp_filename, compression=None, frontend=frontend) else: frontend.error("Unsupported archive filename %s." % (filename)) return SimpleStatus( success=False, description= "Project archive filename must be a .zip, .tar.gz, or .tar.bz2.", errors=frontend.pop_errors()) rename_over_existing(tmp_filename, filename) except IOError as e: frontend.error(str(e)) return SimpleStatus( success=False, description=("Failed to write project archive %s." % (filename)), errors=frontend.pop_errors()) finally: try: os.remove(tmp_filename) except (IOError, OSError): pass unlocked = [] for env_spec in project.env_specs.values(): if env_spec.lock_set.disabled: unlocked.append(env_spec.name) if len(unlocked) > 0: frontend.info( "Warning: env specs are not locked, which means they may not " "work consistently for others or when deployed.") frontend.info( " Consider using the 'anaconda-project lock' command to lock the project." ) if len(unlocked) != len(project.env_specs): frontend.info(" Unlocked env specs are: " + (", ".join(sorted(unlocked)))) return SimpleStatus(success=True, description=("Created project archive %s" % filename))
def _archive_project(project, filename): """Make an archive of the non-ignored files in the project. Args: project (``Project``): the project filename (str): name for the new zip or tar.gz archive file Returns: a ``Status``, if failed has ``errors`` """ failed = project.problems_status() if failed is not None: return failed if not os.path.exists(project.project_file.filename): return SimpleStatus(success=False, description="Can't create an archive.", errors=[("%s does not exist." % project.project_file.basename)]) # this would most likely happen in a GUI editor, if it reloaded # the project from memory but hadn't saved yet. if project.project_file.has_unsaved_changes: return SimpleStatus(success=False, description="Can't create an archive.", errors=[("%s has been modified but not saved." % project.project_file.basename)]) errors = [] infos = _enumerate_archive_files(project.directory_path, errors, requirements=project.requirements) if infos is None: return SimpleStatus(success=False, description="Failed to list files in the project.", errors=errors) # don't put the destination zip into itself, since it's fairly natural to # create a archive right in the project directory relative_dest_file = subdirectory_relative_to_directory( filename, project.directory_path) if not os.path.isabs(relative_dest_file): infos = [ info for info in infos if info.relative_path != relative_dest_file ] logs = [] tmp_filename = filename + ".tmp-" + str(uuid.uuid4()) try: if filename.lower().endswith(".zip"): _write_zip(project.name, infos, tmp_filename, logs) elif filename.lower().endswith(".tar.gz"): _write_tar(project.name, infos, tmp_filename, compression="gz", logs=logs) elif filename.lower().endswith(".tar.bz2"): _write_tar(project.name, infos, tmp_filename, compression="bz2", logs=logs) elif filename.lower().endswith(".tar"): _write_tar(project.name, infos, tmp_filename, compression=None, logs=logs) else: return SimpleStatus( success=False, description= "Project archive filename must be a .zip, .tar.gz, or .tar.bz2.", errors=["Unsupported archive filename %s." % (filename)]) rename_over_existing(tmp_filename, filename) except IOError as e: return SimpleStatus( success=False, description=("Failed to write project archive %s." % (filename)), errors=[str(e)]) finally: try: os.remove(tmp_filename) except (IOError, OSError): pass return SimpleStatus(success=True, description=("Created project archive %s" % filename), logs=logs)
def run(self, io_loop): """Run the download on the given io_loop.""" assert self._client is None dirname = os.path.dirname(self._filename) try: makedirs.makedirs_ok_if_exists(dirname) except Exception as e: self._errors.append("Could not create directory '%s': %s" % (dirname, e)) raise gen.Return(None) if self._hash_algorithm is not None: hasher = getattr(hashlib, self._hash_algorithm)() self._client = httpclient.AsyncHTTPClient( io_loop=io_loop, max_clients=1, # without this we buffer a huge amount # of stuff and then call the streaming_callback # once. max_buffer_size=1024 * 1024, # without this we 599 on large downloads max_body_size=100 * 1024 * 1024 * 1024, force_instance=True) tmp_filename = self._filename + ".part" try: _file = open(tmp_filename, 'wb') except EnvironmentError as e: self._errors.append("Failed to open %s: %s" % (tmp_filename, e)) raise gen.Return(None) def cleanup_tmp(): try: _file.close() # future: we could save it in order to try # resuming a failed download midstream, but # pointless until the download code above # knows how to resume. os.remove(tmp_filename) except EnvironmentError: pass def writer(chunk): if len(self._errors) > 0: return if self._hash_algorithm is not None: hasher.update(chunk) try: _file.write(chunk) except EnvironmentError as e: # we can't actually throw this error or Tornado freaks out, so instead # we ignore all future chunks once we have an error, which does mean # we continue to download bytes that we don't use. yuck. self._errors.append("Failed to write to %s: %s" % (tmp_filename, e)) try: timeout_in_seconds = 60 * 10 # pretty long because we could be dealing with huge files request = httpclient.HTTPRequest( url=self._url, streaming_callback=writer, request_timeout=timeout_in_seconds) try: response = yield self._client.fetch(request) except Exception as e: self._errors.append("Failed download to %s: %s" % (self._filename, str(e))) raise gen.Return(None) # assert fetch() was supposed to throw the error, not leave it here unthrown assert response.error is None if len(self._errors) == 0: try: _file.close() # be sure tmp_filename is flushed rename.rename_over_existing(tmp_filename, self._filename) except EnvironmentError as e: self._errors.append("Failed to rename %s to %s: %s" % (tmp_filename, self._filename, str(e))) if len(self._errors) == 0 and self._hash_algorithm is not None: self._hash = hasher.hexdigest() raise gen.Return(response) finally: cleanup_tmp()