def test_download_http_url__no_directory_traversal(mock_raise_for_status: Mock, tmpdir: Path) -> None: """ Test that directory traversal doesn't happen on download when the Content-Disposition header contains a filename with a ".." path part. """ mock_url = "http://www.example.com/whatever.tgz" contents = b"downloaded" link = Link(mock_url) session = Mock() resp = MockResponse(contents) resp.url = mock_url resp.headers = { # Set the content-type to a random value to prevent # mimetypes.guess_extension from guessing the extension. "content-type": "random", "content-disposition": 'attachment;filename="../out_dir_file"', } session.get.return_value = resp download = Downloader(session, progress_bar="on") download_dir = os.fspath(tmpdir.joinpath("download")) os.mkdir(download_dir) file_path, content_type = download(link, download_dir) # The file should be downloaded to download_dir. actual = os.listdir(download_dir) assert actual == ["out_dir_file"] mock_raise_for_status.assert_called_once_with(resp)
def make_requirement_preparer( temp_build_dir, # type: TempDirectory options, # type: Values req_tracker, # type: RequirementTracker session, # type: PipSession finder, # type: PackageFinder use_user_site, # type: bool download_dir=None, # type: str wheel_download_dir=None, # type: str ): # type: (...) -> RequirementPreparer """ Create a RequirementPreparer instance for the given parameters. """ downloader = Downloader(session, progress_bar=options.progress_bar) temp_build_dir_path = temp_build_dir.path assert temp_build_dir_path is not None return RequirementPreparer( build_dir=temp_build_dir_path, src_dir=options.src_dir, download_dir=download_dir, wheel_download_dir=wheel_download_dir, build_isolation=options.build_isolation, req_tracker=req_tracker, downloader=downloader, finder=finder, require_hashes=options.require_hashes, use_user_site=use_user_site, )
def _basic_resolver(self, finder, require_hashes=False): make_install_req = partial( install_req_from_req_string, isolated=False, use_pep517=None, ) session = PipSession() with get_requirement_tracker() as tracker: preparer = RequirementPreparer( build_dir=os.path.join(self.tempdir, 'build'), src_dir=os.path.join(self.tempdir, 'src'), download_dir=None, wheel_download_dir=None, build_isolation=True, req_tracker=tracker, session=session, downloader=Downloader(session, progress_bar="on"), finder=finder, require_hashes=require_hashes, use_user_site=False, lazy_wheel=False, ) yield Resolver( preparer=preparer, make_install_req=make_install_req, finder=finder, wheel_cache=None, use_user_site=False, upgrade_strategy="to-satisfy-only", ignore_dependencies=False, ignore_installed=False, ignore_requires_python=False, force_reinstall=False, )
def test_unpack_http_url_with_urllib_response_without_content_type(data): """ It should download and unpack files even if no Content-Type header exists """ _real_session = PipSession() def _fake_session_get(*args, **kwargs): resp = _real_session.get(*args, **kwargs) del resp.headers["Content-Type"] return resp session = Mock() session.get = _fake_session_get downloader = Downloader(session, progress_bar="on") uri = path_to_url(data.packages.joinpath("simple-1.0.tar.gz")) link = Link(uri) temp_dir = mkdtemp() try: unpack_http_url( link, temp_dir, downloader=downloader, download_dir=None, ) assert set(os.listdir(temp_dir)) == { 'PKG-INFO', 'setup.cfg', 'setup.py', 'simple', 'simple.egg-info' } finally: rmtree(temp_dir)
def downloadAll(self, session): link = Link("http://files.seeedstudio.com/ardupy/ardupy-core.zip") downloader = Downloader(session, progress_bar="on") ardupycoredir = user_data_dir+"/ardupycore" if not os.path.exists(ardupycoredir + "/ArduPy"): try: os.makedirs(ardupycoredir) except OSError as error: print("Directory '%s was exists' " % ardupycoredir) print(error) unpack_url( link, ardupycoredir, downloader=downloader, download_dir=None, ) if not os.path.exists(ardupycoredir + "/Seeeduino/tools/arm-none-eabi-gcc"): if sys.platform == "linux": link = Link( "http://files.seeedstudio.com/arduino/tools/x86_64-pc-linux-gnu/gcc-arm-none-eabi-4.8.3-2014q1-linux64.tar.gz") if sys.platform == "win32": link = Link( "http://files.seeedstudio.com/arduino/tools/i686-mingw32/gcc-arm-none-eabi-4.8.3-2014q1-windows.tar.gz") if sys.platform == "darwin": link = Link( "http://files.seeedstudio.com/arduino/tools/x86_64-apple-darwin/gcc-arm-none-eabi-4.8.3-2014q1-mac.tar.gz") unpack_url( link, ardupycoredir + "/Seeeduino/tools/arm-none-eabi-gcc", downloader=downloader, download_dir=None, )
def test_download_http_url__no_directory_traversal(tmpdir): """ Test that directory traversal doesn't happen on download when the Content-Disposition header contains a filename with a ".." path part. """ mock_url = 'http://www.example.com/whatever.tgz' contents = b'downloaded' link = Link(mock_url) session = Mock() resp = MockResponse(contents) resp.url = mock_url resp.headers = { # Set the content-type to a random value to prevent # mimetypes.guess_extension from guessing the extension. 'content-type': 'random', 'content-disposition': 'attachment;filename="../out_dir_file"' } session.get.return_value = resp downloader = Downloader(session, progress_bar="on") download_dir = tmpdir.joinpath('download') os.mkdir(download_dir) file_path, content_type = _download_http_url( link, downloader, download_dir, hashes=None, ) # The file should be downloaded to download_dir. actual = os.listdir(download_dir) assert actual == ['out_dir_file']
def __init__( self, build_dir, # type: str download_dir, # type: Optional[str] src_dir, # type: str wheel_download_dir, # type: Optional[str] build_isolation, # type: bool req_tracker, # type: RequirementTracker session, # type: PipSession progress_bar, # type: str finder, # type: PackageFinder require_hashes, # type: bool use_user_site, # type: bool lazy_wheel, # type: bool ): # type: (...) -> None super(RequirementPreparer, self).__init__() self.src_dir = src_dir self.build_dir = build_dir self.req_tracker = req_tracker self._session = session self._download = Downloader(session, progress_bar) self._batch_download = BatchDownloader(session, progress_bar) self.finder = finder # Where still-packed archives should be written to. If None, they are # not saved, and are deleted immediately after unpacking. self.download_dir = download_dir # Where still-packed .whl files should be written to. If None, they are # written to the download_dir parameter. Separate to download_dir to # permit only keeping wheel archives for pip wheel. self.wheel_download_dir = wheel_download_dir # NOTE # download_dir and wheel_download_dir overlap semantically and may # be combined if we're willing to have non-wheel archives present in # the wheelhouse output by 'pip wheel'. # Is build isolation allowed? self.build_isolation = build_isolation # Should hash-checking be required? self.require_hashes = require_hashes # Should install in user site-packages? self.use_user_site = use_user_site # Should wheels be downloaded lazily? self.use_lazy_wheel = lazy_wheel # Memoized downloaded files, as mapping of url: (path, mime type) self._downloaded = {} # type: Dict[str, Tuple[str, str]] # Previous "header" printed for a link-based InstallRequirement self._previous_requirement_header = ("", "")
def __init__( self, build_dir: str, download_dir: Optional[str], src_dir: str, build_isolation: bool, check_build_deps: bool, build_tracker: BuildTracker, session: PipSession, progress_bar: str, finder: PackageFinder, require_hashes: bool, use_user_site: bool, lazy_wheel: bool, verbosity: int, ) -> None: super().__init__() self.src_dir = src_dir self.build_dir = build_dir self.build_tracker = build_tracker self._session = session self._download = Downloader(session, progress_bar) self._batch_download = BatchDownloader(session, progress_bar) self.finder = finder # Where still-packed archives should be written to. If None, they are # not saved, and are deleted immediately after unpacking. self.download_dir = download_dir # Is build isolation allowed? self.build_isolation = build_isolation # Should check build dependencies? self.check_build_deps = check_build_deps # Should hash-checking be required? self.require_hashes = require_hashes # Should install in user site-packages? self.use_user_site = use_user_site # Should wheels be downloaded lazily? self.use_lazy_wheel = lazy_wheel # How verbose should underlying tooling be? self.verbosity = verbosity # Memoized downloaded files, as mapping of url: path. self._downloaded: Dict[str, str] = {} # Previous "header" printed for a link-based InstallRequirement self._previous_requirement_header = ("", "")
def __init__( self, build_dir, # type: str download_dir, # type: Optional[str] src_dir, # type: str build_isolation, # type: bool req_tracker, # type: RequirementTracker session, # type: PipSession progress_bar, # type: str finder, # type: PackageFinder require_hashes, # type: bool use_user_site, # type: bool lazy_wheel, # type: bool in_tree_build, # type: bool ): # type: (...) -> None super().__init__() self.src_dir = src_dir self.build_dir = build_dir self.req_tracker = req_tracker self._session = session self._download = Downloader(session, progress_bar) self._batch_download = BatchDownloader(session, progress_bar) self.finder = finder # Where still-packed archives should be written to. If None, they are # not saved, and are deleted immediately after unpacking. self.download_dir = download_dir # Is build isolation allowed? self.build_isolation = build_isolation # Should hash-checking be required? self.require_hashes = require_hashes # Should install in user site-packages? self.use_user_site = use_user_site # Should wheels be downloaded lazily? self.use_lazy_wheel = lazy_wheel # Should in-tree builds be used for local paths? self.in_tree_build = in_tree_build # Memoized downloaded files, as mapping of url: (path, mime type) self._downloaded = {} # type: Dict[str, Tuple[str, str]] # Previous "header" printed for a link-based InstallRequirement self._previous_requirement_header = ("", "")
def make_requirement_preparer( temp_build_dir, # type: TempDirectory options, # type: Values req_tracker, # type: RequirementTracker session, # type: PipSession finder, # type: PackageFinder use_user_site, # type: bool download_dir=None, # type: str wheel_download_dir=None, # type: str ): # type: (...) -> RequirementPreparer """ Create a RequirementPreparer instance for the given parameters. """ downloader = Downloader(session, progress_bar=options.progress_bar) temp_build_dir_path = temp_build_dir.path assert temp_build_dir_path is not None if '2020-resolver' in options.features_enabled: lazy_wheel = 'fast-deps' in options.features_enabled if lazy_wheel: logger.warning( 'pip is using lazily downloaded wheels using HTTP ' 'range requests to obtain dependency information. ' 'This experimental feature is enabled through ' '--use-feature=fast-deps and it is not ready for ' 'production.' ) else: lazy_wheel = False return RequirementPreparer( build_dir=temp_build_dir_path, src_dir=options.src_dir, download_dir=download_dir, wheel_download_dir=wheel_download_dir, build_isolation=options.build_isolation, req_tracker=req_tracker, session=session, downloader=downloader, finder=finder, require_hashes=options.require_hashes, use_user_site=use_user_site, lazy_wheel=lazy_wheel, )
def downloadAll(self, session): archiveFile = parser.get_archiveFile_by_id(self.board_id) downloader = Downloader(session, progress_bar="on") ardupycoredir = parser.get_core_dir_by_id(self.board_id) if not os.path.exists(ardupycoredir): log.info('Downloading ' + archiveFile['archiveFileName'] + '...') try: unpack_url( Link(archiveFile['url']), ardupycoredir, downloader, download_dir=None, ) except Exception as e: log.error(e) os.remove(ardupycoredir) sys.exit(1) except Exception as e: log.error(e) os.remove(ardupycoredir) sys.exit(1) toolsDependencies = parser.get_toolsDependencies_url_by_id( self.board_id) toolsdir = parser.get_tool_dir_by_id(self.board_id) for tool in toolsDependencies: tooldir = str(Path(toolsdir, tool['name'], tool['version'])) if not os.path.exists(tooldir): log.info('Downloading ' + tool['name'] + '@' + tool['version'] + '...') try: unpack_url( Link(tool['url']), tooldir, downloader, download_dir=None, ) except Exception as e: log.error(e) os.remove(tooldir) sys.exit(1)
def test_unpack_http_url_bad_downloaded_checksum(mock_unpack_file): """ If already-downloaded file has bad checksum, re-download. """ base_url = 'http://www.example.com/somepackage.tgz' contents = b'downloaded' download_hash = hashlib.new('sha1', contents) link = Link(base_url + '#sha1=' + download_hash.hexdigest()) session = Mock() session.get = Mock() response = session.get.return_value = MockResponse(contents) response.headers = {'content-type': 'application/x-tar'} response.url = base_url downloader = Downloader(session, progress_bar="on") download_dir = mkdtemp() try: downloaded_file = os.path.join(download_dir, 'somepackage.tgz') create_file(downloaded_file, 'some contents') unpack_http_url(link, 'location', downloader=downloader, download_dir=download_dir, hashes=Hashes({'sha1': [download_hash.hexdigest()]})) # despite existence of downloaded file with bad hash, downloaded again session.get.assert_called_once_with( 'http://www.example.com/somepackage.tgz', headers={"Accept-Encoding": "identity"}, stream=True, ) # cached file is replaced with newly downloaded file with open(downloaded_file) as fh: assert fh.read() == 'downloaded' finally: rmtree(download_dir)
def pip_download_link(resconfig, url: str, destdir: str): with redirect_stdout(sys.stderr): netloc = urlsplit(resconfig['source']['repository']['index_url'])[1] hostname = netloc.split(':')[0] with PipSession(retries=RETRIES, trusted_hosts=[ hostname, ]) as session: session.timeout = TIMEOUT session.auth.prompting = False session.auth.passwords[netloc] = ( resconfig['source']['repository'].get('username', None), resconfig['source']['repository'].get('password', None)) # pip internals hardcode global tempdir manager. # need to copy to destdir before tempdir gets blown away. with global_tempdir_manager(): file = unpack_url( Link(url), destdir, Downloader(session, "pretty"), ) shutil.copy(file.path, destdir)
def test_unpack_url_with_urllib_response_without_content_type( data: TestData) -> None: """ It should download and unpack files even if no Content-Type header exists """ _real_session = PipSession() def _fake_session_get(*args: Any, **kwargs: Any) -> Dict[str, str]: resp = _real_session.get(*args, **kwargs) del resp.headers["Content-Type"] return resp session = Mock() session.get = _fake_session_get download = Downloader(session, progress_bar="on") uri = data.packages.joinpath("simple-1.0.tar.gz").as_uri() link = Link(uri) temp_dir = mkdtemp() try: unpack_url( link, temp_dir, download=download, download_dir=None, verbosity=0, ) assert set(os.listdir(temp_dir)) == { "PKG-INFO", "setup.cfg", "setup.py", "simple", "simple.egg-info", } finally: rmtree(temp_dir)
logger.warning( 'pip is using lazily downloaded wheels using HTTP ' 'range requests to obtain dependency information. ' 'This experimental feature is enabled through ' '--use-feature=fast-deps and it is not ready for ' 'production.' ) else: lazy_wheel = False if 'fast-deps' in options.features_enabled: logger.warning( 'fast-deps has no effect when used with the legacy resolver.' ) ======= downloader = Downloader(session, progress_bar=options.progress_bar) temp_build_dir_path = temp_build_dir.path assert temp_build_dir_path is not None >>>>>>> 74c061954d5e927be4caafbd793e96a50563c265 return RequirementPreparer( build_dir=temp_build_dir_path, src_dir=options.src_dir, download_dir=download_dir, <<<<<<< HEAD build_isolation=options.build_isolation, req_tracker=req_tracker, session=session, progress_bar=options.progress_bar, finder=finder,
def super_run(self, options, args): """Copy of relevant parts from InstallCommand's run()""" upgrade_strategy = "eager" if options.upgrade: upgrade_strategy = options.upgrade_strategy with self._build_session(options) as session: finder = self._build_package_finder(options, session) wheel_cache = WheelCache(options.cache_dir, options.format_control) try: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) req_tracker_path = False except TypeError: # got an unexpected keyword argument 'require_hashes' requirement_set = RequirementSet() req_tracker_path = True # pip 20 try: with TempDirectory( options.build_dir, delete=True, kind="install") as directory, RequirementTracker( *([directory.path] if req_tracker_path else [] )) as req_tracker: try: self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, wheel_cache) except TypeError: self.populate_requirement_set(requirement_set, args, options, finder, session, wheel_cache) except AttributeError: requirement_set = self.get_requirements( args, options, finder, session, wheel_cache) try: preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=None, wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, req_tracker=req_tracker, ) except TypeError: from pip._internal.network.download import Downloader downloader = Downloader( session, progress_bar=options.progress_bar) preparer = RequirementPreparer( build_dir=directory.path, download_dir=None, src_dir=options.src_dir, wheel_download_dir=None, build_isolation=options.build_isolation, req_tracker=req_tracker, downloader=downloader, finder=finder, require_hashes=options.require_hashes, use_user_site=options.use_user_site, ) try: resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=wheel_cache, use_user_site=options.use_user_site, upgrade_strategy=upgrade_strategy, force_reinstall=options.force_reinstall, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options. ignore_requires_python, ignore_installed=options.ignore_installed, isolated=options.isolated_mode, ) except TypeError: from pip._internal.req.constructors import ( install_req_from_req_string, ) make_install_req = partial( install_req_from_req_string, isolated=options.isolated_mode, wheel_cache=wheel_cache, use_pep517=options.use_pep517, ) try: resolver = Resolver( preparer=preparer, session=session, finder=finder, make_install_req=make_install_req, use_user_site=options.use_user_site, ignore_dependencies=options. ignore_dependencies, ignore_installed=options.ignore_installed, ignore_requires_python=options. ignore_requires_python, force_reinstall=options.force_reinstall, upgrade_strategy=upgrade_strategy, ) except TypeError: try: resolver = Resolver( preparer=preparer, finder=finder, make_install_req=make_install_req, use_user_site=options.use_user_site, ignore_dependencies=options. ignore_dependencies, ignore_installed=options.ignore_installed, ignore_requires_python=options. ignore_requires_python, force_reinstall=options.force_reinstall, upgrade_strategy=upgrade_strategy, ) except TypeError: make_install_req = partial( install_req_from_req_string, isolated=options.isolated_mode, use_pep517=options.use_pep517, ) resolver = Resolver( preparer=preparer, finder=finder, make_install_req=make_install_req, use_user_site=options.use_user_site, ignore_dependencies=options. ignore_dependencies, ignore_installed=options.ignore_installed, ignore_requires_python=options. ignore_requires_python, force_reinstall=options.force_reinstall, upgrade_strategy=upgrade_strategy, wheel_cache=wheel_cache, ) try: resolver.resolve(requirement_set) except TypeError: requirement_set = resolver.resolve( requirement_set, check_supported_wheels=True) finder.format_control.no_binary = set() # allow binaries self.process_requirements(options, requirement_set, finder, resolver) finally: try: requirement_set.cleanup_files() wheel_cache.cleanup() except AttributeError: # https://github.com/pypa/pip/commit/5cca8f10b304a5a7f3a96dfd66937615324cf826 pass return requirement_set
def run(self, options, args): if len(args) == 0: log.warning("Please enter the url of the library!") log.info( 'Usage:\n\r aip install https://github.com/Seeed-Studio/seeed-ardupy-ultrasonic-sensor' ) return ERROR moduledir = Path(parser.user_config_dir, "modules") session = self.get_default_session(options) downloader = Downloader(session, progress_bar="on") for package in args: if options.local: package_location = package.split('/')[len(package.split('/')) - 1] else: package_url = self.get_archive_url(options, package) package_location = package_url[:package_url.find('/archive')] package_location = package_location.split('/')[ len(package_location.split('/')) - 1] package_location = str(Path(moduledir, package_location)) # form location if options.Force: if os.path.exists(package_location): # remove the old package shutil.rmtree(package_location, onerror=readonly_handler) try: os.makedirs(package_location) except OSError as error: log.error(error) log.info("Use aip install -F Overwrite previous Library") return ERROR try: if options.local: log.info("Copying library......") shutil.copytree(package, package_location, dirs_exist_ok=True) else: log.info("Downloading library......") unpack_url( Link(package_url), package_location, downloader=downloader, download_dir=None, ) except Exception as error: log.error(error) if os.path.exists(package_location): # remove the old package shutil.rmtree(package_location, onerror=readonly_handler) return ERROR # downling dependencies package_json_location = Path(package_location, 'library.json') try: #get library.json from package with open(package_json_location, 'r') as package_json: #get dependencies information from library.json package_json_dict = json.load(package_json) dependencies = package_json_dict["dependencies"] if len(dependencies) != 0: log.info("Downloading dependencies......") for dependency in dependencies: dependency_url = self.get_archive_url( options, dependency["url"]) dependency_location = package_location + '/' + dependency[ "name"] try: unpack_url( Link(dependency_url), dependency_location, downloader=downloader, download_dir=None, ) except Exception as error: log.error(error) if os.path.exists(package_location ): # remove the old package shutil.rmtree(package_location, onerror=readonly_handler) return ERROR except Exception as error: log.error(error) log.error("Bad dependency format, please check library.json") if os.path.exists(package_location): # remove the old package shutil.rmtree(package_location, onerror=readonly_handler) return ERROR return SUCCESS
def run(self, options, args): moduledir = Path(user_config_dir, "modules") session = self.get_default_session(options) downloader = Downloader(session, progress_bar="on") for package in args: package_url = self.get_archive_url(options, package) package_location = package_url[:package_url.find('/archive')] package_location = package_location.split('/')[ len(package_location.split('/')) - 1] package_location = str(Path(moduledir, package_location)) # form location if options.Force: if os.path.exists(package_location): # remove the old package shutil.rmtree(package_location, onerror=readonly_handler) try: os.makedirs(package_location) except OSError as error: log.error(error) log.info("Use aip install -F Overwrite previous Library") return ERROR log.info("Downloading library......") try: unpack_url( Link(package_url), package_location, downloader=downloader, download_dir=None, ) except Exception as error: log.error(error) if os.path.exists(package_location): # remove the old package shutil.rmtree(package_location, onerror=readonly_handler) return ERROR # downling dependencies package_json_location = Path(package_location, 'library.json') try: #get library.json from package with open(package_json_location, 'r') as package_json: #get dependencies information from library.json package_json_dict = json.load(package_json) dependencies = package_json_dict["dependencies"] if len(dependencies) != 0: log.info("Downloading dependencies......") for dependency in dependencies: dependency_url = self.get_archive_url( options, dependency["url"]) dependency_location = package_location + '/' + dependency[ "name"] try: unpack_url( Link(dependency_url), dependency_location, downloader=downloader, download_dir=None, ) except Exception as error: log.error(error) if os.path.exists(package_location ): # remove the old package shutil.rmtree(package_location, onerror=readonly_handler) return ERROR except Exception as error: log.error(error) log.error("Bad dependency format, please check library.json") if os.path.exists(package_location): # remove the old package shutil.rmtree(package_location, onerror=readonly_handler) return ERROR
def run(self, options, args): self.port = options.port bossacdir = str( Path(user_config_dir + "/ardupycore/Seeeduino/tools/bossac")) if not os.path.exists(bossacdir): os.makedirs(bossacdir) session = self.get_default_session(options) if sys.platform == "linux": link = Link( "http://files.seeedstudio.com/arduino/tools/i686-linux-gnu/bossac-1.9.1-seeeduino-linux.tar.gz" ) if sys.platform == "win32": link = Link( "http://files.seeedstudio.com/arduino/tools/i686-mingw32/bossac-1.9.1-seeeduino-windows.tar.bz2" ) if sys.platform == "darwin": link = Link( "http://files.seeedstudio.com/arduino/tools/x86_64-apple-darwin/bossac-1.8-48-gb176eee-i386-apple-darwin16.1.0.tar.gz" ) bossac = "" if platform.system() == "Windows": bossac = str(Path(bossacdir, "bossac.exe")) else: bossac = str(Path(bossacdir, "bossac")) if not os.path.exists(bossac): downloader = Downloader(session, progress_bar="on") unpack_url( link, bossacdir, downloader=downloader, download_dir=None, ) try_count = 0 do_bossac = True while True: stty = self.stty print(stty) if stty != "echo not support": os.system(stty % 1200) #os.system(str(bossac)+ " --help") port, desc, hwid, isbootloader = self.serial.getBootloaderBoard() print(port) time.sleep(1) if isbootloader == True: break try_count = try_count + 1 if try_count == 5: do_bossac = False break if do_bossac == True: name, version, url = self.serial.getBoardByPort(port) ardupybin = "" if len(args) > 0: ardupybin = args[0] if not os.path.exists(ardupybin): log.warning('The path of firmware didn\'t exists!') return ERROR elif options.origin == True: firmwaredir = str( Path(user_config_dir + "/deploy/firmware/" + name.replace(' ', '_'))) if not os.path.exists(firmwaredir): os.makedirs(firmwaredir) ardupybin = str(Path(firmwaredir, "ardupy_laster.bin")) if not os.path.exists(ardupybin): downloader = Downloader(session, progress_bar="on") _download_http_url(link=Link(url), downloader=downloader, temp_dir=firmwaredir, hashes=None) else: ardupybin = str(Path(user_config_dir + "/deploy/Ardupy.bin")) _flash_parm = flash_param[name.replace(' ', '_')] print((str(bossac) + _flash_parm) % (port, ardupybin)) os.system((str(bossac) + _flash_parm) % (port, ardupybin)) else: log.warning("Sorry, the device you should have is not plugged in.") return ERROR return SUCCESS