def install_pkg(env_root, bsp_root, pkg): """Install the required packages.""" # default true ret = True local_pkgs_path = os.path.join(env_root, 'local_pkgs') bsp_pkgs_path = os.path.join(bsp_root, 'packages') env_kconfig_path = os.path.join(env_root, 'tools\scripts\cmds') # get the .config file from env env_config_file = os.path.join(env_kconfig_path, '.config') package = Package() pkg_path = pkg['path'] if pkg_path[0] == '/' or pkg_path[0] == '\\': pkg_path = pkg_path[1:] pkg_path = os.path.join(env_root, 'packages', pkg_path, 'package.json') package.parse(pkg_path) url_from_json = package.get_url(pkg['ver']) package_url = package.get_url(pkg['ver']) #package_name = pkg['name'] pkgs_name_in_json = package.get_name() if package_url[-4:] == '.git': ver_sha = package.get_versha(pkg['ver']) # print("==================================================>") # print "packages name:",pkgs_name_in_json # print "ver:",pkg['ver'] # print "url:",package_url # print "url_from_json: ",url_from_json # print("==================================================>") get_package_url = None get_ver_sha = None upstream_change_flag = False if os.path.isfile(env_config_file) and find_macro_in_config( env_config_file, 'SYS_PKGS_DOWNLOAD_ACCELERATE'): get_package_url, get_ver_sha = get_url_from_mirror_server( pkgs_name_in_json, pkg['ver']) # determine whether the package package url is valid if get_package_url != None and determine_url_valid(get_package_url): package_url = get_package_url if get_ver_sha != None: ver_sha = get_ver_sha upstream_change_flag = True if package_url[-4:] == '.git': repo_path = os.path.join(bsp_pkgs_path, pkgs_name_in_json) repo_path = repo_path + '-' + pkg['ver'] cmd = 'git clone ' + package_url + ' ' + repo_path execute_command(cmd, cwd=bsp_pkgs_path) cmd = 'git checkout -q ' + ver_sha execute_command(cmd, cwd=repo_path) if upstream_change_flag: cmd = 'git remote set-url origin ' + url_from_json execute_command(cmd, cwd=repo_path) # If there is a .gitmodules file in the package, prepare to update the # submodule. submod_path = os.path.join(repo_path, '.gitmodules') if os.path.isfile(submod_path): print("Start to update submodule") if os.path.isfile(env_config_file) and find_macro_in_config( env_config_file, 'SYS_PKGS_DOWNLOAD_ACCELERATE'): replace_list = modify_submod_file_to_mirror( submod_path) # Modify .gitmodules file cmd = 'git submodule update --init --recursive' execute_command(cmd, cwd=repo_path) if os.path.isfile(env_config_file) and find_macro_in_config( env_config_file, 'SYS_PKGS_DOWNLOAD_ACCELERATE'): if len(replace_list): for item in replace_list: submod_dir_path = os.path.join(repo_path, item[2]) if os.path.isdir(submod_dir_path): cmd = 'git remote set-url origin ' + item[0] execute_command(cmd, cwd=submod_dir_path) if os.path.isfile(env_config_file) and find_macro_in_config( env_config_file, 'SYS_PKGS_DOWNLOAD_ACCELERATE'): if os.path.isfile(submod_path): cmd = 'git checkout .gitmodules' execute_command(cmd, cwd=repo_path) else: # Download a package of compressed package type. if not package.download(pkg['ver'], local_pkgs_path, package_url): return False pkg_dir = package.get_filename(pkg['ver']) pkg_dir = os.path.splitext(pkg_dir)[0] pkg_fullpath = os.path.join(local_pkgs_path, package.get_filename(pkg['ver'])) if not archive.packtest(pkg_fullpath): return False # unpack package if not os.path.exists(pkg_dir): try: if not package.unpack(pkg_fullpath, bsp_pkgs_path, pkg, pkgs_name_in_json): ret = False except Exception, e: os.remove(pkg_fullpath) ret = False print('e.message: %s\t' % e.message)
if os.path.isfile(submod_path): cmd = 'git checkout .gitmodules' execute_command(cmd, cwd=repo_path) else: # Download a package of compressed package type. if not package.download(pkg['ver'], local_pkgs_path.decode("gbk"), package_url): return False pkg_dir = package.get_filename(pkg['ver']) pkg_dir = os.path.splitext(pkg_dir)[0] pkg_fullpath = os.path.join(local_pkgs_path, package.get_filename(pkg['ver'])) if not archive.packtest(pkg_fullpath.encode("gbk")): print("package : %s is invalid" % pkg_fullpath.encode("utf-8")) return False # unpack package if not os.path.exists(pkg_dir.encode("gbk")): try: if not package.unpack(pkg_fullpath.encode("gbk"), bsp_pkgs_path, pkg, pkgs_name_in_json.encode("gbk")): ret = False except Exception, e: os.remove(pkg_fullpath) ret = False print('e.message: %s\t' % e.message)
def install_pkg(env_root, pkgs_root, bsp_root, pkg): """Install the required packages.""" # default true ret = True local_pkgs_path = os.path.join(env_root, 'local_pkgs') bsp_pkgs_path = os.path.join(bsp_root, 'packages') # get the .config file from env env_kconfig_path = os.path.join(env_root, 'tools\scripts\cmds') env_config_file = os.path.join(env_kconfig_path, '.config') package = Package() pkg_path = pkg['path'] if pkg_path[0] == '/' or pkg_path[0] == '\\': pkg_path = pkg_path[1:] pkg_path = os.path.join(pkgs_root, pkg_path, 'package.json') package.parse(pkg_path) url_from_json = package.get_url(pkg['ver']) package_url = package.get_url(pkg['ver']) pkgs_name_in_json = package.get_name() if package_url[-4:] == '.git': ver_sha = package.get_versha(pkg['ver']) # print("==================================================>") # print("packages name :"%pkgs_name_in_json.encode("utf-8")) # print("ver :"%pkg['ver']) # print("url :"%package_url.encode("utf-8")) # print("url_from_json : "%url_from_json.encode("utf-8")) # print("==================================================>") get_package_url = None get_ver_sha = None upstream_change_flag = False try: if (not os.path.isfile(env_config_file)) or ( os.path.isfile(env_config_file) and find_macro_in_config( env_config_file, 'SYS_PKGS_DOWNLOAD_ACCELERATE')): get_package_url, get_ver_sha = get_url_from_mirror_server( pkgs_name_in_json, pkg['ver']) # determine whether the package package url is valid if get_package_url != None and determine_url_valid( get_package_url): package_url = get_package_url if get_ver_sha != None: ver_sha = get_ver_sha upstream_change_flag = True except Exception as e: print('error message:%s\t' % e) print( "Failed to connect to the mirror server, package will be downloaded from non-mirror server.\n" ) if package_url[-4:] == '.git': try: repo_path = os.path.join(bsp_pkgs_path, pkgs_name_in_json) repo_path = repo_path + '-' + pkg['ver'] repo_path_full = '"' + repo_path + '"' clone_cmd = 'git clone ' + package_url + ' ' + repo_path_full execute_command(clone_cmd, cwd=bsp_pkgs_path) git_check_cmd = 'git checkout -q ' + ver_sha execute_command(git_check_cmd, cwd=repo_path) except Exception as e: print( "\nFailed to download software package with git. Please check the network connection." ) os.chdir(before) return False if upstream_change_flag: cmd = 'git remote set-url origin ' + url_from_json execute_command(cmd, cwd=repo_path) # If there is a .gitmodules file in the package, prepare to update submodule. submod_path = os.path.join(repo_path, '.gitmodules') if os.path.isfile(submod_path): print("Start to update submodule") # print("开始更新软件包子模块") if (not os.path.isfile(env_config_file)) or ( os.path.isfile(env_config_file) and find_macro_in_config( env_config_file, 'SYS_PKGS_DOWNLOAD_ACCELERATE')): # print("开启了镜像加速,开始修改 .gitmodules 文件") replace_list = modify_submod_file_to_mirror( submod_path) # Modify .gitmodules file # print("开始执行更新动作") cmd = 'git submodule update --init --recursive' execute_command(cmd, cwd=repo_path) if (not os.path.isfile(env_config_file)) or ( os.path.isfile(env_config_file) and find_macro_in_config( env_config_file, 'SYS_PKGS_DOWNLOAD_ACCELERATE')): if len(replace_list): for item in replace_list: submod_dir_path = os.path.join(repo_path, item[2]) if os.path.isdir(submod_dir_path): cmd = 'git remote set-url origin ' + item[0] execute_command(cmd, cwd=submod_dir_path) if (not os.path.isfile(env_config_file)) or ( os.path.isfile(env_config_file) and find_macro_in_config( env_config_file, 'SYS_PKGS_DOWNLOAD_ACCELERATE')): if os.path.isfile(submod_path): cmd = 'git checkout .gitmodules' execute_command(cmd, cwd=repo_path) else: # Download a package of compressed package type. if not package.download(pkg['ver'], local_pkgs_path, package_url): return False pkg_dir = package.get_filename(pkg['ver']) pkg_dir = os.path.splitext(pkg_dir)[0] pkg_fullpath = os.path.join(local_pkgs_path, package.get_filename(pkg['ver'])) if not archive.packtest(pkg_fullpath): print("package : %s is invalid" % pkg_fullpath.encode("utf-8")) return False # unpack package if not os.path.exists(pkg_dir): try: if not package.unpack(pkg_fullpath, bsp_pkgs_path, pkg, pkgs_name_in_json): ret = False except Exception as e: os.remove(pkg_fullpath) ret = False print('error message: %s\t' % e) else: print("The file does not exist.") return ret
def download(self, ver, path, url_from_srv): ret = True url = self.get_url(ver) site = self.get_site(ver) if site and site.has_key('filename'): filename = site['filename'] path = os.path.join(path, filename) else: basename = os.path.basename(url) path = os.path.join(path, basename) if os.path.isfile(path): if not os.path.getsize(path): os.remove(path) else: if archive.packtest(path): #print "The file is rigit." return True else: os.remove(path) retryCount = 0 headers = { 'Connection': 'keep-alive', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'User-Agent': 'curl/7.54.0' } print('Start to download package : %s ' % filename.encode("utf-8")) while True: #print("retryCount : %d"%retryCount) try: r = requests.get(url_from_srv, stream=True, headers=headers) flush_count = 0 with open(path.encode("gbk"), 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) f.flush() flush_count += 1 sys.stdout.write("\rDownloding %d KB" % flush_count) sys.stdout.flush() retryCount = retryCount + 1 if archive.packtest( path.encode("gbk")): # make sure the file is right ret = True print("\rDownloded %d KB " % flush_count) print('Start to unpack. Please wait...') break else: if os.path.isfile(path.encode("gbk")): os.remove(path.encode("gbk")) if retryCount > 5: print( "error: Have tried downloading 5 times.\nstop Downloading file :%s" % path) if os.path.isfile(path.encode("gbk")): os.remove(path.encode("gbk")) ret = False break except Exception, e: #print url_from_srv # print('e.message:%s\t' % e.message) retryCount = retryCount + 1 if retryCount > 5: print('%s download fail!\n' % path.decode("gbk").encode("utf-8")) if os.path.isfile(path): os.remove(path) return False
def download(self, ver, path): ret = True import requests from clint.textui import progress url = self.get_url(ver) site = self.get_site(ver) if site and site.has_key('filename'): filename = site['filename'] path = os.path.join(path, filename) else: basename = os.path.basename(url) path = os.path.join(path, basename) if os.path.isfile(path): if not os.path.getsize(path): os.remove(path) else: if archive.packtest(path): #print "the file is rigit do not need to download" return True else: os.remove(path) retryCount = 0 while True: try: r = requests.get(url, stream=True) with open(path, 'wb') as f: total_length = int(r.headers.get('content-length')) # print url # print total_length # print 'download package:', self.get_name() for chunk in progress.bar( r.iter_content(chunk_size=1024), width=50, expected_size=(total_length / 1024) + 1): if chunk: f.write(chunk) f.flush() retryCount = retryCount + 1 if archive.packtest(path): # make sure the file is right ret = True break else: if os.path.isfile(path): os.remove(path) if retryCount > 5: print "error: Have tried downloading 5 times.\nstop Downloading file", path if os.path.isfile(path): os.remove(path) ret = False break except Exception, e: print url retryCount = retryCount + 1 if retryCount > 5: print path, 'download fail!' if os.path.isfile(path): os.remove(path) return False