def down(self, outdir): """下载""" cloudC_dict = self.getCloudCover() if cloudC_dict is not None: for prod in cloudC_dict: if float(cloudC_dict[prod]) > self.max_cloud_cover: print("云量大于{} \n".format(self.max_cloud_cover)) return else: # download if not os.path.isdir(outdir): print("目标目录不存在,创建") os.mkdir(outdir) url_dict = self.getDownloadUrl() url = url_dict[prod] print(" download url: {} \n".format(url)) if Downloader_peps.username is None \ or Downloader_peps.password is None: self.set_username_and_pass() request = self.construct_request(url, Downloader_peps.username, Downloader_peps.password) download_one_by_urllib_basic( request, os.path.join(outdir, prod, prod + '.zip'))
def query_by_name(self): """ 简单地根据产品名称查询哨兵2数据,查询结果以xml格式存储""" queryfiles = [] peps_down_url_dict = self.getDownloadUrl() if peps_down_url_dict is not None: cc = 0 for prod in peps_down_url_dict: query_url = (self.scihub_query_baseUrl + "filename:{}*".format(prod)) # wget --no-check-certificate --user=scihub_user # --password=scihub_pass # --output-document=query_results.txt query_url query_request = self.construct_request(query_url, self.scihub_user, self.scihub_pass) out_xml = os.path.join(tempfile.gettempdir(), 'query' + str(cc) + '.xml') cc += 1 download_one_by_urllib_basic(query_request, out_xml) queryfiles.append(out_xml) return queryfiles else: return None
def down(self, outDir): """Download""" dlist = self.getDownloadUrls_code_de() if dlist is not None: for url in dlist: print("download url: {} \n".format(url)) request = self.construct_request(url, self.code_de_user, self.code_de_pass) download_one_by_urllib_basic( request, os.path.join(outDir, url.split('/')[-1]))
def queryOneTile(tile, date, search_result_json): """查询数据(返回json结果) """ user_agent = ("Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/" "537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari" "/537.36") headers = {"User-Agent": "{0}".format(user_agent)} search_url = Downloader_peps.get_query_url_S2ST(tile, date) print("query url: {}\n".format(search_url)) request = construct_request(search_url, headers=headers) download_one_by_urllib_basic(request, search_result_json) return search_result_json