예제 #1
0
 def __download_nowcast(self):
     names = self.__download(self._html + self._nowcast)
     for name in names:
         if name.find("2D") > 0:
             move_iso(name, "Models/GLCFS/Nowcast-2D")
         else:
             move_iso(name, "Models/GLCFS/Nowcast-3D")
예제 #2
0
파일: aoc.py 프로젝트: CowanSM/glos_catalog
    def download_isos(self):
        """
			Should download all Agg files from the html and put them in the correct glos catalog folder

		"""
        resp = download(self._html)
        tree = etree.HTML(resp)

        datasets = list()
        files = dict()

        # get all of the datasets to download
        for elm in tree.findall(".//a[@href]"):
            for key in elm.attrib.keys():
                val = elm.attrib[key]
                if val.find("dataset") > 0:
                    ds = val[val.find("=") + 1 :]
                    datasets.append(ds)

                    # download isos
        for ds in datasets:
            url = self._iso + ds
            files[ds] = download_iso(url, catalog=self._html, dataset=ds)

            # move them
        for key in files.keys():
            move_iso(files[key], self._iso_dir)
예제 #3
0
    def __download_forecast(self):

        names = self.__download(self._html + self._forecast)
        for name in names:
            nname = list(name.partition("_-_"))
            nname.append(nname[2])
            nname[2] = "Forecast_-_"
            nname = "".join(nname)
            if nname.find("2D") > 0:
                move_iso(name, "Models/GLCFS/Forecast-2D", nname)
            else:
                move_iso(name, "Models/GLCFS/Forecast-3D", nname)
예제 #4
0
	def download_isos(self):
		# get datasets
		resp = download(self._html)
		tree = etree.HTML(resp)

		for a in tree.findall('.//a[@href]'):
			dataset = a.attrib.get('href')
			# will have to correct for a typo in the address
			dataset = dataset.replace('Lastest','Latest')
			if dataset.find('dataset') > 0:
				dataset = dataset[dataset.find('=')+1:]
				# get iso
				if dataset.find('Agg') > 0:
					url = self._agg + dataset + '.nc'
				else:
					url = self._latest + dataset + '.nc'

				fname = download_iso(url, catalog=self._html, dataset=dataset)
				# move iso
				move_iso(fname, self._iso)
예제 #5
0
	def download_isos(self):
		resp = download(self._html+self._sources)
		tree = etree.HTML(resp)

		for source in tree.findall(".//li"):
			if source.text in self._iso_dirs:
				# get list of files in source
				dirlist = download(self._html+source.text+'/list.html')
				dirtree = etree.HTML(dirlist)
				for iso in dirtree.findall('.//li'):
					sname = download_iso(self._html+source.text+'/'+iso.text.strip())
					if source.text != 'STORET':
						move_iso(sname, self._iso_dirs[source.text])
					elif sname is not None:
						# storet needs to be compressed before being moved as a single archive
						if path.exists(self._storet_zip_dir + sname):
							remove(self._storet_zip_dir + sname)
						move('./pyiso/iso_tmp/' + sname, self._storet_zip_dir)

				if source.text == 'STORET':
					# archive the storet directory in iso_tmp and move it to the ISOs directory
					f = file(path.abspath('./pyiso/iso_tmp/storet.zip'), 'w')
					with ZipFile(f, 'w') as zip_file:
						fileiter = (f
							for root, _, files in walk(self._storet_zip_dir)
							for f in files)
						xmlfilter = (f for f in fileiter if f.endswith('.xml'))
						for xml in xmlfilter:
							zip_file.write(self._storet_zip_dir + xml, arcname=xml)

					# move the zip file
					dest = '../ISOs/' + self._iso_dirs[source.text] + '/storet.zip'
					if path.exists(dest):
						remove(dest)

					move('./pyiso/iso_tmp/storet.zip', dest)
예제 #6
0
 def __download_forcing(self):
     # get all of the catalogs at the forcing url
     fnames = self.__download(self._html + self._forcing)
     for name in fnames:
         move_iso(name, "Models/GLCFS/Forcing/")