Пример #1
0
    def get_request(self,
                    requests,
                    path=None,
                    overwrite=False,
                    progress=True,
                    downloader=None,
                    wait=True,
                    max_conn=default_max_conn,
                    **kwargs):
        """
        Query JSOC to see if the request(s) is ready for download.

        If the request is ready for download, it will then download it.

        Parameters
        ----------
        requests : `~drms.client.ExportRequest`, `str`, `list`
            `~drms.client.ExportRequest` objects or `str` request IDs or lists
            returned by `~sunpy.net.jsoc.jsoc.JSOCClient.request_data`.
        path : `str`
            Path to save data to, defaults to SunPy download dir.
        progress : `bool`, optional
            If `True` show a progress bar showing how many of the total files
            have been downloaded. If `False`, no progress bar will be shown.
        overwrite : `bool` or `str`, optional
            Determine how to handle downloading if a file already exists with the
            same name. If `False` the file download will be skipped and the path
            returned to the existing file, if `True` the file will be downloaded
            and the existing file will be overwritten, if ``'unique'`` the filename
            will be modified to be unique.
        downloader : `parfive.Downloader`, optional
            The download manager to use.
        wait : `bool`, optional
            If `False` ``downloader.download()`` will not be called. Only has
            any effect if ``downloader`` is not `None`.

        Returns
        -------
        res: `parfive.Results`
            A `parfive.Results` instance or `None` if no URLs to download
        """
        c = drms.Client()

        # Private communication from JSOC say we should not use more than one connection.
        if kwargs.get('max_splits'):
            log.info(f"max_splits keyword was passed and set to 1.")
        kwargs['max_splits'] = 1

        # Convert Responses to a list if not already
        if isinstance(requests, str) or not isiterable(requests):
            requests = [requests]

        # Ensure all the requests are drms ExportRequest objects
        for i, request in enumerate(requests):
            if isinstance(request, str):
                r = c.export_from_id(request)
                requests[i] = r

        # We only download if all are finished
        if not all([r.has_succeeded() for r in requests]):
            raise NotExportedError("Can not download as not all the requests "
                                   "have been exported for download yet.")

        # Ensure path has a {file} in it
        if path is None:
            default_dir = config.get("downloads", "download_dir")
            path = os.path.join(default_dir, '{file}')
        elif isinstance(path, Path):
            path = str(path)

        if isinstance(path, str) and '{file}' not in path:
            path = os.path.join(path, '{file}')

        paths = []
        for request in requests:
            if request.method == 'url-tar':
                fname = path.format(file=Path(request.tarfile).name)
                paths.append(os.path.expanduser(fname))
            else:
                for filename in request.data['filename']:
                    # Ensure we don't duplicate the file extension
                    ext = os.path.splitext(filename)[1]
                    if path.endswith(ext):
                        fname = path.strip(ext)
                    else:
                        fname = path
                    fname = fname.format(file=filename)
                    fname = os.path.expanduser(fname)
                    paths.append(fname)

        dl_set = True
        if not downloader:
            dl_set = False
            # Private communication from JSOC say we should not use more than one connection.
            if max_conn != self.default_max_conn:
                log.info(
                    f"Setting max parallel downloads to 1 for the JSOC client."
                )
            downloader = Downloader(progress=progress,
                                    overwrite=overwrite,
                                    max_conn=1)

        urls = []
        for request in requests:
            if request.status == 0:
                if request.protocol == 'as-is' or request.method == 'url-tar':
                    urls.extend(list(request.urls.url))
                else:
                    for index, data in request.data.iterrows():
                        url_dir = request.request_url + '/'
                        urls.append(
                            urllib.parse.urljoin(url_dir, data['filename']))

        if urls:
            if progress:
                print_message = "{0} URLs found for download. Full request totalling {1}MB"
                print(print_message.format(len(urls), request._d['size']))
            for aurl, fname in zip(urls, paths):
                downloader.enqueue_file(aurl, filename=fname, **kwargs)

        if dl_set and not wait:
            return Results()

        results = downloader.download()
        return results
Пример #2
0
    def fetch(self, *query_results, path=None, max_conn=5, progress=True,
              overwrite=False, downloader=None, **kwargs):
        """
        Download the records represented by `~sunpy.net.base_client.QueryResponseTable` or
        `~sunpy.net.fido_factory.UnifiedResponse` objects.

        Parameters
        ----------
        query_results : `sunpy.net.fido_factory.UnifiedResponse` or `~sunpy.net.base_client.QueryResponseTable`
            Container returned by query method, or multiple.
        path : `str`
            The directory to retrieve the files into. Can refer to any fields
            in `~sunpy.net.base_client.BaseQueryResponse.response_block_properties` via string formatting,
            moreover the file-name of the file downloaded can be referred to as file,
            e.g. "{source}/{instrument}/{time.start}/{file}".
        max_conn : `int`, optional
            The number of parallel download slots.
        progress : `bool`, optional
            If `True` show a progress bar showing how many of the total files
            have been downloaded. If `False`, no progress bars will be shown at all.
        overwrite : `bool` or `str`, optional
            Determine how to handle downloading if a file already exists with the
            same name. If `False` the file download will be skipped and the path
            returned to the existing file, if `True` the file will be downloaded
            and the existing file will be overwritten, if ``'unique'`` the filename
            will be modified to be unique.
        downloader : `parfive.Downloader`, optional
            The download manager to use. If specified the ``max_conn``,
            ``progress`` and ``overwrite`` arguments are ignored.

        Returns
        -------
        `parfive.Results`

        Examples
        --------
        >>> from sunpy.net.attrs import Time, Instrument
        >>> unifresp = Fido.search(Time('2012/3/4','2012/3/5'), Instrument('EIT'))  # doctest: +REMOTE_DATA
        >>> filepaths = Fido.fetch(unifresp)  # doctest: +SKIP

        If any downloads fail, they can be retried by passing the `parfive.Results` object back into ``fetch``.

        >>> filepaths = Fido.fetch(filepaths)  # doctest: +SKIP

        """
        if path is None:
            path = Path(config.get('downloads', 'download_dir')) / '{file}'
        elif isinstance(path, (str, os.PathLike)) and '{file}' not in str(path):
            path = Path(path) / '{file}'
        else:
            path = Path(path)
        path = path.expanduser()

        # Ensure we have write permissions to the path
        exists = list(filter(lambda p: p.exists(), Path(path).resolve().parents))
        if not os.access(exists[0], os.W_OK):
            raise PermissionError('You do not have permission to write'
                                  f' to the directory {exists[0]}.')

        if "wait" in kwargs:
            raise ValueError("wait is not a valid keyword argument to Fido.fetch.")

        # TODO: Remove when parfive allows us to special case URLS.
        # Avoid more than one connection for JSOC only requests.
        from sunpy.net.jsoc import JSOCClient

        is_jsoc_only = False
        for query_result in query_results:
            if isinstance(query_result, UnifiedResponse):
                is_jsoc_only = all([isinstance(result.client, JSOCClient) for result in query_result])
            elif isinstance(query_result, QueryResponseTable):
                is_jsoc_only = all([isinstance(result.table.client, JSOCClient) for result in query_result])
        if downloader is None:
            if is_jsoc_only:
                max_conn = 1
                kwargs['max_splits'] = 1
            downloader = Downloader(max_conn=max_conn, progress=progress, overwrite=overwrite)
        elif not isinstance(downloader, parfive.Downloader):
            raise TypeError("The downloader argument must be a parfive.Downloader instance.")

        # Handle retrying failed downloads
        retries = [isinstance(arg, Results) for arg in query_results]
        if all(retries):
            results = Results()
            for retry in query_results:
                dr = downloader.retry(retry)
                results.data += dr.data
                results._errors += dr._errors
            return results
        elif any(retries):
            raise TypeError("If any arguments to fetch are `parfive.Results` objects, all arguments must be.")

        reslist = []
        for query_result in query_results:
            if isinstance(query_result, QueryResponseRow):
                responses = [query_result.as_table()]
            elif isinstance(query_result, QueryResponseTable):
                responses = [query_result]
            elif isinstance(query_result, UnifiedResponse):
                responses = query_result
            else:
                raise ValueError(f"Query result has an unrecognized type: {type(query_result)} "
                                 "Allowed types are QueryResponseRow, QueryResponseTable or UnifiedResponse.")
            for block in responses:
                result = block.client.fetch(block, path=path,
                                            downloader=downloader,
                                            wait=False, **kwargs)
                if result not in (NotImplemented, None):
                    reslist.append(result)

        results = downloader.download()
        # Combine the results objects from all the clients into one Results
        # object.
        for result in reslist:
            if not isinstance(result, Results):
                raise TypeError(
                    "If wait is False a client must return a parfive.Downloader and either None"
                    " or a parfive.Results object.")
            results.data += result.data
            results._errors += result.errors

        return results
Пример #3
0
    def fetch(self, query_response, path=None, methods=None, site=None,
              progress=True, overwrite=False, downloader=None, wait=True):
        """
        Download data specified in the query_response.

        Parameters
        ----------
        query_response : sunpy.net.vso.VSOQueryResponseTable
            QueryResponse containing the items to be downloaded.
        path : str
            Specify where the data is to be downloaded. Can refer to arbitrary
            fields of the QueryResponseItem (instrument, source, time, ...) via
            string formatting, moreover the file-name of the file downloaded can
            be referred to as file, e.g.
            "{source}/{instrument}/{time.start}/{file}".
        methods : `list` of `str`
            Download methods, defaults to URL-FILE_Rice then URL-FILE.
            Methods are a concatenation of one PREFIX followed by any number of
            SUFFIXES i.e. ``PREFIX-SUFFIX_SUFFIX2_SUFFIX3``.
            The full list of
            `PREFIXES <https://sdac.virtualsolar.org/cgi/show_details?keyword=METHOD_PREFIX>`_
            and `SUFFIXES <https://sdac.virtualsolar.org/cgi/show_details?keyword=METHOD_SUFFIX>`_
            are listed on the VSO site.
        site : str
            There are a number of caching mirrors for SDO and other
            instruments, some available ones are listed below.

            =============== ========================================================
            NSO             National Solar Observatory, Tucson (US)
            SAO  (aka CFA)  Smithonian Astronomical Observatory, Harvard U. (US)
            SDAC (aka GSFC) Solar Data Analysis Center, NASA/GSFC (US)
            ROB             Royal Observatory of Belgium (Belgium)
            MPS             Max Planck Institute for Solar System Research (Germany)
            UCLan           University of Central Lancashire (UK)
            IAS             Institut Aeronautique et Spatial (France)
            KIS             Kiepenheuer-Institut fur Sonnenphysik Germany)
            NMSU            New Mexico State University (US)
            =============== ========================================================

        progress : `bool`, optional
            If `True` show a progress bar showing how many of the total files
            have been downloaded. If `False`, no progress bars will be shown at all.
        overwrite : `bool` or `str`, optional
            Determine how to handle downloading if a file already exists with the
            same name. If `False` the file download will be skipped and the path
            returned to the existing file, if `True` the file will be downloaded
            and the existing file will be overwritten, if ``'unique'`` the filename
            will be modified to be unique.
        downloader : `parfive.Downloader`, optional
            The download manager to use.
        wait : `bool`, optional
           If `False` ``downloader.download()`` will not be called. Only has
           any effect if ``downloader`` is not `None`.

        Returns
        -------
        out : `parfive.Results`
            Object that supplies a list of filenames and any errors.

        Examples
        --------
        >>> files = fetch(qr) # doctest:+SKIP
        """
        if path is None:
            path = Path(config.get('downloads', 'download_dir')) / '{file}'
        elif isinstance(path, (str, os.PathLike)) and '{file}' not in str(path):
            path = Path(path) / '{file}'
        else:
            path = Path(path)
        path = path.expanduser()

        dl_set = True
        if not downloader:
            dl_set = False
            downloader = Downloader(progress=progress, overwrite=overwrite)

        if isinstance(query_response, (QueryResponse, list)):
            query_response = VSOQueryResponseTable.from_zeep_response(query_response,
                                                                      client=self,
                                                                      _sort=False)
        if isinstance(query_response, QueryResponseRow):
            query_response = query_response.as_table()

        if not len(query_response):
            return downloader.download() if wait else Results()

        # Adding the site parameter to the info
        info = {}
        if site is not None:
            info['site'] = site

        VSOGetDataResponse = self.api.get_type("VSO:VSOGetDataResponse")

        data_request = self.make_getdatarequest(query_response, methods, info)
        data_response = VSOGetDataResponse(self.api.service.GetData(data_request))

        err_results = self.download_all(data_response,
                                        methods,
                                        downloader,
                                        str(path),
                                        self.by_fileid(query_response))

        if dl_set and not wait:
            return err_results

        results = downloader.download()
        results += err_results
        results._errors += err_results.errors
        return results
Пример #4
0
    def download_all(self, response, methods, downloader, path, qr, info=None):
        results = Results()
        GET_VERSION = [
            ('0.8', (5, 8)),
            ('0.7', (1, 4)),
            ('0.6', (0, 3)),
        ]

        for dresponse in response.getdataresponseitem:
            for version, (from_, to) in GET_VERSION:
                if getattr(dresponse, version, '0.6') >= version:
                    break
            else:
                results.add_error('', UnknownVersion(dresponse))
                continue

            # If from_ and to are uninitialized, the else block of the loop
            # continues the outer loop and thus this code is never reached.
            code = (
                dresponse.status[from_:to]
                if getattr(dresponse, 'status', None) else '200'
            )
            if code == '200':
                for dataitem in dresponse.getdataitem.dataitem:

                    try:
                        self.download(
                            dresponse.method.methodtype[0],
                            dataitem.url,
                            downloader,
                            path,
                            qr[dataitem.fileiditem.fileid[0]]
                        )
                    except NoData:
                        results.add_error('', '', DownloadFailed(dresponse))
                        continue

            elif code == '300' or code == '412' or code == '405':
                if code == '300':
                    try:
                        methods = self.multiple_choices(
                            dresponse.method.methodtype, dresponse
                        )
                    except NoData:
                        results.add_error('', '', MultipleChoices(dresponse))
                        continue
                elif code == '412':
                    try:
                        info = self.missing_information(
                            info, dresponse.info
                        )
                    except NoData:
                        results.add_error('', '', MissingInformation(dresponse))
                        continue
                elif code == '405':
                    try:
                        methods = self.unknown_method(dresponse)
                    except NoData:
                        results.add_error('', '', UnknownMethod(dresponse))
                        continue

                files = []
                for dataitem in dresponse.getdataitem.dataitem:
                    files.extend(dataitem.fileiditem.fileid)

                request = self.create_getdatarequest(
                    {dresponse.provider: files}, methods, info
                )

                self.download_all(
                    self.api.service.GetData(request), methods, downloader, path,
                    qr, info
                )
            else:
                results.add_error('', '', UnknownStatus(dresponse))

        return results
Пример #5
0
    def fetch(self,
              *query_results,
              path=None,
              max_conn=5,
              progress=True,
              overwrite=False,
              downloader=None,
              **kwargs):
        """
        Download the records represented by
        `~sunpy.net.fido_factory.UnifiedResponse` objects.

        Parameters
        ----------
        query_results : `sunpy.net.fido_factory.UnifiedResponse`
            Container returned by query method, or multiple.
        path : `str`
            The directory to retrieve the files into. Can refer to any fields
            in `UnifiedResponse.response_block_properties` via string formatting,
            moreover the file-name of the file downloaded can be referred to as file,
            e.g. "{source}/{instrument}/{time.start}/{file}".
        max_conn : `int`, optional
            The number of parallel download slots.
        progress : `bool`, optional
            If `True` show a progress bar showing how many of the total files
            have been downloaded. If `False`, no progress bars will be shown at all.
        overwrite : `bool` or `str`, optional
            Determine how to handle downloading if a file already exists with the
            same name. If `False` the file download will be skipped and the path
            returned to the existing file, if `True` the file will be downloaded
            and the existing file will be overwritten, if `'unique'` the filename
            will be modified to be unique.
        downloader : `parfive.Downloader`, optional
            The download manager to use. If specified the ``max_conn``,
            ``progress`` and ``overwrite`` arguments are ignored.

        Returns
        -------
        `parfive.Results`

        Examples
        --------
        >>> from sunpy.net.attrs import Time, Instrument
        >>> unifresp = Fido.search(Time('2012/3/4','2012/3/5'), Instrument('EIT'))  # doctest: +REMOTE_DATA
        >>> filepaths = Fido.fetch(unifresp)  # doctest: +SKIP

        If any downloads fail, they can be retried by passing the `parfive.Results` object back into ``fetch``.

        >>> filepaths = Fido.fetch(filepaths)  # doctest: +SKIP

        """
        if path is not None:
            exists = list(
                filter(lambda p: p.exists(),
                       Path(path).resolve().parents))

            if not os.access(exists[0], os.W_OK):
                raise PermissionError('You do not have permission to write'
                                      f' to the directory {exists[0]}.')

        if "wait" in kwargs:
            raise ValueError(
                "wait is not a valid keyword argument to Fido.fetch.")

        if downloader is None:
            downloader = Downloader(max_conn=max_conn,
                                    progress=progress,
                                    overwrite=overwrite)
        elif not isinstance(downloader, parfive.Downloader):
            raise TypeError(
                "The downloader argument must be a parfive.Downloader object.")

        # Handle retrying failed downloads
        retries = [isinstance(arg, Results) for arg in query_results]
        if all(retries):
            results = Results()
            for retry in query_results:
                dr = downloader.retry(retry)
                results.data += dr.data
                results._errors += dr._errors
            return results
        elif any(retries):
            raise TypeError(
                "If any arguments to fetch are "
                "`parfive.Results` objects, all arguments must be.")

        reslist = []
        for query_result in query_results:
            for block in query_result.responses:
                reslist.append(
                    block.client.fetch(block,
                                       path=path,
                                       downloader=downloader,
                                       wait=False,
                                       **kwargs))

        results = downloader.download()
        # Combine the results objects from all the clients into one Results
        # object.
        for result in reslist:
            if result is None:
                continue
            if not isinstance(result, Results):
                raise TypeError(
                    "If wait is False a client must return a parfive.Downloader and either None"
                    " or a parfive.Results object.")
            results.data += result.data
            results._errors += result.errors

        return results