Beispiel #1
0
def upload_index(options: argparse.Namespace,
                 cloud_indices: typing.List[data_types.FitsFileIndex]) -> None:
    index_filepath: str = tempfile.NamedTemporaryFile().name
    configuration: typing.Dict[str, typing.Any] = {
        'version': '0.1.0',
        'aws-default-region': AWS_REGION,
        'indicies': [],
        'index-bucket-name': options.index_bucket_name,
        'data-bucket-path': options.data_bucket_path,
    }
    for cloud_index in cloud_indices:
        configuration['indicies'].append(cloud_index.index)

    logger.info(f'Writing Index to Filepath[{index_filepath}]')
    with open(index_filepath, 'w', encoding=ENCODING) as stream:
        stream.write(yaml.dump(configuration, indent=4, canonical=False))

    logger.info(
        f'Updating Cloud Index in AWS Bucket[{options.index_bucket_name}]')
    url: str = f'https://s3.{AWS_REGION}.amazonaws.com/{options.index_bucket_name}/{INDEX_KEY}'
    with open(index_filepath, 'r') as stream:
        response = requests.put(url,
                                data=stream.read(),
                                auth=aws_auth.AWSAuth())
        if response.status_code != 200:
            raise NotImplementedError
Beispiel #2
0
def _load_byte_range(process_count, start: int, stop: int, child_conn,
                     url: str):
    max_retry = 0
    while max_retry < 3:
        try:
            response = requests.get(url,
                                    headers={
                                        'Range': f'bytes={start}-{stop}',
                                        'Accept': 'application/octet-stream'
                                    },
                                    auth=aws_auth.AWSAuth(True),
                                    stream=False)
        except Exception as err:
            time.sleep(.1)

        else:
            if response.status_code == 206:
                child_conn.send([
                    json.dumps([
                        process_count,
                        base64.b64encode(response.content).decode('ascii')
                    ])
                ])
                return None

        max_retry = max_retry + 1
        if max_retry >= 3:
            child_conn.send([
                json.dumps(
                    [process_count,
                     base64.b64encode(b'noop').decode('ascii')])
            ])
            return None
Beispiel #3
0
def download_index(bucket_name: str) -> data_types.FitsCloudIndex:
    index_filepath: str = tempfile.NamedTemporaryFile().name
    logger.info(f'Downloading Cloud Index from AWS Bucket[{bucket_name}]')
    url: str = f'https://s3.{AWS_REGION}.amazonaws.com/{bucket_name}/{INDEX_KEY}'
    response = requests.get(url, auth=aws_auth.AWSAuth())
    if response.status_code != 200:
        raise NotImplementedError

    return data_types.FitsCloudIndex(
        yaml.load(response.content.decode(ENCODING)))
Beispiel #4
0
    def _slice_bintable(self: PWN,
                        nViews: typing.List[slice]) -> Astropy_Table:
        def __validate_bintable_fits_format(header: fits.Header) -> None:
            # https://github.com/astropy/astropy/blob/master/astropy/io/fits/hdu/table.py#L548
            # Implemented the validators that are aligned with the FITS Spec
            # http://articles.adsabs.harvard.edu/pdf/1995A%26AS..113..159C
            assert header['NAXIS'] == 2
            assert header['BITPIX'] == 8
            assert header['TFIELDS'] > 0 and header['TFIELDS'] < 1000
            for idx in range(1, header['TFIELDS'] + 1):
                t_form: str = header.get(f'TFORM{idx}', None)
                assert not t_form is None
                t_type: str = header.get(f'TTYPE{idx}', None)
                assert not t_type is None

            else:
                if idx > 999:
                    raise NotImplementedError(f'Invalid FITS Format')

        def __validate_bintable_python_inputs(
                header: fits.Header, nViews: typing.List[slice]) -> None:
            assert len(nViews) == 1

        __validate_bintable_fits_format(self.fits)
        __validate_bintable_python_inputs(self.fits, nViews)
        path: str = f'{self._context.data_bucket_path[5:].strip("/")}/{self._cloudpath}'
        url: str = f'https://s3.{self._context.region}.amazonaws.com/{path}'

        # NAXIS1 = number of bytes per row
        # NAXIS2 = number of rows in the table
        start: int = nViews[0].start * self.fits['NAXIS1'] + self.data_offset
        stop: int = nViews[0].stop * self.fits['NAXIS1'] + self.data_offset
        response = requests.get(url,
                                headers={
                                    'Range': f'bytes={start}-{stop}',
                                },
                                auth=aws_auth.AWSAuth(True),
                                stream=True)
        cutout_name: str = tempfile.NamedTemporaryFile().name
        with open(cutout_name, 'wb') as stream:
            stream.write(self._primary_header['header']['whole'])
            new_header: fits.Header = self.fits
            new_header['NAXIS2'] = nViews[0].stop - nViews[0].start
            stream.write(new_header.tostring().encode('ascii'))
            for chunk in response.iter_content(1024):
                stream.write(chunk)

        return Astropy_Table(fits.open(cutout_name)[1].data)
Beispiel #5
0
import requests

from cloud_fits.auth import aws

url: str = 'https://s3.us-east-1.amazonaws.com/yoko-videos/awesome.txt'
response = requests.put(url, data=b'awesome', auth=aws.AWSAuth())
import ipdb
ipdb.set_trace()
pass