示例#1
0
    def define_s3bucket(self):
        """
        Get the s3instance and define the name for the
        s3 bucket using salobj

        To access the S3 server, the environment variables are set via:

        export S3_ENDPOINT_URL=http://lsst-nfs.ncsa.illinois.edu:9000
        export AWS_ACCESS_KEY_ID={access_key}
        export AWS_SECRET_ACCESS_KEY={secret_key}
        """

        s3instance = self.get_s3instance()
        self.s3bucket_name = salobj.AsyncS3Bucket.make_bucket_name(s3instance=s3instance)
        self.log.info(f"Will use Bucket name: {self.s3bucket_name}")

        # 2. Use AsyncS3Bucket to make bucket + S3 connection
        self.s3bucket = salobj.AsyncS3Bucket(name=self.s3bucket_name, domock=False)
        self.log.info(f"Connection established to: {self.s3bucket_name}")
        # We will re-use the connection made by salobj
        self.s3conn = self.s3bucket.service_resource
        self.log.info(f"Will use s3 endpoint_url: {self.s3conn.meta.client.meta.endpoint_url}")

        # 3. Make sure the bucket exists in the list of bucket names:
        bucket_names = [b.name for b in self.s3conn.buckets.all()]
        if self.s3bucket_name not in bucket_names:
            self.s3conn.create_bucket(Bucket=self.s3bucket_name)
            self.log.info(f"Created Bucket: {self.s3bucket_name}")
        else:
            self.log.info(f"Bucket Name: {self.s3bucket_name} already exists")
示例#2
0
    async def handle_summary_state(self):
        # disabled: connect and send telemetry, but no commands allowed.
        if self.summary_state in (salobj.State.ENABLED, salobj.State.DISABLED):
            if self.s3bucket is None:
                domock = self.simulation_mode & constants.SimulationMode.S3Server != 0
                self.s3bucket = salobj.AsyncS3Bucket(
                    name=self.s3bucket_name, domock=domock, create=domock
                )
            if self.device is None:
                try:
                    self.device = AvsFiberSpectrograph(
                        serial_number=self.serial_number, log=self.log
                    )
                except Exception as e:
                    msg = "Failed to connect to fiber spectrograph."
                    await self.fault(code=1, report=f"{msg}: {repr(e)}")
                    raise salobj.ExpectedError(msg)

            if self.telemetry_loop_task.done():
                self.telemetry_loop_task = asyncio.create_task(self.telemetry_loop())
            status = self.device.get_status()
            await self.evt_deviceInfo.set_write(
                npixels=status.n_pixels,
                fpgaVersion=status.fpga_version,
                firmwareVersion=status.firmware_version,
                libraryVersion=status.library_version,
            )
        else:
            self.telemetry_loop_task.cancel()
            if self.device is not None:
                self.device.disconnect()
            self.device = None
            if self.s3bucket is not None:
                self.s3bucket.stop_mock()
            self.s3bucket = None
示例#3
0
 def setUp(self) -> None:
     self.bucket_name = "async_bucket_test"
     self.file_data = b"Data for the test case"
     self.key = "test_file"
     self.bucket = salobj.AsyncS3Bucket(self.bucket_name,
                                        create=True,
                                        domock=True)
     self.fileobj = io.BytesIO(self.file_data)
示例#4
0
async def main(ID):

    filename = f"{ID}.header"
    s3instance = "NTS"

    # 1. Get the bucket name we want
    s3bucket_name = salobj.AsyncS3Bucket.make_bucket_name(
        s3instance=s3instance)
    print(f"--- Will use Bucket name:{s3bucket_name}")

    # 2. Use AsyncS3Bucket to make bucket + S3 connection
    s3bucket = salobj.AsyncS3Bucket(name=s3bucket_name, domock=False)
    # We will re-use the connection made by salobj
    s3conn = s3bucket.service_resource

    # In case we want to delete contents and bucket
    #    s3bucket.bucket.objects.all().delete()
    # s3bucket.bucket.delete()

    # 3. Make sure the bucket exists in the list of bucket names:
    bucket_names = [b.name for b in s3conn.buckets.all()]
    if s3bucket_name not in bucket_names:
        s3conn.create_bucket(Bucket=s3bucket_name)
        print(f"Created Bucket: {s3bucket_name}")
    else:
        print(f"Bucket: {s3bucket_name} already exists")

    # 4. Uploading the file, using filename/key/url combination
    # key should be:
    # CCHeaderService/header/2020/05/21/CCHeaderService_header_CC_O_20200521_000008.yaml

    key = s3bucket.make_key(salname='CCHeaderService',
                            salindexname=None,
                            generator='header',
                            other=ID,
                            date='2020-05-21T21:56:18.280',
                            suffix='.yaml')
    url = f"s3://{s3bucket.name}/{key}"
    print(f"New key:{key}")
    print(f"URL:{url}")

    # Test using boto3 upload_file
    for k in range(10):
        t1 = time.time()
        s3conn.meta.client.upload_file(filename, s3bucket_name, key)
        t2 = time.time()
        print(f"Total time file: {t2-t1}")

    # Test using salobj upload_file
    for k in range(10):
        t1 = time.time()
        with open(filename, "rb") as f:
            await s3bucket.upload(fileobj=f, key=key)
            t2 = time.time()
            print(f"Total time fileobj: {t2-t1}")

    # Now we print the contents
    for bucket in s3conn.buckets.all():
        print(f" + {bucket}")
        for file in bucket.objects.all():
            print(f"   - {file.key}")
示例#5
0
 async def test_specified_s3_endpoint_url(self) -> None:
     endpoint_url = "http://foo.bar.edu:9000"
     with utils.modify_environ(S3_ENDPOINT_URL=endpoint_url):
         bucket = salobj.AsyncS3Bucket(self.bucket_name)
         assert bucket.service_resource.meta.client.meta.endpoint_url == endpoint_url
示例#6
0
 async def test_no_s3_endpoint_url(self) -> None:
     # Clear "S3_ENDPOINT_URL" if it exists.
     with utils.modify_environ(S3_ENDPOINT_URL=None):
         bucket = salobj.AsyncS3Bucket(self.bucket_name)
         assert "amazon" in bucket.service_resource.meta.client.meta.endpoint_url
示例#7
0
 async def test_blank_s3_endpoint_url(self) -> None:
     with utils.modify_environ(S3_ENDPOINT_URL=""):
         bucket = salobj.AsyncS3Bucket(self.bucket_name)
         assert "amazon" in bucket.service_resource.meta.client.meta.endpoint_url
示例#8
0
This is an S3 HeadeService example to delete a bucket using salobj

To access the S3 server,
the environment variables are set via:

export S3_ENDPOINT_URL=http://lsst-nfs.ncsa.illinois.edu:9000
export AWS_ACCESS_KEY_ID={access_key}
export AWS_SECRET_ACCESS_KEY={secret_key}
"""

from lsst.ts import salobj

# 1. Get the bucket name we want -- change accordingly
s3instance = "dummy"
s3bucket_name = salobj.AsyncS3Bucket.make_bucket_name(s3instance=s3instance)
print(f" --- Will use Bucket name:{s3bucket_name}")

# 2. Use AsyncS3Bucket to make bucket + S3 connection
s3bucket = salobj.AsyncS3Bucket(name=s3bucket_name, domock=False)
# We will re-use the connection made by salobj
s3conn = s3bucket.service_resource

# In case we want to delete contents and bucket
bucket_names = [b.name for b in s3conn.buckets.all()]
if s3bucket_name in bucket_names:
    print(f"Deleting: {s3bucket_name}")
    s3bucket.bucket.objects.all().delete()
    s3bucket.bucket.delete()
else:
    print(f" --- Cannot delete bucket: {s3bucket_name} it does not exists")