def ListInstances( self, region: Optional[str] = None, filters: Optional[List[Dict[str, Any]]] = None, show_terminated: bool = False) -> Dict[str, ec2.AWSInstance]: """List instances of an AWS account. Example usage: ListInstances(region='us-east-1', filters=[ {'Name':'instance-id', 'Values':['some-instance-id']}]) Args: region (str): Optional. The region from which to list instances. If none provided, the default_region associated to the AWSAccount object will be used. filters (List[Dict]): Optional. Filters for the query. Filters are given as a list of dictionaries, e.g.: {'Name': 'someFilter', 'Values': ['value1', 'value2']}. show_terminated (bool): Optional. Include terminated instances in the list. Returns: Dict[str, AWInstance]: Dictionary mapping instance IDs (str) to their respective AWSInstance object. Raises: RuntimeError: If instances can't be listed. """ if not filters: filters = [] instances = {} client = self.ClientApi(common.EC2_SERVICE, region=region) responses = common.ExecuteRequest(client, 'describe_instances', {'Filters': filters}) for response in responses: for reservation in response['Reservations']: for instance in reservation['Instances']: # If reservation['Instances'] contains any entry, then the # instance's state is expected to be present in the API's response. if instance['State'][ 'Name'] == 'terminated' and not show_terminated: continue zone = instance['Placement']['AvailabilityZone'] instance_id = instance['InstanceId'] aws_instance = ec2.AWSInstance(self, instance_id, zone[:-1], zone) for tag in instance.get('Tags', []): if tag.get('Key') == 'Name': aws_instance.name = tag.get('Value') break instances[instance_id] = aws_instance return instances
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for aws module.""" import typing import unittest import mock from libcloudforensics.providers.aws.internal import account, common, ebs, ec2 from libcloudforensics.providers.aws.internal import log as aws_log from libcloudforensics.providers.aws import forensics FAKE_AWS_ACCOUNT = account.AWSAccount(default_availability_zone='fake-zone-2b') FAKE_INSTANCE = ec2.AWSInstance(FAKE_AWS_ACCOUNT, 'fake-instance-id', 'fake-zone-2', 'fake-zone-2b') FAKE_INSTANCE_WITH_NAME = ec2.AWSInstance(FAKE_AWS_ACCOUNT, 'fake-instance-with-name-id', 'fake-zone-2', 'fake-zone-2b', name='fake-instance') FAKE_VOLUME = ebs.AWSVolume('fake-volume-id', FAKE_AWS_ACCOUNT, 'fake-zone-2', 'fake-zone-2b', False) FAKE_BOOT_VOLUME = ebs.AWSVolume('fake-boot-volume-id', FAKE_AWS_ACCOUNT, 'fake-zone-2', 'fake-zone-2b', False, name='fake-boot-volume', device_name='/dev/spf') FAKE_SNAPSHOT = ebs.AWSSnapshot('fake-snapshot-id',
def GetOrCreateAnalysisVm( self, vm_name: str, boot_volume_size: int, ami: str, cpu_cores: int, packages: Optional[List[str]] = None, ssh_key_name: Optional[str] = None ) -> Tuple[ec2.AWSInstance, bool]: """Get or create a new virtual machine for analysis purposes. Args: vm_name (str): The instance name tag of the virtual machine. boot_volume_size (int): The size of the analysis VM boot volume (in GB). ami (str): The Amazon Machine Image ID to use to create the VM. cpu_cores (int): Number of CPU cores for the analysis VM. packages (List[str]): Optional. List of packages to install in the VM. ssh_key_name (str): Optional. A SSH key pair name linked to the AWS account to associate with the VM. If none provided, the VM can only be accessed through in-browser SSH from the AWS management console with the EC2 client connection package (ec2-instance-connect). Note that if this package fails to install on the target VM, then the VM will not be accessible. It is therefore recommended to fill in this parameter. Returns: Tuple[AWSInstance, bool]: A tuple with an AWSInstance object and a boolean indicating if the virtual machine was created (True) or reused (False). Raises: RuntimeError: If the virtual machine cannot be found or created. """ # Re-use instance if it already exists, or create a new one. try: instances = self.GetInstancesByName(vm_name) if instances: created = False return instances[0], created except RuntimeError: pass instance_type = common.GetInstanceTypeByCPU(cpu_cores) startup_script = utils.ReadStartupScript() if packages: startup_script = startup_script.replace('${packages[@]}', ' '.join(packages)) # Install ec2-instance-connect to allow SSH connections from the browser. startup_script = startup_script.replace( '(exit ${exit_code})', 'apt -y install ec2-instance-connect && (exit ${exit_code})') client = self.ClientApi(common.EC2_SERVICE) vm_args = { 'BlockDeviceMappings': [self._GetBootVolumeConfigByAmi(ami, boot_volume_size)], 'ImageId': ami, 'MinCount': 1, 'MaxCount': 1, 'InstanceType': instance_type, 'TagSpecifications': [common.GetTagForResourceType('instance', vm_name)], 'UserData': startup_script, 'Placement': { 'AvailabilityZone': self.default_availability_zone } } if ssh_key_name: vm_args['KeyName'] = ssh_key_name # Create the instance in AWS try: instance = client.run_instances(**vm_args) # If the call to run_instances was successful, then the API response # contains the instance ID for the new instance. instance_id = instance['Instances'][0]['InstanceId'] # Wait for the instance to be running client.get_waiter('instance_running').wait( InstanceIds=[instance_id]) # Wait for the status checks to pass client.get_waiter('instance_status_ok').wait( InstanceIds=[instance_id]) except (client.exceptions.ClientError, botocore.exceptions.WaiterError) as exception: raise RuntimeError('Could not create instance {0:s}: {1:s}'.format( vm_name, str(exception))) instance = ec2.AWSInstance(self, instance_id, self.default_region, self.default_availability_zone, name=vm_name) created = True return instance, created
import mock from libcloudforensics.providers.aws.internal import account as aws_account from libcloudforensics.providers.aws.internal import ebs, ec2 from dftimewolf import config from dftimewolf.lib import state from dftimewolf.lib.collectors import aws from dftimewolf.lib.containers import containers with mock.patch('boto3.session.Session._setup_loader') as mock_session: mock_session.return_value = None FAKE_AWS_ACCOUNT = aws_account.AWSAccount( default_availability_zone='fake-zone-2b') FAKE_ANALYSIS_VM = ec2.AWSInstance(FAKE_AWS_ACCOUNT, 'fake-analysis-vm-id', 'fake-zone-2', 'fake-zone-2b', name='fake-analysis-vm') FAKE_INSTANCE = ec2.AWSInstance(FAKE_AWS_ACCOUNT, 'my-owned-instance-id', 'fake-zone-2', 'fake-zone-2b') FAKE_VOLUME = ebs.AWSVolume('fake-volume-id', FAKE_AWS_ACCOUNT, 'fake-zone-2', 'fake-zone-2b', False) FAKE_BOOT_VOLUME = ebs.AWSVolume('fake-boot-volume-id', FAKE_AWS_ACCOUNT, 'fake-zone-2', 'fake-zone-2b', False, name='fake-boot-volume', device_name='/dev/spf') FAKE_VOLUME_COPY = ebs.AWSVolume('fake-volume-id-copy', FAKE_AWS_ACCOUNT, 'fake-zone-2', 'fake-zone-2b', False)