def test_process_workspace(monkeypatch): workspacesHelper = WorkspacesHelper({ 'region': 'us-west-2', 'hourlyLimits': { 'VALUE': 5, 'STANDARD': 5, 'PERFORMANCE': 5, 'GRAPHICS': 5 }, 'testEndOfMonth': False, 'isDryRun': True, 'startTime': '2018-02-01T00:00:00Z', 'endTime': '2018-02-01T12:00:00Z' }) def mock_get_bundle_type(BundleIds = 'wsb-xxxxxxxxx'): return 'VALUE' monkeypatch.setattr(workspacesHelper, 'get_bundle_type', mock_get_bundle_type) def mock_check_for_skip_tag(workspaceID): return False monkeypatch.setattr(workspacesHelper, 'check_for_skip_tag', mock_check_for_skip_tag) def mock_get_billable_time(workspaceID, workspaceRunningMode, startTime, endTime): return 10 monkeypatch.setattr(workspacesHelper.metricsHelper, 'get_billable_time', mock_get_billable_time) def mock_modify_workspace_properties(workspaceID, newRunningMode, isDryRun): return '-M-' monkeypatch.setattr(workspacesHelper, 'modify_workspace_properties', mock_modify_workspace_properties) result = workspacesHelper.process_workspace({ "UserName": "******", "DirectoryId": "d-xxxxxxxxx", "WorkspaceProperties": { "UserVolumeSizeGib": 50, "RunningModeAutoStopTimeoutInMinutes": 60, "RunningMode": "AUTO_STOP", "RootVolumeSizeGib": 80, "ComputeTypeName": "STANDARD" }, "ModificationStates": [], "State": "STOPPED", "WorkspaceId": "ws-xxxxxxxxx", "BundleId": "wsb-xxxxxxxxx" }) assert result['workspaceID'] == 'ws-xxxxxxxxx' assert result['optimizationResult'] == '-M-' assert result['billableTime'] == 10 assert result['hourlyThreshold'] == 5 assert result['bundleType'] == 'VALUE'
def test_process_workspace_performance(mocker): workspace = { 'WorkspaceId': 'ws-68h123hty', 'DirectoryId': 'd-901230bb84', 'UserName': '******', 'IpAddress': '111.16.1.233', 'State': 'AVAILABLE', 'BundleId': 'wsb-cl123qzj1', 'SubnetId': 'subnet-05d421387eaa7cf86', 'ComputerName': 'A-APPW123KP4NP', 'WorkspaceProperties': { 'RunningMode': 'ALWAYS_ON', 'RootVolumeSizeGib': 80, 'UserVolumeSizeGib': 50, 'ComputeTypeName': 'PERFORMANCE' }, 'ModificationStates': [] } settings = { 'region': 'us-east-1', 'hourlyLimits': 10, 'testEndOfMonth': 'yes', 'isDryRun': 'yes', 'startTime': 1, 'endTime': 2 } workspace_helper = WorkspacesHelper(settings) mocker.patch.object(workspace_helper.metricsHelper, 'get_billable_time') workspace_helper.metricsHelper.get_billable_time.return_value = 100 mocker.patch.object(workspace_helper, 'check_for_skip_tag') workspace_helper.check_for_skip_tag.return_value = False mocker.patch.object(workspace_helper, 'get_hourly_threshold') workspace_helper.get_hourly_threshold.return_value = 5 mocker.patch.object(workspace_helper, 'compare_usage_metrics') workspace_helper.compare_usage_metrics.return_value = { 'resultCode': '-N-', 'newMode': 'ALWAYS_ON' } result = workspace_helper.process_workspace(workspace) assert result['bundleType'] == 'PERFORMANCE' assert result['billableTime'] == 100
def test_get_tags(monkeypatch): helper = WorkspacesHelper({ 'region': 'us-west-2', 'hourlyLimits': { 'VALUE': 5, 'STANDARD': 5, 'PERFORMANCE': 5, 'GRAPHICS': 5 }, 'testEndOfMonth': False, 'isDryRun': True, 'startTime': '2018-02-01T00:00:00Z', 'endTime': '2018-02-01T12:00:00Z' }) def mock_describe_tags(ResourceId='ws-xxxxxxxxx'): return { 'ResponseMetadata': { 'RetryAttempts': 0, 'HTTPStatusCode': 200, 'RequestId': 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx', 'HTTPHeaders': { 'x-amzn-requestid': 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx', 'date': 'Thu, 07 Feb 1988 11:00:00 GMT', 'content-length': '42', 'content-type': 'application/x-amz-json-1.1' } }, 'TagList': [ { 'Value': 'LIVE', 'Key':'FAI' } ] } monkeypatch.setattr(helper.client, 'describe_tags', mock_describe_tags) result = helper.get_tags('ws-xxxxxxxxx') assert result[0]['Key'] == 'FAI' assert result[0]['Value'] == 'LIVE'
def read_directory(self, region, stackParams, directoryParams): botoConfig = botocore.config.Config(max_pool_connections=100) maxRetries = 3 workspaceCount = 0 testEndOfMonth = False sendAnonymousData = False isDryRun = True endTime = directoryParams['EndTime'] startTime = directoryParams['StartTime'] lastDay = directoryParams['LastDay'] runUUID = directoryParams['RunUUID'] # Provide point to clean up parameter names in the future. if stackParams['DryRun'] == 'No': isDryRun = False # CloudFormation overrides the end-of-month testing if (stackParams['TestEndOfMonth'] == 'Yes'): testEndOfMonth = True log.info('Setting testEndOfMonth to %s', testEndOfMonth) # Should we send Solutiuon Team metrics if stackParams['SendAnonymousData'] == 'true': sendAnonymousData = True log.debug('sendAnonymousData: %s', sendAnonymousData) awsS3Bucket = stackParams['BucketName'] log.info('Output Bucket: %s', awsS3Bucket) # Capture the directoryId passed to the function if 'DirectoryId' in directoryParams: directoryID = directoryParams['DirectoryId'] else: log.error('Failed to find directoryId in directoryParams') return 0 try: directoryParams['CSV'] except: wsCsv = 'WorkspaceID,Billable Hours,Usage Threshold,Change Reported,Bundle Type,Initial Mode,New Mode\n' else: wsCsv = directoryParams['CSV'] try: directoryParams['NextToken'] except: nextToken = 'None' else: nextToken = directoryParams['NextToken'] # List of bundles with specific hourly limits workspacesHelper = WorkspacesHelper({ 'region': region, 'hourlyLimits': { 'VALUE': stackParams['ValueLimit'], 'STANDARD': stackParams['StandardLimit'], 'PERFORMANCE': stackParams['PerformanceLimit'], 'POWER': stackParams['PowerLimit'], 'POWERPRO': stackParams['PowerProLimit'], 'GRAPHICS': stackParams['GraphicsLimit'], 'GRAPHICSPRO': stackParams['GraphicsProLimit'] }, 'testEndOfMonth': testEndOfMonth, 'isDryRun': isDryRun, 'startTime': startTime, 'endTime': endTime }) morePages = True while morePages: # looping through all pages of the directory, 25 at a time workspacesPage = workspacesHelper.get_workspaces_page( directoryID, nextToken) try: workspacesPage['NextToken'] except: nextToken = 'None' else: nextToken = workspacesPage['NextToken'] # Loop through list of workspaces in current page of directory for workspace in workspacesPage['Workspaces']: result = workspacesHelper.process_workspace(workspace) workspaceCount = workspaceCount + 1 log.info('Workspace %d -> %s', workspaceCount, result) log.info('Appending CSV file') # Append result data to the CSV wsCsv = workspacesHelper.append_entry(wsCsv, result) # Send metrics about this Solution to Solutions Team tracker if sendAnonymousData == True: postDict = {} postDict['Data'] = { 'runUUID': runUUID, 'result': result['optimizationResult'], 'bundleType': result['bundleType'], 'previousMode': result['initialMode'] } postDict['TimeStamp'] = str( datetime.datetime.utcnow().isoformat()) postDict['Solution'] = stackParams['SolutionID'] postDict['UUID'] = stackParams['UUID'] url = directoryParams['AnonymousDataEndpoint'] data = urllib.parse.urlencode(postDict).encode("utf-8") headers = {'content-type': 'application/json'} log.debug('%s', data) log.info('Sending solution tracking metrics to %s', url) # added to overcome ssl certificate context = ssl._create_unverified_context() req = urllib.request.Request(url, data=data, headers=headers) rsp = urllib.request.urlopen(req, timeout=5, context=context) content = rsp.read() rspcode = rsp.getcode() log.debug('Response Code: {}'.format(rspcode)) log.debug('Response Content: {}'.format(content)) if nextToken == 'None': morePages = False log.info( 'Last page, finished %d workspaces, putting csv file in S3', workspaceCount) pEndTime = time.strptime(endTime, '%Y-%m-%dT%H:%M:%SZ') s3Client = boto3.client('s3', config=botoConfig) logBody = workspacesHelper.expand_csv(wsCsv) logKey = time.strftime('%Y/%m/%d/', pEndTime) + region + '_' + directoryID if testEndOfMonth: logKey += '_end-of-month' else: logKey += '_daily' if isDryRun: logKey += '_dry-run' logKey += '.csv' for i in range(0, maxRetries): log.debug('Try #%s to put files into S3', i + 1) try: s3DailyPutResult = s3Client.put_object( Bucket=awsS3Bucket, Body=logBody, Key=logKey) log.info('Successfully uploaded csv file to %s', logKey) return workspaceCount # kill the loop except botocore.exceptions.ClientError as e: log.error(e) if i >= maxRetries - 1: log.error('ExceededMaxRetries') else: time.sleep(i / 10) else: # Loop back to the top of while loop and process another page of workspaces for this directory (every 25 workspaces) log.info( 'Calling read_directory again for next page with nextToken -> %s', nextToken) return workspaceCount
def lambda_handler(event, context): wcoHelper = WCOHelper() lastDay = calendar.monthrange(int(time.strftime('%Y')), int(time.strftime('%m')))[1] log.info("Current date = %s", time.strftime('%Y/%m/%d')) log.info("Last day of month = %s", lastDay) region = event['Region'] stackOutputs = {} testEndOfMonth = False sendAnonymousData = False isDryRun = True # Get cached StackOutputs if 'StackOutputs' in event: stackOutputs = event['StackOutputs'] # Get StackOutputs from CloudFormation else: stackName = context.invoked_function_arn.split(':')[6].rsplit('-', 2)[0] cfClient = boto3.client('cloudformation') response = cfClient.describe_stacks(StackName=stackName) for e in response['Stacks'][0]['Outputs']: stackOutputs[e['OutputKey']] = e['OutputValue'] # Set log level log.setLevel(stackOutputs['LogLevel']) log.debug(stackOutputs) # Provide point to clean up parameter names in the future. if stackOutputs['DryRun'] == 'No': isDryRun = False # Determine if child function should run last-day-of-month routine. if (int(time.strftime('%d')) == lastDay): testEndOfMonth = True log.info('Last day of month, setting testEndOfMonth to %s', testEndOfMonth) # CloudFormation overrides the end of month testing if (stackOutputs['TestEndOfMonth'] == 'Yes'): testEndOfMonth = True log.info( 'Setting testEndOfMonth to %s due to CloudFormation stack parameters', testEndOfMonth) if stackOutputs['SendAnonymousData'] == 'true': log.debug('SendAnonymousData') sendAnonymousData = True log.debug('sendAnonymousData: %s', sendAnonymousData) childFunctionArn = stackOutputs['ChildFunctionArn'] awsS3Bucket = stackOutputs['BucketName'] maxRetries = 20 # Capture any payload information passed to the function if 'DirectoryId' in event: directoryID = event['DirectoryId'] else: return 'Error: No DirectoryID specified' endTime = event['EndTime'] startTime = event['StartTime'] lastDay = event['LastDay'] runUUID = event['RunUUID'] try: event['CSV'] except: wsCsv = 'WorkspaceID,Billable Hours,Usage Threshold,Change Reported,Bundle Type,Initial Mode,New Mode\n' else: wsCsv = event['CSV'] try: event['NextToken'] except: nextToken = 'None' else: nextToken = event['NextToken'] workspacesHelper = WorkspacesHelper({ 'region': region, 'hourlyLimits': { 'VALUE': stackOutputs['ValueLimit'], 'STANDARD': stackOutputs['StandardLimit'], 'PERFORMANCE': stackOutputs['PerformanceLimit'], 'GRAPHICS': stackOutputs['GraphicsLimit'], 'POWER': stackOutputs['PowerLimit'] }, 'testEndOfMonth': testEndOfMonth, 'isDryRun': isDryRun, 'startTime': startTime, 'endTime': endTime }) workspacesPage = workspacesHelper.get_workspaces_page( directoryID, nextToken) try: workspacesPage['NextToken'] except: nextToken = 'None' else: nextToken = workspacesPage['NextToken'] for workspace in workspacesPage['Workspaces']: result = workspacesHelper.process_workspace(workspace) # Append result data to the CSV wsCsv = wcoHelper.append_entry(wsCsv, result) if sendAnonymousData == True: postDict = {} postDict['Data'] = { 'runUUID': runUUID, 'result': result['optimizationResult'], 'bundleType': result['bundleType'], 'previousMode': result['initialMode'] } postDict['TimeStamp'] = str(datetime.datetime.utcnow().isoformat()) postDict['Solution'] = 'SO0018' postDict['UUID'] = stackOutputs['UUID'] url = event['AnonymousDataEndpoint'] data = json.dumps(postDict) log.debug('Sending anonymous data to endpoint %s', event['AnonymousDataEndpoint']) log.debug('%s', data) headers = {'content-type': 'application/json'} req = Request(url, data, headers) rsp = urlopen(req) content = rsp.read() rspcode = rsp.getcode() log.debug('Response Code: {}'.format(rspcode)) log.debug('Response Content: {}'.format(content)) if nextToken == 'None': log.debug('Last page, putting files in S3') pEndTime = time.strptime(endTime, '%Y-%m-%dT%H:%M:%SZ') s3Client = boto3.client('s3', config=botoConfig) logBody = wcoHelper.expand_csv(wsCsv) logKey = time.strftime('%Y/%m/%d/', pEndTime) + region + '_' + directoryID if testEndOfMonth: logKey += '_end-of-month' else: logKey += '_daily' if isDryRun: logKey += '_dry-run' logKey += '.csv' for i in range(0, maxRetries): log.debug('Try #%s to put files into S3', i) try: s3DailyPutResult = s3Client.put_object(Bucket=awsS3Bucket, Body=logBody, Key=logKey) return 'Successfully uploaded log file to {!s}/{!s}'.format( awsS3Bucket, logKey) break except botocore.exceptions.ClientError as e: log.error(e) if i >= maxRetries - 1: log.error('ExceededMaxRetries') else: time.sleep(i / 10) else: # Invoke a child function to process paginated API results. lambdaHelper = LambdaHelper(childFunctionArn) lambdaHelper.invokeChildFunction(region, directoryID, startTime, endTime, lastDay, stackOutputs, runUUID, event['AnonymousDataEndpoint'], wsCsv, nextToken) return 'Another page of results was found, invoking the child function again.'
# # # or in the "license" file accompanying this file. This file is distributed # # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # # express or implied. See the License for the specific language governing # # permissions and limitations under the License. # ############################################################################## from lib.workspaces_helper import WorkspacesHelper helper = WorkspacesHelper({ 'region': 'us-west-2', 'hourlyLimits': { 'VALUE': 5, 'STANDARD': '10', 'PERFORMANCE': 5, 'GRAPHICS': 5 }, 'testEndOfMonth': False, 'isDryRun': True, 'startTime': '2018-02-01T00:00:00Z', 'endTime': '2018-02-01T12:00:00Z' }) # AUTO_STOP (paid hourly) def test_get_hourly_threshold_int(): result = helper.get_hourly_threshold('VALUE') assert type(result) is int assert result == 5
def test_process_workspace_skip(monkeypatch): workspacesHelper = WorkspacesHelper({ 'region': 'us-west-2', 'hourlyLimits': { 'VALUE': 5, 'STANDARD': 5, 'PERFORMANCE': 5, 'GRAPHICS': 5 }, 'testEndOfMonth': False, 'isDryRun': True, 'startTime': '2018-02-01T00:00:00Z', 'endTime': '2018-02-01T12:00:00Z' }) def mock_describe_tags(ResourceId='ws-xxxxxxxxx'): return { 'ResponseMetadata': { 'RetryAttempts': 0, 'HTTPStatusCode': 200, 'RequestId': 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx', 'HTTPHeaders': { 'x-amzn-requestid': 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx', 'date': 'Thu, 07 Feb 1988 11:00:00 GMT', 'content-length': '42', 'content-type': 'application/x-amz-json-1.1' } }, 'TagList': [ { 'Value': 'True', 'Key':'Skip_Convert' } ] } monkeypatch.setattr(workspacesHelper.client, 'describe_tags', mock_describe_tags) def mock_get_bundle_type(BundleIds = 'wsb-xxxxxxxxx'): return 'VALUE' monkeypatch.setattr(workspacesHelper, 'get_bundle_type', mock_get_bundle_type) def mock_get_billable_time(workspaceID, workspaceRunningMode, startTime, endTime): return 0 monkeypatch.setattr(workspacesHelper.metricsHelper, 'get_billable_time', mock_get_billable_time) result = workspacesHelper.process_workspace({ "UserName": "******", "DirectoryId": "d-xxxxxxxxx", "WorkspaceProperties": { "UserVolumeSizeGib": 50, "RunningModeAutoStopTimeoutInMinutes": 60, "RunningMode": "AUTO_STOP", "RootVolumeSizeGib": 80, "ComputeTypeName": "STANDARD" }, "ModificationStates": [], "State": "STOPPED", "WorkspaceId": "ws-xxxxxxxxx", "BundleId": "wsb-xxxxxxxxx" }) assert result['workspaceID'] == 'ws-xxxxxxxxx' assert result['optimizationResult'] == '-S-' assert result['billableTime'] == 0 assert result['hourlyThreshold'] == 'n/a' assert result['bundleType'] == 'VALUE'