def main(): reservation = helpers.get_reservation_context_details() app = helpers.get_resource_context_details() app_attributes = app.attributes connectivity = helpers.get_connectivity_context_details() cloudshell = helpers.get_api_session() msg = lambda txt: cloudshell.WriteMessageToReservationOutput(reservation.id, txt) resource = helpers.get_resource_context_details_dict() resource['deployedAppData']['attributes'] = {attribute['name']: attribute['value'] for attribute in resource['deployedAppData']['attributes']} resource['deployedAppData']['attributes']['Password'] = cloudshell.DecryptPassword(resource['deployedAppData']['attributes']['Password']).Value repo = cloudshell.GetResourceDetails(app_attributes["Repository Name"]) repo_attributes = {attribute.Name: attribute.Value for attribute in repo.ResourceAttributes} repo_attributes["Host"] = repo.Address repo_attributes["Password"] = cloudshell.DecryptPassword(repo_attributes["Password"]).Value artifactory = Artifactory.from_dict(repo_attributes) build = ArtifactBuild.from_dict(repo_attributes) msg('- Looking for app dependencies') if not build.is_populated(): build = populate_build_from_sandbox(connectivity, reservation, msg) msg('- Found dependencies on Artifactory, associated with build {0} {1}'.format(build.name, build.number)) file_location, file_name = artifactory.download(build.name, build.number) msg('- Downloaded app dependencies to Execution Server at ' + os.path.join(file_location, file_name)) install_package(resource['deployedAppData']['address'], resource['deployedAppData']['attributes']['User'], resource['deployedAppData']['attributes']['Password'], os.path.join(file_location, file_name), app_attributes['Target Directory'] + '/' + file_name) msg('- Copied binaries to app server at ' + app_attributes['Target Directory'] + '/' + file_name)
def __init__(self, reservation_id, logger): """ Load the configuration from config files on the Blueprint's devices :param str reservation_id: reservation id """ try: self._logger = logger """:type : logging.Logger""" self.api_session = helpers.get_api_session() self.id = reservation_id self.Blueprint_name = helpers.get_reservation_context_details().environment_name full_path = None tp = self.api_session.GetActiveTopologyNames() for value in tp.Topologies: filename = basename(value) if filename == self.Blueprint_name: full_path = value break if full_path: self.blueprint_details = self.api_session.GetTopologyDetails(full_path) except: err = "Failed to initialize the Sandbox. Unexpected error:" + \ str(sys.exc_info()[0]) self.report_error(error_message=err)
def first_module_flow(): """ Functions passed into orchestration flow MUST have (sandbox, components) signature :param Sandbox sandbox: :param componentssc :return: """ # script helpers to pull in sandbox details, resource details, and api session sb_context = script_help.get_reservation_context_details() resource_details = script_help.get_resource_context_details() api = script_help.get_api_session() res_id = sb_context.id ip = resource_details.address resource_name = resource_details.name # environment variables not available during dev, we can mock it if DEBUG_MODE: warn_print(api, res_id, "=== DEBUG_MODE Boolean is on ===") custom_param = "my debug param value" else: custom_param = os.environ[INPUT_COMMAND_PARAMETER] sb_print(api, res_id, "resource name is {}".format(resource_name)) sb_print(api, res_id, "resource ip is {}".format(ip)) sb_print(api, res_id, "custom param value: {}".format(custom_param))
def run(): session = sh.get_api_session() resources = session.GetReservationDetails(sh.get_reservation_context_details().id).ReservationDescription.Resources pcs = [] switch = sh.get_resource_context_details().name for res in resources: if res.ResourceModelName.__contains__('GenericPortChannel'): # pcs.append(res.Name) command = 'show interfaces {}'.format(res.Name.split('/')[-1]) session.ExecuteCommand( reservationId=sh.get_reservation_context_details().id, targetType='Resource', targetName=switch, commandName='run_custom_command', commandInputs=[api.InputNameValue('custom_command', command)], printOutput=True )
def main(): session = helpers.get_api_session() command_to_run = helpers.get_user_param("Command Name") try_execute_command_on_resources( session, reservation_id=helpers.get_reservation_context_details().id, command_name=command_to_run)
def __init__(self): self.reservation_description = None self.reservation_id = helpers.get_reservation_context_details().id self.resource = None self.logger = qs_logger.get_qs_logger( log_file_prefix="CloudShell Sandbox Setup", log_group=self.reservation_id, log_category='Setup')
def test_test(self): dev_helpers.attach_to_cloudshell_as("admin", "admin", "Global", "8d36098c-6dd0-4d47-8ad8-b159191e3f63") details = helpers.get_reservation_context_details() py_vmomi_service = pyVmomiService(SmartConnect, Disconnect) cred = TestCredentials() si = py_vmomi_service.connect(cred.host, cred.username, cred.password, cred.port) vm = py_vmomi_service.find_by_uuid(si, '4222941e-a02d-dc78-80f6-44b88e0cb24f') network = py_vmomi_service.get_network_by_mac_address(vm, '00:50:56:a2:06:87') print network.name
def test_test(self): dev_helpers.attach_to_cloudshell_as( "admin", "admin", "Global", "8d36098c-6dd0-4d47-8ad8-b159191e3f63") details = helpers.get_reservation_context_details() py_vmomi_service = pyVmomiService(SmartConnect, Disconnect) cred = TestCredentials() si = py_vmomi_service.connect(cred.host, cred.username, cred.password, cred.port) vm = py_vmomi_service.find_by_uuid( si, '4222941e-a02d-dc78-80f6-44b88e0cb24f') network = py_vmomi_service.get_network_by_mac_address( vm, '00:50:56:a2:06:87') print network.name
def inner(func): from cloudshell.helpers.scripts import cloudshell_scripts_helpers as helpers profiling = helpers.get_global_inputs().get('quali_profiling') reservation_context = helpers.get_reservation_context_details() reservation_id = reservation_context.id environment_name = reservation_context.environment_name def wrapper(*args, **kwargs): if not profiling: return func(*args, **kwargs) prof = cProfile.Profile() retval = prof.runcall(func, *args, **kwargs) s = open(os.path.join(profiling, scriptName + "_" + environment_name + "_" + reservation_id + ".text"), 'w') stats = pstats.Stats(prof, stream=s) stats.strip_dirs().sort_stats('cumtime').print_stats() return retval return wrapper
def integration_test_update_vnics(self): dev_helpers.attach_to_cloudshell_as("admin", "admin", "Global", "1205e711-edf7-4b12-8a5e-e0ff53768ce7") details = helpers.get_reservation_context_details() py_vmomi_service = pyVmomiService(SmartConnect, Disconnect) cred = TestCredentials() si = py_vmomi_service.connect(cred.host, cred.username, cred.password, cred.port) vm = py_vmomi_service.find_vm_by_name(si, 'QualiSB/Alex', 'Ubuntu_a287f573') nics = [x for x in vm.config.hardware.device if isinstance(x, vim.vm.device.VirtualEthernetCard)] for nic in nics: network_name = nic.backing.network.name mac_address = nic.macAddress print network_name + mac_address
def integration_test_update_vnics(self): dev_helpers.attach_to_cloudshell_as("admin", "admin", "Global", "90738b16-cd33-4b24-ae43-7d76ad1e0e1e") details = helpers.get_reservation_context_details() py_vmomi_service = pyVmomiService(SmartConnect, Disconnect) cred = TestCredentials() si = py_vmomi_service.connect(cred.host, cred.username, cred.password, cred.port) vm = py_vmomi_service.find_by_uuid(si, '4222dd8d-0f01-29ca-0dce-f41561de1be9') nics = [x for x in vm.config.hardware.device if isinstance(x, vim.vm.device.VirtualEthernetCard) and hasattr(x, 'backing') and hasattr(x.backing, 'network')] for nic in nics: network_name = nic.backing.network.name mac_address = nic.macAddress print network_name + mac_address
def inner(func): from cloudshell.helpers.scripts import cloudshell_scripts_helpers as helpers profiling = helpers.get_global_inputs().get('quali_profiling') reservation_context = helpers.get_reservation_context_details() reservation_id = reservation_context.id environment_name = reservation_context.environment_name def wrapper(*args, **kwargs): if not profiling: return func(*args, **kwargs) prof = cProfile.Profile() retval = prof.runcall(func, *args, **kwargs) s = open( os.path.join( profiling, scriptName + "_" + environment_name + "_" + reservation_id + ".text"), 'w') stats = pstats.Stats(prof, stream=s) stats.strip_dirs().sort_stats('cumtime').print_stats() return retval return wrapper
def integration_test_update_vnics(self): dev_helpers.attach_to_cloudshell_as( "admin", "admin", "Global", "1205e711-edf7-4b12-8a5e-e0ff53768ce7") details = helpers.get_reservation_context_details() py_vmomi_service = pyVmomiService(SmartConnect, Disconnect) cred = TestCredentials() si = py_vmomi_service.connect(cred.host, cred.username, cred.password, cred.port) vm = py_vmomi_service.find_vm_by_name(si, 'QualiSB/Alex', 'Ubuntu_a287f573') nics = [ x for x in vm.config.hardware.device if isinstance(x, vim.vm.device.VirtualEthernetCard) ] for nic in nics: network_name = nic.backing.network.name mac_address = nic.macAddress print network_name + mac_address
def integration_test_update_vnics(self): dev_helpers.attach_to_cloudshell_as( "admin", "admin", "Global", "90738b16-cd33-4b24-ae43-7d76ad1e0e1e") details = helpers.get_reservation_context_details() py_vmomi_service = pyVmomiService(SmartConnect, Disconnect) cred = TestCredentials() si = py_vmomi_service.connect(cred.host, cred.username, cred.password, cred.port) vm = py_vmomi_service.find_by_uuid( si, '4222dd8d-0f01-29ca-0dce-f41561de1be9') nics = [ x for x in vm.config.hardware.device if isinstance(x, vim.vm.device.VirtualEthernetCard) and hasattr(x, 'backing') and hasattr(x.backing, 'network') ] for nic in nics: network_name = nic.backing.network.name mac_address = nic.macAddress print network_name + mac_address
def __init__(self): self.reservation_id = helpers.get_reservation_context_details().id self.logger = qs_logger.get_qs_logger(log_file_prefix="CloudShell Sandbox Setup", log_group=self.reservation_id, log_category='Setup')
def __init__(self): self.reservation_id = helpers.get_reservation_context_details().id self.logger = qs_logger.get_qs_logger(log_file_prefix="CloudShell Sandbox Teardown", log_group=self.reservation_id, log_category='Teardown')
def __init__(self): self.reservation_id = helpers.get_reservation_context_details().id self.logger = qs_logger.get_qs_logger(log_file_prefix='Connect_All', log_group=self.reservation_id, log_category="Connect All")
from cloudshell.api.cloudshell_api import CloudShellAPISession from cloudshell.api.common_cloudshell_api import CloudShellAPIError from cloudshell.helpers.scripts import cloudshell_scripts_helpers as helpers from cloudshell.helpers.scripts import cloudshell_dev_helpers as dev_helpers dev_helpers.attach_to_cloudshell_as("admin","admin","Global","55aec269-792f-4ca9-8156-9967788d3a4f","10.211.55.4") reservation_id = helpers.get_reservation_context_details().id try: helpers.get_api_session().ExecuteCommand(reservation_id, "DriverDeepDive", "Resource", "failed_command") except CloudShellAPIError as err: print err.message raise
def main(): session = script_helpers.get_api_session() sandbox_id = script_helpers.get_reservation_context_details().id session.ConfigureApps(reservationId=sandbox_id, printOutput=True)
def execute(self): api = helpers.get_api_session() resource_details_cache = {} api.WriteMessageToReservationOutput( reservationId=self.reservation_id, message='Beginning reservation setup') reservation_details = api.GetReservationDetails(self.reservation_id) deploy_result = self._deploy_apps_in_reservation( api=api, reservation_details=reservation_details) # refresh reservation_details after app deployment if any deployed apps if deploy_result and deploy_result.ResultItems: reservation_details = api.GetReservationDetails( self.reservation_id) self._connect_all_routes_in_reservation( api=api, reservation_details=reservation_details) self._run_async_power_on_refresh_ip_install( api=api, reservation_details=reservation_details, deploy_results=deploy_result, resource_details_cache=resource_details_cache) self._try_exeucte_autoload( api=api, reservation_details=reservation_details, deploy_result=deploy_result, resource_details_cache=resource_details_cache) remote_host, user, password = self._get_ftp(api, self.reservation_id) global_inputs = helpers.get_reservation_context_details( ).parameters.global_inputs api.WriteMessageToReservationOutput(reservationId=self.reservation_id, message=str(global_inputs)) # Begin Firmware load for IntlTAC environments ############################################# # Check for presense of version selector input if 'GigaVue Version' in global_inputs: self.logger.info( "Executing load_firmware for relevant devices: version " + global_inputs['GigaVue Version']) api.WriteMessageToReservationOutput( reservationId=self.reservation_id, message='Beginning load_firmware') version = global_inputs['GigaVue Version'] self._apply_software_image( api=api, reservation_details=reservation_details, deploy_result=deploy_result, resource_details_cache=resource_details_cache, version=version, remote_host=remote_host) self.logger.info("Setup for reservation {0} completed".format( self.reservation_id)) api.WriteMessageToReservationOutput( reservationId=self.reservation_id, message='Reservation setup finished successfully')
sys.stdout = devnull try: yield finally: sys.stdout = old_stdout def attach_report_to_reservation(reservation_id, filename, api): for attachment in api.GetReservationAttachmentsDetails(reservation_id): api.DeleteFileFromReservation(reservation_id, attachment) api.AttachFileToReservation(reservation_id, filename, filename, True) if __name__ == '__main__': reservation_id = helper.get_reservation_context_details().id session = helper.get_api_session() IxVM_address = '' reservation_details = session.GetReservationDetails(reservation_id) for resource in reservation_details.ReservationDescription.Resources: resource_name = resource.Name if "/" in resource_name: continue if "IxVM" in resource_name: IxVM = resource_name IxVM_address = resource.FullAddress for resource in reservation_details.ReservationDescription.Apps: resource_name = resource.Name
def __init__(self): self.session = script_help.get_api_session() self.name = script_help.get_resource_context_details().name self.id = script_help.get_reservation_context_details().id self.suffix = 'westeurope.cloudapp.azure.com'
from cloudshell.helpers.scripts.cloudshell_scripts_helpers import get_reservation_context_details, get_api_session, \ get_resource_context_details api = get_api_session() sb_details = get_reservation_context_details() sb_id = sb_details.id resource_details = get_resource_context_details() name = resource_details.name ip = resource_details.address api.WriteMessageToReservationOutput(reservationId=sb_id, message=f"Resource name: {name}, IP: {ip}") # printing to std_out will be the return value of resource scripts print(f"resource script completed for '{name}'")
def __init__(self): self.reservation_id = helpers.get_reservation_context_details().id self.logger = qs_logger.get_qs_logger(name="Connect All", reservation_id=self.reservation_id)
def execute(): api = helpers.get_api_session() inputs = helpers.get_reservation_context_details().parameters.global_inputs res_id = helpers.get_reservation_context_details().id connectivity = helpers.get_connectivity_context_details() tempdir = tempfile.gettempdir() # install mock shells try: with zipfile.ZipFile(os.path.dirname(__file__), "r") as z: z.extractall(tempdir) shells = [ tempdir + "\\Trafficshell.zip", tempdir + "\\Putshell.zip", tempdir + "\\L2Mockswitch.zip" ] success = install_shells(connectivity, shells) api.WriteMessageToReservationOutput( reservationId=res_id, message='Shells installation results:\n' + success) except Exception as e: print e.message # get user/admin counts to create # admins_count = 0 # if 'Number of Sys Admins' in inputs: # admins_count = int(inputs['Number of Sys Admins']) users_count = 0 if 'Number of Users' in inputs: users_count = int(inputs['Number of Users']) domain_admins_count = 0 if 'Number of Domain Admins' in inputs: domain_admins_count = int(inputs['Number of Domain Admins']) # create domains and assign users group to them # first create users group try: api.AddNewGroup(groupName='Users Group', groupRole='Regular') except CloudShellAPIError as ex: pass # probably group exists already # then create domain admins group try: api.AddNewGroup(groupName='Domain Admins', groupRole='DomainAdmin') except CloudShellAPIError as ex: pass # probably group exists already # now create domains and assign the group to it domains_created = [] for domain in ['Test Team NY', 'Test Team Calif', 'Consulting Phili']: try: api.AddNewDomain(domainName=domain) api.AddGroupsToDomain(domainName=domain, groupNames=['Users Group']) domains_created.append(domain) # assign networking service category to the new domains #import_package(connectivity, domain, tempdir + "\\Networking Service Category.zip") api.AddGroupsToDomain(domainName=domain, groupNames=['Domain Admins']) #if domain == 'Test Team NY': # import_package(connectivity, domain, tempdir + "\\Apps for testing service category.zip") except CloudShellAPIError as ex: pass # probably domain exists already api.WriteMessageToReservationOutput( res_id, 'Domains created: ' + ','.join(domains_created)) # import the put blueprint try: api.WriteMessageToReservationOutput( res_id, 'Importing "PUT Traffic Test Blueprint"') import_package(connectivity, 'Test Team NY', tempdir + "\\PUT Traffic Test Blueprint.zip") api.WriteMessageToReservationOutput( res_id, 'Importing "PUT Traffic Test Blueprint" complete') except Exception as ex: api.WriteMessageToReservationOutput( res_id, 'Importing "PUT Traffic Test Blueprint" failed') pass # create users/admins groups = None # if admins_count > 0: # groups = api.GetGroupsDetails() # sysadmin_group = [g for g in groups.Groups if g.Name == "System Administrators"][0] # a = len(sysadmin_group.Users) + 1 # added_count = 0 # admins_created = [] # while added_count < admins_count: # try: # api.AddNewUser('admin' + str(a), 'admin' + str(a), '', isActive=True, isAdmin=True) # added_count += 1 # admins_created.append('admin' + str(a)) # except: # pass # a += 1 # api.WriteMessageToReservationOutput(res_id, 'Admins created: ' + ','.join(admins_created)) if domain_admins_count > 0: if groups is None: groups = api.GetGroupsDetails() dom_admin_group = [ g for g in groups.Groups if g.Name == "Domain Admins" ][0] a = len(dom_admin_group.Users) + 1 added_count = 0 dom_admins_created = [] while added_count < domain_admins_count: try: api.AddNewUser('dadmin' + str(a), 'dadmin' + str(a), '', isActive=True, isAdmin=False) api.AddUsersToGroup(['dadmin' + str(a)], 'Domain Admins') added_count += 1 dom_admins_created.append('dadmin' + str(a)) except: pass a += 1 api.WriteMessageToReservationOutput( res_id, 'Domain Admins created: ' + ','.join(dom_admins_created)) if users_count > 0: api.WriteMessageToReservationOutput( res_id, 'Creating users and resources, this might take a while...') if groups is None: groups = api.GetGroupsDetails() users_group = [g for g in groups.Groups if g.Name == "Users Group"][0] a = len(users_group.Users) + 1 added_count = 0 users_created = [] l2_resource_created = False while added_count < users_count: try: api.AddNewUser('user' + str(a), 'user' + str(a), '', isActive=True, isAdmin=False) api.AddUsersToGroup(['user' + str(a)], 'Users Group') added_count += 1 users_created.append('user' + str(a)) except: pass a += 1 # create resources for this user (PUT+Traffic) try: api.CreateFolder('PUTs') #postfix_rn = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6)) postfix_rn = str(a - 1) resource_name = 'Product Under Test - ' + postfix_rn api.CreateResource('CS_GenericResource', 'Putshell', resource_name, '10.10.10.' + str(a), 'PUTs', '', 'A fake resource for training') api.UpdateResourceDriver(resource_name, 'Putshell') api.AutoLoad(resource_name) api.AddResourcesToDomain('Test Team NY', [resource_name], True) api.CreateFolder('Traffic Generators') #postfix_tg = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6)) postfix_tg = str(a - 1) traffic_name = 'Traffic Generator 2.0 - ' + postfix_tg api.CreateResource('CS_GenericResource', 'Trafficshell', traffic_name, '10.10.11.' + str(a), 'Traffic Generators', '', 'A fake resource for training') api.UpdateResourceDriver(traffic_name, 'Trafficshell') api.AutoLoad(traffic_name) api.AddResourcesToDomain('Test Team NY', [traffic_name], True) # connect devices to the patch panel patch_name = 'Patch Panel - Training' api.CreateResources([ ResourceInfoDto('Panel Jack', 'Generic Jack', 'Port 1 - PUT' + postfix_rn, '1', '', patch_name, ''), ResourceInfoDto('Panel Jack', 'Generic Jack', 'Port 2 - PUT' + postfix_rn, '2', '', patch_name, ''), ResourceInfoDto('Panel Jack', 'Generic Jack', 'Port 1 - TG' + postfix_tg, '1', '', patch_name, ''), ResourceInfoDto('Panel Jack', 'Generic Jack', 'Port 2 - TG' + postfix_tg, '2', '', patch_name, '') ]) api.AddResourcesToDomain('Test Team NY', [patch_name], True) api.UpdatePhysicalConnections([ PhysicalConnectionUpdateRequest( resource_name + '/Port 1', patch_name + '/Port 1 - PUT' + postfix_rn, '10'), PhysicalConnectionUpdateRequest( resource_name + '/Port 2', patch_name + '/Port 2 - PUT' + postfix_rn, '10'), PhysicalConnectionUpdateRequest( traffic_name + '/Port 1', patch_name + '/Port 1 - TG' + postfix_tg, '10'), PhysicalConnectionUpdateRequest( traffic_name + '/Port 2', patch_name + '/Port 2 - TG' + postfix_tg, '10') ]) # create L2 mock switch if needed l2_training = 'L2 Mock Switch - Training' try: if l2_resource_created == False: try: l2 = api.GetResourceDetails(l2_training) l2_resource_created = True except: pass if l2_resource_created == False: api.CreateResource( resourceFamily='CS_Switch', resourceModel='L2Mockswitch', resourceName=l2_training, resourceAddress='1.2.3.4', folderFullPath='', parentResourceFullPath='', resourceDescription='A fake resource for training') api.UpdateResourceDriver(l2_training, 'L2Mockswitch') api.CreateResource( resourceFamily='CS_Chassis', resourceModel='L2Mockswitch.GenericChassis', resourceName='Chassis1', resourceAddress='1', folderFullPath='', parentResourceFullPath=l2_training, resourceDescription='') except Exception as ex: api.WriteMessageToReservationOutput(res_id, ex.message) pass # resource probably exists already # add L2 ports and connect to PUT chassis_name = l2_training + '/Chassis1' api.CreateResources([ ResourceInfoDto('CS_Port', 'L2Mockswitch.GenericPort', 'Port 3 - PUT' + postfix_rn, '3', '', chassis_name, ''), ResourceInfoDto('CS_Port', 'L2Mockswitch.GenericPort', 'Port 4 - PUT' + postfix_rn, '4', '', chassis_name, ''), ResourceInfoDto('CS_Port', 'L2Mockswitch.GenericPort', 'Port 3 - TG' + postfix_tg, '3', '', chassis_name, ''), ResourceInfoDto('CS_Port', 'L2Mockswitch.GenericPort', 'Port 4 - TG' + postfix_tg, '4', '', chassis_name, '') ]) api.AddResourcesToDomain('Test Team NY', [l2_training], True) api.UpdatePhysicalConnections([ PhysicalConnectionUpdateRequest( resource_name + '/Port 3', chassis_name + '/Port 3 - PUT' + postfix_rn, '10'), PhysicalConnectionUpdateRequest( resource_name + '/Port 4', chassis_name + '/Port 4 - PUT' + postfix_rn, '10'), PhysicalConnectionUpdateRequest( traffic_name + '/Port 3', chassis_name + '/Port 3 - TG' + postfix_tg, '10'), PhysicalConnectionUpdateRequest( traffic_name + '/Port 4', chassis_name + '/Port 4 - TG' + postfix_tg, '10') ]) except Exception as ex: api.WriteMessageToReservationOutput(res_id, ex.message) pass # try: # api.AddTopologiesToDomain('Test Team NY', ['PUT Traffic Test']) # except: # pass api.WriteMessageToReservationOutput( res_id, 'Users created: ' + ','.join(users_created)) print 'Process complete'
domain = 'Global' server = 'localhost' resId = '325e5105-47c9-4add-8e6c-8fd6a6f1fc8e' attach_to_cloudshell_as(user=username, password=password, domain=domain, server_address=server, reservation_id=resId, service_name='UptimeEnforcer') get_debug_session() session = sh.get_api_session() token = session.token_id reservation_context = sh.get_reservation_context_details() reservation_context.reservation_id = reservation_context.id connectivity_context = sh.get_connectivity_context_details() connectivity_context.admin_auth_token = token resource_context = sh.get_resource_context_details() reservation_description = session.GetReservationDetails( reservation_context.id).ReservationDescription services = reservation_description.Services connectors = reservation_description.Connectors context_connectors = [ conn for conn in connectors if resource_context.name in [conn.Source, conn.Target] ]
dh.attach_to_cloudshell_as( user='******', password='******', domain='Global', reservation_id='e03d8f4b-a233-490f-b3d4-5088b3aaf9be', server_address='localhost', resource_name='Centos VM_3432-f9be') def _decrypt(session, password): decypted = session.DecryptPassword(password).Value return decypted session = script_help.get_api_session() resid = script_help.get_reservation_context_details().id # resource_details = session address = script_help.get_resource_context_details().address username = script_help.get_resource_context_details().attributes.get('User') enc_cs_password = script_help.get_resource_context_details().attributes.get( 'Password') cleartext_password = session.DecryptPassword(enc_cs_password).Value pass_enc = os.popen("cryptRDP5.exe {}".format(cleartext_password)).read() rdp_text = rdp_populator(username=username, password=pass_enc, ip=address) FILEPATH = r'c:\temp\{}.rdp'.format( script_help.get_resource_context_details().name) with open(FILEPATH, 'w') as upload_file: upload_file.write(rdp_text)
"type": "actionTarget" }, "customActionAttributes": [], "type": "setVlan" }] } } ''' import cloudshell.helpers.scripts.cloudshell_scripts_helpers as qs_helper import cloudshell.helpers.scripts.cloudshell_dev_helpers as dev_help from cloudshell.shell.core.driver_context import * res_id = 'f7505eb3-b637-4795-9740-19a56244a33d' dev_help.attach_to_cloudshell_as('admin', 'admin', 'Global', res_id, 'localhost', '8029',resource_name='brocy') resource = qs_helper.get_resource_context_details() reservation = qs_helper.get_reservation_context_details() connectivity = qs_helper.get_connectivity_context_details() my_context = ResourceCommandContext(connectivity,resource,reservation,[]) my_context.reservation.reservation_id = '5695cf87-a4f3-4447-a08a-1a99a936010e' # cont = create_context() driv = BrocadeNOSDriver() driv.ApplyConnectivityChanges(my_context, request) # driv.get_inventory(cont) # driv.command(context=cont, command='conf t') # driv.command(context=cont, command='int fa 0/1') # driv.command(context=cont, command='speed 100')
def __init__(self): self.reservation_id = helpers.get_reservation_context_details().id self.logger = qs_logger.get_qs_logger(name="CloudShell Sandbox Teardown", reservation_id=self.reservation_id)
:return: """ resv_det = api.GetReservationDetails(reservation.id) server = None user = None password = None for resource in resv_det.ReservationDescription.Resources: if resource.ResourceModelName.lower() == 'generic tftp server': server = resource.FullAddress res_det = api.GetResourceDetails(resource.Name) for attribute in res_det.ResourceAttributes: if attribute.Name == 'Storage username': user = attribute.Value if attribute.Name == 'Storage password': password = attribute.Value return server, user, password ses = helpers.get_api_session() reservation = helpers.get_reservation_context_details() resource = helpers.get_resource_context_details() filename = os.environ['FileName'] filename_input = InputNameValue('file_path', filename) ftp, user, password = get_ftp(ses, reservation) remote_host_input = InputNameValue('remote_host', ftp) ses.EnqueueCommand(reservation.id, resource.name, 'Resource', 'load_firmware', [filename_input, remote_host_input])
import cloudshell.helpers.scripts.cloudshell_scripts_helpers as script_helpers import debug import cloudshell_cli_handler def decrypt(password): decypted = session.DecryptPassword(password).Value return decypted debug.get_debug_session() res_id = script_helpers.get_reservation_context_details().id session = script_helpers.get_api_session() resources = session.GetReservationDetails( res_id).ReservationDescription.Resources password = script_helpers.get_resource_context_details().attributes.get( '{0}.Admin Password'.format( script_helpers.get_resource_context_details().model)) i = 0 while i < 5: try: password = decrypt(password) except: i = 1000 i = i + 1 CS_Cli = cloudshell_cli_handler.CreateSession( host=script_helpers.get_resource_context_details().address, username=script_helpers.get_resource_context_details().attributes.get(
import json import os import cloudshell.api.cloudshell_api as api import cloudshell.helpers.scripts.cloudshell_scripts_helpers as scripthelpers reservation_details = scripthelpers.get_reservation_context_details() connectivity_details = scripthelpers.get_connectivity_context_details_dict() class AnsibleExecutioner(): def __init__(self): pass def executePlaybookScript(self): session = scripthelpers.get_api_session() Ansible_resources = [] Ansible_resources_raw = [ resource for resource in session.GetReservationDetails( reservation_details.id).ReservationDescription.Resources ] for res in Ansible_resources_raw: res_det = session.GetResourceDetails(res.Name).ResourceAttributes Ansible_playbook_url = [ attr.Value for attr in res_det if attr.Name == 'Ansible Playbook URL' ] if Ansible_playbook_url.__len__() == 1: Ansible_resources.append(res) if Ansible_resources == []: raise Exception('no resources with Ansible capabilities found!')
from cloudshell.api.cloudshell_api import InputNameValue from cloudshell.helpers.scripts import cloudshell_scripts_helpers as helper reservation_id = helper.get_reservation_context_details().id reservation_details = helper.get_api_session().GetReservationDetails( reservation_id).ReservationDescription for service in reservation_details.Services: print service.ServiceName if service.ServiceName == 'Ixia IxNetwork Contro ller Shell 2G': helper.get_api_session().ExecuteCommand( helper.get_reservation_context_details().id, service.Alias, 'Service', 'run_quicktest', [ InputNameValue('test_name', 'rfc2544_frameloss'), InputNameValue('config_file_name', 'rfc_2544_frameloss.ixncfg') ])
def ExportBlueprint_and_commit(self): """ Export Blueprint on local machin and compare with old commit on Github before commiting and uploading the new exported package """ self.logger.info("Start Exporting Blueprint") self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, "Start Exporting blueprint") try: self.commit_comment = os.environ['comment'] except Exception as e: self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, "Missing comment for commit") self.logger.info("Missing comment for commit") return self.commit_message = self.commit_comment + " commited by owner: " + self.sandbox.reservationContextDetails.owner_user ip = helpers.get_connectivity_context_details().server_address domain = helpers.get_reservation_context_details().domain contentlist = self.configs["GitPakageContentList"] temp_zip_path = self.configs['temp_zip_file'] if not os.path.isdir(temp_zip_path): os.makedirs(temp_zip_path) self.zip_package_name = self.sandbox.reservationContextDetails.environment_name + '.zip' self.fullZipfilePath = temp_zip_path + '\\' + self.zip_package_name GitHub_Token = self.configs["GitHub Token"] #repo_url = self.configs["repo_url"] try: self.logger.info("Export the package") qac = QualiAPIClient( ip, '9000', self.sandbox.reservationContextDetails.owner_user, self.sandbox.reservationContextDetails.owner_password, domain) qac.download_environment_zip( self.sandbox.reservationContextDetails.environment_name, self.fullZipfilePath) UnzipFolderName = temp_zip_path + '\\' + self.sandbox.reservationContextDetails.environment_name ## Unzip the pakage if not os.path.isdir(UnzipFolderName): os.makedirs(UnzipFolderName) self.logger.info("Unzip the package") zip = zipfile.ZipFile(self.fullZipfilePath) zip.extractall(UnzipFolderName) zip.close() ## Delete unrelevant files from package self.logger.info("Delete the unrelevant files from the package") for item in os.listdir(UnzipFolderName): if item not in contentlist: path = UnzipFolderName + '\\' + item shutil.rmtree(path) ## Zip the new package - overite the package self.logger.info("Zip the new blueprint package") os.chdir(os.path.dirname(UnzipFolderName)) shutil.make_archive(UnzipFolderName, 'zip', UnzipFolderName) is_uploaded = False try: #First try to connect to regular Github url : https://github.com self.logger.info("Try to connect to the organization") git = Github(login_or_token=GitHub_Token) org = git.get_organization(self.configs["organization_name"]) repo = org.get_repo(self.configs["Repository_name"]) except Exception as e: #For enterprise github URL which is different than "https://github.com" try: base_GitHubUrl = self.configs["GitHub_Link"] + '/api/v3' git = Github(login_or_token=GitHub_Token, base_url=base_GitHubUrl) org = git.get_organization( self.configs["organization_name"]) repo = org.get_repo(self.configs["Repository_name"]) except Exception as e: self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, "Error getting git: {0}".format(str(e))) self.logger.error("Error getting git {0}".format(str(e))) raise e try: ##Download the previouse one self.logger.info( "Try to download the prev commit of this blueprint") Prev_file_contents = repo.get_file_contents( self.zip_package_name) self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, "This is not the first commit for this blueprint") self.logger.info( "This is not the first commit for this blueprint") prev_zip_path = temp_zip_path + '\\' + self.configs[ "Prev_Package_name"] + '.zip' fh = open(prev_zip_path, "wb") fh.write(Prev_file_contents.content.decode('base64')) fh.close() ## unzip the prev one self.logger.info("Unzip the prev blueprint package") prev_unzip = temp_zip_path + '\\' + self.configs[ "Prev_Package_name"] if not os.path.isdir(prev_unzip): os.makedirs(prev_unzip) zip = zipfile.ZipFile(prev_zip_path) zip.extractall(prev_unzip) zip.close() if os.listdir(UnzipFolderName) and os.listdir(prev_unzip): self.logger.info("Compering with prev commit for diff") if self.are_dir_trees_equal(prev_unzip, UnzipFolderName): self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, "There is no diff from the prev commit") self.logger.info( "There is no diff from the prev commit - Not uploading the new commit!" ) else: self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, "There is a diff from the prev commit") self.logger.info( "There is a diff from the prev commit") self.build_list_and_commit(repo, is_new_blueprint=False) is_uploaded = True self.logger.info("delete prev files") if os.path.isdir(prev_unzip): shutil.rmtree(prev_unzip) if os.path.isfile(prev_zip_path): os.remove(prev_zip_path) except Exception as e: self.logger.info("fail to download {0}".format(str(e))) if e.status == 404: self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, "This is the first commit for this blueprint") self.logger.info( "This is the first commit for this blueprint {0}". format(str(e))) try: if os.listdir(UnzipFolderName) and repo: self.build_list_and_commit(repo, is_new_blueprint=True) is_uploaded = True except Exception as e: self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, "Error trying to build list and commit blueprint") self.logger.error( "Error trying to build list and commit blueprint {0}" .format(str(e))) DownloadLink = self.configs['GitHub_Link'] + '/'+ self.configs["organization_name"] + '/' +\ self.configs["Repository_name"] + "/blob/master/"+ self.zip_package_name try: self.logger.info("Export and commit to Github completed") self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, "Export and commit to Github completed ") if is_uploaded: self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, DownloadLink) self.logger.info("delete tmp files") shutil.rmtree(UnzipFolderName) if os.path.isfile(self.fullZipfilePath): os.remove(self.fullZipfilePath) except Exception as e: self.logger.error("Error delete tmp files {0}".format(str(e))) except Exception as e: self.sandbox.automation_api.WriteMessageToReservationOutput( self.sandbox.id, "Failed to export and commit {0}".format(str(e))) self.logger.error("Error - fail to extport and commit {0}".format( str(e))) raise e
def main(): session = helpers.get_api_session() command_to_run = helpers.get_user_param("Command Name") try_execute_command_on_resources(session, reservation_id=helpers.get_reservation_context_details().id, command_name=command_to_run)
import cloudshell.helpers.scripts.cloudshell_scripts_helpers as cs_helper from cloudshell.api.common_cloudshell_api import CloudShellAPIError route_list = [] res_id = cs_helper.get_reservation_context_details().id route_details = cs_helper.get_api_session().GetReservationDetails(res_id).ReservationDescription.TopologiesRouteInfo w2output = cs_helper.get_api_session().WriteMessageToReservationOutput # build route lists # route lists used by the Route API are just are endpoints paired in an open list: # ['source1', 'target1', 'source2', 'target2', ... 'sourceN', 'targetN'] for route in route_details: for r in route.Routes: route_list.append(r.Source) route_list.append(r.Target) # execute routes if len(route_list) > 0: try: w2output(reservationId=res_id, message='Queuing {} routes for disconnection'.format(len(route_list)/2)) cs_helper.get_api_session().DisconnectRoutesInReservation(reservationId=res_id, endpoints=route_list) except CloudShellAPIError as err: w2output(reservationId=res_id, message=err.message)
def __init__(self): self.reservation_id = helpers.get_reservation_context_details().id self.logger = get_qs_logger(log_file_prefix="SaveSnapshot", log_group=self.reservation_id, log_category='EnvironmentCommands')
def __init__(self): self.reservation_id = helpers.get_reservation_context_details().id self.session = helpers.get_api_session() self.routes = self.session.GetReservationDetails(self.reservation_id).ReservationDescription.RequestedRoutesInfo
from cloudshell.shell.core.driver_context import InitCommandContext, AutoLoadCommandContext, ResourceCommandContext, \ AutoLoadAttribute, AutoLoadDetails, CancellationContext, ResourceRemoteCommandContext from cloudshell.shell.core.resource_driver_interface import ResourceDriverInterface # Debug Script # with open(r'c:\temp\HWC_Debug_creds', 'r') as debug_file: # my_debug_file = debug_file.read() dev_help.attach_to_cloudshell_as( user='******', password='******', domain='Global', reservation_id='c3f09270-58a6-4188-a11d-19239d73a0d8', server_address='192.168.85.15', resource_name='HWC') driver_interface = ResourceDriverInterface context = ResourceCommandContext( connectivity=script_help.get_connectivity_context_details(), reservation=script_help.get_reservation_context_details(), resource=script_help.get_resource_context_details(), connectors=None) with open('request.json', 'r') as file: my_request = json.load(file) my_hwc_driver = driver.HuaweicloudDriver() my_hwc_driver.initialize(context) my_hwc_driver.Deploy(context, request=my_request) pass