def test_03_delete(self): HEADING() while 'pending' in Shell.execute("cms vm list test_boot_02 --refresh", shell=True): time.sleep(1) Benchmark.Start() result = Shell.execute("cms vm delete test_boot_02", shell=True) Benchmark.Stop() VERBOSE(result) assert "test_boot_02" in result
def test_01_start(self): HEADING() Benchmark.Start() result = Shell.execute( f"cms vm start test_boot_02 --cloud={CLOUD} --dryrun", shell=True) Benchmark.Stop() VERBOSE(result) assert "start nodes ['test_boot_02']" in result assert "option - iter" in result assert "processors - None" in result
def test_02_delete(self): HEADING() Benchmark.Start() result = Shell.execute( "cms vm delete test_boot_02 --parallel --processors=3 --dryrun", shell=True) Benchmark.Stop() VERBOSE(result) assert "delete nodes ['test_boot_02']" in result assert "option - pool" in result assert "processors - 3" in result
def test_add_rules_to_security_group(self): HEADING() # BUG: this seems wrong: cloudmesh_upload Benchmark.Start() test_add_rules_to_secgroup = self.p.add_rules_to_secgroup( secgroupname=SECGROUP_UPLOAD, newrules='resource_name_security_rule_upload') Benchmark.Stop() VERBOSE(test_add_rules_to_secgroup, label='Add Rules to Security Group') assert test_add_rules_to_secgroup is None
def test_not_VERBOSE(self): HEADING() variables["verbose"] = 0 help = "hallo" with io.StringIO() as buf, redirect_stdout(buf): VERBOSE(help) output = buf.getvalue() print (output) variables["verbose"] = 10 assert "help" not in output assert "hallo" not in output assert "#" not in output
def test_02_stop(self): HEADING() Benchmark.Start() result = Shell.execute( f"cms vm stop test_boot_02 --cloud={CLOUD} --parallel --processors=3 --dryrun", shell=True) Benchmark.Stop() VERBOSE(result) assert "stop nodes ['test_boot_02']" in result assert "option - pool" in result assert "processors - 3" in result
def listdatasets(self): print("Listing dataset") VERBOSE("in list dataset") results = {} #client = bigquery.Client(credentials=credentials, project=project_id) datasets = list(self.client.list_datasets()) results = datasets project = self.client.project if datasets: print("Datasets in project {}:".format(project)) for dataset in datasets: print("\t{}".format(dataset.dataset_id)) else: print("{} project does not contain any datasets.".format(project))
def test_create_with_options(self): HEADING() vm_name = f"{self.vm_name_prefix}3" Benchmark.Start() result = Shell.execute( f"cms multipass create {vm_name} --cpus=2 --size=3G --image=bionic --mem=1G", shell=True) Benchmark.Stop() VERBOSE(result) assert f'Launched: {vm_name}' in result, "Error creating instance" Benchmark.Status(True)
def test_provider_create(self): HEADING() vm_name = f"{self.vm_name_prefix}2" provider = Provider(vm_name) Benchmark.Start() result = provider.create(vm_name) Benchmark.Stop() VERBOSE(result) assert 'Running' in result['status'], "Error creating instance" Benchmark.Status(True)
def test_provider_images(self): HEADING() self.provider = Provider() Benchmark.Start() result = self.provider.images() Benchmark.Stop() VERBOSE(result) result = str(result) assert "18.04" in result Benchmark.Status(True)
def test_provider_get(self): HEADING() self.provider = Provider() Benchmark.Start() result = self.provider.get() Benchmark.Stop() VERBOSE(result) result = str(result) assert "missing" in result Benchmark.Status(True)
def cat_bibfiles(directory, output): d = path_expand(directory) VERBOSE(d) bibs = list(Path(d).glob("**/*.bib")) pprint(bibs) r = "" for bib in bibs: bib = str(bib) content = readfile(bib) r = r + content + "\n\n% " + bib + "\n\n" writefile(output, r) return list(bibs)
def test_reboot(self): HEADING() self.provider = Provider() #Using 2 VMs to test_created usingn test_create* methods. vm_names = f"{self.vm_name_prefix}1,{self.vm_name_prefix}3" Benchmark.Start() result = Shell.execute(f"cms multipass reboot {vm_names}", shell=True) Benchmark.Stop() VERBOSE(result) assert 'Running' in result, "Error rebooting instance"
def list(self, bucket_name): """List the objects in an Amazon S3 bucket :param bucket_name: string :return: List of bucket objects. If error, return None. """ # Retrieve the list of bucket objects # s3 = boto3.client('s3') try: response = self.s3_client.list_objects_v2(Bucket=bucket_name) except ClientError as e: # AllAccessDisabled error == bucket not found VERBOSE(e) return None return response['Contents']
def test_provider_run_buffer(self): HEADING() self.provider = Provider() Benchmark.Start() result = self.provider.run(command="uname -a", executor="buffer") Benchmark.Stop() VERBOSE(result) # find a good assertion result = str(result) assert "18.04" in result Benchmark.Status(True)
def execute(self, arguments): def Service(): mod = arguments.SERVICE cls = mod.capitalize() imp = f'cloudmesh.pi.cluster.{mod}.{mod}.{cls}' _Service = locate(imp) service = _Service() return service if arguments.list and arguments.SERVICE and arguments.NAMES: print(arguments.NAMES) if arguments.list and arguments.SERVICE: VERBOSE(arguments) service = Service() print() print(f"Scripts for {arguments.SERVICE}") print() if not arguments.details: for script in service.script: print(" *", script) print() else: for name in service.script: banner(name) print(service.script[name].strip()) print() print("details") elif arguments.list: print() print(f"Deployment Services") print() directory = os.path.dirname(cloudmesh.pi.cluster.__file__) entries = glob.glob(f"{directory}/*") for entry in entries: if os.path.isdir(entry): entry = os.path.basename(entry) if not entry.startswith("_"): print(" *", entry) print()
def start_flow(self): VERBOSE("START") started_collection = f"{self.workflow_name}-flow-active" self.collection.aggregate([{ "$project": { "dependencies": 1, "cm": 1, "kind": 1, "cloud": 1, "name": 1, "status": "pending" } }, { "$out": started_collection }]) self.switch_to_active_flow()
def list_datasets(self): VERBOSE("in list dataset") results = {} try: results['dataset'] = "Error executing command" results = self.client.list_datasets() print(results['dataset']) return self.update_status(results=results, name='all', status="listingdataset") except ClientError as e: if e.response['Error']['Code'] == 'DatasetNotFound': results['dataset'] = "No dataset exist" return self.update_status(results=results, name="all", status="listingdataset")
def test_provider_destroy(self): HEADING() self.provider = Provider() vm_name = f"{self.vm_name_prefix}2" provider = Provider(vm_name) Benchmark.Start() result = provider.destroy(vm_name) Benchmark.Stop() VERBOSE(result) assert 'destroyed' in result['status'], "Error destroying instance" Benchmark.Status(True)
def __init__(self, name=None, spec=None, directory=None, host="127.0.0.1", server="flask", port=8080, debug=True): if spec is None: Console.error("No service specification file defined") raise FileNotFoundError self.spec = path_expand(spec) self.name = Server.get_name(name, self.spec) if directory is None: self.directory = os.path.dirname(self.spec) else: self.directory = directory self.host = host or "127.0.0.1" self.port = port or 8080 self.debug = debug or False self.server = server or "flask" self.server_command = "" data = dict(self.__dict__) #data['import'] = __name__ VERBOSE(data, label="Server parameters") if server == "tornado": try: import tornado except Exception as e: print(e) Console.error( "tornado not install. Please use `pip install tornado`") sys.exit(1) # return "" if self.debug: Console.error("Tornado does not support --verbose") sys.exit(1) # return "" Console.ok(self.directory)
def test_info(self): HEADING() config = Config() Benchmark.Start() info = config["info"] VERBOSE(info) compare = { 'filename': 'None', 'name': 'sample', 'version': '0.1', 'kind': 'launcher', 'service': 'local' } assert info == compare Benchmark.Stop()
def modify_cluster(self, cluster_id, new_pass): VERBOSE("in modify") results = {} try: results['Clusters'] = "Error modifying cluster password" results = self.client.modify_cluster(ClusterIdentifier=cluster_id, MasterUserPassword=new_pass) return self.update_status(results=results, name=cluster_id, status="Modifying password") except ClientError as e: results['Clusters'] = results['Clusters'] + ' : ' + e.response return self.update_status(results=results, name=cluster_id, status="Error changing password")
def test_provider_vm_create(self): HEADING() name_generator.incr() Benchmark.Start() data = provider.create(key=key) Benchmark.Stop() # print(data) VERBOSE(data) name = str(Name()) status = provider.status(name=name)[0] print(f'status: {str(status)}') if cloud == 'oracle': assert status["cm.status"] in ['STARTING', 'RUNNING', 'STOPPING', 'STOPPED'] else: assert status["cm.status"] in ['ACTIVE', 'BOOTING', 'TERMINATED', 'STOPPED']
def test_cpu(self): HEADING() url = "http://127.0.0.1:8080/cloudmesh/cpu" Benchmark.Start() # result = Shell.execute(f"cms openapi3 server stop {name}", shell=True) result = requests.get('https://api.github.com/user') assert result.status_code == 200 # >> > r.headers['content-type'] # 'application/json; charset=utf8' # >> > r.encoding # 'utf-8' # >> > r.text # u'{"type":"User"...' # >> > r.json() # {u'private_gists': 419, u'total_private_repos': 77, ...} Benchmark.Stop() VERBOSE(result)
def test_provider_vm_stop(self): HEADING() name = str(Name()) Benchmark.Start() data = provider.stop(name=name) Benchmark.Stop() stop_timeout = 360 time = 0 while time <= stop_timeout: sleep(5) time += 5 status = provider.status(name=name)[0] if status["cm.status"] in ['STOPPED', 'SHUTOFF']: break VERBOSE(data) print(status) assert status["cm.status"] in ['STOPPED', 'SHUTOFF']
def test_01_script(self): HEADING() Benchmark.Start() # # TODO: location is a bug as we can not assum test is run in . # alos the sh command has been removed and should be created in this # test # result = Shell.execute( "cms vm script --name=test_boot_01 --username=ubuntu ./test_cms_aws.sh --dryrun", shell=True) Benchmark.Stop() VERBOSE(result) assert "run script ./test_cms_aws.sh on vms: ['test_boot_01']" in result
def test_status(self): HEADING( "please patiently wait for vm to boot and proceed with other tests") # wait for vms to boot for further tests while 'pending' in Shell.execute("cms vm list test_boot_01 --refresh", shell=True): time.sleep(1) Benchmark.Start() result = Shell.execute(f"cms vm status test_boot_01 --cloud={CLOUD}", shell=True) Benchmark.Stop() VERBOSE(result) assert "{'test_boot_01': 'running'}" in result
def test_provider_vm_start(self): HEADING() name = str(Name()) Benchmark.Start() data = provider.start(name=name) Benchmark.Stop() start_timeout = 360 time = 0 while time <= start_timeout: sleep(5) time += 5 status = provider.status(name=name)[0] if status["cm.status"] in ['ACTIVE', 'BOOTING', 'RUNNING']: break VERBOSE(data) print(status) assert status["cm.status"] in ['ACTIVE', 'BOOTING', 'RUNNING']
def destroy(self, groupName=None, vmName=None): """ Destroys the node :param name: the name of the node :return: the dict of the node """ if groupName is None: groupName = self.GROUP_NAME if vmName is None: vmName = self.VM_NAME # Delete VM VERBOSE(" ".join('Deleteing Azure Virtual Machine')) async_vm_delete = self.compute_client.virtual_machines.delete( groupName, vmName) async_vm_delete.wait() return self.info(groupName)
def do_scriptrunner(self, args, arguments): """ :: Usage: scriptrunner --file=FILE --bucket=BUCKET --upload=UPLOAD scriptrunner --bucket=BUCKET list This command does some useful things. Arguments: FILE a file name BUCKET a bucket name UPLOAD TRUE Options: -f specify the file -b specify the s3 bucket name """ map_parameters(arguments, 'upload') arguments.FILE = arguments['--file'] or None arguments.BUCKET = arguments['--bucket'] or None arguments.UPLOAD = arguments['--upload'] or None VERBOSE(arguments) m = Manager() if arguments.UPLOAD: print("option upload") gr = GlueRunner.GlueRunner(arguments.FILE, arguments.BUCKET) gr.upload() # if arguments.FILE: # print("option a") # m.list(path_expand(arguments.FILE)) # elif arguments.list: # print("option b") # m.list("just calling list without parameter") return ""