def do_create(args, client, sl_storage, configuration): if args['-v']: DebugLevel.set_level('verbose') else: DebugLevel.set_level('progress') containername = args['<clustername>'] if args['<clustername>'] in clusters(sl_storage): error('cluster {} already exists'.format(args['<clustername>'])) scoretext = open(args['<score.yaml>'], 'r').read() score = yaml.load(scoretext) score['clustername'] = args['<clustername>'] dirname = os.path.dirname(args['<score.yaml>']) if dirname == "": dirname = "." score['path'] = dirname + "/" # setup environment for scripts in score to run properly. Change to # the score directory and add . to the path os.chdir(score['path']) os.environ['PATH'] = ':'.join([os.environ['PATH'], './']) if 'parameters' in score: parmvalues = score['parameters'] else: parmvalues = {} parameters = args['<key=value>'] for param in parameters: splits = param.split('=', 1) if len(splits) != 2: raise Exception("{} is not a key=value pair".format(param)) parmvalues[splits[0]] = splits[1] score['parameters'] = parmvalues scoretext = yaml.dump(score, indent=4) msg = validate_provision_parms_passed(scoretext, parmvalues) debug(msg) if msg: error(msg) state_container_create(sl_storage, containername) try: # save score for later operations save_state(sl_storage, containername, 'score', scoretext) provision(args['<clustername>'], containername, score, configuration, client, sl_storage) except Exception, e: debug(traceback.format_exc()) resources = get_resources(sl_storage, containername) del resources['score'] if deletable(resources): state_container_clean(sl_storage, containername) error(e.message)
def do_create(args, client, sl_storage, configuration): if args['-v']: DebugLevel.set_level('verbose') else: DebugLevel.set_level('progress') containername = args['<clustername>'] if args['<clustername>'] in clusters(sl_storage): error('cluster {} already exists'.format(args['<clustername>'])) scoretext = open(args['<score.yaml>'], 'r').read() score = yaml.load(scoretext) score['clustername'] = args['<clustername>'] dirname = os.path.dirname(args['<score.yaml>']) if dirname == "": dirname = "." score['path'] = dirname+"/" # setup environment for scripts in score to run properly. Change to # the score directory and add . to the path os.chdir(score['path']) os.environ['PATH'] = ':'.join([os.environ['PATH'], './']) if 'parameters' in score: parmvalues = score['parameters'] else: parmvalues = {} parameters = args['<key=value>'] for param in parameters: splits = param.split('=', 1) if len(splits) != 2: raise Exception("{} is not a key=value pair".format(param)) parmvalues[splits[0]] = splits[1] score['parameters'] = parmvalues scoretext = yaml.dump(score, indent=4) msg = validate_provision_parms_passed(scoretext, parmvalues) debug(msg) if msg: error(msg) state_container_create(sl_storage, containername) try: # save score for later operations save_state(sl_storage, containername, 'score', scoretext) provision(args['<clustername>'], containername, score, configuration, client, sl_storage) except Exception, e: debug(traceback.format_exc()) resources = get_resources(sl_storage, containername) del resources['score'] if deletable(resources): state_container_clean(sl_storage, containername) error(e.message)
def vm_provision(self, tasks=None): confs = self._get_provision_confs('provision') # TODO: specified tasks with more provisions for conf in confs: if tasks: try: provision.provision(self, conf, tasks) break except Exception as e: pass else: provision.provision(self, conf, tasks)
def vm_initial_setup(self, tasks=None): confs = self._get_provision_confs('init') if not confs: log.info("No initial setup for %s." % self.name_pp) return log.info("Running initial setup for %s:" % self.name_pp) for conf in confs: try: provision.provision(self, conf, tasks, user='******') except Exception, e: self.vm_init = const.VMINIT_FAIL self._save_data() raise e
def run_sim(dat, route_step, n_hop, max_con, avg_con, n_sat, perimiter_only, length, year): current_file_path = Path(__file__).resolve() code_source_path = str(current_file_path.parents[0]) TLE_path = code_source_path + f'/TLEs/{n_sat}.txt' # Create directory for sim results results_path = f'{code_source_path}/results/{dat}' try: os.mkdir(results_path) except: pass print('________ ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + ' ________') print(data_name) # Propagate orbits pos_table = propagate(TLE_path, length, year) # Provision at every time step. Returns results from routing time steps (distable_linkdicts, all_xyz_rsteps) = provision(pos_table, route_step, nbr_hop=n_hop, max_conn=max_con, avg_conn=avg_con, length=length+1) # print links and distance table for each routing time step for i in range(len(distable_linkdicts)): print_txt(distable_linkdicts, all_xyz_rsteps, i, results_path) return
def run_sim(dat, route_step, n_hop, max_con, avg_con, n_sat, perimiter_only, length, year): current_file_path = Path(__file__).resolve() code_source_path = str(current_file_path.parents[0]) TLE_path = code_source_path + f'/TLE/{n_sat}.txt' # Create directory for sim results results_path = f'{code_source_path}/dial_results/{dat}' data_path = f'{results_path}/{dat}_data.csv' try: os.mkdir(results_path) except: pass print('________ ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + ' ________') print(data_name) # Propagate orbits pos_table = propagate(TLE_path, length, year) # Provision at every time step. Returns results from routing time steps (distable_linkdicts, all_xyz_rsteps) = provision(pos_table, route_step, nbr_hop=n_hop, max_conn=max_con, avg_conn=avg_con, length=length + 1) # print links and distance table of last time step print_txt(distable_linkdicts, all_xyz_rsteps, -1, results_path) # # Perform routing at specified time steps # @timing # def route_sum(): # sim_data = [] # for i in range(len(distable_linkdicts)): # route_data_time_i = route(n_node=n_sat, nbrhood_hop=n_hop, # border=perimiter_only, # linkdict=distable_linkdicts[i][1], # distable=distable_linkdicts[i][0], # results_path=results_path) # # append routing results from a single time step # sim_data.append(route_data_time_i) # return sim_data # route_data = route_sum() # # Write routing results to csv # np_data = pd.DataFrame(route_data) # initialize pd dataframe # np_data.columns = ['n_node', 'n_hop', 'total', 'bad', 'loop', 'no_path'] # np_data.to_csv(data_path, index=False) return
def run_sim(dat, n_hop, max_con, avg_con, n_sat, perimiter_only): current_file_path = Path(__file__).resolve() code_source_path = str(current_file_path.parents[0]) # Create directory for sim results results_path = f'{code_source_path}/results/{dat}' data_path = f'{results_path}/{dat}_data.csv' try: os.mkdir(results_path) except: pass print('________ ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + ' ________') print(data_name) # initialize node positions pos_table = [] for i in range(n_sat): pos_table.append( [random.uniform(-1, 1) * 50, random.uniform(-1, 1) * 50]) # Provision at every time step. Returns results from routing time steps distable_linkdicts = provision(pos_table, nbr_hop=n_hop, max_conn=max_con, avg_conn=avg_con) # print links and distance table print_txt(distable_linkdicts, pos_table, results_path) # Perform routing route_data = route(n_node=n_sat, nbrhood_hop=n_hop, border=perimiter_only, linkdict=distable_linkdicts[1], distable=distable_linkdicts[0], results_path=results_path) # Write routing results to csv np_data = pd.DataFrame(route_data).T # initialize pd dataframe np_data.columns = ['n_node', 'n_hop', 'total', 'bad', 'loop', 'no_path'] np_data.to_csv(data_path, index=False) return
def upgrade_provision(samba3, setup_dir, message, credentials, session_info, smbconf, targetdir): oldconf = samba3.get_conf() if oldconf.get("domain logons") == "True": serverrole = "domain controller" else: if oldconf.get("security") == "user": serverrole = "standalone" else: serverrole = "member server" domainname = oldconf.get("workgroup") if domainname: domainname = str(domainname) realm = oldconf.get("realm") netbiosname = oldconf.get("netbios name") secrets_db = samba3.get_secrets_db() if domainname is None: domainname = secrets_db.domains()[0] message("No domain specified in smb.conf file, assuming '%s'" % domainname) if realm is None: realm = domainname.lower() message("No realm specified in smb.conf file, assuming '%s'\n" % realm) domainguid = secrets_db.get_domain_guid(domainname) domainsid = secrets_db.get_sid(domainname) if domainsid is None: message("Can't find domain secrets for '%s'; using random SID\n" % domainname) if netbiosname is not None: machinepass = secrets_db.get_machine_password(netbiosname) else: machinepass = None result = provision(setup_dir=setup_dir, message=message, samdb_fill=FILL_DRS, smbconf=smbconf, session_info=session_info, credentials=credentials, realm=realm, domain=domainname, domainsid=domainsid, domainguid=domainguid, machinepass=machinepass, serverrole=serverrole, targetdir=targetdir) import_wins(Ldb(result.paths.winsdb), samba3.get_wins_db()) # FIXME: import_registry(registry.Registry(), samba3.get_registry()) # FIXME: import_idmap(samdb,samba3.get_idmap_db(),domaindn) groupdb = samba3.get_groupmapping_db() for sid in groupdb.groupsids(): (gid, sid_name_use, nt_name, comment) = groupdb.get_group(sid) # FIXME: import_sam_group(samdb, sid, gid, sid_name_use, nt_name, comment, domaindn) # FIXME: Aliases passdb = samba3.get_sam_db() for name in passdb: user = passdb[name] #FIXME: import_sam_account(result.samdb, user, domaindn, domainsid) if hasattr(passdb, 'ldap_url'): message("Enabling Samba3 LDAP mappings for SAM database") enable_samba3sam(result.samdb, passdb.ldap_url)
import provision from subprocess import call import time import boto3 import sys ec2 = boto3.client('ec2') provisionId = sys.argv[1].zfill(10) print('provisionId: ', provisionId) instance = provision.provision(provisionId, 'us-east-1a') instance = provision.provision(provisionId, 'us-east-1a') print(instance) print("public_dns_name: ", instance['public_dns_name']) # print("Wait until running.") # ec2.wait_until_running(Filters=[{'Name': 'instance-id', 'Values': instance['instance']['InstanceId']}]) # # print("Running!") with open('instance.dns', 'w') as f: f.write(str(instance['public_dns_name']))
def test_provision_success( template_bucket, permission_table, permission_table_name, state_table_empty, state_table_name, regional_table, regional_table_name, cloudformation, ): os.environ["project_name"] = "test_quail" os.environ["dynamodb_regional_metadata_table_name"] = regional_table_name os.environ["dynamodb_permissions_table_name"] = permission_table_name os.environ["dynamodb_state_table_name"] = state_table_name os.environ["cfn_data_bucket"] = template_bucket.bucket_name os.environ["tag_config"] = json.dumps([{ "tag-name": "variable-tag", "tag-value": "$email" }, { "tag-name": "fixed-tag", "tag-value": "fixed-vale" }]) event = { "requestContext": { "authorizer": { "jwt": {} } }, "Input": { "account": "fake_account", "region": "eu-west-1", "instance_name": "The Best Instance", "instance_type": "t3.micro", "operating_system": "AWS Linux 2", "expiry": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(), "email": "*****@*****.**", "group": "private", "username": "******", "user": { "email": "*****@*****.**", "profile": "private", "nickname": "alice", "custom:is_superuser": "******", }, }, } response = provision(event, context=AttrDict({"aws_request_id": "123456"})) # Verify provisioning results assert "stackset_id" in response stackset_id = response["stackset_id"] stackset = cloudformation.describe_stack_set(StackSetName=stackset_id) assert stackset["StackSet"]["Status"] == "ACTIVE" stack_instances = cloudformation.list_stack_instances( StackSetName=stackset_id)["Summaries"] assert len(stack_instances) == 1 assert stack_instances[0]["Region"] == "eu-west-1" assert stack_instances[0]["Account"] == "fake_account"