def group_delete(cloud_name, group_name=None, group_id=None): if ((group_id == None and group_name == None) or (group_id and group_name)): util.message("Must specify group_id or group_name", "error") return return
def server_init(): """Initialize all of the server state.""" # if options.save_file: # load_game(options.save_file) # SS.map_rand.seed(SS.random_seed) # SS.rand.seed(SS.random_seed) # # else: if True: # We need to generate a random seed using the default-seeded random # number generator, and then save that to seed the game's generators. # This will allow us to use the same seed to duplicate games. SS.random_seed = str(random.randrange(sys.maxint)) SS.map_rand.seed(SS.random_seed) SS.rand.seed(SS.random_seed) SS.u = Player(0, 0, 'wizard', fov_radius=cfg.TORCH_RADIUS) SS.dlevel = 1 SS.dlevel_dict['doom'] = [] SS.map = Map(cfg.MAP_W, cfg.MAP_H, layout='connected_rooms') SS.dlevel_dict['doom'].append(SS.map) print SS.map.upstairs SS.u.move_to(SS.map.upstairs) SS.game_id = str(uuid.uuid1()) SS.u.set_fov_map(SS.map.grid) SS.u.fov_map.do_fov(SS.u.x, SS.u.y, SS.u.fov_radius) requesthandler.attach_request_actions() message('Welcome, {0}!'.format("Whatever"), CLR['gold']) # Send the client any initial response data server_tick()
def group_list_aws(region, cloud_keys, group_name=None): conn = cloud.get_aws_connection('ec2', region, cloud_keys) if conn == None: return ([]) import boto3 try: response = conn.describe_security_groups() except Exception as e: util.message(str(e), "error") return ([]) gl = [] for sg in response['SecurityGroups']: try: dict = {} if group_name: if sg['GroupName'] == group_name: pass else: continue dict['name'] = sg['GroupName'] dict['id'] = sg['GroupId'] for ipp in sg['IpPermissions']: for r in ipp['IpRanges']: dict['cidr'] = str(r['CidrIp']) break dict['port'] = str(ipp['FromPort']) + ":" + str(ipp['ToPort']) break gl.append(dict) except Exception as e: continue return(gl)
def shell_cmd(cloud_name, machine_id, cmd): from pssh.clients import ParallelSSHClient ##util.message("# " + str(cmd), "info") aa, bb, cc, dd, describe, ff, gg = read(cloud_name, machine_id) if describe == None: util.message("Node " + str(machine_id) + " not found", "error") return key_name = describe['key_name'] host = describe['public_ip'] hosts = host.split() username, pkey = key.read(key_name) if username == None: util.message("key file not found", "error") return ##util.message("host=" + str(hosts) + ", user="******", pkey=" + str(pkey), "info") client = ParallelSSHClient(hosts, user=username, pkey=pkey) output = client.run_command(cmd, use_pty=True, read_timeout=3600) for host_out in output: try: for line in host_out.stdout: print(line) except: time.sleep(3) continue return
def solve(self, ns=10, return_eigenvectors=True): """ Solves (i.e. find the low energy eigenstates) of a certain spin system with a given z component of spin. Parameters ---------- ns : int number of states functional : boolean Use functional or matrix representation of the Hamiltonian return_eigenvectors: boolean Return the eigenvectors along with the .. """ # what are trying to deal with? systemsize = (util.choose(self.n, self.nu) * util.choose(self.n, self.nd) * 80) # what resources do we have svmem = psutil.virtual_memory() mcap = np.floor(svmem.free / systemsize) cap = 2 * mcap if self.humo.real else mcap # construct the best solution to the problem functional = False if cap < 2: print('Error. Not enough free virtual memory for \ exact diagonalization of this system') sys.exit('Not enough memory') if cap < ns: # Solve util.message('cap<ns', verbal) ns = np.max(cap - 2, 1) functional = True if cap < 40: util.message('cap40', verbal) functional = True self.lita, H = SzState.hamiltonian(self.humo, self.nu + self.nd, self.nu, functional=functional) if functional: # functional definition Nu, Nd = self.lita.Ns HTu, HTd, HU = H H = slinalg.LinearOperator( (Nu * Nd, Nu * Nd), dtype=HTu.dtype, matvec=lambda v: lanczos.Hv(HTu, HTd, HU, v)) self.w, self.v = diagonalize(H, k=min(ns, np.prod(self.lita.Ns) - 1), return_eigenvectors=return_eigenvectors) return self.w, self.v
async def delete(self, name): purge = self.get_query_argument('purge', False) if not isinstance(purge, bool) and purge.lower() not in ('true', 'false'): self.set_status(400) self.write(error(400, 'Unrecognized `purge` value: %s' % purge)) return purge = purge and purge.lower() == 'true' self.logger.debug('Purge?: %s' % purge) status, msg, err = await self.__marathon.delete_container( name, 'cephfs') if status == 404: self.logger.info( 'The MDS container for `%s` has already been deleted' % name) if status != 200 and status != 404: self.set_status(status) self.write(err) return msg = message(status, 'Filesystem `%s` has been deleted' % name) if purge: status, msg, err = await self.__ceph.clean_fs(name) while status != 200: status, msg, err = await self.__ceph.clean_fs(name) await sleep(1) msg = message(status, 'Filesystem `%s` has been purged' % name) self.set_status(status) self.write(msg if status == 200 else err)
def waitfor(cloud_name, machine_id, new_state, interval=5, max_tries=12): util.message( "waitfor '" + str(new_state) + "' up to " + str(interval * max_tries) + "s", "info") provider, xxx, region, default_ssh_key, cloud_keys = cloud.read( cloud_name, True) kount = 0 while kount < max_tries: svr, name, size, state, location, private_ip, \ public_ip, key_name, vcpus, volumes \ = get_describe_data(provider, machine_id, region, cloud_keys) if (state == new_state) or (state == "active"): util.message(" " + new_state, "info") return (new_state) if state == "error": util.message(state, "error") return ("error") util.message(" " + state, "info") kount = kount + 1 time.sleep(interval) util.message("max tries exceeded", "error") return ("error")
def update(name, username, pem_file): if not os.path.isfile(pem_file): util.message("WARNING: pem_file not found", "info") sql = "UPDATE keys SET username = %s, pem_file = %s, updated_utc = %s \n" + \ " WHERE name = %s" rc = cloud.exec_sql(sql, [username, pem_file, util.sysdate(), name]) return (rc)
def cast_heal(item, x=None, y=None): """Cast the Heal spell.""" if SS.u.hp == SS.u.max_hp: message('You are already at full health.', CLR['light_violet']) return 'cancelled' message('Your wounds start to feel better!', CLR['light_violet']) SS.u.heal(cfg.HEAL_AMOUNT) return 'success'
def get_provider(cloud_name): data = read(cloud_name, True) if data == None: util.message("not found", "error") return None provider = str(data[0]) return (provider)
def destroy(cloud_name, machine_ids=None, machine_name=None): if machine_ids == None and machine_name == None: util.message("machines_ids or machine_name required", "error") return if machine_ids == None: machine_ids = get_machine_ids(cloud_name, machine_name) return (action(cloud_name, machine_ids, "destroy"))
def get_provider_constant(p_provider): pp = p_provider.upper() if pp in ('AWS', 'EC2'): return (Provider.EC2) elif pp in ('OPENRDS', 'OPENSTACK'): return (Provider.OPENSTACK) else: util.message("provider not supported", "error") return (None)
def exec_sql_list(sql): try: rs = dba.execute(sql) data = [] for r in rs: data.append(r) except Exception as e: util.message(str(e), "error") return [] return (data)
def cast_confuse(item, x=None, y=None): """Cast the Confuse spell.""" if x == None and y == None: message('Left-click an enemy to confuse it, or right-click to ' 'cancel.', CLR['light_cyan']) SS.mode = cfg.ST_TARGETING SS.targeting_function.append(finish_confuse) SS.targeting_item = item return 'targeting' else: finish_confuse(item, x, y)
def solve(self, ns=10, cache=True, spin_extend=True): """ Solve the system Parameters ---------- ns : int Number of spin states nss : int Number of states to return spin_extend : bool Whether to extend the spin """ # remember the cache if cache and ns < len(self.eigenstates): if not spin_extend or self.eigenstates[0]['d']: ds = max([s['d'] for s in self.eigenstates]) util.message(( 'Cached {0} - ne={1} charge state - lowest {2} states - ' + 'Sz={3} to {4}').format( self.humo.__class__.__name__, self.ne, ns, (np.ceil(self.ne / 2) - np.floor(self.ne / 2)) / 2, (np.ceil(self.ne / 2) - np.floor(self.ne / 2)) / 2 + ds / 2), verbal) # verbal return nu = int(np.ceil(self.ne / 2)) nd = int(np.floor(self.ne / 2)) sz0 = SzState(self.humo, nu, nd) # lowest sz spin state w, v = sz0.solve(ns=ns) # get the <ns> lowest states ns = len(w) # update ns states = [{ 'E': w[i], 'V': [v[:, i]], 'd': False, 'n': [(nu, nd)] } for i in range(ns)] ds = 0 if spin_extend is True: # construct the spin-degenerate states (states, ds) = ChargeState.spin_extend(states, nu, nd, self.humo.n) self.eigenstates = states util.message( ('Solved {0} - ne={1} charge state - lowest {2} states - ' + 'Sz={3} to {4}').format(self.humo.__class__.__name__, self.ne, ns, (nu - nd) / 2, (nu - nd) / 2 + ds), verbal) # verbal
def use(self, item): """Use an item.""" if item.use_function is None: message('The ' + item.name + ' cannot be used.') else: use_result = item.use_function(item) if use_result != 'cancelled' and use_result != 'targeting': # Destroy after use, but only if it was actually used. self.inventory.remove(item) del Object.obj_dict[item.oid] self.dirty = True return use_result
def insert(name, username, pem_file): now = util.sysdate() if not os.path.isfile(pem_file): util.message("WARNING: pem_file not found", "info") sql = "INSERT INTO keys (name, username, pem_file, \n" + \ " created_utc, updated_utc) VALUES (%s, %s, %s, %s, %s)" rc = cloud.exec_sql(sql, [name, username, pem_file, now, now]) return (rc)
def list_sizes(cloud_name): driver = cloud.get_cloud_driver(cloud_name) if driver == None: return try: sizes = driver.list_sizes() except Exception as e: util.message(str(e), 'error') return headers = [ 'Family', 'Size', 'RAM (MB)', 'Disk (GB)', 'Bandwidth', 'Price (USD/Mo)' ] keys = ['family', 'size', 'ram', 'disk', 'bandwidth', 'price'] jsonList = [] for size in sizes: if size.disk == 0: continue sz = size.name sz_split = sz.split(".") if len(sz_split) < 2: family = "" szz = size.name else: family = sz_split[0] szz = sz_split[1] sizeDict = {} sizeDict['family'] = family sizeDict['size'] = szz sizeDict['ram'] = str(size.ram) sizeDict['disk'] = str(size.disk) if size.price == 0.0: sizeDict['price'] = "" else: sizeDict['price'] = str(round(size.price * 720)) if size.bandwidth == None: sizeDict['bandwidth'] = "" else: sizeDict['bandwidth'] = str(size.bandwidth) jsonList.append(sizeDict) util.print_list(headers, keys, jsonList) return
def cast_confuse(item, x=None, y=None): """Cast the Confuse spell.""" if x == None and y == None: message( 'Left-click an enemy to confuse it, or right-click to ' 'cancel.', CLR['light_cyan']) SS.mode = cfg.ST_TARGETING SS.targeting_function.append(finish_confuse) SS.targeting_item = item return 'targeting' else: finish_confuse(item, x, y)
def take_turn(self): """ Move in a random direction if still confused. If no longer confused, revert to the previous AI. """ if self.num_turns > 0: self.owner.move_randomly() self.num_turns -= 1 else: self.owner.ai = self.old_ai message('The ' + self.owner.name + ' is no longer confused!', CLR['red'])
def exec_sql(sql, in_vars, commit=True): try: rs = dba.execute(sql, in_vars) sql_type_list = sql.split() sql_type = sql_type_list[0].upper() if sql_type == "SELECT": for r in rs: return r except Exception as e: util.message(str(e), "error") return None return None
def get_openstack_connection(region, cloud_keys): import openstack try: load_dotenv(dotenv_path=cloud_keys) openstack.enable_logging(debug=False) conn = openstack.connect(load_envvars=True) except Exception as e: util.message(str(e), "error") return (None) return (conn)
def attack(self, target): """Attack target with wielded weapon.""" damage = self.atk_power - target.defense if damage > 0: message(self.name.capitalize() + ' attacks ' + target.name + ' for ' + str(damage) + ' hit points.') target.take_damage(damage) else: message(self.name.capitalize() + ' attacks ' + target.name + ' but it has no effect!') self.dirty = True
def describe_openstack(machine_id, region, l_cloud_keys): conn = cloud.get_openstack_connection(region, l_cloud_keys) if conn == None: return ('', '', '', '', '', '', '', '', '', '') import openstack for s in conn.list_servers(): if s.id == machine_id: ##print(json.dumps(s, indent=2)) try: volume = s.volumes[0].id except: volume = "" ##print(str(s["addresses"]["public-net"][0]["addr"])) try: public_ip = s["addresses"]["public-net"][0]["addr"] except: public_ip = s.public_v4 if s.vm_state == "active": vm_state = "running" elif s.vm_state == "building": vm_state = "pending" else: vm_state = s.vm_state svr = {} svr['name'] = s.name svr['state'] = vm_state svr['region'] = s.region svr['location'] = s.location.zone svr['private_v4'] = s.private_v4 svr['public_v4'] = public_ip svr['key_name'] = s.key_name svr['created_at'] = s.created svr['launched_at'] = s.launched_at svr['locked'] = s.locked svr['size'] = s.flavor.original_name svr['vcpus'] = s.flavor.vcpus svr['ram'] = s.flavor.ram svr['disk'] = s.flavor.disk svr['volumes'] = volume return (svr, s.name, s.flavor.original_name, vm_state, s.region, \ s.private_v4, public_ip, s.key_name, s.flavor.vcpus, volume) util.message( "not found in machine.describe_openstack() for " + str(machine_id), "error") return ('', '', '', '', '', '', '', '', '', '')
def get_image(driver, cloud_name, platform='amd'): util.message("getting default image", "info") provider, xxx, region, default_ssh_key, cloud_keys = cloud.read( cloud_name, True) sql = "SELECT image_id, image_type FROM images \n" + \ " WHERE provider = %s AND region = %s AND platform = %s AND is_default = 1" data = cloud.exec_sql(sql, [provider, region, platform]) if data == None or data == []: util.message("Image not known for " + str(cloud_name) + \ ", " + str(region) + ", " + str(platform) + ")", "error") return (None, None) image_id = str(data[0]) image_type = str(data[1]) if provider == 'aws': images = driver.list_images(ex_image_ids=image_id.split()) else: images = driver.list_images() for i in images: if i.id == image_id: util.message("image_id - " + image_type + " : " + image_id, "info") return (i, image_type) util.message("Cannot Locate image '" + str(image_id) + "'", "error") return (None, None)
def smrf(afa_file, pdb_file, options, pmrf_path): edge_file, mrf_file = options.edge_file, options.mrf_file pmrf_exec = '%s/pmrf'%pmrf_path if not os.path.exists(pmrf_exec): print 'Cannot find the PMRF executable in the directory %s.'%(pmrf_path) sys.exit(1) ## Determine graph structure edge_list = build_edge(afa_file, pdb_file) write_file('\n'.join(['%s\t%s'%(i, j) for i, j in edge_list]), edge_file) message('MRF edge is determined.') ## Build MRF model cmd = '%s build %s --edge %s -o %s'%(pmrf_exec, afa_file, edge_file, mrf_file) subprocess.check_call(cmd.split()) message('MRF model is parameterized.') ## Estimate positional coevolution cmd = '%s stat %s --mode pos'%(pmrf_exec, mrf_file) fp = sys.stdout if options.score_file1 == "stdout" else open(options.score_file1, 'w') p = subprocess.Popen(shlex.split(cmd), stdout=fp) p.wait() message('Positional coevolution scores are estimated.') ## Estimate pairwise coevolution if options.score_file2: cmd = '%s stat %s --mode pair'%(pmrf_exec, mrf_file) p = subprocess.Popen(shlex.split(cmd), stdout=open(options.score_file2, 'w')) p.wait() message('Pairwise coevolution scores are estimated.')
def smrf(afa_file, pdb_file, options, pmrf_path): edge_file, mrf_file = options.edge_file, options.mrf_file pmrf_exec = '%s/pmrf' % pmrf_path if not os.path.exists(pmrf_exec): print 'Cannot find the PMRF executable in the directory %s.' % ( pmrf_path) sys.exit(1) ## Determine graph structure edge_list = build_edge(afa_file, pdb_file) write_file('\n'.join(['%s\t%s' % (i, j) for i, j in edge_list]), edge_file) message('MRF edge is determined.') ## Build MRF model cmd = '%s build %s --edge %s -o %s' % (pmrf_exec, afa_file, edge_file, mrf_file) subprocess.check_call(cmd.split()) message('MRF model is parameterized.') ## Estimate positional coevolution cmd = '%s stat %s --mode pos' % (pmrf_exec, mrf_file) fp = sys.stdout if options.score_file1 == "stdout" else open( options.score_file1, 'w') p = subprocess.Popen(shlex.split(cmd), stdout=fp) p.wait() message('Positional coevolution scores are estimated.') ## Estimate pairwise coevolution if options.score_file2: cmd = '%s stat %s --mode pair' % (pmrf_exec, mrf_file) p = subprocess.Popen(shlex.split(cmd), stdout=open(options.score_file2, 'w')) p.wait() message('Pairwise coevolution scores are estimated.')
def get_size(provider, flavor): sql = "SELECT size FROM flavors WHERE provider = %s AND flavor = %s" data = cloud.exec_sql(sql, [provider, flavor]) size = flavor if data == None or data == []: pass else: size = str(data[0]) util.message( "translating flavor " + str(flavor) + " to size " + str(size), "info") return (size)
def cast_fireball(item, x=None, y=None): """ Begin the casting of a fireball spell. Ask the player to target a cell. """ if x == None and y == None: message('Left-click a target for the fireball, or right-click to ' 'cancel.', CLR['light_cyan']) SS.mode = cfg.ST_TARGETING SS.targeting_function.append(finish_fireball) SS.targeting_item = item return 'targeting' else: finish_fireball(item, x, y)
def cast_lightning(item, x=None, y=None): """Cast the Lightning spell.""" target = closest_monster(5) if target is None: target = SS.u message('A lightning bolt arcs out from you and then returns to ' 'strike you in the head!', CLR['light_blue']) else: message('A lighting bolt strikes the ' + target.name + ' with a loud thunder!', CLR['light_blue']) target.take_damage(cfg.LIGHTNING_DAMAGE) return 'success'
def describe_aws(machine_id, region, cloud_keys): conn = cloud.get_aws_connection('ec2', region, cloud_keys) if conn == None: return ('', '', '', '', '', '', '', '', '', '') try: import boto3 response = conn.describe_instances(InstanceIds=machine_id.split()) except Exception as e: util.message( "not found in machine.describe_aws() for " + str(machine_id), "error") return ('', '', '', '', '', '', '', '', '', '') ##util.message("jmespath.search()", "info") s = jmespath.search( "Reservations[].Instances[].[InstanceId, InstanceType, State.Name, \ Placement.AvailabilityZone, PrivateIpAddress, PublicIpAddress, KeyName, \ [Tags[?Key=='Name'].Value] [0][0], CpuOptions.CoreCount, \ BlockDeviceMappings[].Ebs[].VolumeId[] ] | [0]", response) flavor = s[1] state = s[2] loct = s[3] priv_ip = s[4] pub_ip = s[5] key_nm = s[6] name = s[7] vcpus = s[8] volumes = s[9] svr = {} svr['name'] = s[7] svr['state'] = s[2] svr['region'] = "?" svr['location'] = s[3] svr['private_v4'] = s[4] svr['public_v4'] = s[5] svr['key_name'] = s[6] svr['created_at'] = "?" svr['launched_at'] = "?" svr['locked'] = "?" svr['size'] = "?" svr['vcpus'] = s[8] svr['ram'] = "0" svr['disk'] = "0" svr['volumes'] = s[9] return (svr, name, flavor, state, loct, priv_ip, pub_ip, key_nm, vcpus, volumes)
def create(provider, name=None, region=None, keys=None, default_ssh_key=None): if name == None: name = provider lc_provider = get_provider_constant(provider) if lc_provider == None: util.message("invalid provider", "error") return if region == None: util.message("region must be specified", "error") return if keys == None: util.message("env file must be specified as key", "error") return if not os.path.isfile(keys): util.message("invalid env file specified as key", "error") return sql = "INSERT INTO clouds (name, provider, region, keys, default_ssh_key, " + \ " created_utc, updated_utc) \n" + \ "VALUES (%s, %s, %s, %s, %s, %s, %s)" now = util.sysdate() exec_sql(sql, [name, provider, region, keys, default_ssh_key, now, now]) return
def pick_up(oids): """Try to pick up one or more items at the player's feet.""" item_here = False for oid in oids: item = Object.obj_dict[oid] if item in SS.map.grid[SS.u.x][SS.u.y].items: SS.u.pick_up(item) item_here = True if item_here: SS.u_took_turn = True else: SS.u_took_turn = False message('Nothing to pick up!')
def post(self): username = request.json.get('username', None) password = request.json.get('password', None) if not username: return message("Missing username parameter"), 400 if not password: return message("Missing password parameter"), 400 user = authenticate(username, password) if user: accessToken = create_access_token(identity=username) return {'access_token': accessToken}, 200 else: return message('Invalid Username or Password'), 401
def cast_fireball(item, x=None, y=None): """ Begin the casting of a fireball spell. Ask the player to target a cell. """ if x == None and y == None: message( 'Left-click a target for the fireball, or right-click to ' 'cancel.', CLR['light_cyan']) SS.mode = cfg.ST_TARGETING SS.targeting_function.append(finish_fireball) SS.targeting_item = item return 'targeting' else: finish_fireball(item, x, y)
def finish_fireball(item, x=None, y=None): """ Finish the casting of a fireball spell after a cell has been selected. Return whether or not the fireball was cast. """ if x is None: return False message('The fireball explodes, burning everything within ' + str(cfg.FIREBALL_RADIUS) + ' spaces!', CLR['orange']) for mon in SS.map.monsters + [SS.u]: if mon.distance(x, y) <= cfg.FIREBALL_RADIUS: message('The ' + mon.name + ' gets burned for ' + str(cfg.FIREBALL_DAMAGE) + ' hit points.', CLR['orange']) mon.take_damage(cfg.FIREBALL_DAMAGE) return True
def streamsearch(ofile, text, max_pages=10, results_per_page=100): """Stream the results of searching for 'text' to the 'ofile' output file Args: ofile str, the name of a file where we will write any tweets we find. Tweets are written in JSON format, with every tweet being stored in a separate line as a Python dict. text str, the text to search for in Twitter. This can be a plain text string or a '#hashtag' to look for tweets of this topic only. max_pages int, maximum number of result 'pages' to obtain from Twitter's backlog of archived tweets. When not specified, default to 10 pages. results_per_page int, maximum number of results per page to fetch from Twitter's backlog of archived tweets. When not specified, default to 100 tweets per page. Returns: None """ # Load the id of already seen tweets, if there are any. ofilename = ofile or 'standard output' seen = ofile and preload_tweets(ofile) or set() if seen: message('%d tweets preloaded from %s', len(seen), ofilename) try: ostream = ofile and file(ofile, 'a+') or sys.stdout for matches in search(text, max_pages=max_pages, results_per_page=results_per_page): newmatches = 0 for tweet in matches: (tid, tuser, text) = (tweet['id'], tweet['from_user'], tweet['text']) if not tid in seen: newmatches += 1 seen.add(tid) print >> ostream, json.dumps(tweet) if newmatches > 0: message('%d new tweets logged at %s', newmatches, ofilename) ostream.close() except IOError, e: if ostream and ostream != sys.stdout: ostream.close() warning('Error writing at file "%s". %s', ofilename, e)
def save_game(): """Save a game to a file.""" save_file = SS.game_id + '.save' with open(save_file, 'w') as f: for obj in SIMPLE_SAVE_OBJS: f.write('{0} = {1}\n'.format(obj, repr(eval(obj)))) f.write('map = [') for x in xrange(SS.map.w): f.write('[') for y in xrange(SS.map.h): f.write("{{'n': {0}, 'e': {1}}}, ".format( repr(SS.map.grid[x][y].name), repr(SS.map.grid[x][y].explored))) f.write('],') f.write(']\n') # For references to Objects, just save the oid f.write('monsters = ' + repr([mon.oid for mon in SS.map.monsters]) + '\n') f.write('items = ' + repr([item.oid for item in SS.map.items]) + '\n') # Save all monsters in existence. f.write('monster_defs = [') for oid, obj in Object.obj_dict.iteritems(): if oid != SS.u.oid and obj.__class__.__name__ == 'Monster': f.write(repr(obj.serialize()) + ',') f.write(']\n') # Save all items in existence. f.write('item_defs = [') for oid, obj in Object.obj_dict.iteritems(): if obj.__class__.__name__ == 'Item': f.write(repr(obj.serialize()) + ',') f.write(']\n') # Save the player's state. f.write('u = ' + repr(SS.u.serialize()) + '\n') message('Saved game to {0}.'.format(save_file))
def finish_confuse(item, x, y): """Finish casting the Confuse spell.""" # FIXME: should be able to target myself target = None for mon in SS.map.monsters: if (mon.x == x and mon.y == y and mon.distance(SS.u.x, SS.u.y) <= cfg.CONFUSE_RANGE): target = mon break if target is None: return False # Replace the monster's AI with a confused one old_ai = target.ai target.ai = ai.ConfusedAI(old_ai) # Tell the new component who owns it target.ai.owner = target message('The eyes of the ' + target.name + ' look vacant, as he starts to stumble around!', CLR['light_green']) return True
def action(showtitle=None): # Get the name if showtitle is not None: searchName = showtitle else: keyboard = xbmc.Keyboard("", "Search for...", False) keyboard.doModal() if not keyboard.isConfirmed(): return searchName = keyboard.getText() # Get indexer to use indexers = ["All Indexers", "theTVDB", "TVRage"] dialog = xbmcgui.Dialog() indexerIndex = dialog.select("Indexer to Search", indexers) if indexerIndex == -1: return # Do the search matches = util.api.doSearch(searchName, indexerIndex) if not matches: util.message("Search Results", "No matching shows found.") return matchList = [] for show in matches: if not indexerIndex == 0: if not show["indexer"] == indexerIndex: continue indexer = "" else: if "tvdbid" in show: indexer = "theTVDB" elif "tvrageid" in show: indexer = "TVRage" else: indexer = "Unknown" matchList.append(show["name"] + " - " + util.formatDate(show["first_aired"]) + " " + indexer) dialog = xbmcgui.Dialog() matchIndex = dialog.select("Search Results", matchList) if matchIndex == -1: return show = matches[matchIndex] # Get the parent directory rootDirs = util.api.getRootDirs() if not rootDirs: util.message("Error", "There are no root directories defined!") return dirs = [] for rootDir in rootDirs: dirs.append(("* " if rootDir["default"] == 1 else "") + rootDir["location"]) dialog = xbmcgui.Dialog() dirIndex = dialog.select("Parent Folder", dirs) if dirIndex == -1: return location = rootDirs[dirIndex]["location"] # Get show options defaults = util.api.getDefaults() # ... initial status statuses = ["wanted", "skipped", "archived", "ignored"] statusList = [] for status in statuses: statusList.append(("* " if status == defaults["status"] else "") + status.title()) dialog = xbmcgui.Dialog() statusIndex = dialog.select("Initial Episode Status", statusList) if statusIndex == -1: return status = statuses[statusIndex] # ... flatten folders # no way for user to cancel this (looks like No/False)! flattenFolders = dialog.yesno( "Flatten Folders", "Flatten season folders?", "Default: " + "Yes" if defaults["flatten_folders"] else "No" ) # ... anime anime = dialog.yesno("Anime", "Is this show an Anime?") # ... scene numbering sceneNumbered = dialog.yesno("Scene Numbering", "Is this show scene numbered?") # ... quality qualities = [ "sdtv|sddvd", "hdtv|hdwebdl|hdbluray", "fullhdtv|fullhdwebdl|fullhdbluray", "hdtv|fullhdtv|hdwebdl|fullhdwebdl|hdbluray|fullhdbluray", "sdtv|sddvd|hdtv|fullhdtv|hdwebdl|fullhdwebdl|hdbluray|fullhdbluray|unknown", ] qualityList = ["SD", "HD720p", "HD1080p", "HD", "Any"] defaultQuality = "|".join(defaults["initial"]) try: qualityList[qualities.index(defaultQuality)] = "* " + qualityList[qualities.index(defaultQuality)] except: pass dialog = xbmcgui.Dialog() qualityIndex = dialog.select("Quality", qualityList) if qualityIndex == -1: return quality = qualities[qualityIndex] # Add the show! if "tvdbid" in show: indexerid = show["tvdbid"] indexer = "tvdb" elif "tvrageid" in show: indexerid = show["tvrageid"] indexer = "tvrage" else: indexerid = None result = util.api.doAddNewShow(indexerid, indexer, location, status, flattenFolders, anime, sceneNumbered, quality) if result["result"] == "success": util.message("Add Show", "The show has been added.", "It may take a moment before it appears in the list.") xbmc.executebuiltin("Container.Refresh")
def die_leave_corpse(mon): """Kill monster and transform it into a corpse.""" message(mon.name.capitalize() + ' dies!', CLR['red']) mon.delete() corpse = Item(mon.x, mon.y, 'corpse', prev_monster=mon) corpse.place_on_map()
def pick_up(self, item): """Pick up an item.""" self.inventory.append(item) item.delete() message('You picked up a ' + item.name + '.', CLR['green']) self.dirty = True
def use(self, item): """Use an item.""" if item.use_function is None: message('The ' + item.name + ' cannot be used.')
def drop(self, item): """Drop an item.""" item = Object.obj_dict[item] Monster.drop(self, item) message('You dropped the ' + item.name + '.')
def player_death(mon): """Take care of everything that happens when the player dies.""" message(mon.name.capitalize() + ' dies!', CLR['red']) mon.delete() corpse = Item(mon.x, mon.y, 'corpse', prev_monster=mon) corpse.place_on_map()
def action(): # Get the name keyboard = xbmc.Keyboard('', 'Search for...', False) keyboard.doModal() if not keyboard.isConfirmed(): return searchName = keyboard.getText() # Get indexer to use indexers = ['All Indexers', 'theTVDB', 'TVRage'] dialog = xbmcgui.Dialog() indexerIndex = dialog.select('Indexer to Search', indexers) if indexerIndex == -1: return # Do the search matches = util.api.doSearch(searchName, indexerIndex) if not matches: util.message('Search Results', 'No matching shows found.') return matchList = [] for show in matches: if not indexerIndex == 0: if not show['indexer'] == indexerIndex: continue indexer = '' else: if 'tvdbid' in show: indexer = 'theTVDB' elif 'tvrageid' in show: indexer = 'TVRage' else: indexer = 'Unknown' matchList.append(show['name'] + ' - ' + util.formatDate(show['first_aired']) + ' ' + indexer) dialog = xbmcgui.Dialog() matchIndex = dialog.select('Search Results', matchList) if matchIndex == -1: return show = matches[matchIndex] # Get the parent directory rootDirs = util.api.getRootDirs() if not rootDirs: util.message('Error', 'There are no root directories defined!') return dirs = [] for rootDir in rootDirs: dirs.append(('* ' if rootDir['default'] == 1 else '') + rootDir['location']) dialog = xbmcgui.Dialog() dirIndex = dialog.select('Parent Folder', dirs) if dirIndex == -1: return location = rootDirs[dirIndex]['location'] # Get show options defaults = util.api.getDefaults() # ... initial status statuses = ['wanted', 'skipped', 'archived', 'ignored'] statusList = [] for status in statuses: statusList.append(('* ' if status == defaults['status'] else '') + status.title()) dialog = xbmcgui.Dialog() statusIndex = dialog.select('Initial Episode Status', statusList) if statusIndex == -1: return status = statuses[statusIndex] # ... flatten folders # no way for user to cancel this (looks like No/False)! flattenFolders = dialog.yesno('Flatten Folders', 'Flatten season folders?', 'Default: ' + 'Yes' if defaults['flatten_folders'] else 'No') # ... anime anime = dialog.yesno('Anime', 'Is this show an Anime?') # ... scene numbering sceneNumbered = dialog.yesno('Scene Numbering', 'Is this show scene numbered?') # ... quality qualities = [ 'sdtv|sddvd', 'hdtv|hdwebdl|hdbluray', 'fullhdtv|fullhdwebdl|fullhdbluray', 'hdtv|fullhdtv|hdwebdl|fullhdwebdl|hdbluray|fullhdbluray', 'sdtv|sddvd|hdtv|fullhdtv|hdwebdl|fullhdwebdl|hdbluray|fullhdbluray|unknown' ] qualityList = [ 'SD', 'HD720p', 'HD1080p', 'HD', 'Any' ] defaultQuality = '|'.join(defaults['initial']) try: qualityList[qualities.index(defaultQuality)] = '* ' + qualityList[qualities.index(defaultQuality)] except: pass dialog = xbmcgui.Dialog() qualityIndex = dialog.select('Quality', qualityList) if qualityIndex == -1: return quality = qualities[qualityIndex] # Add the show! if 'tvdbid' in show: indexerid = show['tvdbid'] indexer = 'tvdb' elif 'tvrageid' in show: indexerid = show['tvrageid'] indexer = 'tvrage' else: indexerid = None result = util.api.doAddNewShow(indexerid, indexer, location, status, flattenFolders, anime, sceneNumbered, quality) if result['result'] == 'success': util.message('Add Show', 'The show has been added.', 'It may take a moment before it appears in the list.') xbmc.executebuiltin('Container.Refresh')