Esempio n. 1
0
    def layout(self):
        self._rmtemp()

        itemlist = []
        for itemfile in file_list('items'):
            itemfile = os.path.basename(itemfile)
            itemlist.append(Item(load_yaml('items',itemfile)))
        inventory_rect = pygame.Rect(self.rect.x,self.rect.y, self.rect.w - 523, self.rect.h)
        pack = ContainerDialog(
            inventory_rect,
            self.frontend,
            'Pack:',
            layer=self._layer +1,
            items=[Item(i) for i in self.char.get('inventory/pack',[])],
            onclose=self.update_inventory,
            onselect=self.equip_item,
            onselect_parms=[],
            can_add=False,
            can_remove=False,
            addfrom=itemlist
            )
        self._addtemp('Inventory_pack_dialog', pack)
        image_x = self.rect.w - 522
        image_y = 10
        self.image.blit(self.frontend.imagecache['inventory_background.png'], (image_x, image_y))
        rects = load_yaml('images','gui_rects.yaml')
        debug(self.char())
        portrait = self.frontend.imagecache[self.char.get('personal/portrait')]
        portrait = pygame.transform.smoothscale(portrait, (256,256))
        prect = rects['inventory_portrait']
        self.image.blit(portrait,(image_x + prect['x'],image_y + prect['y']))
        image_x += self.rect.x
        image_y += self.rect.y
        for itemtuple in self.char.inventory_generator(['equiped']):
            debug(itemtuple)
            item, slot = itemtuple[1], itemtuple[2]
            irect = rects[slot]
            irect = pygame.Rect(image_x + irect['x'], image_y + irect['y'], irect['w'], irect['h'])
            debug(slot, irect)
            sprite = ButtonSprite(
                self.frontend.tilemaps,
                irect,
                self.frontend.eventstack,
                onclick=self.unequip_item,
                onclick_params=[slot],
                animations=item.getsubtree('animations'),
                layer=self._layer + 2,
                fps=5,
                mouseover=item.displayname(),
                frontend=self.frontend,
                sendself=True
                )
            sprite.setanimation('view')
            self._addtemp(make_hash(), sprite)
Esempio n. 2
0
 def load_formation(self, form, form_attrs):
     """Create the actors for a formation [form] with the offset (x, y)"""
     for item_dict in util.load_yaml("formations", form)["items"]:
         kind = item_dict.keys()[0]
         attrs = item_dict[kind]
         formation_maker = getattr(formations, kind)
         new_actors = formation_maker(self, form_attrs["x"], form_attrs["y"], attrs)
         if form_attrs.has_key("path"):
             for a in new_actors:
                 a.path = util.path.PathNetwork(util.load_yaml("paths", form_attrs["path"]), a.get_path_origin)
                 a.path_progress = a.path.starting_progress()
Esempio n. 3
0
    def set_environment(self, env):
        if env not in self.region_config[SUPPORTED_ENVS]:
            raise ChinaContextError("environment '"+env+"' is not supported in region '" + self.region + "', supported_envs in region config ("+self.region_config_file+"), must be one of: "+str(self.region_config[SUPPORTED_ENVS]))

        self.environment = env

        # Load the environment configuration, using the region configuration for variable substitution
        self.default_environment = util.load_yaml(self.blueprints_dir + "/envs/env.default.yml", self.region_config)
        env_yml = self.blueprints_dir + "/envs/env." + env + ".yml"

        self.specific_environment = {}
        if os.path.isfile(env_yml):
            self.specific_environment = util.load_yaml(env_yml, self.region_config)
        self.specific_environment['name'] = env
Esempio n. 4
0
File: unit.py Progetto: Tout/china
    def __init__(self, ctx, unit_name, role_name=None, num_instances=1):
        self.region_context = ctx
        self.unit_name = unit_name
        if role_name is None:
            self.role_name = unit_name
        else:
            self.role_name = role_name
        self.num_instances = int(num_instances)

        self.env_name = ctx.specific_environment['name']
        self.env_group_name = util.env_prefix(ctx) + self.env_name
        self.group_name = self.env_group_name + "-" + unit_name

        self.context = dict(ctx.region_config.items() + ctx.default_environment.items() + ctx.specific_environment.items())
        # print "raw context is "+pformat(self.context)
        self.context['unit_name'] = unit_name
        self.context['env_name'] = self.env_name
        self.context['env_group_name'] = self.env_group_name
        self.unit_yml_dir = ctx.blueprints_dir + "/units/" + unit_name
        self.yml = self.unit_yml_dir + "/unit.yml"
        self.config = util.load_yaml(self.yml, self.context)
        print "loaded "+unit_name+" yml: "+pformat(self.config)
        if 'override_region_context' in self.config:
            for key, value in self.config['override_region_context'].iteritems():
                self.region_context.region_config[key] = value
        print("config====")
        pprint(self.config)
Esempio n. 5
0
 def weapons(self):
     """
     >>> char = Character({})
     >>> char.weapons[0].displayname()
     'Fist'
     >>> halberd = char.acquire_item(Item(load_yaml('items','halberd.yaml')))
     >>> item = Item(char.get('inventory/pack')[0])
     >>> char.equip_item(item)
     (True, '[ ] has equiped Halberd')
     >>> char.weapons[0].displayname()
     'Halberd'
     >>> len(char.weapons)
     1
     """
     equipedweapons = self.equiped_by_type('weapon')
     if not equipedweapons:
         messages.warning('No weapons equipped - equipping fist')
         fist = Item(load_yaml('items', 'ff7b801f5dfa6ad84870ca1ce2d43c74685d9ddbfcdc488e2ad66caa.yaml'))
         fist = self.acquire_item(fist)
         self.equip_item(fist)
         equipedweapons = self.equiped_by_type('weapon')
     if equipedweapons and equipedweapons[0].get('slot', "") == 'twohand':
         return [equipedweapons[0]]
     debug('Equipped weapons', equipedweapons)
     return equipedweapons
Esempio n. 6
0
    def loadgame(self, slot):
        self.setsavedir(slot)
        self.player = Player(load_yaml('player','player.yaml'))
        debug(self.player.get_hash())

        self.journal = Journal(json.loads(open(file_path('journal', 'journal.yaml')).read()))
        self._rmtemp()
Esempio n. 7
0
def attack_roll(player, target, attack_modifiers, custom_tohit):
    custom_tohit = custom_tohit or 0
    if custom_tohit:
        frontend.campaign.message('Applying custom to-hit modifier of %s' % custom_tohit)
    if player.is_casting:
        player.interrupt_cast()
        frontend.campaign.message('%s is casting. Cast will be interrupted if you attack %s' % (player.displayname(), target.displayname))
    attack_mods = load_yaml('adnd2e', 'attack_mods')
    total_modifier = custom_tohit
    for mod in attack_modifiers:
        total_modifier += int(attack_mods[mod])
        frontend.campaign.message('Applying modifier %s: %s' % (mod, attack_mods[mod]))
    range_modifier = 0
    if range_modifier:
        frontend.campaign.message('Applying range modifier %s' % range_modifier)
        total_modifier += range_modifier
    weaponmod = player.to_hit_mod()
    frontend.campaign.message('Applying weapon modifier %s' % weaponmod)
    total_modifier += weaponmod
    frontend.campaign.message('Total modifier: %s<br><br>' % total_modifier)
    frontend.campaign.message('%s has a defense modifier of %s and armor class %s' % (target.displayname(), target.def_mod(), target.armor_class()))
    target_roll = int(player.thac0 - target.armor_class() - target.def_mod())
    target_roll = target_roll - total_modifier
    if target_roll <= 0:
        frontend.campaign.error('%s is guaranteed to hit %s - no need to roll' % (player.displayname(), target_roll, target.displayname()))
    else:
        frontend.campaign.error('%s needs to roll %s to hit %s' % (player.displayname(), target_roll, target.displayname()))
    return target_roll
def get_registered_accounts_from_yml():
    """
    Parses yaml data and returns dictionary with registered users
    :return: dictionary with registered users
    """
    data = load_yaml('registered_users.yaml')
    return data
Esempio n. 9
0
    def set_region(self, region):
        if region not in VALID_REGIONS:
            raise ChinaContextError("invalid region: " + args.region + ", valid regions are: " + str(VALID_REGIONS.keys()))
        self.region = region
        self.region_fullname = VALID_REGIONS[region]

        # Load the region configuration, using the system environment for variable substitution
        self.default_region_config = util.load_yaml(self.blueprints_dir + "/regions/region.default.yml", os.environ)
        self.region_config_file = self.blueprints_dir + "/regions/region." + self.region_fullname + ".yml"
        self.specific_region_config = util.load_yaml(self.region_config_file, os.environ)

        # Merge the specific region config onto the defaults, then let os.environ override anything
        self.region_config = dict(self.default_region_config.items() + self.specific_region_config.items() + os.environ.items())

        # Now that we've loaded the region config, check if the environment is supported there
        if SUPPORTED_ENVS not in self.region_config:
            raise ChinaContextError("region '" + self.region + "' has no supported_envs!")
Esempio n. 10
0
 def xp_worth(self):
     xpkey = self.get('combat/level-hitdice', 1)
     xpvalues = load_yaml('adnd2e', 'creature_xp.objdata')
     if str(xpkey) in list(xpvalues.keys()):
         xp = xpvalues[str(xpkey)]
     elif int(xpkey) > 12:
         xp = 3000 + ((int(xpkey) - 13) * 1000)
     return int(xp)
Esempio n. 11
0
def main():
    args = parse_args()
    config = util.load_yaml(args.config)
    files = util.input_files(args.files)
    lang = language.get(args.lang, None)["TypeMap"]

    files.extend([f"{PRESTO_HOME}/{file}" for file in config.JavaClasses])

    classes = defaultdict(util.attrdict)
    depends = defaultdict(set)

    subclasses = {}
    for abstract_name, abstract_value in config.AbstractClasses.items():

        classes[abstract_name].class_name = abstract_name
        classes[abstract_name].field_name = member_name(abstract_name)
        classes[abstract_name].abstract = True
        classes[abstract_name].super_class = abstract_value.super
        if "comparable" in abstract_value:
            classes[abstract_name].comparable = True
        classes[abstract_name].subclasses = []

        for subclass in abstract_value.subclasses:
            subclasses[subclass.name] = util.attrdict(super=abstract_name,
                                                      key=subclass.key)

            classes[abstract_name].subclasses.append(
                util.attrdict(
                    type=subclass.name,
                    name=member_name(subclass.name),
                    key=subclass.key,
                ))
            classes[abstract_name].subclasses[-1]._N = len(
                classes[abstract_name].subclasses)

        classes[abstract_name].subclasses[-1]._last = True

        if "source" in abstract_value:
            file = abstract_value.source
            process_file(f"{PRESTO_HOME}/{file}", config, lang, subclasses,
                         classes, depends)
        else:
            classes[abstract_name].fields = []
            add_extra(abstract_name, abstract_name, config, lang, classes,
                      depends)

    for file in files:
        process_file(file, config, lang, subclasses, classes, depends)

    depends = list(topological({k: list(v) for k, v in depends.items()}))[::-1]

    comment = "// This file is generated DO NOT EDIT @" + "generated"
    result = [{"comment": comment}]
    result += [classes[name] for name in depends if name in classes]
    result += [classes[name] for name in config.AddToOutput]

    if args.json:
        print(util.to_json(result))
Esempio n. 12
0
def check_config(reporter, source_dir):
    """Check configuration file."""

    config_file = os.path.join(source_dir, '_config.yml')
    config = load_yaml(config_file)
    reporter.check_field(config_file, 'configuration', config, 'kind', 'lesson')
    reporter.check_field(config_file, 'configuration', config, 'carpentry', ('swc', 'dc'))
    reporter.check_field(config_file, 'configuration', config, 'title')
    reporter.check_field(config_file, 'configuration', config, 'email')
def check_config(reporter, source_dir):
    """Check configuration file."""

    config_file = os.path.join(source_dir, '_config.yml')
    config = load_yaml(config_file)
    reporter.check_field(config_file, 'configuration', config, 'kind', 'lesson')
    reporter.check_field(config_file, 'configuration', config, 'carpentry', ('swc', 'dc'))
    reporter.check_field(config_file, 'configuration', config, 'title')
    reporter.check_field(config_file, 'configuration', config, 'email')
Esempio n. 14
0
 def thac0(self):
     if self.get('personal/race', '') == "creature":
         key = "creature"
     else:
         key = self.get('class/parent', '')
     thac0s = load_yaml("rules", "thac0.yaml")[key]
     for key2 in list(thac0s.keys()):
         if inrange(self.get('combat/level-hitdice', 1), key2):
             return int(thac0s[key2])
Esempio n. 15
0
 def ppd_mod(self):
     """
     >>> char = Character(load_yaml('characters', 'bardic_rogue.yaml'))
     >>> isinstance(char.ppd_mod(), int)
     True
     """
     ability_mods = FlattenedDict(load_yaml('rules', 'ability_scores.yaml'))
     con = self.get('attributes/con', 1)
     return int(ability_mods['con/%s/ppd' % con])
Esempio n. 16
0
 def def_mod(self):
     """
     >>> char = Character(load_yaml('characters', 'bardic_rogue.yaml'))
     >>> isinstance(char.def_mod(), int)
     True
     """
     ability_mods = load_yaml('rules', 'ability_scores.yaml')
     dex = self.get('attributes/dex', 0)
     return int(FlattenedDict(ability_mods).get('/dex/%s/defense' % dex, 0))
Esempio n. 17
0
 def dmg_mod(self):
     """
     >>> char = Character(load_yaml('characters', 'bardic_rogue.yaml'))
     >>> isinstance(char.dmg_mod(), int)
     True
     """
     ability_mods = FlattenedDict(load_yaml('rules', 'ability_scores.yaml'))
     strength = self.get('attributes/str', 0)
     return int(ability_mods.get('/str/%s/dmg' % strength, 0))
Esempio n. 18
0
 def level_up(self):
     """
     >>> char = Character(load_yaml('characters', 'bardic_rogue.yaml'))
     >>> level = char.get('combat/level-hitdice', 1)
     >>> hp = char.get('combat/hitpoints', 1)
     >>> max = char.get('combat/max_hp', 1)
     >>> debug(char.level_up())
     >>> char.get('combat/hitpoints', 1) > hp
     True
     >>> char.get('combat/max_hp', 1) > max
     True
     >>> char.get('combat/level-hitdice', 1) == level + 1
     True
     """
     level = int(self.get('combat/level-hitdice', 1))
     level += 1
     out = '%s has reached level %s !' % (self.displayname(), level)
     self.put('combat/level-hitdice', level)
     ability_scores = load_yaml('rules', 'ability_scores.yaml')
     con = self.get('attributes/con', 1)
     out += '<br>Character constitution: %s' % con
     con_bonus = int(FlattenedDict(ability_scores).get('/con/%s/hit' % con,0))
     out += '<br>Constitution Bonus: %s' % con_bonus
     xp_levels = FlattenedDict(load_yaml('rules', 'xp_levels.yaml'))
     pclass = self.get('class/parent', '')
     xp_levels = xp_levels.readall(pclass)
     hitdice = str(xp_levels.get('%s/%s/hit_dice' % (pclass, level), 1))
     debug("Read hitdice as ", hitdice)
     if '+' not in hitdice:
         hitdice = hitdice + '+0'
     hitdice, bonus = hitdice.split('+')
     dice = int(xp_levels.get('%s/dice' % pclass, 1))
     more_hp, roll = rolldice(numdice=int(hitdice), numsides=dice, modifier=con_bonus)
     out += '<br>%s' % roll
     more_hp += int(bonus)
     current_max = int(self.get('combat/max_hp', 1))
     new_max = current_max + more_hp
     out += '<br>Maximum hitpoints increased by %s. Maximum hitpoints now: %s' % (more_hp, new_max)
     self.put('combat/max_hp', new_max)
     new_hp = new_max
     out += '<br>Character hitpoints now %s' % new_hp
     self.put('combat/hitpoints', new_hp)
     self.__init__(self())
     return out
Esempio n. 19
0
def get_build_config(path: str,
                     image_name: str) -> typing.Optional[ImageBuildConfig]:
    '''
    Returns an ImageBuildConfig object from the default buildfile
    located in the image directory.
    :param path: The path of the images directory.
    :param name: Name of the image, must exist as a directory.
    '''
    buildfile_path = get_buildfile_path(path, image_name)
    return ImageBuildConfig.from_dict(util.load_yaml(buildfile_path))
Esempio n. 20
0
 def __init__(self, frontend):
     self.frontend = frontend
     Tempsprites.__init__(self)
     self.homedir = os.path.join(os.getenv('HOME'), '.mirthless')
     self.player = Player({})
     self.cleanuplist = []
     template = load_yaml('rules','template_character.yaml')
     self.portrait = template['personal/portrait']
     self.journal = Journal()
     characters = []
Esempio n. 21
0
    def createchar(self):
        def value(key):
            return self.frontend.sprites[key].value

        self.player.put('personal/name/first', value('firstname'))
        self.player.put('personal/name/last', value('lastname'))
        self.player.put('personal/portrait', self.portrait)
        self.player.put('personal/sex', value('sex'))
        playerclass = value('playerclass') or choice(PLAYER_CLASSES) 
        playerclass = playerclass.split(':')
        debug(playerclass)
        self.player.put('class/parent', playerclass[0])
        self.player.put('class/class', playerclass[1])
        template = load_yaml('rules','template_character.yaml')
        for key in template:
            k = None
            if key.startswith('conditional/class.parent=%s/' %playerclass[0]):
                k = key.replace('conditional/class.parent=%s/' %playerclass[0],'')
            elif key.startswith('conditional/class.class=%s/' %playerclass[1]):
                k = key.replace('conditional/class.class=%s/' %playerclass[1],'')
            if k is not None and k != 'class.class':
                self.player.put(k, template[key])
            if key.startswith('inventory'):
                self.player.put(key, template[key])
            if key.startswith('__Yinventory'):
                k = key.replace('__Y', '')
                self.player.put(k, template[key])
        slot = str(len(os.listdir(self.homedir)))
        self.setsavedir(slot)
        armor = Item(load_yaml('items', 'ab7ed2a7e93bae020aeaab893902702fc0727b0079ecd3a14aa4a57c.yaml'))
        armor = self.player.acquire_item(armor)
        self.player.equip_item(armor)
        debug(self.player())
        firstmap = GameMap(load_yaml('maps', 'f1440bb0f6826a470c385218d19515ad937c1d7ab4ad3e0c71206238'))
        self.player.moveto(firstmap, 18, 1)
        firstmap.savetoslot('maps')
        animations = load_yaml('rules', 'default_sprites')
        for k,v  in animations[playerclass[0]].items():
            self.player.put(k, v)
        self.player.savetoslot()
        self.journal.write('Prison')
        self._rmtemp()
Esempio n. 22
0
 def num_attacks(self):
     atr_objdata = FlattenedDict(load_yaml('rules', 'various.yaml'))
     atr = atr_objdata['various/attacks_per_round']
     parentclass = self.get('class/parent', '')
     if parentclass not in atr:
         myatr = 1
     else:
         for key in list(atr[parentclass].keys()):
             if inrange(self.get('combat/level-hitdice', 1), key):
                 myatr = int(atr[parentclass][key])
     return self.num_weapons() * int(myatr)
 def __init__(self, setting_path):
     self.setting_path = setting_path
     yaml = load_yaml(setting_path)
     self.settings = self.Settings(
         yaml["project"]["name"],
         pathlib.Path(yaml["project"]["resource_path"]).resolve(),
         yaml["project"]["kind"], yaml["project"]["domain"],
         yaml["project"]["processes"])
     self.project = self.Project(self.settings)
     self.procedure = self.Procedure(self.settings,
                                     self._validate_processes)
Esempio n. 24
0
    def load_old_params(self):
        """ Load parameters from *self.files_spec['previous_QNAS_params']* and replace
            *self.train_spec*, *self.QNAS_spec*, and *self.fn_dict* with the file values.
        """

        previous_params_file = load_yaml(self.files_spec['previous_QNAS_params'])

        self.train_spec = dict(previous_params_file['train'])
        self.QNAS_spec = dict(previous_params_file['QNAS'])
        self.QNAS_spec['params_ranges'] = eval(self.QNAS_spec['params_ranges'])
        self.fn_dict = previous_params_file['fn_dict']
Esempio n. 25
0
 def load(filepath):
     Settings.__DATA = {'variables': {}}
     Settings.__DATA.update(util.load_yaml(filepath))
     Settings.__VARIABLES_EXTENDED = dict(Settings.__DATA['variables'])
     root = os.path.abspath('.').replace('\\', '/')
     Settings.__VARIABLES_EXTENDED.update({
         'rootpath': root,
         'rootdir': root,
         'cflags': Settings.get('compiler').get('cflags', ''),
         'lflags': Settings.get('compiler').get('lflags', '')
     })
Esempio n. 26
0
 def load(self):
     self._rmtemp()
     itemlist = []
     for itemfile in file_list(self.dirname):
         itemfile = os.path.basename(itemfile)
         if self.dirname == 'items':
             itemlist.append(Item(load_yaml(self.dirname,itemfile)))
         else:
             itemlist.append(NPC(load_yaml(self.dirname,itemfile)))
     c = ContainerDialog(self.rect,
         self.frontend,
         'Load %s' %self.dirname,
         7,
         items=itemlist,
         onselect=self.loaditem,
         onselect_parms=[],
         animation='view',
         can_add=False,
         can_remove=False
         )
     self._addtemp(make_hash(), c)
Esempio n. 27
0
 def spell_success(self):
     ability_scores = load_yaml('rules', 'ability_scores.yaml')
     wis = str(self.get('attributes/wis', 0))
     failrate = int(ability_scores["wis/%s/spell_failure" %(wis)].split('%')[0])
     out = "Spell failure rate: %s percent" % failrate
     roll = rolldice(numdice=1, numsides=100)
     out += '<br>%s' % roll[1]
     if roll[0] > failrate:
         out += '<br>Spell succeeds !'
         return (True, out)
     else:
         out += '<br>Spell fails !'
         return(False, out)
def check_config(reporter, source_dir):
    """Check configuration file."""

    config_file = os.path.join(source_dir, '_config.yml')
    config = load_yaml(config_file)
    reporter.check_field(config_file, 'configuration', config, 'kind', 'lesson')
    reporter.check_field(config_file, 'configuration', config, 'carpentry', ('swc', 'dc', 'lc'))
    reporter.check_field(config_file, 'configuration', config, 'title')
    reporter.check_field(config_file, 'configuration', config, 'email')

    reporter.check({'values': {'root': '..'}} in config.get('defaults', []),
                   'configuration',
                   '"root" not set to ".." in configuration')
Esempio n. 29
0
def check_config(reporter, filename):
    """
    Check YAML configuration file.
    """

    config = load_yaml(filename)

    kind = config.get('kind', None)
    reporter.check(kind == 'workshop', filename,
                   'Missing or unknown kind of event: {0}', kind)

    carpentry = config.get('carpentry', None)
    reporter.check(carpentry in ('swc', 'dc', 'lc', 'cp'), filename,
                   'Missing or unknown carpentry: {0}', carpentry)
Esempio n. 30
0
 def load(self,objtype):
     if objtype == 'npc':
         data = self.get('npc', False)
         if not data:
             return None
         npc = NPC(load_yaml('characters',data))
         if not resaved('characters', data):
             npc.set_hash()
             npc.savetoslot('characters')
             self.add('npc', npc.get_hash())
         return npc
     current = self.get('items', [])
     result = []
     itemlist = []
     for item in current:
         i = Item(load_yaml('items',item))
         if not resaved('items', item):
             i.set_hash()
             i.savetoslot('items')
         result.append(i)
         itemlist.append(i.get_hash())
     self.put('items', itemlist)
     return result
Esempio n. 31
0
 def learn_spell(self, spellitem):
     spells = self.get('inventory/spells', [])
     if isinstance(spells, str):
         try:
             spells = simpleobjdata.loads(spells)
         except:
             spells = []
     if not isinstance(spells, list):
         self.put('inventory/spells', [])
     spelltype = spellitem.get('spell_type', 'wizard spells')
     parentclass = self.get('class/parent', '')
     childclass = self.get('class/class', '')
     canlearn = load_yaml('rules', 'various.yaml')["spell progression"]
     found = False
     for key in canlearn:
         if key == parentclass or key == childclass:
             debug(key)
             found = True
             break
     if not found:
         return "%s cannot learn spells" % self.displayname()
     oneline = list(canlearn[key].keys())[0]
     if spelltype not in canlearn[key][oneline]:
         return "%s cannot learn %s, failed to learn spell %s" % (self.displayname(), spelltype, spellitem.displayname())
     intelect = str(self.get('attributes/int', 1))
     chance = FlattenedDict(load_yaml('rules', 'ability_scores.yaml'))
     chance = chance['/int/%s/spell_learn' % intelect]
     out = "<strong>%s has a %s chance to learn a new spell</strong>" % (self.displayname(), chance)
     chance = int(chance.replace('%', ''))
     roll = rolldice(numdice=1, numsides=100, modifier=0)
     out += '<br>%s' % roll[1]
     if roll[0] > chance:
         return '%s<br><strong>%s has failed to learn %s!</strong>' % (out, self.displayname(), spellitem.displayname())
     spellitem.identify()
     self()['core']['inventory']['spells'].append(spellitem())
     self.autosave()
     return "%s<br><strong>%s has learned %s</strong>" % (out, self.displayname(), spellitem.displayname())
Esempio n. 32
0
 def saving_throws(self):
     """
     >>> char = Character(load_yaml('characters', 'bardic_rogue.yaml'))
     >>> isinstance(char.saving_throws, dict)
     True
     """
     key = self.get('class/parent', '')
     sts = FlattenedDict(load_yaml("rules", "saving_throws.yaml"))
     sts = FlattenedDict(sts.getsubtree(key))
     hitdice = self.get('combat/level-hitdice', 0)
     for key2 in sts.subkeys():
         if inrange(hitdice, key2):
             st = sts.getsubtree(key2)
             st['ppd'] = int(st['ppd']) + self.ppd_mod()
             return(st)
Esempio n. 33
0
def attack(player, target, attack_modifiers, custom_tohit, custom_dmg):
    custom_tohit = custom_tohit or 0
    if custom_tohit:
        frontend.campaign.message('Applying custom to-hit modifier of %s' % custom_tohit)

    frontend.campaign.message('%s is attacking %s' % (player.displayname(), target.displayname()))
    target_alive = True
    attack_number = 1
    attack_mods = load_yaml('adnd2e', 'attack_mods')
    num_attacks = player.num_attacks()
    debug("COMBAT: num_attacks:", num_attacks)
    while attack_number <= num_attacks and target_alive:
        frontend.campaign.message('<br><Br><strong>Attack %s of %s</strong><br>' % (attack_number, num_attacks))
        total_modifier = custom_tohit
        for mod in attack_modifiers:
            total_modifier += int(attack_mods[mod])
            frontend.campaign.message('Applying modifier %s: %s' % (mod, attack_mods[mod]))
        debug("Attack number", attack_number, "Out of", num_attacks, "Target alive", target_alive)
        weapon = player.current_weapon()
        weapon.onattack(player, target)
        frontend.campaign.message('Attacking with weapon %s' % weapon.displayname())
        range_modifier = range_mod(player, target, weapon)
        if range_modifier:
            frontend.campaign.message('Applying range modifier %s' % range_modifier)
            total_modifier += range_modifier
        frontend.campaign.message('Attack number %s out of %s' % (attack_number, num_attacks))
        attack_number += 1
        weaponmod = player.to_hit_mod()
        frontend.campaign.message('Applying weapon modifier %s' % weaponmod)
        total_modifier += weaponmod
        frontend.campaign.message('Total modifier: %s<br><br>' % total_modifier)
        attack_roll = player.attack_roll(target, total_modifier)
        frontend.campaign.message('Attack roll: %s %s %s' % (attack_roll[0], attack_roll[1], attack_roll[2]))
        if attack_roll[1] == 'Critical Hit !':
            frontend.campaign.message('Critical hit ! %s gain an extra attack.' % player.displayname())
            num_attacks += 1
        if attack_roll[1] == "Critical Miss !":
            frontend.campaign.message('Critical miss ! %s loses an attack.' % player.displayname())
            num_attacks - 1
        if "hit" in attack_roll[1].lower():
            if target.is_casting:
                target.interrupt_cast()
                frontend.campaign.message('%s was casting but it was interrupted by a successfull hit' % target.displayname)
            weapon.onstrike(player, target)
            damage_result = calc_damage(player, target, custom_dmg)
            target_alive = damage_result is True
            for char in [player, target]:
                char.autosave()
Esempio n. 34
0
 def next_level(self):
     parentclass = self.get('class/parent', '')
     childclass = self.get('class/class', '')
     if childclass == 'paladin':
         childclass = 'ranger'
     debug('Checking next level for %s' % self.displayname())
     nl = int(self.get('combat/level-hitdice', 1)) + 1
     if nl > 20:
         return -1
     xp_levels = FlattenedDict(load_yaml('rules', 'xp_levels.yaml')).readall('/%s/%s' % (parentclass, str(nl)))
     
     if '%s/%s/all' %(parentclass, str(nl)) in xp_levels:
         next_xp = int(xp_levels['%s/%s/all' %(parentclass, str(nl))])
     else:
         next_xp = int(xp_levels['%s/%s/%s' %(parentclass, str(nl),childclass)])
     return next_xp
Esempio n. 35
0
def load_params(exp_path, generation=None, individual=0):
    """ Load the parameters from *exp_path/log_params_evolution.txt* and the data from
        *exp_path/data_QNAS.txt*. The data loaded is the network encoded by individual
        *individual* of generation *generation*.

    Args:
        exp_path: (str) path to the directory containing evolution files.
        generation: (int) the generation number of the individual to be profiled.
            If *None*, the last generation will be used.
        individual: (int) the number of the individual in *generation* to be profiled.

    Returns:
        dict holding all the necessary parameters and data.
    """

    log_file_path = os.path.join(exp_path, 'log_params_evolution.txt')
    log_data_path = os.path.join(exp_path, 'data_QNAS.pkl')

    params = load_yaml(log_file_path)
    log_data = load_pkl(log_data_path)

    input_shape = (1, params['train_data_info']['height'],
                   params['train_data_info']['width'],
                   params['train_data_info']['num_channels'])

    # Load last generation, if it is not specified
    if generation is None:
        generation = max(log_data.keys())

    log_data = log_data[generation]
    nets = log_data['net_pop']

    net = QChromosomeNetwork(
        fn_list=params['QNAS']['fn_list'],
        max_num_nodes=params['QNAS']['max_num_nodes']).decode(nets[individual])
    loaded_params = {
        'individual_id_str':
        f"Generation {generation} - individual {individual}",
        'individual_id': (generation, individual),
        'net_list': net,
        'input_shape': input_shape,
        'num_classes': params['train_data_info']['num_classes'],
        'fn_dict': params['fn_dict'],
        'fn_list': params['QNAS']['fn_list']
    }

    return loaded_params
Esempio n. 36
0
    def _get_evolution_params(self):
        """ Get specific parameters for the evolution phase. """

        config_file = load_yaml(self.args['config_file'])

        self._check_vars(config_file)  # Checking if config file contains valid information.

        self.train_spec = dict(config_file['train'])
        self.QNAS_spec = dict(config_file['QNAS'])

        # Get the parameters lower and upper limits
        ranges = self._get_ranges(config_file)
        self.QNAS_spec['params_ranges'] = OrderedDict(sorted(ranges.items()))

        self._get_fn_spec()

        self.train_spec['experiment_path'] = self.args['experiment_path']
Esempio n. 37
0
    def _get_continue_params(self):
        """ Get parameters for the continue evolution phase. The evolution parameters are loaded
            from previous evolution configuration, except from the maximum number of generations
            (*max_generations*).
        """

        self.files_spec['continue_path'] = self.args['continue_path']
        self.files_spec['previous_QNAS_params'] = os.path.join(
            self.files_spec['continue_path'], 'log_params_evolution.txt')

        self.files_spec['previous_data_file'] = os.path.join(self.args['continue_path'],
                                                             'data_QNAS.pkl')
        self.load_old_params()
        self.QNAS_spec['max_generations'] = load_yaml(
                self.args['config_file'])['QNAS']['max_generations']

        self.train_spec['experiment_path'] = self.args['experiment_path']
Esempio n. 38
0
 def remove(self, obj, objtype):
     if objtype == 'npc':
         self.put('npc', '')
         return
     if objtype == 'player':
         self.put('player', False)
         return
     counter = 0
     todel = None
     current = self.get('items', [])
     for item in current:
         hash = Item(load_yaml('items',item)).get_hash()
         if obj.get_hash() == hash:
             todel = counter
         counter += 1
     if todel is not None:
         del current[todel]
         self.put('items', current)
Esempio n. 39
0
 def moveto(self, map, x, y):
     if not mapname:
         return
     if not isinstance(x, int) or not isinstance(y, int):
         try:
             x = int(x)
             y = int(y)
         except:
             return
     if not map.tile(x,y).canenter():
         return
     current = self.location()
     if current.get('map') and x and y:
         gamemap = GameMap(load_yaml('maps', current['map']))
         gamemap.removefromtile(current['x'], current['y'],self,'npc')
     map.addtotile(x, y, 'npc', self)
     if map.tile(x,y).revealed():
         messages.warning('%s moves to %sx%s' %(self.displayname(),x, y))
def check_config(reporter, filename):
    """
    Check YAML configuration file.
    """

    config = load_yaml(filename)

    kind = config.get('kind', None)
    reporter.check(kind == 'workshop',
                   filename,
                   'Missing or unknown kind of event: {0}',
                   kind)

    carpentry = config.get('carpentry', None)
    reporter.check(carpentry in ('swc', 'dc'),
                   filename,
                   'Missing or unknown carpentry: {0}',
                   carpentry)
Esempio n. 41
0
def check_config(reporter, source_dir):
    """Check configuration file."""

    config_file = os.path.join(source_dir, '_config.yml')
    config = load_yaml(config_file)
    reporter.check_field(config_file, 'configuration',
                         config, 'kind', 'lesson')
    reporter.check_field(config_file, 'configuration',
                         config, 'carpentry', ('swc', 'dc', 'lc', 'cp', 'cms'))
    reporter.check_field(config_file, 'configuration', config, 'title')
    reporter.check_field(config_file, 'configuration', config, 'email')

    for defaults in [
            {'values': {'root': '.', 'layout': 'page'}},
            {'values': {'root': '..', 'layout': 'episode'}, 'scope': {'type': 'episodes', 'path': ''}},
            {'values': {'root': '..', 'layout': 'page'}, 'scope': {'type': 'extras', 'path': ''}}
            ]:
        reporter.check(defaults in config.get('defaults', []),
                   'configuration',
                   '"root" not set to "." in configuration')
Esempio n. 42
0
 def save(self, path):
     """
 	Save model as a Wrapper class (pyfunc)
 
 	Parameters
 	----------            
     path : str
            path where the model object will be loaded.
          
 	Returns
 	-------
 	None
     """
     path_artifacts = path + '_artifacts.pkl'
     dump(self.artifacts, path_artifacts)
     content = load_json("config/arquivos.json")
     conda_env = load_yaml(content["path_yaml"])
     mlflow.pyfunc.save_model(path=path,
                              python_model=self,
                              artifacts={'model': path_artifacts},
                              conda_env=conda_env)
Esempio n. 43
0
    test_plan = os.environ.get('TEST_PLAN', 'release').lower()
    # 'Debug' only differs from 'Release' but NOT uploading results to QA FTP
    root_folder_name = test_plan
    debug = False
    if test_plan == 'debug':
        debug = True
        test_plan = 'release'

    assert options.res_dir is not None, "use --res_dir option to select resources for jobs"

    base_template = os.path.join(os.path.dirname(__file__), 'res',
                                 'template.yaml')
    print("*** Loading %s ***" % base_template)

    # first load 'base' template
    template = load_yaml(base_template)

    # then extend it with template specific to release type (ai/gg-ult-fab/...)
    res_template = os.path.abspath(
        os.path.join(options.res_dir, 'template.yaml'))
    print("*** Loading %s ***" % res_template)
    template.extend(load_yaml(res_template))

    tiden_job = None
    for k, v in enumerate(template):
        if 'job-template' in template[k]:
            if 'id' in template[k]['job-template']:
                if 'job-tiden' == template[k]['job-template']['id']:
                    tiden_job = deepcopy(template[k])
                    break
Esempio n. 44
0
import scipy.stats
import torch
import torch.utils.data
from sklearn import metrics

try:
    from sklearn.externals import joblib
except ImportError:
    import joblib

# Local application/library specific imports.
import util
from pytorch_model import AutoEncoder

# Load configuration from YAML file.
CONFIG = util.load_yaml("./config.yaml")

# String constant: "cuda:0" or "cpu"
DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")


def load_model(config, machine_type):
    """
    Load model file
    """
    model_file = "{model}/model_{machine_type}.hdf5".format(
        model=CONFIG["model_directory"], machine_type=machine_type)
    if not os.path.exists(model_file):
        print("{} model not found ".format(machine_type))
        sys.exit(-1)
Esempio n. 45
0
                         category)

    # Check whether we have missing or too many categories
    seen_categories = set(header.keys())
    check_categories(reporter, REQUIRED, seen_categories,
                     'Missing categories')
    check_categories(reporter, seen_categories, REQUIRED.union(OPTIONAL),
                     'Superfluous categories')


def check_config(reporter, filename):
    """
    Check YAML configuration file.
    """

    config = load_yaml(filename)

    kind = config.get('kind', None)
    reporter.check(kind == 'workshop',
                   filename,
                   'Missing or unknown kind of event: {0}',
                   kind)

    carpentry = config.get('carpentry', None)
    reporter.check(carpentry in ('swc', 'dc'),
                   filename,
                   'Missing or unknown carpentry: {0}',
                   carpentry)


def main():
Esempio n. 46
0
def create_cluster(mem_count, ebs_count, func_count, sched_count, route_count,
                   bench_count, cfile, ssh_key, cluster_name, kops_bucket,
                   aws_key_id, aws_key):

    # create the cluster object with kops
    util.run_process(
        ['./create_cluster_object.sh', cluster_name, kops_bucket, ssh_key])

    client, apps_client = util.init_k8s()

    # create the kops pod
    print('Creating management pods...')
    kops_spec = util.load_yaml('yaml/pods/kops-pod.yml')
    env = kops_spec['spec']['containers'][0]['env']

    util.replace_yaml_val(env, 'AWS_ACCESS_KEY_ID', aws_key_id)
    util.replace_yaml_val(env, 'AWS_SECRET_ACCESS_KEY', aws_key)
    util.replace_yaml_val(env, 'KOPS_STATE_STORE', kops_bucket)
    util.replace_yaml_val(env, 'FLUENT_CLUSTER_NAME', cluster_name)

    client.create_namespaced_pod(namespace=util.NAMESPACE, body=kops_spec)

    # wait for the kops pod to start
    kops_ip = util.get_pod_ips(client, 'role=kops', is_running=True)[0]

    # copy kube config file to kops pod, so it can execute kubectl commands
    kops_podname = kops_spec['metadata']['name']
    kcname = kops_spec['spec']['containers'][0]['name']

    os.system('cp %s kvs-config.yml' % cfile)
    util.copy_file_to_pod(client, '/home/ubuntu/.kube/config', kops_podname,
                          '/root/.kube/', kcname)
    util.copy_file_to_pod(client, ssh_key, kops_podname, '/root/.ssh/', kcname)
    util.copy_file_to_pod(client, ssh_key + '.pub', kops_podname,
                          '/root/.ssh/', kcname)
    util.copy_file_to_pod(client, 'kvs-config.yml', kops_podname,
                          '/fluent/conf/', kcname)

    # start the monitoring pod
    mon_spec = util.load_yaml('yaml/pods/monitoring-pod.yml')
    util.replace_yaml_val(mon_spec['spec']['containers'][0]['env'], 'MGMT_IP',
                          kops_ip)
    client.create_namespaced_pod(namespace=util.NAMESPACE, body=mon_spec)

    util.get_pod_ips(client, 'role=monitoring')

    # copy config file into monitoring pod -- wait till we create routing pods,
    # so we're sure that the monitoring nodes are up and running
    util.copy_file_to_pod(client, 'kvs-config.yml',
                          mon_spec['metadata']['name'], '/fluent/conf/',
                          mon_spec['spec']['containers'][0]['name'])
    os.system('rm kvs-config.yml')

    print('Creating %d routing nodes...' % (route_count))
    add_nodes(client, apps_client, cfile, ['routing'], [route_count], True)
    util.get_pod_ips(client, 'role=routing')

    print('Creating %d memory, %d ebs node(s)...' % (mem_count, ebs_count))
    add_nodes(client, apps_client, cfile, ['memory', 'ebs'],
              [mem_count, ebs_count], True)

    print('Creating routing service...')
    service_spec = util.load_yaml('yaml/services/routing.yml')
    client.create_namespaced_service(namespace=util.NAMESPACE,
                                     body=service_spec)

    print('Adding %d scheduler nodes...' % (sched_count))
    add_nodes(client, apps_client, cfile, ['scheduler'], [sched_count], True)
    util.get_pod_ips(client, 'role=scheduler')

    print('Adding %d function serving nodes...' % (func_count))
    add_nodes(client, apps_client, cfile, ['function'], [func_count], True)

    print('Creating function service...')
    service_spec = util.load_yaml('yaml/services/function.yml')
    client.create_namespaced_service(namespace=util.NAMESPACE,
                                     body=service_spec)

    print('Adding %d benchmark nodes...' % (bench_count))
    add_nodes(client, apps_client, cfile, ['benchmark'], [bench_count], True)

    print('Finished creating all pods...')
    os.system('touch setup_complete')
    util.copy_file_to_pod(client, 'setup_complete', kops_podname, '/fluent',
                          kcname)
    os.system('rm setup_complete')

    sg_name = 'nodes.' + cluster_name
    sg = ec2_client.describe_security_groups(Filters=[{
        'Name': 'group-name',
        'Values': [sg_name]
    }])['SecurityGroups'][0]

    print('Authorizing ports for routing service...')

    permission = [{
        'FromPort': 6200,
        'IpProtocol': 'tcp',
        'ToPort': 6203,
        'IpRanges': [{
            'CidrIp': '0.0.0.0/0'
        }]
    }]
    ec2_client.authorize_security_group_ingress(GroupId=sg['GroupId'],
                                                IpPermissions=permission)

    routing_svc_addr = util.get_service_address(client, 'routing-service')
    function_svc_addr = util.get_service_address(client, 'function-service')
    print('The routing service can be accessed here: \n\t%s' %
          (routing_svc_addr))
    print('The function service can be accessed here: \n\t%s' %
          (function_svc_addr))
Esempio n. 47
0
        ys: Dict[str, torch.Tensor] = {}
        for name, conv in self.convs.items():
            ys[name] = conv(mid).squeeze()

        return ys

    @property
    def module(self) -> nn.Module:
        return self


if __name__ == "__main__":
    from train import load_yaml

    configs = util.load_yaml("configs/debug.yaml")
    latent_vars = build_latent_variables(configs["latent_variables"])

    g = Generator(latent_vars)
    zs = g.sample_latent_vars(2)
    for k, v in zs.items():
        print(k, v.shape)
    x = g.infer(list(zs.values()))
    print("x:", x.shape)

    d = Discriminator(configs["models"]["dis"])
    d_head, q_head = DHead(), QHead(latent_vars)

    mid = d(x)
    y, c = d_head(mid), q_head(mid)
Esempio n. 48
0
def add_nodes(client, apps_client, cfile, kinds, counts, create=False):
    for i in range(len(kinds)):
        print('Adding %d %s server node(s) to cluster...' %
              (counts[i], kinds[i]))

        # get the previous number of nodes of type kind that are running
        prev_count = util.get_previous_count(client, kinds[i])

        # we only add new nodes if we didn't pass in a node IP
        util.run_process(['./modify_ig.sh', kinds[i],
                          str(counts[i] + prev_count)])

    util.run_process(['./validate_cluster.sh'])

    kops_ip = util.get_pod_ips(client, 'role=kops')[0]
    route_ips = util.get_pod_ips(client, 'role=routing')
    if len(route_ips) > 0:
        seed_ip = random.choice(route_ips)
    else:
        seed_ip = ''

    mon_str = ' '.join(util.get_pod_ips(client, 'role=monitoring'))
    route_str = ' '.join(route_ips)
    sched_str = ' '.join(util.get_pod_ips(client, 'role=scheduler'))

    route_addr = util.get_service_address(client, 'routing-service')
    function_addr = util.get_service_address(client, 'function-service')

    # create should only be true when the DaemonSet is being created for the
    # first time -- i.e., when this is called from create_cluster
    if create:
        for i in range(len(kinds)):
            kind = kinds[i]

            fname = 'yaml/ds/%s-ds.yml' % kind
            yml = util.load_yaml(fname)

            for container in yml['spec']['template']['spec']['containers']:
                env = container['env']

                util.replace_yaml_val(env, 'ROUTING_IPS', route_str)
                util.replace_yaml_val(env, 'ROUTE_ADDR', route_addr)
                util.replace_yaml_val(env, 'SCHED_IPS', sched_str)
                util.replace_yaml_val(env, 'FUNCTION_ADDR', function_addr)
                util.replace_yaml_val(env, 'MON_IPS', mon_str)
                util.replace_yaml_val(env, 'MGMT_IP', kops_ip)
                util.replace_yaml_val(env, 'SEED_IP', seed_ip)

            apps_client.create_namespaced_daemon_set(namespace=util.NAMESPACE,
                                                     body=yml)

            # wait until all pods of this kind are running
            res = []
            while len(res) != counts[i]:
                res = util.get_pod_ips(client, 'role='+kind, is_running=True)

            created_pods = []
            pods = client.list_namespaced_pod(namespace=util.NAMESPACE,
                                              label_selector='role=' +
                                              kind).items
            for pod in pods:
                pname = pod.metadata.name
                for container in pod.spec.containers:
                    cname = container.name
                    created_pods.append((pname, cname))

            os.system('cp %s ./kvs-config.yml' % cfile)
            for pname, cname in created_pods:
                util.copy_file_to_pod(client, 'kvs-config.yml', pname,
                                      '/fluent/conf/', cname)

            os.system('rm ./kvs-config.yml')
Esempio n. 49
0
 def __init__(self, path=DEFAULT_CONFIG_PATH):
     self.globel_app_conf = util.load_yaml(path)
Esempio n. 50
0
import requests
from aiohttp import web
from jwt import JWT, jwk_from_dict
from util import load_yaml, rndstr, url_encode

AUTH_ENDPOINT = "https://id.twitch.tv/oauth2/authorize"
TOKEN_ENDPOINT = "https://id.twitch.tv/oauth2/token"

config = load_yaml("config.yaml")
jwt_instance = JWT()
jwt_key = jwk_from_dict(
    requests.get("https://id.twitch.tv/oauth2/keys").json()["keys"][0])

session = {}


async def login(request):
    session["state"] = rndstr()
    session["nonce"] = rndstr()
    args = {
        "client_id": config["client_id"],
        "response_type": "code",
        "scope": "openid",
        "nonce": session["nonce"],
        "redirect_uri": config["api_url"] + "/callback",
        "state": session["state"],
    }
    raise web.HTTPFound(AUTH_ENDPOINT + url_encode(args))


async def login_callback(request):
Esempio n. 51
0
 def __init__(self, db_path, sql_path):
     self.__conn = SqliteConnection(convert_path(db_path))
     self.__sqls = load_yaml(convert_path(sql_path))
Esempio n. 52
0
 def __init__(self, path):
     self.__subtitles = load_yaml(convert_path(path))
Esempio n. 53
0
def add_nodes(client,
              apps_client,
              cfile,
              kinds,
              counts,
              management_ip,
              aws_key_id=None,
              aws_key=None,
              create=False,
              prefix=None):
    for i in range(len(kinds)):
        print('Adding %d %s server node(s) to cluster...' %
              (counts[i], kinds[i]))

        prev_count = util.get_previous_count(client, kinds[i])
        util.run_process(
            ['./modify_ig.sh', kinds[i],
             str(counts[i] + prev_count)])

    util.run_process(['./validate_cluster.sh'])
    replica_str = ' '.join(util.get_node_ips(client, 'role=aft'))

    # Create should only be true when the DaemonSet is being created for the
    # first time -- i.e., when this is called from create_cluster. After that,
    # we can basically ignore this because the DaemonSet will take care of
    # adding pods to created nodes.
    if create:
        for i in range(len(kinds)):
            kind = kinds[i]

            fname = 'yaml/ds/%s-ds.yml' % kind
            yml = util.load_yaml(fname, prefix)

            for container in yml['spec']['template']['spec']['containers']:
                env = container['env']
                util.replace_yaml_val(env, 'REPLICA_IPS', replica_str)
                util.replace_yaml_val(env, 'MANAGER', management_ip)
                util.replace_yaml_val(env, 'AWS_ACCESS_KEY_ID', aws_key_id)
                util.replace_yaml_val(env, 'AWS_SECRET_ACCESS_KEY', aws_key)

            apps_client.create_namespaced_daemon_set(namespace=util.NAMESPACE,
                                                     body=yml)

            # Wait until all pods of this kind are running
            res = []
            while len(res) != counts[i]:
                res = util.get_pod_ips(client, 'role=' + kind, is_running=True)

            created_pods = []
            pods = client.list_namespaced_pod(namespace=util.NAMESPACE,
                                              label_selector='role=' +
                                              kind).items

            # Generate list of all recently created pods.
            for pod in pods:
                pname = pod.metadata.name
                for container in pod.spec.containers:
                    cname = container.name
                    created_pods.append((pname, cname))
                    pod.metadata.labels['aftReady'] = 'isready'
                    client.patch_namespaced_pod(pod.metadata.name,
                                                util.NAMESPACE, pod)

            # Copy the KVS config into all recently created pods.
            os.system('cp %s ./aft-config.yml' % cfile)

            for pname, cname in created_pods:
                util.copy_file_to_pod(
                    client, 'aft-config.yml', pname,
                    '/go/src/github.com/Alchem-Lab/aft/config', cname)
            os.system('rm ./aft-config.yml')
Esempio n. 54
0
def create_cluster(txn_count, keynode_count, rtr_count, worker_count, lb_count,
                   benchmark_count, config_file, branch_name, ssh_key,
                   cluster_name, kops_bucket, aws_key_id, aws_key,
                   anna_config_file):
    prefix = './'
    util.run_process(['./create_cluster_object.sh', kops_bucket, ssh_key],
                     'kops')

    client, apps_client = util.init_k8s()

    print('Creating Monitor Node...')
    add_nodes(client, apps_client, config_file, "monitor", 1, aws_key_id,
              aws_key, True, prefix, branch_name)

    print('Creating %d Anna Routing Nodes...' % (rtr_count))
    add_nodes(client, apps_client, anna_config_file, "routing", rtr_count,
              aws_key_id, aws_key, True, prefix, branch_name)

    print('Creating routing service...')
    service_spec = util.load_yaml('yaml/services/routing.yml', prefix)
    client.create_namespaced_service(namespace=util.NAMESPACE,
                                     body=service_spec)
    util.get_service_address(client, 'routing-service')

    print('Creating %d Key Nodes...' % (keynode_count))
    add_nodes(client, apps_client, config_file, "keynode", keynode_count,
              aws_key_id, aws_key, True, prefix, branch_name)

    print('Creating %d Worker Nodes...' % (worker_count))
    add_nodes(client, apps_client, config_file, "worker", worker_count,
              aws_key_id, aws_key, True, prefix, branch_name)

    print('Creating Worker Service...')
    service_spec = util.load_yaml('yaml/services/worker.yml', prefix)
    client.create_namespaced_service(namespace=util.NAMESPACE,
                                     body=service_spec)
    util.get_service_address(client, 'worker-service')

    print('Creating %d TASC nodes...' % (txn_count))
    add_nodes(client, apps_client, config_file, 'tasc', txn_count, aws_key_id,
              aws_key, True, prefix, branch_name)

    print('Creating %d Load Balancers...' % (lb_count))
    add_nodes(client, apps_client, config_file, 'lb', lb_count, aws_key_id,
              aws_key, True, prefix, branch_name)

    print('Creating TASC Load Balancing service...')
    service_spec = util.load_yaml('yaml/services/tasc.yml', prefix)
    client.create_namespaced_service(namespace=util.NAMESPACE,
                                     body=service_spec)

    print('Creating %d Benchmark nodes...' % (benchmark_count))
    add_nodes(client, apps_client, config_file, 'benchmark', benchmark_count,
              aws_key_id, aws_key, True, prefix, branch_name)

    benchmark_ips = util.get_node_ips(client, 'role=benchmark', 'ExternalIP')
    with open('../cmd/benchmark/benchmarks.txt', 'w+') as f:
        for ip in benchmark_ips:
            f.write(ip + '\n')

    print('Finished creating all pods...')

    sg_name = 'nodes.' + cluster_name
    sg = ec2_client.describe_security_groups(Filters=[{
        'Name': 'group-name',
        'Values': [sg_name]
    }])['SecurityGroups'][0]
    print("Authorizing Ports for TASC...")
    permission = [{
        'FromPort': 0,
        'IpProtocol': 'tcp',
        'ToPort': 65535,
        'IpRanges': [{
            'CidrIp': '0.0.0.0/0'
        }]
    }]

    ec2_client.authorize_security_group_ingress(GroupId=sg['GroupId'],
                                                IpPermissions=permission)

    print('Registering Key Nodes...')
    keynode_pod_ips = util.get_pod_ips(client, 'role=keynode', is_running=True)
    register(client, keynode_pod_ips)

    print("\nThe TASC ELB Endpoint: " +
          util.get_service_address(client, "tasc-service") + "\n")
    print('Finished!')
Esempio n. 55
0
def create_cluster(replica_count, gc_count, lb_count, bench_count, cfile,
                   ssh_key, cluster_name, kops_bucket, aws_key_id, aws_key):
    prefix = './'
    util.run_process(['./create_cluster_object.sh', kops_bucket, ssh_key])

    client, apps_client = util.init_k8s()

    print('Creating management pod')
    # management_spec = util.load_yaml('yaml/pods/management-pod.yml')
    # env = management_spec['spec']['containers'][0]['env']
    # util.replace_yaml_val(env, 'AWS_ACCESS_KEY_ID', aws_key_id)
    # util.replace_yaml_val(env, 'AWS_SECRET_ACCESS_KEY', aws_key)
    #
    # client.create_namespaced_pod(namespace=util.NAMESPACE,
    #                              body=management_spec)
    # management_ip = util.get_pod_ips(client, 'role=management',
    #                                 is_running=True)[0]

    management_ip = ""

    print('Creating standby replicas...')
    util.run_process(['./modify_ig.sh', 'standby', '1'])
    util.run_process(['./validate_cluster.sh'])
    print('Creating %d load balancer, %d GC replicas...' %
          (lb_count, gc_count))
    add_nodes(client, apps_client, cfile, ['lb', 'gc'], [lb_count, gc_count],
              management_ip, aws_key_id, aws_key, True, prefix)

    lb_pods = client.list_namespaced_pod(namespace=util.NAMESPACE,
                                         label_selector="role=lb").items
    kubecfg = os.path.join(os.environ['HOME'], '.kube/config')
    for pod in lb_pods:
        util.copy_file_to_pod(client, kubecfg, pod.metadata.name,
                              '/root/.kube', 'lb-container')

    replica_ips = util.get_node_ips(client, 'role=gc', 'ExternalIP')
    with open('gcs.txt', 'w') as f:
        for ip in replica_ips:
            f.write(ip + '\n')

    # Wait until the monitoring pod is finished creating to get its IP address
    # and then copy KVS config into the monitoring pod.
    print('Creating %d Aft replicas...' % (replica_count))
    add_nodes(client, apps_client, cfile, ['aft'], [replica_count],
              management_ip, aws_key_id, aws_key, True, prefix)
    util.get_pod_ips(client, 'role=aft')

    replica_ips = util.get_node_ips(client, 'role=aft', 'ExternalIP')
    with open('replicas.txt', 'w') as f:
        for ip in replica_ips:
            f.write(ip + '\n')

    os.system('cp %s aft-config.yml' % cfile)
    management_pname = management_spec['metadata']['name']
    management_cname = management_spec['spec']['containers'][0]['name']
    util.copy_file_to_pod(client, 'aft-config.yml', management_pname,
                          '/go/src/github.com/tajshaik24/aft/config',
                          management_cname)
    util.copy_file_to_pod(client, 'replicas.txt', management_pname,
                          '/go/src/github.com/tajshaik24/aft',
                          management_cname)
    util.copy_file_to_pod(client, 'gcs.txt', management_pname,
                          '/go/src/github.com/tajshaik24/aft',
                          management_cname)
    util.copy_file_to_pod(client, kubecfg, management_pname, '/root/.kube/',
                          management_cname)
    os.system('rm aft-config.yml')
    os.system('rm gcs.txt')

    # Copy replicas.txt to all Aft pods.
    aft_pod_list = client.list_namespaced_pod(namespace=util.NAMESPACE,
                                              label_selector="role=aft").items
    aft_pod_list = list(map(lambda pod: pod.metadata.name, aft_pod_list))
    for pname in aft_pod_list:
        util.copy_file_to_pod(client, 'replicas.txt', pname,
                              '/go/src/github.com/tajshaik24/aft',
                              'aft-container')

    gc_pod_list = client.list_namespaced_pod(namespace=util.NAMESPACE,
                                             label_selector="role=gc").items
    gc_pod_list = list(map(lambda pod: pod.metadata.name, gc_pod_list))
    for pname in gc_pod_list:
        util.copy_file_to_pod(client, 'replicas.txt', pname,
                              '/go/src/github.com/tajshaik24/aft',
                              'gc-container')
    os.system('rm replicas.txt')

    print('Adding %d benchmark nodes...' % (bench_count))
    add_nodes(client, apps_client, cfile, ['benchmark'], [bench_count],
              management_ip, aws_key_id, aws_key, True, prefix)

    print('Finished creating all pods...')

    print('Creating Aft service...')
    service_spec = util.load_yaml('yaml/services/aft.yml', prefix)
    client.create_namespaced_service(namespace=util.NAMESPACE,
                                     body=service_spec)

    sg_name = 'nodes.' + cluster_name
    sg = ec2_client.describe_security_groups(Filters=[{
        'Name': 'group-name',
        'Values': [sg_name]
    }])['SecurityGroups'][0]

    print('Authorizing ports for Aft replicas...')
    permission = [{
        'FromPort': 7654,
        'IpProtocol': 'tcp',
        'ToPort': 7656,
        'IpRanges': [{
            'CidrIp': '0.0.0.0/0'
        }]
    }, {
        'FromPort': 7777,
        'IpProtocol': 'tcp',
        'ToPort': 7782,
        'IpRanges': [{
            'CidrIp': '0.0.0.0/0'
        }]
    }, {
        'FromPort': 8000,
        'IpProtocol': 'tcp',
        'ToPort': 8003,
        'IpRanges': [{
            'CidrIp': '0.0.0.0/0'
        }]
    }]

    ec2_client.authorize_security_group_ingress(GroupId=sg['GroupId'],
                                                IpPermissions=permission)
    print('Finished!')
Esempio n. 56
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--config",
        "-c",
        default="configs/default.yaml",
        help="training configuration file",
    )
    args = parser.parse_args()
    configs = util.load_yaml(args.config)

    dataset_name = configs["dataset"]["name"]
    dataset_path = Path(configs["dataset"]["path"]) / dataset_name

    # prepare dataset
    if dataset_name == "mnist":
        dataset = new_mnist_dataset(dataset_path)
    elif dataset_name == "fashion-mnist":
        dataset = new_fashion_mnist_dataset(dataset_path)
    else:
        raise NotImplementedError

    dataloader = DataLoader(
        dataset,
        batch_size=configs["dataset"]["batchsize"],
        num_workers=configs["dataset"]["n_workers"],
        shuffle=True,
        drop_last=True,
        pin_memory=True,
        worker_init_fn=worker_init_fn,
    )

    # prepare models
    latent_vars = build_latent_variables(configs["latent_variables"])
    gen, dis = Generator(latent_vars), Discriminator(configs["models"]["dis"])
    dhead, qhead = DHead(), QHead(latent_vars)
    models = {"gen": gen, "dis": dis, "dhead": dhead, "qhead": qhead}

    # prepare optimizers
    opt_gen = create_optimizer([gen, qhead], **configs["optimizer"]["gen"])
    opt_dis = create_optimizer([dis, dhead], **configs["optimizer"]["dis"])
    opts = {"gen": opt_gen, "dis": opt_dis}

    # prepare directories
    log_path = Path(configs["log_path"])
    log_path.mkdir(parents=True, exist_ok=True)
    tb_path = Path(configs["tensorboard_path"])
    tb_path.mkdir(parents=True, exist_ok=True)

    # initialize logger
    logger = Logger(log_path, tb_path)

    # initialize losses
    losses = {
        "adv": loss.AdversarialLoss(),
        "info": loss.InfoGANLoss(latent_vars)
    }

    # start training
    trainer = Trainer(dataloader, latent_vars, models, opts, losses,
                      configs["training"], logger)
    trainer.train()
Esempio n. 57
0
def main():
    args = parse_args()
    config = util.load_yaml(args.config)
    thrift = util.load_yaml(args.thrift)
    protocol = util.load_yaml(args.protocol)

    pmap = {}
    for item in protocol:
        if "class_name" in item:
            pmap[item.class_name] = item

    comment = "// This file is generated DO NOT EDIT @" + "generated"
    result = [{"comment": comment}]

    for thrift_item in thrift:
        config_item = None
        if "class_name" in thrift_item and thrift_item.class_name in pmap:
            protocol_item = pmap[thrift_item.class_name]

            special = False
            if "struct" in thrift_item:
                if thrift_item.class_name in config.StructMap:
                    config_item = config.StructMap[thrift_item.class_name]
                    thrift_item["proto_name"] = config_item.class_name
                    special = True

                for field in thrift_item.fields:
                    if (
                        config_item is not None
                        and field.field_name in config_item.fields
                    ):
                        field["proto_name"] = config_item.fields[
                            field.field_name
                        ].field_name
                    else:
                        field["proto_name"] = field.field_name

                if "struct" in protocol_item:
                    thrift_field_set = {t.proto_name for t in thrift_item.fields}
                    protocol_field_set = {p.field_name for p in protocol_item.fields}
                    valid_fields = thrift_field_set.intersection(protocol_field_set)

                    for field in thrift_item.fields:
                        if field.field_name in valid_fields:
                            field["convert"] = True

                    if len((thrift_field_set - protocol_field_set)) != 0:
                        eprint(
                            "Missing protocol fields: "
                            + thrift_item.class_name
                            + " "
                            + str(thrift_field_set - protocol_field_set)
                        )

                    if len((protocol_field_set - thrift_field_set)) != 0:
                        eprint(
                            "Missing thrift fields: "
                            + thrift_item.class_name
                            + " "
                            + str(protocol_field_set - thrift_field_set)
                        )
                else:
                    hfile = "./special/" + thrift_item.class_name + ".hpp.inc"
                    special = special_file(hfile, special, thrift_item, "hinc")

                    cfile = "./special/" + thrift_item.class_name + ".cpp.inc"
                    special = special_file(cfile, special, thrift_item, "cinc")

                    if not special:
                        eprint(
                            "Thrift struct missing from presto_protocol: "
                            + thrift_item.class_name
                        )
        else:
            eprint("Thrift item missing from presto_protocol: " + item.class_name)

    result.extend(thrift)
    print(util.to_json(result))
Esempio n. 58
0
def add_nodes(client, apps_client, cfile, kind, count, aws_key_id=None,
              aws_key=None, create=False, prefix=None, branch="master"):
    print('Adding %d %s server node(s) to cluster...' % (count, kind))

    prev_count = util.get_previous_count(client, kind)
    util.run_process(['./modify_ig.sh', kind, str(count + prev_count)], 'kops')

    util.run_process(['./validate_cluster.sh'], 'kops')

    if create:
        fname = 'yaml/ds/%s-ds.yml' % kind
        yml = util.load_yaml(fname, prefix)

        for container in yml['spec']['template']['spec']['containers']:
            env = container['env']
            util.replace_yaml_val(env, 'BRANCH', branch)
            util.replace_yaml_val(env, 'AWS_ACCESS_KEY_ID', aws_key_id)
            util.replace_yaml_val(env, 'AWS_SECRET_ACCESS_KEY', aws_key)
            if kind == "tasc":
                routing_svc = util.get_service_address(client, 'routing-service')
                util.replace_yaml_val(env, 'ROUTING_ILB', routing_svc)
                monitor_ip = util.get_node_ips(client, 'role=monitor', 'ExternalIP')[0]
                util.replace_yaml_val(env, 'MONITOR', monitor_ip)
                worker_svc = util.get_service_address(client, 'worker-service')
                util.replace_yaml_val(env, 'WORKER_ILB', worker_svc)
            if kind == "keynode":
                monitor_ip = util.get_node_ips(client, 'role=monitor', 'ExternalIP')[0]
                util.replace_yaml_val(env, 'MONITOR', monitor_ip)
            if kind == 'worker':
                monitor_ip = util.get_node_ips(client, 'role=monitor', 'ExternalIP')[0]
                util.replace_yaml_val(env, 'MONITOR', monitor_ip)
                routing_svc = util.get_service_address(client, 'routing-service')
                util.replace_yaml_val(env, 'ROUTING_ILB', routing_svc)

        apps_client.create_namespaced_daemon_set(namespace=util.NAMESPACE,
                                                 body=yml)

        # Wait until all pods of this kind are running
        res = []
        while len(res) != count:
            res = util.get_pod_ips(client, 'role=' + kind, is_running=True)

        created_pods = []
        pods = client.list_namespaced_pod(namespace=util.NAMESPACE,
                                          label_selector='role=' +
                                                         kind).items

        # Send kube config to lb
        if kind == 'lb':
            kubecfg = os.path.join(os.environ['HOME'], '.kube/config')
            for pod in pods:
                cname = pod.spec.containers[0].name
                util.copy_file_to_pod(client, kubecfg, pod.metadata.name,
                                      '/root/.kube', cname)

        # Generate list of all recently created pods.
        created_pod_ips = []
        for pod in pods:
            created_pod_ips.append(pod.status.pod_ip)
            pname = pod.metadata.name
            for container in pod.spec.containers:
                cname = container.name
                created_pods.append((pname, cname))

        # Copy the KVS config into all recently created pods.
        cfile_name = './tasc-config.yml' if kind != 'routing' else './anna-config.yml'
        cfile_dir = '/go/src/github.com/saurav-c/tasc/config' if kind != 'routing' else 'hydro/anna/conf'
        os.system(str('cp %s ' + cfile_name) % cfile)

        for pname, cname in created_pods:
            util.copy_file_to_pod(client, cfile_name[2:], pname,
                                  cfile_dir, cname)
        os.system('rm ' + cfile_name)
Esempio n. 59
0
def using_remote_theme(source_dir):
    config_file = os.path.join(source_dir, '_config.yml')
    config = load_yaml(config_file)
    return 'remote_theme' in config