示例#1
0
    def __getFunction__(self, yaml, modname, opMap):
        prim = "(" + yaml.get(modname)["primitive"] + ")"
        inputs = yaml.get(modname)["inputs"].keys()
        execStr = "def " + modname + "("
        execStr += reduce(lambda x,y: x+ "," + y, inputs)
        execStr += "):\n"

        execStr += " retStr = \"\"\n"

        for token in tokenize.generate_tokens(StringIO.StringIO(prim).readline):
            # token is a NAME ... this can be not/and/or OR a variable name
            if token[0] == 1:
                if token[1] in opMap:
                    execStr += " retStr += \" " + opMap[token[1]] + " \"\n"
                elif token[1] in inputs:
                    execStr += " retStr += " + token[1] + "\n"
                else:
                    raise Exception("Unknown token name: " + token[1])
            # token is an OP ... usually '(' or ')'
            elif token[0] == 51:
                execStr += " retStr += \"" + token[1] + "\"\n"
            # endmarker
            elif token[0] == 0:
                pass
            else:
                raise Exception("Unexpected token: " + str(token))

        execStr += " return retStr\n"

        return execStr
示例#2
0
 def __init__(self, id, yaml):
     self.id = id
     self.name = yaml.get('name', None) or self.id.replace('_', '-')
     self.description = yaml.get('description', None)
     self.tags = {}
     for tname, tvalue in yaml.get('tags', {}).items():
         self.tags[tname] = tvalue
     self.tests = []
示例#3
0
def mkosds(lists, yaml):
    i = 0
    for server in yaml.get('osd_servers', []):
        for j in xrange(0, yaml.get('osds_per_server', 0)):
            name = "osd.%d" % i
            lists[name] = []
            lists[name].append("        host = %s" % server)
            lists[name].append("        osd data = /srv/osd-device-%d-data" % j)
            lists[name].append("        osd journal = /dev/disk/by-partlabel/osd-device-%d-journal" % j)
            i += 1
示例#4
0
def mkosds(lists, yaml):
    i = 0
    for server in yaml.get('osd_servers', []):
        for j in xrange(0, yaml.get('osds_per_server', 0)):
            name = "osd.%d" % i
            lists[name] = []
            lists[name].append("        host = %s" % server)
            lists[name].append("        osd data = /srv/osd-device-%d-data" % j)
            lists[name].append("        osd journal = /srv/osd-device-%d-data/journal" % j)
#            lists[name].append("        osd journal = /dev/disk/by-partlabel/osd-device-%d-journal" % j)
            i += 1
示例#5
0
def station_from_yaml(yaml):
    """Produce Station from YAML."""
    station = Station()

    station.name = yaml.get("name", None)

    # TODO do something about that None.
    station.x = yaml.get("long", None)
    station.y = yaml.get("lat", None)

    return station
示例#6
0
 def count_data(self):
     """
     统计 yaml  data 测试数据的条数
     :return:
     """
     yamlList = self.get_yaml()
     for yaml in yamlList:
         # 如果用列等于当前 用列就返回
         if yaml.get('casename') == self.case_name:
             return len(yaml.get('data'))
     return "casename 不存在!"
示例#7
0
def mkosds(lists, yaml):
    i = 0
    for server in yaml.get('osd_servers', []):
        for j in xrange(0, yaml.get('osds_per_server', 0)):
#            for k in xrange(0, yaml.get('osds_per_device', 0)):
                name = "osd.%d" % i
                lists[name] = []
                lists[name].append("        host = %s" % server)
                lists[name].append("        osd data = /mnt/osd-device-%d-data" % j)
#                lists[name].append("        osd journal = /dev/mapper/%s-sata-l%d" % (server, j + yaml.get('osds_per_server', 0)))
                lists[name].append("        osd journal = /mnt/osd-device-%d-data/journal" % j)
                i += 1
示例#8
0
    def get_param(self, value):
        """
        获取 yaml用列参数
        :param value:  传递参数值
        :return:
        """

        yamlList = self.get_yaml()
        for yaml in yamlList:
            # 如果用列等于当前 用列就返回
            if yaml.get('casename') == self.case_name:
                return yaml.get(value)
        return "casename 不存在!"
示例#9
0
 def __init__(self, id, yaml):
     self.id = id
     self.name = yaml.get('name', None)
     self.description = yaml.get('description', None)
     self.filters = {}
     for fname, fvalue in yaml.get('filter', {}).items():
         self.filters[fname] = fvalue
     self.tests = yaml.get('tests', [])
     logs = yaml.get('logs', {})
     self.files = [UjiNew.FileName(f) for f in logs.get('files', [])]
     self.commands = [
         UjiNew.Command(yaml) for yaml in logs.get('commands', [])
     ]
     self.actor = None
示例#10
0
def update(yaml, nestedKeys, value):
    if len(nestedKeys) > 1:
        key = nestedKeys.pop(0)
        update(yaml.get(key, {}), nestedKeys, value)
    else:
        yaml[nestedKeys[0]] = value
    return yaml
示例#11
0
def get_openstack_driver(yaml, project):
    """
    Create an openstack handle from config to manage RHOS-D resources
    """
    OpenStack = get_driver(Provider.OPENSTACK)
    glbs = yaml.get("globals")
    os_cred = glbs.get("openstack-credentials")
    username = os_cred["username"]
    password = os_cred["password"]
    auth_url = os_cred["auth-url"]
    auth_version = os_cred["auth-version"]
    tenant_name = project
    service_region = os_cred["service-region"]
    domain_name = os_cred["domain"]
    tenant_domain_id = os_cred["tenant-domain-id"]
    driver = OpenStack(
        username,
        password,
        ex_force_auth_url=auth_url,
        ex_force_auth_version=auth_version,
        ex_tenant_name=tenant_name,
        ex_force_service_region=service_region,
        ex_domain_name=domain_name,
        ex_tenant_domain_id=tenant_domain_id,
    )
    return driver
示例#12
0
def load_config(yaml: Yaml) -> Config:
    """Unpack a parsed YAML file into a `Config` object."""
    _schema_validate(yaml)

    try:
        yaml_state_machines = yaml['state_machines']
    except KeyError:  # pragma: no cover
        raise ConfigError(
            "No top-level state_machines key defined.",
        ) from None

    yaml_logging_plugins = yaml.get('plugins', {}).get('logging', [])

    return Config(
        state_machines={
            name: _load_state_machine(
                ['state_machines', name],
                name,
                yaml_state_machine,
            )
            for name, yaml_state_machine in yaml_state_machines.items()
        },
        database=load_database_config(),
        logging_plugins=_load_logging_plugins(yaml_logging_plugins),
    )
示例#13
0
def processBlock(yaml, makoVars):
    if makoVars["class"] in ["NToOneBlock"]:
        makoVars["factoryParams"] = ["nchans"] + makoVars["factoryParams"]
        makoVars["classParams"] = ["nchans"] + makoVars["classParams"]
    if makoVars["class"] in ["ForwardAndPostLabelBlock"]:
        label = "\"{0}\"".format(yaml["label"]) if "label" in yaml else "None"
        makoVars["classParams"] = [label] + makoVars["classParams"]
        makoVars["classParams"] = [yaml.get("findIndexFunc", "None")] + makoVars["classParams"]

    for key in ["blockType", "outputType"]:
        if key in yaml:
            makoVars["outputDTypeArgs"] = blockTypeToDictString(yaml[key])
            makoVars["outputDTypeChooser"] = blockTypeToDTypeChooser(yaml[key])
            makoVars["outputDTypeDefault"] = blockTypeToDTypeDefault(yaml[key])
            makoVars["classParams"] = ["outputDTypeArgs"] + makoVars["classParams"]
            makoVars["factoryVars"] += ["outputDTypeArgs"]
            break
    for key in ["blockType", "inputType"]:
        if key in yaml:
            makoVars["inputDTypeArgs"] = blockTypeToDictString(yaml[key])
            makoVars["inputDTypeChooser"] = blockTypeToDTypeChooser(yaml[key])
            makoVars["inputDTypeDefault"] = blockTypeToDTypeDefault(yaml[key])
            makoVars["classParams"] = ["inputDTypeArgs"] + makoVars["classParams"]
            makoVars["factoryVars"] += ["inputDTypeArgs"]
            break

    if "blockType" in yaml:
        makoVars["classParams"] = ["dtype, dtype"] + makoVars["classParams"]
        makoVars["factoryParams"] = ["dtype"] + makoVars["factoryParams"]
    else:
        makoVars["classParams"] = ["inputDType", "outputDType"] + makoVars["classParams"]
        makoVars["factoryParams"] = ["inputDType", "outputDType"] + makoVars["factoryParams"]
示例#14
0
  def from_yaml(cls, name, yaml):
    _yaml_check_map(yaml, ['type', 'values'], ['description'])
    _yaml_check_array(yaml['values'])

    obj = cls(name, yaml.get('description'))
    for value in yaml['values']:
      obj.add_value(EnumValue.from_yaml(value))

    return obj
示例#15
0
  def from_yaml(cls, yaml):
    _yaml_check_map_with_meta(yaml)
    name, null = _yaml_get_from_map_with_meta(yaml)

    if null is not None:
      raise SpecParseError(
          'Value for Enum ({}:{}) must be null. (leave empty or use "null")'.format(name, null))

    return cls(name, yaml.get('_description'))
示例#16
0
  def from_yaml(cls, name, yaml):
    _yaml_check_map(yaml, ['type', 'fields'], ['description'])
    _yaml_check_array(yaml['fields'])

    obj = cls(name, yaml.get('description'))
    for field in yaml['fields']:
      obj.add_field(StructField.from_yaml(field))

    return obj
示例#17
0
 def get_strata_paths(self, strata_path):
     yaml = self.load_yaml_from_file(strata_path)
     build_depends_paths = ['%s/%s' % (
                            self.morph_dir,
                            a['morph']) for a in yaml.get(
                                'build-depends', [])]
     bdds = [a for b in [self.get_strata_paths(x)
             for x in build_depends_paths] for a in b]
     x = list(set([strata_path]) | set(build_depends_paths) | set(bdds))
     return x
示例#18
0
    def updateYamlPathVals(self, yaml, yaml_key_list, rewrite_val):
        for key in yaml_key_list:
            if key == yaml_key_list[-1]:
                yaml[key] = rewrite_val
                break
            key = yaml_key_list.pop(0)
            yaml[key] = self.updateYamlPathVals(yaml.get(key, {}),
                                                yaml_key_list, rewrite_val)

        return yaml
示例#19
0
 def get_current_data(self):
     """
     返回 yaml 当前用列的所有数据
     :return: dict
     """
     yamlList = self.get_yaml()
     for yaml in yamlList:
         # 如果用列等于当前 用列就返回
         if yaml.get('casename') == self.case_name:
             return yaml
     return "casename 不存在!"
示例#20
0
  def from_yaml(cls, yaml):
    _yaml_check_map_with_meta(yaml)
    name, bits = _yaml_get_from_map_with_meta(yaml)

    if not isinstance(bits, int):
      raise SpecParseError('Value ({}) of field ({}) must be a integer.'.format(bits, name))

    if bits < 1:
      raise SpecParseError('Bit number ({}) for field ({}) must be greater than zero.'.format(
          bits, name))

    return cls(name, bits, yaml.get('_description'))
示例#21
0
 def get_strata_paths(self, strata_path):
     yaml = self.load_yaml_from_file(strata_path)
     build_depends_paths = [
         '%s/%s' % (self.morph_dir, a['morph'])
         for a in yaml.get('build-depends', [])
     ]
     bdds = [
         a for b in [self.get_strata_paths(x) for x in build_depends_paths]
         for a in b
     ]
     x = list(set([strata_path]) | set(build_depends_paths) | set(bdds))
     return x
示例#22
0
def move_item(yaml,
              src_db,
              trg_db,
              p_trg_table,
              p_source_sql,
              src_yaml=None,
              Truncate=False,
              pk=None,
              work_table=None):
    # step 1: Truncate Table in target database connection
    # step 2 & 3: Dump the Data to CSV standard

    target_table_name = p_trg_table
    source_sql = p_source_sql
    logging_table = recurse_replace_yaml(yaml.get('logging_table', yaml),
                                         src_yaml)
    retain_data_file_path = yaml.get('retain_data_file_path', None)
    v_move_data = yaml.get('move_data', False)
    if v_move_data:
        if Truncate:
            trg_db.execute(
                'TRUNCATE TABLE {} CASCADE'.format(target_table_name))
        load_status = move_data(source_sql,
                                target_table_name,
                                src_db,
                                trg_db,
                                label=os.getpid(),
                                retain_data_file_path=retain_data_file_path,
                                logging_table=logging_table,
                                pk=pk,
                                work_table=work_table)

        trg_db.execute('COMMIT')
        # trg_db.execute('VACUUM analyze {}'.format(target_table_name))
    else:
        logging.debug(
            "Not Moving Data Set 'move_data' attribute to True in Yaml files")
        return 0
    return load_status
示例#23
0
 def __init__(self, yaml: dict):
     self.components = {}
     for component_yaml in yaml.get('components', []):
         if component_yaml['type'] == 'can':
             self.components[
                 component_yaml['name']] = CanInterfaceComponent(
                     self, component_yaml)
         if component_yaml['type'] == 'uart':
             self.components[component_yaml['name']] = SerialPortComponent(
                 self, component_yaml)
         if component_yaml['type'] == 'gpio':
             self.components[
                 'gpio' + str(component_yaml['num'])] = LinuxGpioComponent(
                     self, component_yaml)
示例#24
0
  def from_yaml(cls, yaml):
    _yaml_check_map_with_meta(yaml)
    name, field_type = _yaml_get_from_map_with_meta(yaml)

    if not isinstance(field_type, (str, list)):
      raise SpecParseError(
          ('Struct field value ({}:{}) must be string ' +
           'or two element list (for describing arrays).').format(name, field_type))

    if isinstance(field_type, list):
      type_object = Array.from_yaml(field_type)
    else:
      type_object = DataType.get_type(field_type)

    return cls(name, type_object, yaml.get('_description'))
示例#25
0
def get_openstack_driver(yaml):
    OpenStack = get_driver(Provider.OPENSTACK)
    glbs = yaml.get('globals')
    os_cred = glbs.get('openstack-credentials')
    username = os_cred['username']
    password = os_cred['password']
    auth_url = os_cred['auth-url']
    auth_version = os_cred['auth-version']
    tenant_name = os_cred['tenant-name']
    service_region = os_cred['service-region']
    driver = OpenStack(username,
                       password,
                       ex_force_auth_url=auth_url,
                       ex_force_auth_version=auth_version,
                       ex_tenant_name=tenant_name,
                       ex_force_service_region=service_region,
                       ex_domain_name='redhat.com')
    return driver
示例#26
0
 def __init__(self, yaml, db, parent = None):
   db.append(self)
   self.name = yaml["name"]
   self.parent = parent
   self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())]
   valid_until = None
   if "valid_until" in yaml:
     valid_until = rpki.sundial.datetime.fromdatetime(yaml.get("valid_until"))
   if valid_until is None and "valid_for" in yaml:
     valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"])
   self.base = rpki.resource_set.resource_bag(
     asn = rpki.resource_set.resource_set_as(yaml.get("asn")),
     v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")),
     v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")),
     valid_until = valid_until)
   self.sia_base = yaml.get("sia_base")
   if "crl_interval" in yaml:
     self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds()
   if "regen_margin" in yaml:
     self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds()
   self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))]
   for r in self.roa_requests:
     if r.v4:
       self.base.v4 = self.base.v4.union(r.v4.to_resource_set())
     if r.v6:
       self.base.v6 = self.base.v6.union(r.v6.to_resource_set())
   self.hosted_by = yaml.get("hosted_by")
   self.hosts = []
   if not self.is_hosted:
     self.engine = self.allocate_engine()
     self.rpkid_port = self.allocate_port()
     self.irdbd_port = self.allocate_port()
   if self.runs_pubd:
     self.pubd_port  = self.allocate_port()
     self.rsync_port = self.allocate_port()
   if self.is_root:
     self.rootd_port = self.allocate_port()
示例#27
0
def stationset_from_yaml(yaml):
    """Produce StationSet from YAML."""
    station_set = StationSet()

    LOG.debug("Loading name for stationset from config YAML.")
    station_set.name = yaml.get("name", None)

    LOG.info("Loaded stationset %s from config.", station_set.name)

    # Get scale factors for later.
    long_shift = yaml.get("long_shift", 0)
    long_scale = yaml.get("long_scale", 1.0)

    lat_shift = yaml.get("lat_shift", 0)
    lat_scale = yaml.get("lat_scale", 1.0)

    station_set.real_stations = set()
    station_set.fantasy_stations = set()

    LOG.debug("Loading real stations from config YAML.")
    real_stations_yaml = yaml.get("real_stations", [])
    for station_yaml in real_stations_yaml:
        station_set.real_stations.add(station_from_yaml(station_yaml))

    LOG.debug("Transforming real station coords according to scale factors.")
    for s in station_set.real_stations:
        s.x = (s.x * long_scale) + long_shift
        s.y = (s.y * lat_scale) + lat_shift

    LOG.debug(f"Loaded real stations {station_set.real_stations}.")

    LOG.debug("Loading fantasy stations from config YAML.")
    fantasy_stations_yaml = yaml.get("fantasy_stations", [])
    for station_yaml in fantasy_stations_yaml:
        station_set.fantasy_stations.add(station_from_yaml(station_yaml))

    LOG.debug(
        "Transforming fantasy station coords according to scale factors.")
    for s in station_set.fantasy_stations:
        s.x = (s.x * long_scale) + long_shift
        s.y = (s.y * lat_scale) + lat_shift

    LOG.debug(f"Loaded fantasy stations {station_set.fantasy_stations}")

    return station_set
示例#28
0
def get_openstack_driver(yaml):
    OpenStack = get_driver(Provider.OPENSTACK)
    glbs = yaml.get("globals")
    os_cred = glbs.get("openstack-credentials")
    username = os_cred["username"]
    password = os_cred["password"]
    auth_url = os_cred["auth-url"]
    auth_version = os_cred["auth-version"]
    tenant_name = os_cred["tenant-name"]
    service_region = os_cred["service-region"]
    driver = OpenStack(
        username,
        password,
        ex_force_auth_url=auth_url,
        ex_force_auth_version=auth_version,
        ex_tenant_name=tenant_name,
        ex_force_service_region=service_region,
        ex_domain_name="redhat.com",
    )
    return driver
示例#29
0
 def parse(cls, yaml):
     return cls(yaml.get("asn"), yaml.get("router_id"))
示例#30
0
 def parse(cls, yaml):
     return cls(yaml.get("asn"), yaml.get("router_id"))
示例#31
0
    def __init__(self, yaml):
        sim = dict()
        ins = dict()
        logic = dict()
        python = dict()
        gen = dict()
        func = dict()
        py2cnf = dict()
        #aima = dict()

        eqntott = {"and": "&",
                   "or": "|",
                   "not": "!",
                   "False": "ZERO",
                   "True": "ONE"}

        #aimaC = {"and": "&",
        #         "or": "|",
        #         "not": "~",
        #         "False": "0",
        #         "True": "1"}

        for modname in yaml.keys():
            if "primitive" in yaml.get(modname):
                prim = yaml.get(modname)["primitive"]
                inputs = yaml.get(modname)["inputs"].keys()
                evalStr = "lambda "
                for i in range(0, len(inputs)-1):
                    evalStr += inputs[i] + ","
                evalStr += inputs[len(inputs)-1] + ": "
                simStr = evalStr + prim
                logicStr = evalStr + self.__primToLogic__(inputs, prim, eqntott)
                pyStr    = evalStr + self.__primToLogic__(inputs, prim)
                #aimaStr  = evalStr + self.__primToLogic__(inputs, prim, aimaC)

                genStr = self.__getGenerator__(yaml, modname, eqntott)
                
                d = {}
                exec genStr.strip() in d
                #setattr(self.__class__, modname, d[modname])
                gen[modname] = d[modname]

                funcStr = self.__getFunction__(yaml, modname, eqntott)

                f = {}
                exec funcStr.strip() in f
                func[modname] = f[modname]

                sim[modname] = eval(simStr)
                logic[modname] = eval(logicStr)
                python[modname] = eval(pyStr)
                py2cnf[modname] = Gate2CNF.Gate2CNF(prim)
                #aima[modname] = eval(aimaStr)
                ins[modname] = inputs
        self.__sim = sim
        self.__inputs = ins
        self.__logic = logic
        self.__python = python
        self.__gen = gen
        self.__func = func
        self.__py2cnf = py2cnf
示例#32
0
def generateMakoVars(func, yaml):
    # Generate variables for processing.
    makoVars = dict()
    makoVars["name"] = yaml["name"]
    makoVars["subclass"] = yaml.get("subclass", False)
    makoVars["class"] = yaml["class"]
    makoVars["category"] = " ".join(yaml["categories"])
    makoVars["func"] = func
    makoVars["pothosDocFunc"] = makoVars["func"].replace(
        "randint", "integers").replace("integers", NumPyRandomIntegersString)
    makoVars["keywords"] = yaml.get("keywords", [func])
    makoVars["prefix"] = yaml.get("prefix", "numpy")
    makoVars["pothosDocPrefix"] = makoVars["prefix"].replace(
        "Random.NumPyRandom",
        NumPyRandomString).replace("Random", NumPyRandomString)
    makoVars["factoryVars"] = []

    if "funcArgs" in yaml:
        for arg in yaml["funcArgs"]:
            arg["title"] = arg["name"][0].upper() + arg["name"][1:]
            arg["privateVar"] = "__{0}".format(arg["name"])
            if arg["dtype"] + "_" in ParamWidgets:
                arg["widget"] = ParamWidgets[arg["dtype"] + "_"]
                arg["widgetArgs"] = dict()
                if arg["widget"] == "ComboBox":
                    arg["widgetArgs"]["editable"] = False
                else:
                    if ">=" in arg:
                        arg[">="] = getMapEntryIfStr(arg[">="])
                        arg["widgetArgs"]["minimum"] = str(arg[">="])
                    elif ">" in arg:
                        arg[">"] = getMapEntryIfStr(arg[">"])
                        diff = 1 if (arg["widget"] == "SpinBox") else 0.01
                        arg["widgetArgs"]["minimum"] = str(arg[">"] + diff)
                    if "<=" in arg:
                        arg["<="] = getMapEntryIfStr(arg["<="])
                        arg["widgetArgs"]["maximum"] = str(arg["<="])
                    elif "<" in arg:
                        arg["<"] = getMapEntryIfStr(arg["<"])
                        diff = 1 if (arg["widget"] == "SpinBox") else 0.01
                        arg["widgetArgs"]["maximum"] = str(arg["<"] - diff)
        makoVars["funcArgsList"] = [
            "self.{0}".format(arg["privateVar"]) for arg in yaml["funcArgs"]
        ]

    # Some keys are just straight copies.
    for key in ["alias", "niceName", "funcArgs", "factoryPrefix", "nanFunc"]:
        if key in yaml:
            makoVars[key] = yaml[key]

    if "description" in yaml:
        makoVars["description"] = yaml["description"]

    makoVars["classParams"] = []
    makoVars["factoryParams"] = []

    if "args" in yaml:
        makoVars["factoryVars"] += ["args"]
        makoVars["args"] = "[{0}]".format(", ".join(yaml["args"]))

    if "kwargs" in yaml:
        makoVars["factoryVars"] += ["kwargs"]
        makoVars["kwargs"] = "dict({0})".format(", ".join(yaml["kwargs"]))

    if "funcArgs" in yaml:
        assert (type(yaml["funcArgs"]) is list)

        for arg in yaml["funcArgs"]:
            if arg.get("isPublic", True):
                makoVars["factoryParams"] += [arg["name"]]

    if "funcKWargs" in yaml:
        assert (type(yaml["funcKWargs"]) is list)
        funcKWargs = []

        for arg in yaml["funcKWargs"]:
            if arg.get("isPublic", True):
                makoVars["factoryParams"] = [arg["name"]
                                             ] + makoVars["factoryParams"]
                funcKWargs += ["{0}={0}".format(arg["name"])]
            else:
                funcKWargs += [
                    "{0}={1}".format(arg["name"], arg.get("value", "None"))
                ]

        makoVars["factoryVars"] += ["funcKWargs"]
        makoVars["funcKWargs"] = "dict({0})".format(", ".join(funcKWargs))

    if "blockType" in yaml:
        if "Block" in makoVars["class"]:
            processBlock(yaml, makoVars)
        elif "Source" in makoVars["class"]:
            processSource(yaml, makoVars)
        else:
            raise RuntimeError("Invalid block type.")
    elif "blockPattern" in yaml:
        if yaml["blockPattern"] == "ComplexToScalar":
            yaml["inputType"] = ["complex"]
            yaml["outputType"] = ["float"]
            processBlock(yaml, makoVars)
        else:
            raise RuntimeError("Invalid block pattern.")

    if "nanFunc" in makoVars:
        funcAsParam = "({0}.{1} if ignoreNaN else {0}.{2})".format(
            makoVars["prefix"], makoVars["nanFunc"], func)
        makoVars["factoryParams"] += ["ignoreNaN"]
    else:
        funcAsParam = "{0}.{1}".format(makoVars["prefix"], func)

    makoVars["classParams"] = [
        '"{0}/{1}"'.format(makoVars.get("factoryPrefix", "/numpy"), func)
    ] + [funcAsParam] + makoVars["classParams"]

    makoVars["classParams"] += ["list()"]
    makoVars["classParams"] += [
        "funcKWargs" if "funcKWargs" in makoVars else "dict()"
    ]
    if "args" in makoVars:
        makoVars["classParams"] += ["*args"]
    if "kwargs" in makoVars:
        makoVars["classParams"] += ["**kwargs"]

    return makoVars
示例#33
0
 def parse(cls, yaml):
   """
   Parse a ROA request from YAML format.
   """
   return cls(yaml.get("asn"), yaml.get("ipv4"), yaml.get("ipv6"))
def i_enter_santanders_website(driver):      
    util.validate_text(driver, yaml.get("text_validation"))
    util.screenshot(driver, "Santander")
示例#35
0
 def __init__(self, yaml, db, parent=None):
     db.append(self)
     self.name = yaml["name"]
     self.parent = parent
     self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())]
     valid_until = None
     if "valid_until" in yaml:
         valid_until = rpki.sundial.datetime.from_datetime(
             yaml.get("valid_until"))
     if valid_until is None and "valid_for" in yaml:
         valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(
             yaml["valid_for"])
     self.base = rpki.resource_set.resource_bag(
         asn=rpki.resource_set.resource_set_as(yaml.get("asn")),
         v4=rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")),
         v6=rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")),
         valid_until=valid_until)
     if "crl_interval" in yaml:
         self.crl_interval = rpki.sundial.timedelta.parse(
             yaml["crl_interval"]).convert_to_seconds()
     if "regen_margin" in yaml:
         self.regen_margin = rpki.sundial.timedelta.parse(
             yaml["regen_margin"]).convert_to_seconds()
     self.roa_requests = [
         roa_request.parse(y)
         for y in yaml.get("roa_request", yaml.get("route_origin", ()))
     ]
     self.router_certs = [
         router_cert.parse(y) for y in yaml.get("router_cert", ())
     ]
     if "ghostbusters" in yaml:
         self.ghostbusters = yaml.get("ghostbusters")
     elif "ghostbuster" in yaml:
         self.ghostbusters = [yaml.get("ghostbuster")]
     else:
         self.ghostbusters = []
     for r in self.roa_requests:
         if r.v4:
             self.base.v4 |= r.v4.to_resource_set()
         if r.v6:
             self.base.v6 |= r.v6.to_resource_set()
     for r in self.router_certs:
         self.base.asn |= r.asn
     self.hosted_by = yaml.get("hosted_by")
     self.hosts = []
     if not self.is_hosted:
         self.engine = self.allocate_engine()
         self.rpkid_port = self.allocate_port()
         self.irdbd_port = self.allocate_port()
     if self.runs_pubd:
         self.pubd_port = self.allocate_port()
         self.rsync_port = self.allocate_port()
     if self.is_root:
         self.rootd_port = self.allocate_port()
示例#36
0
def yaml_to_sphinx(yaml: dict):
    """Convert a Jupyter Book style config structure into a Sphinx config dict.

    :returns: (recursive_updates, override_updates)
    """
    sphinx_config = {}

    # top-level, string type
    YAML_TRANSLATIONS = {
        "title": "html_title",
        "author": "author",
        "copyright": "copyright",
        "logo": "html_logo",
        "project": "project",
    }
    for key, newkey in YAML_TRANSLATIONS.items():
        if key in yaml:
            val = yaml.get(key)
            if val is None:
                val = ""
            sphinx_config[newkey] = val

    # exclude patterns
    if "exclude_patterns" in yaml:
        # we always include these excludes, so as not to break back-compatibility
        defaults = {"_build", "Thumbs.db", ".DS_Store", "**.ipynb_checkpoints"}
        defaults.update(yaml["exclude_patterns"])
        sphinx_config["exclude_patterns"] = list(sorted(defaults))

    # Theme
    sphinx_config["html_theme_options"] = theme_options = {}
    if "launch_buttons" in yaml:
        theme_options["launch_buttons"] = yaml["launch_buttons"]

    repository_config = yaml.get("repository", {})
    for spx_key, yml_key in [
        ("path_to_docs", "path_to_book"),
        ("repository_url", "url"),
        ("repository_branch", "branch"),
    ]:
        if yml_key in repository_config:
            theme_options[spx_key] = repository_config[yml_key]

    # HTML
    html = yaml.get("html")
    if html:

        for spx_key, yml_key in [
            ("html_favicon", "favicon"),
            ("html_baseurl", "baseurl"),
            ("comments_config", "comments"),
        ]:
            if yml_key in html:
                sphinx_config[spx_key] = html[yml_key]

        for spx_key, yml_key in [
            ("google_analytics_id", "google_analytics_id"),
            ("navbar_footer_text", "navbar_footer_text"),
            ("extra_navbar", "extra_navbar"),
                # Deprecate navbar_footer_text after a release cycle
            ("extra_footer", "extra_footer"),
            ("home_page_in_toc", "home_page_in_navbar"),
        ]:
            if yml_key in html:
                theme_options[spx_key] = html[yml_key]

        # Pass through the buttons
        btns = [
            "use_repository_button", "use_edit_page_button",
            "use_issues_button"
        ]
        use_buttons = {btn: html.get(btn) for btn in btns if btn in html}
        if any(use_buttons.values()):
            if not repository_config.get("url"):
                raise ValueError(
                    "To use 'repository' buttons, you must specify the repository URL"
                )
        # Update our config
        theme_options.update(use_buttons)

    # Parse and Rendering
    parse = yaml.get("parse")
    if parse:
        if parse.get("myst_extended_syntax") is True:
            sphinx_config["myst_dmath_enable"] = True
            sphinx_config["myst_amsmath_enable"] = True
            sphinx_config["myst_deflist_enable"] = True
            sphinx_config["myst_admonition_enable"] = True
            sphinx_config["myst_html_img_enable"] = True
            sphinx_config["myst_figure_enable"] = True
        if "myst_url_schemes" in parse:
            sphinx_config["myst_url_schemes"] = parse.get("myst_url_schemes")

    # Execution
    execute = yaml.get("execute")
    if execute:
        for spx_key, yml_key in [
            ("execution_allow_errors", "allow_errors"),
            ("execution_in_temp", "run_in_temp"),
            ("nb_output_stderr", "stderr_output"),
            ("execution_timeout", "timeout"),
            ("jupyter_cache", "cache"),
            ("jupyter_execute_notebooks", "execute_notebooks"),
            ("execution_excludepatterns", "exclude_patterns"),
        ]:
            if yml_key in execute:
                sphinx_config[spx_key] = execute[yml_key]

        if sphinx_config.get("jupyter_execute_notebooks") is False:
            # Special case because YAML treats `off` as "False".
            sphinx_config["jupyter_execute_notebooks"] = "off"

    # LaTeX
    latex = yaml.get("latex")
    if latex:
        for spx_key, yml_key in [
            ("latex_engine", "latex_engine"),
        ]:
            if yml_key in latex:
                sphinx_config[spx_key] = latex[yml_key]

    sphinx_config["latex_doc_overrides"] = {}
    if "title" in yaml:
        sphinx_config["latex_doc_overrides"]["title"] = yaml["title"]
    for key, val in yaml.get("latex", {}).get("latex_documents", {}).items():
        sphinx_config["latex_doc_overrides"][key] = val

    # Sphinx Configuration
    extra_extensions = yaml.get("sphinx", {}).get("extra_extensions")
    if extra_extensions:
        sphinx_config["extensions"] = get_default_sphinx_config()["extensions"]
        if not isinstance(extra_extensions, list):
            extra_extensions = [extra_extensions]
        for extension in extra_extensions:
            if extension not in sphinx_config["extensions"]:
                sphinx_config["extensions"].append(extension)

    local_extensions = yaml.get("sphinx", {}).get("local_extensions")
    if local_extensions:
        if "extensions" not in sphinx_config:
            sphinx_config["extensions"] = get_default_sphinx_config(
            )["extensions"]
        for extension, path in local_extensions.items():
            if extension not in sphinx_config["extensions"]:
                sphinx_config["extensions"].append(extension)
            if path not in sys.path:
                sys.path.append(os.path.abspath(path))

    # items in sphinx.config will override defaults,
    # rather than recursively updating them
    return sphinx_config, yaml.get("sphinx", {}).get("config") or {}
def create_csv(database, table_name, yaml={}):
    friendly_table_name = table_name
    table_yaml = yaml.get('tables', {}).get(table_name, {})
    if args.verbose and not table_yaml:
        print(f"YAML WARNING: Table {table_name} is not defined in the YAML.")
    else:
        friendly_table_name = yaml.get('tables',
                                       {}).get(table_name,
                                               {}).get("name", table_name)
        ignore_table = table_yaml.get("ignore", "no") == "yes"
        if ignore_table:
            print(
                f"Table {friendly_table_name} ignored because of YAML setting."
            )
            return
    fcount = 1
    dest_file = pathlib.Path(args.outpath) / (friendly_table_name + ".csv")
    while dest_file.exists():
        dest_file = pathlib.Path.cwd() / (friendly_table_name +
                                          f"({fcount}).csv")
        fcount += 1
    with dest_file.open("w", newline="") as csv_file:
        csv_writer = csv.writer(csv_file,
                                delimiter=',',
                                quotechar='"',
                                quoting=csv.QUOTE_MINIMAL)
        print(f"Processing {friendly_table_name}")
        ese_table = database.get_table_by_name(table_name)
        header_row = []
        for each_field in ese_table.columns:
            name = each_field.name
            if table_yaml:
                field_yaml = table_yaml.get("fields",
                                            {}).get(each_field.name, {})
                if args.verbose and not field_yaml:
                    print(
                        f"YAML WARNING: Format specifier missing in yaml for field {name} in {table_name}"
                    )
                name = field_yaml.get('name', each_field.name)
                ignore_field = field_yaml.get('ignore', 'no') == "yes"
                if ignore_field:
                    continue
            header_row.append(name)
        if yaml_config and hasattr(plugin, "plugin_modify_header"):
            header_row = plugin.plugin_modify_header(header_row, table_name)
        csv_writer.writerow(header_row)
        for rec_entry_num in range(ese_table.number_of_records):
            data_row = []
            for each_field in range(ese_table.number_of_columns):
                field_name = ese_table.columns[each_field].name
                data = smart_retrieve(ese_table, rec_entry_num, each_field)
                if table_yaml:
                    ignore_field = table_yaml.get("fields", {}).get(
                        field_name, {}).get('ignore', 'no') == "yes"
                    if ignore_field:
                        continue
                    data_format = yaml.get('tables',
                                           {}).get(table_name,
                                                   {}).get("fields", {}).get(
                                                       field_name,
                                                       {}).get('format', "")
                    if data_format.startswith("function:"):
                        func_call = data_format.split(":")[1]
                        data = plugin.__dict__.get(
                            func_call,
                            lambda x: f"{data_format} not found")(data)
                    elif data_format.startswith("lookup:"):
                        data = lookup(data_format.split(":")[1], data)
                data_row.append(data)
            if yaml_config and hasattr(plugin, "plugin_modify_row"):
                data_row = plugin.plugin_modify_row(data_row, table_name)
            csv_writer.writerow(data_row)
        if yaml_config and hasattr(plugin, "plugin_end_of_file"):
            plugin.plugin_end_of_file(csv_writer, table_name)
示例#38
0
def yaml_to_sphinx(yaml: dict):
    """Convert a Jupyter Book style config structure into a Sphinx config dict.

    :returns: (recursive_updates, override_updates, add_paths)

    add_paths collects paths that are specified in the _config.yml (such as those
    provided in local_extensions) and returns them for adding to sys.path in
    a context where the _config.yml path is known
    """
    sphinx_config = {}

    # top-level, string type
    YAML_TRANSLATIONS = {
        "title": "html_title",
        "author": "author",
        "copyright": "copyright",
        "logo": "html_logo",
        "project": "project",
    }
    for key, newkey in YAML_TRANSLATIONS.items():
        if key in yaml:
            val = yaml.get(key)
            if val is None:
                val = ""
            sphinx_config[newkey] = val

    # exclude patterns
    if "exclude_patterns" in yaml:
        # we always include these excludes, so as not to break back-compatibility
        defaults = {"_build", "Thumbs.db", ".DS_Store", "**.ipynb_checkpoints"}
        defaults.update(yaml["exclude_patterns"])
        sphinx_config["exclude_patterns"] = list(sorted(defaults))

    # Theme
    sphinx_config["html_theme_options"] = theme_options = {}
    if "launch_buttons" in yaml:
        theme_options["launch_buttons"] = yaml["launch_buttons"]

    repository_config = yaml.get("repository", {})
    for spx_key, yml_key in [
        ("path_to_docs", "path_to_book"),
        ("repository_url", "url"),
        ("repository_branch", "branch"),
    ]:
        if yml_key in repository_config:
            theme_options[spx_key] = repository_config[yml_key]

    # HTML
    html = yaml.get("html")
    if html:

        for spx_key, yml_key in [
            ("html_favicon", "favicon"),
            ("html_baseurl", "baseurl"),
            ("comments_config", "comments"),
        ]:
            if yml_key in html:
                sphinx_config[spx_key] = html[yml_key]

        for spx_key, yml_key in [
            ("google_analytics_id", "google_analytics_id"),
            ("navbar_footer_text", "navbar_footer_text"),
            ("extra_navbar", "extra_navbar"),
            # Deprecate navbar_footer_text after a release cycle
            ("extra_footer", "extra_footer"),
            ("home_page_in_toc", "home_page_in_navbar"),
        ]:
            if yml_key in html:
                theme_options[spx_key] = html[yml_key]

        # Pass through the buttons
        btns = ["use_repository_button", "use_edit_page_button", "use_issues_button"]
        use_buttons = {btn: html.get(btn) for btn in btns if btn in html}
        if any(use_buttons.values()):
            if not repository_config.get("url"):
                raise ValueError(
                    "To use 'repository' buttons, you must specify the repository URL"
                )
        # Update our config
        theme_options.update(use_buttons)

    # Parse and Rendering
    parse = yaml.get("parse")
    if parse:
        # Enable extra extensions
        extensions = sphinx_config.get("myst_enable_extensions", [])
        # TODO: deprecate this in v0.11.0
        if parse.get("myst_extended_syntax") is True:
            extensions.append(
                [
                    "colon_fence",
                    "dollarmath",
                    "amsmath",
                    "deflist",
                    "html_image",
                ]
            )
            _message_box(
                (
                    "myst_extended_syntax is deprecated, instead specify extensions "
                    "you wish to be enabled. See https://myst-parser.readthedocs.io/en/latest/using/syntax-optional.html"  # noqa: E501
                ),
                color="orange",
                print_func=print,
            )
        for ext in parse.get("myst_enable_extensions", []):
            if ext not in extensions:
                extensions.append(ext)
        if extensions:
            sphinx_config["myst_enable_extensions"] = extensions

        # Configuration values we'll just pass-through
        for ikey in ["myst_substitutions", "myst_url_schemes"]:
            if ikey in parse:
                sphinx_config[ikey] = parse.get(ikey)

    # Execution
    execute = yaml.get("execute")
    if execute:
        for spx_key, yml_key in [
            ("execution_allow_errors", "allow_errors"),
            ("execution_in_temp", "run_in_temp"),
            ("nb_output_stderr", "stderr_output"),
            ("execution_timeout", "timeout"),
            ("jupyter_cache", "cache"),
            ("jupyter_execute_notebooks", "execute_notebooks"),
            ("execution_excludepatterns", "exclude_patterns"),
        ]:
            if yml_key in execute:
                sphinx_config[spx_key] = execute[yml_key]

        if sphinx_config.get("jupyter_execute_notebooks") is False:
            # Special case because YAML treats `off` as "False".
            sphinx_config["jupyter_execute_notebooks"] = "off"

    # LaTeX
    latex = yaml.get("latex")
    if latex:
        for spx_key, yml_key in [
            ("latex_engine", "latex_engine"),
            ("use_jupyterbook_latex", "use_jupyterbook_latex"),
        ]:
            if yml_key in latex:
                sphinx_config[spx_key] = latex[yml_key]

    sphinx_config["latex_doc_overrides"] = {}
    if "title" in yaml:
        sphinx_config["latex_doc_overrides"]["title"] = yaml["title"]
    for key, val in yaml.get("latex", {}).get("latex_documents", {}).items():
        sphinx_config["latex_doc_overrides"][key] = val

    # Sphinx Configuration
    extra_extensions = yaml.get("sphinx", {}).get("extra_extensions")
    if extra_extensions:
        sphinx_config["extensions"] = get_default_sphinx_config()["extensions"]

        if not isinstance(extra_extensions, list):
            extra_extensions = [extra_extensions]

        for extension in extra_extensions:
            if extension not in sphinx_config["extensions"]:
                sphinx_config["extensions"].append(extension)

    local_extensions = yaml.get("sphinx", {}).get("local_extensions")
    # add_paths collects additional paths for sys.path
    add_paths = []
    if local_extensions:
        if "extensions" not in sphinx_config:
            sphinx_config["extensions"] = get_default_sphinx_config()["extensions"]
        for extension, path in local_extensions.items():
            if extension not in sphinx_config["extensions"]:
                sphinx_config["extensions"].append(extension)
            if path not in sys.path:
                add_paths.append(path)

    # Citations
    sphinxcontrib_bibtex_configs = ["bibtex_bibfiles", "bibtex_reference_style"]
    if any(ii in yaml for ii in sphinxcontrib_bibtex_configs):
        # Load sphincontrib-bibtex
        if "extensions" not in sphinx_config:
            sphinx_config["extensions"] = get_default_sphinx_config()["extensions"]
        sphinx_config["extensions"].append("sphinxcontrib.bibtex")

        # Pass through configuration
        if yaml.get("bibtex_bibfiles"):
            if isinstance(yaml.get("bibtex_bibfiles"), str):
                yaml["bibtex_bibfiles"] = [yaml["bibtex_bibfiles"]]
            sphinx_config["bibtex_bibfiles"] = yaml["bibtex_bibfiles"]

    # items in sphinx.config will override defaults,
    # rather than recursively updating them
    return sphinx_config, yaml.get("sphinx", {}).get("config") or {}, add_paths
示例#39
0
def run():

    ids = read_input()
    logger = get_logger()
    yaml = read_yaml()

    with get_driver() as driver:

        driver.get(yaml.get("url_login"))
        driver.maximize_window()

        logger.info("url: %s", yaml.get("url_login"))

        actions = WebActions(driver, logger)

        actions.send_by_css("[ng-model=\"user.username\"]", yaml.get("user"))
        actions.send_by_css("[ng-model=\"user.password\"]",
                            yaml.get("password"))
        actions.click_by_css("[data-automation-id=\"login-button\"]")

        actions.wait_for_element_by_css(".dropdown-click", visible=True)

        logger.info("logged in: %s %s", yaml.get("user"), yaml.get("password"))

        open_product(driver, logger)

        for idx, id_key in enumerate(ids, 1):

            try:
                completed = ""
                failed_to_select = ""

                if yaml.get("process_items") < idx:
                    logger.info(
                        "completed %s items set in config: process_items",
                        yaml.get("process_items"))
                    break

                append_idx_key(id_key)

                logger.info("processing: %s", id_key)

                actions.send_by_css("input.bem-TextInput", id_key)
                time.sleep(1)

                for _ in range(3):
                    found = actions.get_all_elements_by_css_no_error(
                        ".bem-Table_Row", wait_time=5)
                    if len(found) == 1:
                        logger.info("ok: one matching row found")
                        break
                    else:
                        logger.info(
                            "matching rows found  (%s) => wait and retry",
                            len(found))
                        time.sleep(1)
                else:
                    logger.info("failed matching rows ...")

                    if len(found) == 0:
                        logger.info("0 key found %s. not processed", id_key)
                        set_status(id_key, "0 items found")
                        open_product(driver, logger)
                        continue
                    elif not yaml.get("process_multiple"):
                        logger.info("incorrect items found %s. not processed",
                                    len(found))
                        set_status(id_key,
                                   "errored: items found: (%s)" % len(found))
                        continue

                name = actions.get_element_by_css(
                    ".bem-Table_Row .bem-Text").text
                logger.info("processing item in table with name: %s", name)

                actions.click_by_css(".bem-Table_Row")
                actions.wait_for_element_by_xpath(
                    "//button/span[contains(text(),'Save')]", visible=True)

                # read inputs
                all_required = actions.get_all_elements_by_css(
                    ".bem-TextInput-required")
                required = [
                    item.get_attribute("value") for item in all_required
                ]

                all_not_required = actions.get_all_elements_by_css(
                    ".bem-TextInput")
                not_required = [
                    item.get_attribute("value") for item in all_not_required
                ]

                logger.info("brand hardcode is: %s", required[2])

                collections = not_required[5].split("; ")
                types = not_required[6].split("; ")
                categories = not_required[7].split("; ")

                logger.info("collections: %s, type: %s, category: %s",
                            collections, types, categories)

                select_collections = []
                for category in categories:
                    for collection in collections:
                        select_collections.append(category + " " + collection)

                logger.info("select collection built: %s" % select_collections)

                select_types = []
                for category in categories:
                    for t in types:
                        select_types.append(category + " " + t)

                logger.info("select types built: %s" % select_collections)

                actions.scroll_into_view(".bem-Pane_Section:nth-of-type(2)")

                for collection in select_collections:
                    all_selects = actions.get_all_elements_by_css(
                        ".selectize-input")

                    actions.click_if_clickable(all_selects[0])
                    actions.wait_for_element_by_css(
                        ".selectize-input.dropdown-active")

                    all_drop_down = actions.get_all_elements_by_css(
                        ".selectize-dropdown-content>div")
                    drops = [drop.text for drop in all_drop_down]

                    if collection in drops:
                        logger.info("found collection: |%s| in drop down: %s",
                                    collection, drops)

                        actions.click_by_xpath(
                            "//div[contains(@class, 'selectize-dropdown-content')]/div[text()='"
                            + collection + "']")
                        actions.wait_for_element_by_xpath(
                            "//div[contains(@class, 'selectize-input')]/div[text()='"
                            + collection + "']")
                        completed += collection
                        completed += ";"

                    else:
                        logger.info(
                            "not found collection: |%s| in drop down: %s",
                            collection, drops)
                        failed_to_select += collection
                        failed_to_select += ";"

                esc_select(driver, logger)

                actions.wait_for_element_not_present_by_css(
                    ".selectize-input.dropdown-active")

                for t in select_types:

                    all_selects = actions.get_all_elements_by_css(
                        ".selectize-input")

                    actions.click_if_clickable(all_selects[1])
                    actions.wait_for_element_by_css(
                        ".selectize-input.dropdown-active")

                    all_drop_down = actions.get_all_elements_by_css(
                        ".selectize-dropdown-content>div")
                    drops = [drop.text for drop in all_drop_down]

                    if t in drops:
                        logger.info("found type: |%s| in drop down: %s", t,
                                    drops)

                        actions.click_by_xpath(
                            "//div[contains(@class, 'selectize-dropdown-content')]/div[text()='"
                            + t + "']")
                        actions.wait_for_element_by_xpath(
                            "//div[contains(@class, 'selectize-input')]/div[text()='"
                            + t + "']")
                        completed += t
                        completed += ";"

                    else:
                        logger.info("not found type: |%s| in drop down: %s", t,
                                    drops)
                        failed_to_select += t
                        failed_to_select += ";"

                # to test only (type)
                # all_selects = actions.get_all_elements_by_css(".selectize-input")
                # actions.click_if_clickable(all_selects[1])
                # actions.wait_for_element_by_css(".selectize-input.dropdown-active")
                # actions.click_by_xpath(
                #     "//div[contains(@class, 'selectize-dropdown-content')]/div[contains(text(),'Women Tops')]")
                # actions.wait_for_element_by_xpath(
                #     "//div[contains(@class, 'selectize-input')]/div[contains(text(),'Women Tops')]")

                esc_select(driver, logger)

                for categ in categories:

                    all_selects = actions.get_all_elements_by_css(
                        ".selectize-input")

                    actions.click_if_clickable(all_selects[2])
                    actions.wait_for_element_by_css(
                        ".selectize-input.dropdown-active")

                    all_drop_down = actions.get_all_elements_by_css(
                        ".selectize-dropdown-content>div")
                    drops = [drop.text for drop in all_drop_down]

                    if categ in drops:
                        logger.info("found category: |%s| in drop down: %s",
                                    categ, drops)

                        actions.click_by_xpath(
                            "//div[contains(@class, 'selectize-dropdown-content')]/div[text()='"
                            + categ + "']")
                        actions.wait_for_element_by_xpath(
                            "//div[contains(@class, 'selectize-input')]/div[text()='"
                            + categ + "']")
                        completed += categ
                        completed += ";"

                    else:
                        logger.info(
                            "not found category: |%s| in drop down: %s", categ,
                            drops)
                        failed_to_select += categ
                        failed_to_select += ";"

                esc_select(driver, logger)

                if yaml.get("save"):
                    actions.click_by_xpath("//button/span[text()='Save ']")
                    actions.wait_for_element_not_present_by_css(
                        "//button/span[contains(text(),'Save')]")

                    logger.info("saved product id: %s", id_key)
                else:
                    actions.click_by_xpath("//button/span[text()='Cancel']")

                    actions.wait_for_element_by_css(
                        ".bem-ConfirmWrapper .bem-ConfirmWrapper_Modal")

                    actions.click_by_xpath(
                        "//button[text()='Discard Changes']")
                    actions.wait_for_element_not_present_by_css(
                        ".bem-ConfirmWrapper .bem-ConfirmWrapper_Modal")

                set_status(id_key, "processed")
                set_completed(id_key, completed)
                set_failed_to_select(id_key, failed_to_select)

            except Exception as err:
                logger.error("%s: error: %s", err.__class__.__name__, err)
                set_status(id_key, "errored: (%s)" % err.__class__.__name__)

                open_product(driver, logger)
示例#40
0
 def __init__(self, yaml, db, parent = None):
     db.append(self)
     self.name = yaml["name"]
     self.parent = parent
     self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())]
     valid_until = None
     if "valid_until" in yaml:
         valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until"))
     if valid_until is None and "valid_for" in yaml:
         valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"])
     self.base = rpki.resource_set.resource_bag(
         asn         = str(yaml.get("asn", "")),
         v4          = yaml.get("ipv4"),
         v6          = yaml.get("ipv6"),
         valid_until = valid_until)
     if "crl_interval" in yaml:
         self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds()
     if "regen_margin" in yaml:
         self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds()
     self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))]
     self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())]
     if "ghostbusters" in yaml:
         self.ghostbusters = yaml.get("ghostbusters")
     elif "ghostbuster" in yaml:
         self.ghostbusters = [yaml.get("ghostbuster")]
     else:
         self.ghostbusters = []
     for r in self.roa_requests:
         if r.v4:
             self.base.v4 |= r.v4.to_resource_set()
         if r.v6:
             self.base.v6 |= r.v6.to_resource_set()
     for r in self.router_certs:
         self.base.asn |= r.asn
     self.hosted_by = yaml.get("hosted_by")
     self.hosts = []
     if not self.is_hosted:
         self.engine = self.allocate_engine()
         self.rpkid_port = self.allocate_port()
         self.irdbd_port = self.allocate_port()
     if self.runs_pubd:
         self.pubd_port  = self.allocate_port()
         self.rsync_port = self.allocate_port()
         self.rrdp_port  = self.allocate_port()
def i_am_in_google_website(driver):    
    driver.get(yaml.get("set_url"))