コード例 #1
0
def process_map(file_in, validate):
    """Iteratively process each XML element and write to csv(s)"""

    with codecs.open(NODES_PATH, 'w') as nodes_file, codecs.open(
            NODE_TAGS_PATH, 'w') as nodes_tags_file, codecs.open(
                WAYS_PATH, 'w') as ways_file, codecs.open(
                    WAY_NODES_PATH,
                    'w') as way_nodes_file, codecs.open(WAY_TAGS_PATH,
                                                        'w') as way_tags_file:

        nodes_writer = UnicodeDictWriter(nodes_file, NODE_FIELDS)
        node_tags_writer = UnicodeDictWriter(nodes_tags_file, NODE_TAGS_FIELDS)
        ways_writer = UnicodeDictWriter(ways_file, WAY_FIELDS)
        way_nodes_writer = UnicodeDictWriter(way_nodes_file, WAY_NODES_FIELDS)
        way_tags_writer = UnicodeDictWriter(way_tags_file, WAY_TAGS_FIELDS)

        nodes_writer.writeheader()
        node_tags_writer.writeheader()
        ways_writer.writeheader()
        way_nodes_writer.writeheader()
        way_tags_writer.writeheader()

        validator = cerberus.Validator()

        for element in get_element(file_in, tags=('node', 'way')):
            el = shape_element(element)
            if el:
                if validate is True:
                    validate_element(el, validator)

                if element.tag == 'node':
                    nodes_writer.writerow(el['node'])
                    node_tags_writer.writerows(el['node_tags'])
                elif element.tag == 'way':
                    ways_writer.writerow(el['way'])
                    way_nodes_writer.writerows(el['way_nodes'])
                    way_tags_writer.writerows(el['way_tags'])
コード例 #2
0
def process_map(file_in, validate):
    """Iteratively process each XML element and write to csv(s)"""

    with codecs.open(NODES_PATH, 'w') as nodes_file, \
         codecs.open(NODE_TAGS_PATH, 'w') as nodes_tags_file, \
         codecs.open(WAYS_PATH, 'w') as ways_file, \
         codecs.open(WAY_NODES_PATH, 'w') as way_nodes_file, \
         codecs.open(WAY_TAGS_PATH, 'w') as way_tags_file:

        #https://discussions.udacity.com/t/problem-with-column-names-and-values-in-the-generated-csv-files/316584/2
        # For Python 2
        nodes_writer = UnicodeDictWriter(nodes_file, NODE_FIELDS)
        node_tags_writer = UnicodeDictWriter(nodes_tags_file, NODE_TAGS_FIELDS)
        ways_writer = UnicodeDictWriter(ways_file, WAY_FIELDS)
        way_nodes_writer = UnicodeDictWriter(way_nodes_file, WAY_NODES_FIELDS)
        way_tags_writer = UnicodeDictWriter(way_tags_file, WAY_TAGS_FIELDS)

        nodes_writer.writeheader()
        node_tags_writer.writeheader()
        ways_writer.writeheader()
        way_nodes_writer.writeheader()
        way_tags_writer.writeheader()

        validator = cerberus.Validator()

        for element in get_element(file_in, tags=('node', 'way')):
            el = shape_element(element)
            if el:
                if validate is True:
                    validate_element(el, validator)
                if element.tag == 'node':
                    nodes_writer.writerow(el['node'])
                    node_tags_writer.writerows(el['node_tags'])
                elif element.tag == 'way':
                    ways_writer.writerow(el['way'])
                    way_nodes_writer.writerows(el['way_nodes'])
                    way_tags_writer.writerows(el['way_tags'])
コード例 #3
0
def get_config(config_file_path: str = None) -> Config:
    """Метод получения конфигурации приложения

    Возвращает объект класса Config, хранящий настройки конфигурации и предоставляющий методы доступа к ним

    :param config_file_path: Путь к YAML-файлу, содержащему конфигурацию приложения
    :return: Объект класса Config, содержащий конфигурацию приложения
    """
    result = dict()
    settings = ConfigurationSettings()
    if config_file_path:
        try:
            with open(config_file_path, 'r') as conf:
                config = yaml.safe_load(conf.read())
                v = cerberus.Validator(settings.schema)
                v.allow_unknown = True
                if v.validate(config):
                    log('Configuration validation passed!')
                    result.update(__merge_conf_dicts(settings.default, config))
                    log('Working with configuration:')
                else:
                    log('Configuration validation failed!', logging.WARNING)
                    log(v.errors, logging.WARNING)
                    result.update(settings.default)
                    log('Using default configuration:', logging.WARNING)
        except Exception as exc:
            log('Configuration file reading failed with following exception:',
                logging.WARNING)
            log(str(exc), logging.WARNING)
            log('Using default configuration:', logging.WARNING)
            result.update(settings.default)
    else:
        log('Configuration file not specified, using default configuration:',
            logging.WARNING)
        result.update(settings.default)
    log(str(result))
    return Config(result)
コード例 #4
0
async def write_to_user(sid, data):
    json = {}

    v = cerberus.Validator()

    v.schema = {'user': {'type': 'UUID', 'required': True, 'coerce': UUID}}

    if not v.validate(data):
        result['errors'] = v.errors

        return result

    user = await sio.get_session(sid)

    receiver = database.session.query(User).get(data['user'])

    if not receiver:
        json['errors'] = {'to': "Reciever doesn't exist"}

        return json

    u = {
        'uuid': user['uuid'],
        'name': user['name'],
        'public_key': user['public_key']
    }

    await sio.emit('wrote.to.user', u, room=str(receiver.uuid))

    json['user'] = {
        'uuid': str(receiver.uuid),
        'name': receiver.name,
        'public_key': receiver.public_key
    }

    return json
コード例 #5
0
def test_update_add_authorized_session(base_url):
    # Create session
    session = requests.Session()

    # Authorization
    session.request("login", base_url + "/auth/login", json=AUTH_DATA)

    # Create user
    data_to_make = {
        "name": "Test" + str(random.randint(10, 1000)),
        "surname": "TestSurname",
        "grade": 10
    }
    response = session.post(base_url + "/update/add", json=data_to_make)

    # Verify addition and response
    try:
        assert response.json().get("status") == "ok"
    except AssertionError:
        raise AssertionError(response.json())

    data = response.json().get("data")
    v = cerberus.Validator()
    assert v.validate(data, schema)
コード例 #6
0
def process_map(file_in, validate):
    """
        Iteratively process each XML element and write to csv(s
    """
    from street_map_csv_writer import StreetMapCsvWriter

    writer = StreetMapCsvWriter(add_csv_headers=False,
                                output_directory='generated_data')

    validator = cerberus.Validator()

    for element in get_element(file_in, tags=('node', 'way')):
        el = shape_element(element)
        if el:
            if validate is True:
                validate_element(el, validator)

            if element.tag == 'node':
                writer.add_node(el['node'])
                writer.add_node_tags(el['node_tags'])
            elif element.tag == 'way':
                writer.add_way(el['way'])
                writer.add_way_nodes(el['way_nodes'])
                writer.add_way_tags(el['way_tags'])
コード例 #7
0
ファイル: Decorator.py プロジェクト: my-webchat-learning/chat
def validateInputByName(name, rules, error_msg=dict(), default=''):
    #不准使用error关键字作为请求参数,请求参数都会被格式化成string,无法使用int去验证
    if name == 'error':
        error = {}
        error['msg'] = '不能使用error关键字作用请求参数'
        error['error_code'] = CONST['CODE']['ERROR']['value']
        error['error'] = True
        error['show'] = True
        return error
    #这边修改成json格式接收参数
    method = request.method
    requests = None
    if method == 'GET':
        requests = request.values
    if method == 'POST':
        requests = request.get_json()
    if requests == None:
        requests = dict()
    if 'required' in rules[name].keys():
        if rules[name]['required'] == False:
            if name not in requests:
                #requests[name] = default
                return requests
    v = cerberus.Validator(
        rules, error_handler=CustomErrorHandler(custom_messages=error_msg))
    if name not in requests:
        requests[name] = default
    cookedReqVal = {name: requests[name]}
    if (v.validate(cookedReqVal)):  # validate
        return requests
    error = {}
    error['msg'] = v.errors
    error['error_code'] = CONST['CODE']['ERROR']['value']
    error['error'] = True
    error['show'] = True
    return error
コード例 #8
0
def process_map(file_in, validate=True):

    with codecs.open(NODES_PATH, 'wb') as nodes_file, \
         codecs.open(NODE_TAGS_PATH, 'wb') as nodes_tags_file, \
        codecs.open(WAYS_PATH, 'wb') as ways_file,\
        codecs.open(WAY_NODES_PATH, 'wb') as way_nodes_file,\
         codecs.open(WAY_TAGS_PATH, 'wb') as way_tags_file:
        nodes_writer = UnicodeDictWriter(nodes_file, NODE_FIELDS)
        node_tags_writer = UnicodeDictWriter(nodes_tags_file, NODE_TAGS_FIELDS)
        ways_writer = UnicodeDictWriter(ways_file, WAY_FIELDS)
        way_nodes_writer = UnicodeDictWriter(way_nodes_file, WAY_NODES_FIELDS)
        way_tags_writer = UnicodeDictWriter(way_tags_file, WAY_TAGS_FIELDS)

        nodes_writer.writeheader()
        node_tags_writer.writeheader()
        ways_writer.writeheader()
        way_nodes_writer.writeheader()
        way_tags_writer.writeheader()

        validator = cerberus.Validator()

        for element in get_element(file_in, tags=('node', 'way')):
            #print(element)
            el = shape_element(element, file_in)
            if el:
                if validate is True:
                    validate_element(el, validator)

                if element.tag == 'node':
                    nodes_writer.writerow(el['node'])
                    node_tags_writer.writerows(el['node_tags'])

                elif element.tag == 'way':
                    ways_writer.writerow(el['way'])
                    way_nodes_writer.writerows(el['way_nodes'])
                    way_tags_writer.writerows(el['way_tags'])
コード例 #9
0
import copy

from pi_mqtt_gpio import CONFIG_SCHEMA
from pi_mqtt_gpio.modules import BASE_SCHEMA

gpio_module = import_module("pi_mqtt_gpio.modules.raspberrypi")

# Doesn't need to be a deep copy because we won't modify the base
# validation rules, just add more of them.
module_config_schema_input = CONFIG_SCHEMA.copy()
module_config_schema_input = {
    "digital_inputs": module_config_schema_input["digital_inputs"]
}
module_config_schema_input.update(getattr(gpio_module, "CONFIG_SCHEMA", {}))
# print yaml.dump(module_config_schema_input)
module_validator_input = cerberus.Validator(module_config_schema_input)

module_config_schema_output = CONFIG_SCHEMA.copy()
module_config_schema_output = {
    "digital_outputs": module_config_schema_output["digital_outputs"]
}
module_config_schema_output.update(getattr(gpio_module, "CONFIG_SCHEMA", {}))
# print yaml.dump(module_config_schema_output)
module_validator_output = cerberus.Validator(module_config_schema_output)

digital_inputs = {}
digital_outputs = {}
config = {}
with open("config.example.yml") as f:
    config = yaml.safe_load(f)
コード例 #10
0
ファイル: fit_all.py プロジェクト: gregreen/deep-potential
def load_params(fname):
    d = {}
    if fname is not None:
        with open(fname, 'r') as f:
            d = json.load(f)
    schema = {
        "df": {
            'type': 'dict',
            'schema': {
                "n_flows": {
                    'type': 'integer',
                    'default': 1
                },
                "n_hidden": {
                    'type': 'integer',
                    'default': 4
                },
                "hidden_size": {
                    'type': 'integer',
                    'default': 32
                },
                "reg": {
                    'type': 'dict',
                    'schema': {
                        "dv_dt_reg": {
                            'type': 'float'
                        },
                        "kinetic_reg": {
                            'type': 'float'
                        },
                        "jacobian_reg": {
                            'type': 'float'
                        }
                    }
                },
                "n_epochs": {
                    'type': 'integer',
                    'default': 64
                },
                "batch_size": {
                    'type': 'integer',
                    'default': 512
                },
                "lr_init": {
                    'type': 'float',
                    'default': 0.02
                },
                "lr_final": {
                    'type': 'float',
                    'default': 0.0001
                }
            }
        },
        "Phi": {
            'type': 'dict',
            'schema': {
                "n_samples": {
                    'type': 'integer',
                    'default': 524288
                },
                "grad_batch_size": {
                    'type': 'integer',
                    'default': 1024
                },
                "n_hidden": {
                    'type': 'integer',
                    'default': 3
                },
                "hidden_size": {
                    'type': 'integer',
                    'default': 256
                },
                "xi": {
                    'type': 'float',
                    'default': 1.0
                },
                "lam": {
                    'type': 'float',
                    'default': 1.0
                },
                "n_epochs": {
                    'type': 'integer',
                    'default': 64
                },
                "batch_size": {
                    'type': 'integer',
                    'default': 1024
                },
                "lr_init": {
                    'type': 'float',
                    'default': 0.001
                },
                "lr_final": {
                    'type': 'float',
                    'default': 0.000001
                }
            }
        }
    }
    validator = cerberus.Validator(schema)
    params = validator.normalized(d)
    return params
コード例 #11
0
    def test_canary(self):
        import cerberus

        validator = cerberus.Validator()
        self._canary_test(validator)
コード例 #12
0
async def start_secret_chat(sid, data):
    json = {}

    v = cerberus.Validator()

    v.schema = {'user': {'type': 'UUID', 'required': True, 'coerce': UUID}}

    if not v.validate(data):
        json['errors'] = v.errors

        return json

    user = await sio.get_session(sid)

    requester = database.session.query(User).get(user['uuid'])

    participant = database.session.query(User).get(data['user'])

    if not participant:
        json['errors'] = {'user': "******"}

        return json

    if participant.uuid == requester.uuid:
        json['errors'] = {'user': "******"}

        return json

    conference = database.session.query(Conference) \
        .join(Conference_Reference) \
        .filter(and_(
            Conference.type == 'secret',
            Conference_Reference.conference_uuid == Conference.uuid,
            Conference_Reference.user_uuid == requester.uuid,
            Conference_Reference.participant_uuid == participant.uuid
        )) \
        .one_or_none()

    if conference:
        conference_reference = database.session.query(Conference_Reference) \
            .filter(and_(
                Conference_Reference.user_uuid == requester.uuid,
                Conference_Reference.conference_uuid == conference.uuid
            )) \
            .one_or_none()

        json['conference'] = {
            'uuid': str(conference.uuid),
            'type': conference.type,
            'updated_at': conference_reference.updated_at.timestamp(),
            'messages_count': conference_reference.messages_count,
            'unread_messages_count':
            conference_reference.unread_messages_count,
            'participant': {
                'uuid': str(conference_reference.participant.uuid),
                'name': conference_reference.participant.name,
                'public_key': conference_reference.participant.public_key
            }
        }

        if conference_reference.last_message is not None:
            last_message = conference_reference.last_message

            json['conference']['last_message'] = {
                'uuid':
                str(last_message.uuid),
                'author': {
                    'uuid': str(last_message.author.uuid),
                    'name': last_message.author.name,
                    'public_key': last_message.author.public_key
                },
                'conference': {
                    'uuid': str(conference.uuid),
                    'type': conference.type,
                    'updated_at': conference_reference.updated_at.timestamp(),
                    'messages_count': conference_reference.messages_count,
                    'unread_messages_count':
                    conference_reference.unread_messages_count,
                    'participant': {
                        'uuid': str(conference_reference.participant.uuid),
                        'name': conference_reference.participant.name,
                        'public_key':
                        conference_reference.participant.public_key
                    }
                },
                'read':
                last_message.read,
                'readAt':
                last_message.read_at.timestamp()
                if last_message.read_at is not None else last_message.read_at,
                'date':
                last_message.date.timestamp(),
                'type':
                last_message.type,
                'content':
                last_message.content,
                'consumed':
                last_message.consumed,
                'edited':
                last_message.edited
            }

        return json

    # if not conference:
    conference = Conference(uuid=uuid4(), type='secret')

    requester_conference_reference = Conference_Reference(
        user=requester, conference=conference, participant=participant)

    participant_conference_reference = Conference_Reference(
        user=participant, conference=conference, participant=requester)

    requester_participant = Participant(conference=conference,
                                        user_uuid=requester.uuid)

    participant_participant = Participant(conference=conference,
                                          user=participant)

    database.session.add(conference)
    database.session.flush()

    database.session.add_all([
        requester_conference_reference, participant_conference_reference,
        requester_participant, participant_participant
    ])
    database.session.flush()

    requester.conferences_count += 1
    participant.conferences_count += 1

    database.session.add_all([requester, participant])
    database.session.flush()

    await sio.emit('user.conferences_count.updated',
                   {'conferences_count': requester.conferences_count},
                   room=str(requester.uuid))
    await sio.emit('user.conferences_count.updated',
                   {'conferences_count': participant.conferences_count},
                   room=str(participant.uuid))

    conference_references = [
        requester_conference_reference, participant_conference_reference
    ]

    for conference_reference in conference_references:
        c = {
            'uuid': str(conference.uuid),
            'type': conference.type,
            'updated_at': conference_reference.updated_at.timestamp(),
            'messages_count': conference_reference.messages_count,
            'unread_messages_count':
            conference_reference.unread_messages_count,
            'participant': {
                'uuid': str(conference_reference.participant.uuid),
                'name': conference_reference.participant.name,
                'public_key': conference_reference.participant.public_key
            }
        }

        await sio.emit('secret.chat.started',
                       c,
                       room=str(conference_reference.user.uuid))

    json['conference'] = {
        'uuid': str(conference.uuid),
        'type': conference.type,
        'updated_at': requester_conference_reference.updated_at.timestamp(),
        'messages_count': requester_conference_reference.messages_count,
        'unread_messages_count':
        requester_conference_reference.unread_messages_count,
        'participant': {
            'uuid': str(requester_conference_reference.participant.uuid),
            'name': requester_conference_reference.participant.name,
            'public_key': requester_conference_reference.participant.public_key
        }
    }

    return json
コード例 #13
0
def verify_versions_acceptability(containers: List[ContainerInfo],
                                  acceptable: pathlib.Path,
                                  quiet: bool) -> bool:
    """Function for verification of software versions installed in containers.

    Args:
        containers: List of items, that represents containers in k8s cluster.
        acceptable: Path to the YAML file, with the software verification parameters.
        quiet: Determines if output should be printed, to stdout.

    Returns:
        0 if the verification was successful or 1 otherwise.
    """

    if not acceptable:
        return 0

    if not acceptable.is_file():
        raise FileNotFoundError(
            "File with configuration for acceptable does not exists!")

    schema = {
        "python": {
            "type": "list",
            "schema": {
                "type": "string"
            }
        },
        "java": {
            "type": "list",
            "schema": {
                "type": "string"
            }
        },
    }

    validator = cerberus.Validator(schema)

    with open(acceptable) as stream:
        data = yaml.safe_load(stream)

    if not validator.validate(data):
        raise cerberus.SchemaError(
            "Schema of file with configuration for acceptable is not valid.")

    python_acceptable = data.get("python", [])
    java_acceptable = data.get("java", [])

    python_not_acceptable = [(container, "python", version)
                             for container in containers
                             for version in container.versions.python
                             if version not in python_acceptable]

    java_not_acceptable = [(container, "java", version)
                           for container in containers
                           for version in container.versions.java
                           if version not in java_acceptable]

    if not python_not_acceptable and not java_not_acceptable:
        return 0

    if quiet:
        return 1

    print("List of not acceptable versions")
    pprint.pprint(python_not_acceptable)
    pprint.pprint(java_not_acceptable)

    return 1
コード例 #14
0
ファイル: config_parser.py プロジェクト: qoneci/notify
 def _validate(self, yml, service):
     schema = self.get_schema(service)
     if schema:
         return cerberus.Validator(schema).validate(yml)
     else:
         return False
コード例 #15
0
ファイル: config.py プロジェクト: jeromepin/esctl
    def _ensure_config_file_is_valid(self, document):
        settings_schema = {
            "no_check_certificate": {
                "type": "boolean"
            },
            "max_retries": {
                "type": "integer"
            },
            "timeout": {
                "type": "integer"
            },
        }

        external_credentials_schema = {
            "type": "dict",
            "schema": {
                "command": {
                    "type": "dict",
                    "schema": {
                        "run": {
                            "type": "string",
                            "required": True,
                        }
                    },
                },
            },
        }

        schema = {
            "settings": {
                "type": "dict",
                "schema": settings_schema
            },
            "clusters": {
                "type": "dict",
                "required": True,
                "keysrules": {},
                "valuesrules": {
                    "type": "dict",
                    "schema": {
                        "servers": {
                            "type": "list"
                        },
                        "settings": {
                            "type": "dict",
                            "schema": settings_schema
                        },
                    },
                },
            },
            "users": {
                "type": "dict",
                "keysrules": {
                    "type": "string"
                },
                "valuesrules": {
                    "type": "dict",
                    "schema": {
                        "username": {
                            "type": "string"
                        },
                        "password": {
                            "type": "string"
                        },
                        "external_username": external_credentials_schema,
                        "external_password": external_credentials_schema,
                    },
                },
            },
            "contexts": {
                "type": "dict",
                "required": True,
                "keysrules": {
                    "type": "string"
                },
                "valuesrules": {
                    "type": "dict",
                    "schema": {
                        "cluster": {
                            "type": "string"
                        },
                        "user": {
                            "type": "string"
                        },
                        "pre_commands": {
                            "type": "list",
                            "schema": {
                                "type": "dict",
                                "schema": {
                                    "command": {
                                        "type": "string"
                                    },
                                    "wait_for_exit": {
                                        "type": "boolean"
                                    },
                                    "wait_for_output": {
                                        "type": "string"
                                    },
                                },
                            },
                        },
                    },
                },
            },
            "default-context": {
                "type": "string"
            },
        }
        cerberus_validator = cerberus.Validator(schema)

        if not cerberus_validator.validate(document):
            for root_error in cerberus_validator._errors:
                for inner_error in root_error.info[0]:
                    self.log.error("Unknown configuration : {}".format(
                        ".".join(inner_error.document_path)))
                self.log.error(
                    ("Invalid type or schema for configuration field '{0}'."
                     " Should be {1}. Got '{2}'").format(
                         root_error.field, root_error.constraint,
                         root_error.value))

            raise SyntaxError("{} doesn't match expected schema".format(
                self.path))
コード例 #16
0
        open(WAYS_PATH, 'w') as ways_file, \
        open(WAY_NODES_PATH, 'w') as way_nodes_file, \
        open(WAY_TAGS_PATH, 'w') as way_tags_file:

    nodes_writer = UnicodeDictWriter(nodes_file, NODE_FIELDS)
    node_tags_writer = UnicodeDictWriter(nodes_tags_file, NODE_TAGS_FIELDS)
    ways_writer = UnicodeDictWriter(ways_file, WAY_FIELDS)
    way_nodes_writer = UnicodeDictWriter(way_nodes_file, WAY_NODES_FIELDS)
    way_tags_writer = UnicodeDictWriter(way_tags_file, WAY_TAGS_FIELDS)

    nodes_writer.writeheader()
    node_tags_writer.writeheader()
    ways_writer.writeheader()
    way_nodes_writer.writeheader()
    way_tags_writer.writeheader()

    validator = cerberus.Validator()

    for element in get_element(file_name, tags=("node", "way")):
        el = tool.inital_csvs(element)
        print(el)
        if el:
            tool.validate_element(el, validator)
        if element.tag == "node":
            nodes_writer.writerow(el['node'])
            node_tags_writer.writerows(el['node_tags'])
        elif element.tag == 'way':
            ways_writer.writerow(el['way'])
            way_nodes_writer.writerows(el['way_nodes'])
            way_tags_writer.writerows(el['way_tags'])
コード例 #17
0
async def create_job(request):  # pylint: disable=R0912
    parameters = await request.json()

    schema = {
        # will be validated when creating pod
        'spec': schemas.pod_spec,
        'copy_service_account_name': {
            'type': 'string'
        },
        'batch_id': {
            'type': 'integer'
        },
        'parent_ids': {
            'type': 'list',
            'schema': {
                'type': 'integer'
            }
        },
        'scratch_folder': {
            'type': 'string'
        },
        'input_files': {
            'type': 'list',
            'schema': {
                'type': 'list',
                'items': 2 * ({
                    'type': 'string'
                }, )
            }
        },
        'output_files': {
            'type': 'list',
            'schema': {
                'type': 'list',
                'items': 2 * ({
                    'type': 'string'
                }, )
            }
        },
        'always_run': {
            'type': 'boolean'
        },
        'attributes': {
            'type': 'dict',
            'keyschema': {
                'type': 'string'
            },
            'valueschema': {
                'type': 'string'
            }
        },
        'callback': {
            'type': 'string'
        }
    }
    validator = cerberus.Validator(schema)
    if not validator.validate(parameters):
        abort(400, 'invalid request: {}'.format(validator.errors))

    pod_spec = v1.api_client._ApiClient__deserialize(parameters['spec'],
                                                     kube.client.V1PodSpec)

    batch_id = parameters.get('batch_id')
    if batch_id:
        batch = batch_id_batch.get(batch_id)
        if batch is None:
            abort(404, f'invalid request: batch_id {batch_id} not found')
        if not batch.is_open:
            abort(400, f'invalid request: batch_id {batch_id} is closed')

    parent_ids = parameters.get('parent_ids', [])
    for parent_id in parent_ids:
        parent_job = job_id_job.get(parent_id, None)
        if parent_job is None:
            abort(400, f'invalid parent_id: no job with id {parent_id}')
        if parent_job.batch_id != batch_id or parent_job.batch_id is None or batch_id is None:
            abort(
                400, f'invalid parent batch: {parent_id} is in batch '
                f'{parent_job.batch_id} but child is in {batch_id}')

    scratch_folder = parameters.get('scratch_folder')
    input_files = parameters.get('input_files')
    output_files = parameters.get('output_files')
    copy_service_account_name = parameters.get('copy_service_account_name')
    always_run = parameters.get('always_run', False)

    if len(pod_spec.containers) != 1:
        abort(400, f'only one container allowed in pod_spec {pod_spec}')

    if pod_spec.containers[0].name != 'main':
        abort(
            400,
            f'container name must be "main" was {pod_spec.containers[0].name}')

    if not both_or_neither(input_files is None and output_files is None,
                           copy_service_account_name is None):
        abort(
            400,
            f'invalid request: if either input_files or ouput_files is set, '
            f'then the service account must be specified; otherwise the '
            f'service account must not be specified. input_files: {input_files}, '
            f'output_files: {output_files}, copy_service_account_name: '
            f'{copy_service_account_name}')

    job = Job(pod_spec, batch_id, parameters.get('attributes'),
              parameters.get('callback'), parent_ids, scratch_folder,
              input_files, output_files, copy_service_account_name, always_run)
    return jsonify(job.to_dict())
コード例 #18
0
def yaml_validator() -> cerberus.Validator:
    """Validator for mod-pack data loaded from YAML."""

    return cerberus.Validator(cerberus.schema_registry.get('pack'))
コード例 #19
0
class ModeloProveedor(QtCore.QAbstractTableModel):
    __v = cerberus.Validator()

    def __init__(self, propiedades=None, parent=None):
        super(ModeloProveedor, self).__init__()
        self.__querier = querier.Querier(tabla="proveedores", prefijo="prov_")

        self.__scProveedor = {
            'prov_id': {
                'type': 'integer',
                'nullable': True
            },
            'prov_nombre': {
                'type': 'string',
                'required': True,
                'maxlength': 60
            },
            'prov_razon_social': {
                'type': 'string',
                'maxlength': 60
            },
            'prov_cuit': {
                'type': 'string',
                'maxlength': 20
            },
            'prov_direccion': {
                'type': 'string',
                'maxlength': 30
            },
            'prov_telefono': {
                'type': 'string',
                'maxlength': 30
            },
            'prov_telefono_dos': {
                'type': 'string',
                'maxlength': 30
            },
            'prov_email': {
                'type': 'string',
                'maxlength': 40
            },
            'prov_activo': {
                'type': 'integer',
                'allowed': [0, 1]
            },
            'prov_notas': {
                'type': 'string'
            },
            'prov_nombre_contacto': {
                'type': 'string',
                'maxlength': 30
            }
        }

        self.__propiedades = [
            'Codigo', 'Nombre', 'Razon Social', 'Cuit', 'Direccion',
            'Teléfono', 'Teléfono secundario', 'Email'
        ]

        if propiedades:
            self.__propiedades = propiedades

        self.relacion = {
            'Codigo': 'prov_id',
            'Nombre': 'prov_nombre',
            'Razon Social': 'prov_razon_social',
            'Cuit': 'prov_cuit',
            'Direccion': 'prov_direccion',
            'Teléfono': 'prov_telefono',
            'Teléfono secundario': 'prov_telefono_dos',
            'Email': 'prov_email'
        }

        self.__busqueda = []

        for propiedad in self.__propiedades:
            self.__busqueda.append(self.relacion[propiedad])

        self.proveedores = self.__querier.traerElementos(self.__busqueda)
        self.proveedor = {}

    def crearProveedor(self, proveedorNuevo):
        v = self.__v.validate(proveedorNuevo, self.__scProveedor)
        if v:

            self.__querier.insertarElemento(proveedorNuevo)
        else:
            print("ERRORES: ", self.__v.errors)
        return v

    def verListaProveedores(self,
                            campos=None,
                            condiciones=None,
                            limite=None,
                            uniones=None,
                            orden=None):
        if not campos:
            campos = self.__busqueda

        self.proveedores = self.__querier.traerElementos(
            campos, condiciones, limite, uniones, orden)
        self.layoutChanged.emit()

    def verDetallesProveedor(self, proveedor, condiciones=None, campos=None):
        # condiciones = ()
        # print (proveedor)
        # print (proveedor.row())
        if proveedor.row() >= 0:
            proveedor = self.proveedores[proveedor.row()]
        if not condiciones:
            condiciones = [('prov_id', '=', proveedor[0])]
        resultado = self.__querier.traerElementos(campos, condiciones, 1)
        if resultado:
            self.proveedor = resultado[0]
        else:
            return None
        # print(self.proveedor)
        return self.proveedor

    def modificarProveedor(self, proveedor):
        v = self.__v.validate(proveedor, self.__scProveedor)
        if v:
            self.__querier.actualizarElemento(proveedor)
        else:
            print("ERRORES: ", self.__v.errors)
        return v

    def getId(self):
        return self.proveedor[0]

    def getIdByNombre(self, nombre):
        campos = ["prov_id"]
        condiciones = [("prov_nombre", "LIKE", "'%{}%'".format(nombre))]
        try:
            resultado = self.__querier.traerElementos(campos=campos,
                                                      condiciones=condiciones)
            return resultado[0][0]
        except:
            return 0

    def agregarNone(self):
        self.proveedores.insert(0, ["Proveedor"])
        self.layoutChanged.emit()

    # def asociarProveedor(self, proveedor = { 'prov_nombre' : 'Indeterminado' }):
    #     # El ID de proveedor por defecto no debe ser 0000, sino el que sea creado para el proveedor con nombre "Indeterminado"
    #
    #     prov_id = proveedor.fetchID()
    #     art_id = self.fetchID()
    #
    #     if prov_id:
    #         QUERY = "UPDATE proveedores SET prov_ID = " + prov_id
    #         + " WHERE proveedores.art_ID = " + art_id
    #     else:
    #         print("El proveedor no existe")

    def toggleProveedorActivo(self, proveedor):
        if proveedor['prov_activo']:
            proveedor['prov_activo'] = 0
        else:
            proveedor['prov_activo'] = 1
        self.__querier.actualizarElemento(proveedor)

# ===============================================================
# Funciones para Modelo de tabla para PyQt
# ===============================================================

    def rowCount(self, parent):
        return len(self.proveedores)

    def columnCount(self, parent):
        if len(self.proveedores):
            return len(self.proveedores[0])
        else:
            return 0

    def flags(self, index):
        return QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled

    def data(self, index, role):

        if role == QtCore.Qt.DisplayRole:
            row = index.row()
            column = index.column()
            value = self.proveedores[row][column]

            return value

    def setData(self, index, value, role=QtCore.Qt.EditRole):
        if role == QtCore.Qt.EditRole:
            row = index.row()
            column = index.column()

            value = self.proveedores[row][column]

            return value

    def headerData(self, section, orientation, role):

        if role == QtCore.Qt.DisplayRole:

            if orientation == QtCore.Qt.Horizontal:
                return self.__propiedades[section]
コード例 #20
0
 def validator(self):
     v = getattr(self.__class__, "_validator", None)
     if v is None:
         v = cerberus.Validator(self.schema)
         self.__class__._validator = v
     return v
コード例 #21
0
 def __validate(cls, schema, param):
     v = cerberus.Validator(schema)
     if v.validate(param):
         return
     raise Exception(json.dumps(v.errors))
コード例 #22
0
def process_map(file_in, validate):
    """Iteratively process each XML element and write to csv(s)"""
    print('starting to open files...')
    with codecs.open(NODES_PATH, 'wt') as nodes_file, \
         codecs.open(NODE_TAGS_PATH, 'wt') as nodes_tags_file, \
         codecs.open(WAYS_PATH, 'wt') as ways_file, \
         codecs.open(WAY_NODES_PATH, 'wt') as way_nodes_file, \
         codecs.open(WAY_TAGS_PATH, 'wt') as way_tags_file:

        nodes_writer = csv.DictWriter(nodes_file, NODE_FIELDS)
        node_tags_writer = csv.DictWriter(nodes_tags_file, NODE_TAGS_FIELDS)
        ways_writer = csv.DictWriter(ways_file, WAY_FIELDS)
        way_nodes_writer = csv.DictWriter(way_nodes_file, WAY_NODES_FIELDS)
        way_tags_writer = csv.DictWriter(way_tags_file, WAY_TAGS_FIELDS)

        nodes_writer.writeheader()
        node_tags_writer.writeheader()
        ways_writer.writeheader()
        way_nodes_writer.writeheader()
        way_tags_writer.writeheader()

        validator = cerberus.Validator()

        for idx, element in enumerate(
                get_element(file_in, tags=('node', 'way'))):
            if idx % 1000 == 0:
                print("progress -> {}".format(idx))
            #print("got element: {}".format(element.attrib))
            el = shape_element(element)
            if el:
                #print("if el is true")
                if validate is True:
                    #print("validating...")
                    validate_element(el, validator)
                    #print("validation done.")

                if element.tag == 'node':
                    #print("tag is a node")
                    nodes_writer.writerow(el['node'])

                    for element in el['node_tags']:
                        if element.get('key') == 'street':
                            street_name = element.get('value')
                            updated_name = street_names.update_name(
                                street_name)
                            element['value'] = updated_name

                        elif element.get('key') == 'amenity':
                            amenity_name = element.get('value')
                            updated_amenity = amenities.normalize_amenity(
                                amenity_name)
                            element['value'] = updated_amenity

                        elif element.get('key') == 'postcode':
                            postcode = element.get('value')
                            if not postcodes.is_valid_postcode(postcode):
                                continue

                            updated_postcode = postcodes.truncate_postcode(
                                postcode)
                            element['value'] = updated_postcode

                        elif element.get('key') == 'city':
                            city_name = element.get('value')
                            if not city.is_valid_city(city_name):
                                continue

                        node_tags_writer.writerow(element)

                elif element.tag == 'way':
                    ways_writer.writerow(el['way'])
                    way_nodes_writer.writerows(el['way_nodes'])
                    way_tags_writer.writerows(el['way_tags'])
コード例 #23
0
ファイル: test_dog.py プロジェクト: maximgonchar/hw3_test_API
 def test_json_schema_multiply_random_dogs(self,
                                           response_multiply_random_dog):
     """Проверка структуры ответа запроса рандомного списка нескольких собак через cerberus"""
     schema = {"message": {"type": "list"}, "status": {"type": "string"}}
     v = cerberus.Validator()
     assert v.validate(response_multiply_random_dog, schema)
コード例 #24
0
class ModeloProcesador(QtCore.QAbstractTableModel):
    __querier = querier.Querier()
    __v = cerberus.Validator()

    # La idea ahora es separar el esquema de validación de los datos y campos que se van a usar, nosotros no vamos a usar
    # todos los campos en la tabla, habíamos definido los que se encuentran en el archivo 'cosas para hacer.md'
    # | Mes | Proveedor | Importe | Cuota N° | Total Cuotas

    def __init__(self, propiedades=None, parent=None):
        super(ModeloProcesador, self).__init__()
        if parent:
            self.__parent = parent
        self.__propiedades = [
            "Estado", "Fecha", "Legajo", "Banco", "CBU", "Importe", "CUIT",
            "Orden de movimiento", "Codigo de rechazo", "Motivo de rechazo",
            "Empresa"
        ]

        if propiedades:
            self.__propiedades = self.validarPropiedades(propiedades)

        self.__debitosBet = [
        ]  # Los valores de prueba los saco del archivo fuente

        self.__debitosDatabase = [
        ]  # Los valores de acá salen de la base de datos
        self.__debitosNoProcesados = []
        self.__debitosRechazados = []
        self.__debitosProcesados = []
        self.__codigosDeRechazo = {}

        self.verListaCodigosDeRechazo()

        # self.setConfig(banco = "P", cuit = "30561600194", empresa = "SIND T MUN MERLO")

    def verListaCodigosDeRechazo(self):

        self.__codigosDeRechazo = self.__querier.traerElementos(
            campos=["codigo", "descripcion"], tabla="codigos_rechazo")
        codigosDeRechazo = dict(
            (codigo, descripcion)
            for codigo, descripcion in self.__codigosDeRechazo)
        self.__codigosDeRechazo = codigosDeRechazo
        self.__codigosDeRechazo[None] = ""
        self.__codigosDeRechazo['   '] = ""
        # CodigosDeRechazo ahora contiene una lista de tuplas, cada tupla es un registro de la base de datos.

    def verListaDebitosAProcesar(self, lista):

        self.__debitosBet = lista
        self.verListaDebitosDatabase()
        self.__debitosProcesados = []
        self.__debitosProcesadosDB = []
        self.__debitosRechazados = []
        # self.procesables = []

        for index, debito in enumerate(self.__debitosBet):
            codigo = debito[8]

            if codigo != "   ":
                self.__debitosBet[index][9] = self.__codigosDeRechazo[codigo]

            self.obtenerDebitoProcesable(debito)

        # if self.procesables:
        print("DEBUG - Debitos a procesar: ", self.__debitosBet)
        print("DEBUG - Procesados: ", self.__debitosProcesados)
        print("DEBUG - Rechazados, para clonar: ", self.__debitosRechazados)
        print("DEBUG - No Procesados, para clonar: ",
              self.__debitosNoProcesados)
        # self.__debitosBet = self.procesables
        self.layoutChanged.emit()
        return True
        # return False

    def verListaDebitosDatabase(self):

        self.__debitosDatabase = self.__querier.traerElementos(
            campos=[
                "id", "id_temporal", "legajo_afiliado", "cbu",
                "fecha_descuento", "importe_actual", "fecha_carga_inicial",
                "proveedor_id", "cuota_actual", "total_cuotas",
                "importe_total", "n_orden", "estado", "motivo"
            ],
            tabla="debitos",
            uniones=[("afiliados",
                      "afiliados.legajo = debitos.legajo_afiliado")],
            condiciones=[("id_temporal", "IS NOT", "NULL")],
            orden=("id_temporal", "ASC"))

        print(self.__debitosDatabase)

    def apllicarCambios(self):
        self.guardarXls()
        for debito in self.__debitosProcesados:
            print("Estado a actualizar: " + debito[0])
            print("Motivo a actualizar: " + debito[8])
            print("DEBITO PROCESADO a actulizar: ", debito)
            self.__querier.actualizarElemento(tabla="debitos",
                                              elemento={
                                                  "id_temporal": None,
                                                  "estado": debito[0],
                                                  "motivo": debito[8]
                                              },
                                              condiciones=[("id_temporal", "=",
                                                            int(debito[7])),
                                                           ("legajo_afiliado",
                                                            "=", debito[2])])

        for debito in self.__debitosRechazados:
            self.__querier.insertarElemento(
                tabla="debitos",
                elemento={
                    "id_temporal": None,
                    "legajo_afiliado": debito[2],
                    "fecha_descuento": debito[4] +
                    relativedelta(months=1),  # Nueva fecha para proximo mes
                    "importe_actual": debito[5],
                    "fecha_carga_inicial": debito[6],
                    "proveedor_id": debito[7],
                    "cuota_actual": debito[8],
                    "total_cuotas": debito[9],
                    "importe_total": debito[10],
                    "n_orden": debito[11],
                    "estado": None,
                    "motivo": None
                })
            self.__querier.actualizarElemento(tabla="debitos",
                                              elemento={
                                                  "id_temporal": None,
                                                  "estado": "Rechazado",
                                                  "motivo": debito[13]
                                              },
                                              condiciones=[("id", "=",
                                                            debito[0])])

        self.verListaDebitosDatabase()
        self.limpiarTabla()

    def actualizarDebito(self, debito, condiciones):
        self.__querier.actualizarElemento(tabla="debitos",
                                          elemento=debito,
                                          condiciones=condiciones)

    def obtenerDebitoProcesable(self, debito):
        # Voy a comparar los elementos de self.__debitosBet con los de
        # self.__debitosDatabase. El segundo lo puedo traer ordenado para hacer de las búsquedas un proceso mas rápido,
        # el primero no hace falta ordenarlo ya que vamos a iterar sobre el registro por registro.

        # noProcesados = []
        # ignorados = []

        match = []

        f_idTemporal, f_legajoAfiliado, f_cbu, f_fecha, f_importe, f_estado, f_codigo_error = 7, 2, 4, 1, 5, 0, 8
        db_idTemporal, db_legajoAfiliado, db_cbu, db_fecha, db_importe, db_estado, db_motivo = 1, 2, 3, 4, 5, 12, 13

        for index, possMatch in enumerate(self.__debitosDatabase):
            if possMatch[db_idTemporal] == int(
                    debito[f_idTemporal]
            ) and possMatch[db_legajoAfiliado] == debito[f_legajoAfiliado]:
                match = self.__debitosDatabase[index]

                print("\nDEBUG - El item file contiene: ")
                print("Legajo de Afiliado: ", debito[f_legajoAfiliado])
                print("Id Temporal: ", debito[f_idTemporal])
                print("Estado: ", debito[f_estado])
                print("Codigo de error: ", debito[f_codigo_error])

                print("\nDEBUG - El item en db contiene: ")
                print("Legajo de Afiliado: ", match[db_legajoAfiliado])
                print("Id Temporal", match[db_idTemporal])

                print("")

                if debito[f_cbu] != match[db_cbu]:  # CBU
                    print("Los CBU no coinciden en ", match[db_legajoAfiliado])
                    print(debito[f_cbu])
                    print(match[db_cbu])
                if debito[f_fecha] != match[db_fecha]:  # Fecha
                    print("Las fechas no coinciden en ",
                          match[db_legajoAfiliado])
                    print(debito[f_fecha])
                    print(match[db_fecha].strftime('%d/%m/%Y'))
                if debito[f_importe] != match[db_importe]:  # Importe
                    print("Los importes no coinciden en ",
                          match[db_legajoAfiliado])
                    print(debito[f_importe])
                    print(match[db_importe])

                if debito[f_estado] != "Procesado" or debito[
                        f_codigo_error] != "   ":
                    match = list(match)
                    match[db_estado] = debito[f_estado]
                    match[db_motivo] = debito[f_codigo_error]
                    self.__debitosRechazados.append(match)
                else:
                    self.__debitosProcesadosDB.append(list(match))
                    self.__debitosProcesados.append(list(debito))
                return True
        return False

    def guardarXls(self):
        wb = openpyxl.Workbook()
        ws = wb.worksheets[0]

        registrosProcesados = len(self.__debitosProcesados)
        registrosRechazados = len(self.__debitosRechazados)
        # registrosModificados = registrosProcesados + registrosRechazados

        debitosXls = self.__querier.traerElementos(
            tabla="debitos",
            campos=[
                "CONCAT(afiliados.nombre, ' ', afiliados. apellido)",
                "importe_actual", "cuota_actual", "total_cuotas", "motivo",
                "proveedores.nombre", "id_temporal", "legajo_afiliado"
            ],
            uniones=[('afiliados',
                      'afiliados.legajo = debitos.legajo_afiliado'),
                     ('proveedores', 'proveedores.id = debitos.proveedor_id')],
            condiciones=[('id_temporal', 'IS NOT', ' NULL')],
            # limite = registrosModificados,
            orden=("debitos.id", "DESC"))

        ws['A1'] = 'Afiliado'
        ws['B1'] = 'Importe'
        ws['C1'] = 'Cuota'
        ws['D1'] = 'Cantidad de cuotas'
        ws['E1'] = 'Rechazado'
        ws['F1'] = 'Empresa'

        print("DBG - COSAS Proc: ", self.__debitosProcesados)
        print("DBG - COSAS Rech: ", self.__debitosRechazados)
        print("DBG - registros procesados: ", registrosProcesados)

        db_id_temporal, db_legajo = 6, 7
        fp_id_temporal, fp_legajo = 7, 2
        fr_id_temporal, fr_legajo = 1, 2

        index = 2
        for possMatch in self.__debitosProcesados:
            for debito in debitosXls:

                print(possMatch[fp_legajo], debito[db_legajo])
                print(possMatch[fp_id_temporal], debito[db_id_temporal])
                if possMatch[fp_legajo] == debito[db_legajo] and int(
                        possMatch[fp_id_temporal]) == debito[db_id_temporal]:

                    a = 'A{}'.format(index)
                    b = 'B{}'.format(index)
                    c = 'C{}'.format(index)
                    d = 'D{}'.format(index)
                    e = 'E{}'.format(index)
                    f = 'F{}'.format(index)
                    ws[a] = debito[0]
                    ws[b] = debito[1]
                    ws[c] = debito[2]
                    ws[d] = debito[3]
                    ws[f] = debito[5]

                    print(debito, index)
                    index += 1

        index = 3
        for possMatch in self.__debitosRechazados:
            for debito in debitosXls:
                if int(possMatch[fr_id_temporal]
                       ) == debito[db_id_temporal] and possMatch[
                           fr_legajo] == debito[db_legajo]:
                    a = 'A{}'.format(index + registrosProcesados)
                    b = 'B{}'.format(index + registrosProcesados)
                    c = 'C{}'.format(index + registrosProcesados)
                    d = 'D{}'.format(index + registrosProcesados)
                    e = 'E{}'.format(index + registrosProcesados)
                    f = 'F{}'.format(index + registrosProcesados)
                    ws[a] = debito[0]
                    ws[b] = debito[1]
                    ws[c] = debito[2]
                    ws[d] = debito[3]
                    ws[e] = self.__codigosDeRechazo[possMatch[13]]
                    ws[f] = debito[5]

                    print(debito, index)
                    index += 1

# ABRIR UN CUADRO DE DIALOGO INDICANDO DONDE GUARDAR
        self.handleSave(wb)
        wb.close()

    def handleSave(self, workbook):
        path = QFileDialog.getSaveFileName(None, 'Save File', '/comercios/',
                                           'Excel(*.xlsx)')
        if not path[0]: return
        workbook.save(path[0])

    def limpiarTabla(self):
        self.__debitosBet = []
        self.layoutChanged.emit()

    def __setTotales(self, indexImporte):
        self.total_debitos = len(self.__debitosBet)
        self.importe_total = 0
        if self.total_debitos > 0:
            for debito in self.__debitosBet:
                self.importe_total += debito[indexImporte]

    def __toString(self, index):
        for debito in self.__debitosBet:
            debito[index] = str(debito[index])

    def validarPropiedades(self, propiedades):
        # ahora mi función se asegura que las propieades existan en la lista, debería encontrar si hay alguna forma mas elegante de hacer esto
        if propiedades:
            prop = []
            for propiedad in propiedades:
                if propiedad in self.__propiedades:
                    prop.append(propiedad)
                else:
                    print("Propiedad '{}' es inválida, no se agregará".format(
                        propiedad))
            return prop

# Estas son las funciones específicas de Qt para las tablas

    def rowCount(self, parent):
        return len(self.__debitosBet)

    def columnCount(self, parent):
        if self.__debitosBet:
            return len(self.__debitosBet[0])
        else:
            return 0

    def flags(self, index):
        return QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled

    def data(self, index, role):
        # Acá es donde definí de dónde (De qué lista) voy a levantar los datos
        if role == QtCore.Qt.DisplayRole:
            row = index.row()
            column = index.column()
            value = self.__debitosBet[row][
                column]  # value contiene la lista de listas que contiene los afiliados

            return value  # el valor que retorno es el que aparecería en la tabla

    def setData(self, index, value, role=QtCore.Qt.EditRole):
        # Esta función no la estoy usando
        if role == QtCore.Qt.EditRole:
            row = index.row()
            column = index.column()

            value = self.articulos[row][column]

            return value

    def headerData(self, section, orientation, role):
        if role == QtCore.Qt.DisplayRole:
            if orientation == QtCore.Qt.Horizontal:
                # Los objetos tipo diccionario no ordenan sus elementos, por eso usar dict.keys() me tira los nombres
                # de las columnas en cualquier orden. Acá debería usar la lista propiedades.
                # además de salir ordenado, se ajusta la cantidad de columnas correspondiente
                keys = list(self.__propiedades)
                return keys[section]
コード例 #25
0
ファイル: schema_v1.py プロジェクト: uSpike/molecule
def validate(c):
    v = cerberus.Validator()
    v.validate(c, base_schema)

    return v.errors
コード例 #26
0
def params_from_file(path):
    out = {}
    err = {}
    code = 0

    try:
        with open(path) as f:
            docs = json.loads(f.read())
    except (IOError, ValueError) as exc:
        err = exc
        code = 1
        return out, err, code

    schema = {
        "hostname": {
            "type": "string",
            "required": True,
            "check_with": validate_hostname,
        },
        "email": {
            "type": "string",
            "required": True,
            "check_with": validate_email,
        },
        "admin_pw": {
            "type": "string",
            "required": True,
            "check_with": validate_admin_pw,
        },
        "ldap_pw": {
            "type": "string",
            "required": True,
            "check_with": validate_admin_pw,
        },
        "org_name": {
            "type": "string",
            "required": True,
            "empty": False,
        },
        "country_code": {
            "type": "string",
            "required": True,
            "minlength": 2,
            "maxlength": 2,
        },
        "state": {
            "type": "string",
            "required": True,
            "empty": False,
        },
        "city": {
            "type": "string",
            "required": True,
            "empty": False,
        },
    }

    validator = cerberus.Validator(schema)
    validator.allow_unknown = True

    if not validator.validate(docs):
        err = validator.errors
        code = 1
    else:
        out = docs
    return out, err, code
コード例 #27
0
class BatchClient:
    def __init__(self, url=None, api=api.DEFAULT_API):
        if not url:
            url = 'http://batch.default'
        self.url = url
        self.api = api

    def _create_job(
            self,  # pylint: disable=R0912
            image,
            command,
            args,
            env,
            ports,
            resources,
            tolerations,
            volumes,
            security_context,
            service_account_name,
            attributes,
            batch_id,
            callback,
            parent_ids):
        if env:
            env = [{'name': k, 'value': v} for (k, v) in env.items()]
        else:
            env = []
        env.extend([{
            'name': 'POD_IP',
            'valueFrom': {
                'fieldRef': {
                    'fieldPath': 'status.podIP'
                }
            }
        }, {
            'name': 'POD_NAME',
            'valueFrom': {
                'fieldRef': {
                    'fieldPath': 'metadata.name'
                }
            }
        }])

        container = {'image': image, 'name': 'default'}
        if command:
            container['command'] = command
        if args:
            container['args'] = args
        if env:
            container['env'] = env
        if ports:
            container['ports'] = [{
                'containerPort': p,
                'protocol': 'TCP'
            } for p in ports]
        if resources:
            container['resources'] = resources
        if volumes:
            container['volumeMounts'] = [v['volume_mount'] for v in volumes]
        spec = {'containers': [container], 'restartPolicy': 'Never'}
        if volumes:
            spec['volumes'] = [v['volume'] for v in volumes]
        if tolerations:
            spec['tolerations'] = tolerations
        if security_context:
            spec['securityContext'] = security_context
        if service_account_name:
            spec['serviceAccountName'] = service_account_name

        j = self.api.create_job(self.url, spec, attributes, batch_id, callback,
                                parent_ids)
        return Job(self, j['id'], j.get('attributes'), j.get('parent_ids', []))

    def _get_job(self, id):
        return self.api.get_job(self.url, id)

    def _get_job_log(self, id):
        return self.api.get_job_log(self.url, id)

    def _delete_job(self, id):
        self.api.delete_job(self.url, id)

    def _cancel_job(self, id):
        self.api.cancel_job(self.url, id)

    def _get_batch(self, batch_id):
        return self.api.get_batch(self.url, batch_id)

    def _close_batch(self, batch_id):
        return self.api.close_batch(self.url, batch_id)

    def _refresh_k8s_state(self):
        self.api.refresh_k8s_state(self.url)

    def list_jobs(self):
        jobs = self.api.list_jobs(self.url)
        return [
            Job(self, j['id'], j.get('attributes'), j.get('parent_ids', []), j)
            for j in jobs
        ]

    def get_job(self, id):
        # make sure job exists
        j = self.api.get_job(self.url, id)
        return Job(self, j['id'], j.get('attributes'), j.get('parent_ids', []),
                   j)

    def create_job(self,
                   image,
                   command=None,
                   args=None,
                   env=None,
                   ports=None,
                   resources=None,
                   tolerations=None,
                   volumes=None,
                   security_context=None,
                   service_account_name=None,
                   attributes=None,
                   callback=None,
                   parent_ids=None):
        if parent_ids is None:
            parent_ids = []
        return self._create_job(image, command, args, env, ports, resources,
                                tolerations, volumes, security_context,
                                service_account_name, attributes, None,
                                callback, parent_ids)

    def create_batch(self, attributes=None, callback=None):
        batch = self.api.create_batch(self.url, attributes, callback)
        return Batch(self, batch['id'])

    job_yaml_schema = {
        'spec': schemas.pod_spec,
        'type': {
            'type': 'string',
            'allowed': ['execute']
        },
        'name': {
            'type': 'string'
        },
        'dependsOn': {
            'type': 'list',
            'schema': {
                'type': 'string'
            }
        },
    }
    job_yaml_validator = cerberus.Validator(job_yaml_schema)

    def create_batch_from_file(self, file):
        job_id_by_name = {}

        def job_id_by_name_or_error(id, self_id):
            job = job_id_by_name.get(id)
            if job:
                return job
            raise ValueError(
                '"{self_id}" must appear in the file after its dependency "{id}"'
            )

        batch = self.create_batch()
        for doc in yaml.load(file):
            if not BatchClient.job_yaml_validator.validate(doc):
                raise BatchClient.job_yaml_validator.errors
            spec = doc['spec']
            type = doc['type']
            name = doc['name']
            dependsOn = doc.get('dependsOn', [])
            if type == 'execute':
                job = batch.create_job(parent_ids=[
                    job_id_by_name_or_error(x, name) for x in dependsOn
                ],
                                       **spec)
                job_id_by_name[name] = job.id
        return batch
コード例 #28
0
def process_map(osm_file, validate):
    global part_message
    global end_message

    if validate:
        end_message = " validated. Please process the map."
    else:
        end_message = " processed. You can continue creation and importing."

    if not os.path.isfile('nodes.csv') or overpassCSV:
        print("Beginning Process...")
        part_message = " has been"
        #Open an encoded file using the given mode and return
        #a wrapped version providing transparent encoding/decoding
        #and set an alias
        #Creates the file
        with codecs.open(NODES_FILE, 'wb') as nodes_file, codecs.open(
                NODE_TAGS_FILE, 'wb') as nodes_tags_file, codecs.open(
                    WAYS_FILE, 'wb') as ways_file, codecs.open(
                        WAY_NODES_FILE, 'wb') as way_nodes_file, codecs.open(
                            WAY_TAGS_FILE, 'wb') as way_tags_file:

            #Setting Ojects
            nodes_writer = UnicodeDictWriter(nodes_file, NODE_FIELDS)
            node_tags_writer = UnicodeDictWriter(nodes_tags_file,
                                                 NODE_TAGS_FIELDS)
            ways_writer = UnicodeDictWriter(ways_file, WAY_FIELDS)
            way_nodes_writer = UnicodeDictWriter(way_nodes_file,
                                                 WAY_NODES_FIELDS)
            way_tags_writer = UnicodeDictWriter(way_tags_file, WAY_TAGS_FIELDS)

            #Create headers
            nodes_writer.writeheader()
            node_tags_writer.writeheader()
            ways_writer.writeheader()
            way_nodes_writer.writeheader()
            way_tags_writer.writeheader()

            #Validator used to check integrity of schema
            validator = cerberus.Validator()

            #Get into the yielded elements inside the osm file
            print("Shaping & Writing Elements...")
            for element in get_element(osm_file):

                #Shape according to schema
                shape_el = shape_element(element)

                if shape_el:

                    #Validate the schema to ensure consistency
                    if validate:
                        validate_element(shape_el, validator)

                    #Write rows of nodes or ways
                    if element.tag == 'node':
                        nodes_writer.writerow(shape_el['node'])
                        node_tags_writer.writerows(shape_el['node_tags'])
                    elif element.tag == 'way':
                        ways_writer.writerow(shape_el['way'])
                        way_nodes_writer.writerows(shape_el['way_nodes'])
                        way_tags_writer.writerows(shape_el['way_tags'])
    else:
        print(
            "The CSV file process has been done. Set overpassCSV to true to rewrite CSV files or to validate."
        )
        part_message = " was"
コード例 #29
0
ファイル: config.py プロジェクト: koopa/egress0r
def validate(config):
    schema = {
        'sanity': {
            'type': 'dict',
            'required': True,
            'schema': {
                'override': {
                    'type': 'dict',
                    'required': True,
                    'schema': {
                        'ipv4': {
                            'allowed': [None, 'enable', 'disable'],
                            'nullable': True
                        },
                        'ipv6': {
                            'allowed': [None, 'enable', 'disable'],
                            'nullable': True
                        },
                    }
                }
            }
        },
        'check': {
            'type': 'dict',
            'required': True,
            'schema': {
                'port': {
                    'type': 'boolean',
                    'required': True
                },
                'icmp': {
                    'type': 'boolean',
                    'required': True
                },
                'http': {
                    'type': 'boolean',
                    'required': True
                },
                'smtp': {
                    'type': 'boolean',
                    'required': True
                },
                'dns': {
                    'type': 'boolean',
                    'required': True
                },
                'ftp': {
                    'type': 'boolean',
                    'required': True
                },
            }
        },
        'smtp': {
            'type': 'dict',
            'required': True,
            'schema': {
                'timeout': {
                    'type': 'integer',
                    'required': True,
                    'min': 1
                },
                'host': {
                    'type': 'string',
                    'required': True,
                    'empty': False
                },
                'port': {
                    'type': 'integer',
                    'required': True
                },
                'encryption': {
                    'allowed': [None, 'tls', 'ssl'],
                    'nullable': True
                },
                'from_addr': {
                    'type': 'string',
                    'required': True,
                    'empty': False,
                    'regex': r'.+?@.+\..+'
                },
                'to_addr': {
                    'type': 'string',
                    'required': True,
                    'regex': r'.+?@.+\..+'
                },
                'username': {
                    'required': True,
                    'type': 'string',
                    'nullable': True,
                    'empty': False
                },
                'password': {
                    'required': True,
                    'type': 'string',
                    'nullable': True,
                    'empty': False
                },
                'exfil': {
                    'type': 'dict',
                    'required': True,
                    'empty': False,
                    'schema': {
                        'filename': {
                            'type': 'string',
                            'empty': False,
                            'required': True
                        },
                        'payload_mode': {
                            'type': 'string',
                            'empty': False,
                            'required': True,
                            'allowed': ['attachment', 'inline']
                        }
                    }
                },
                'message': {
                    'type': 'string',
                    'empty': True,
                    'required': False,
                    'nullable': True
                },
                'subject': {
                    'type': 'string',
                    'required': True,
                    'empty': False
                },
            }
        },
        'port': {
            'type': 'dict',
            'required': True,
            'schema': {
                'mode': {
                    'type': 'string',
                    'allowed': ['top10', 'top100', 'all']
                },
                'ipv4_addr': {
                    'type': 'string',
                    'required': True,
                    'empty': False,
                    'regex': r'^(\d{1,3}\.){3}\d{1,3}$'
                },
                'ipv6_addr': {
                    'type': 'string',
                    'required': True,
                    'empty': False,
                    'regex': r'^[0-9a-fA-F:]+$'
                },
                'with_tcp': {
                    'type': 'boolean',
                    'required': True
                },
                'tcp_timeout': {
                    'type': 'integer',
                    'required': True,
                    'min': 1
                },
                'with_udp': {
                    'type': 'boolean',
                    'required': True
                },
                'udp_timeout': {
                    'type': 'integer',
                    'required': True,
                    'min': 1
                }
            }
        },
        'http': {
            'type': 'dict',
            'required': True,
            'schema': {
                'timeout': {
                    'type': 'integer',
                    'required': True,
                    'min': 1
                },
                'exfil': {
                    'type': 'dict',
                    'required': True,
                    'empty': False,
                    'schema': {
                        'filename': {
                            'type': 'string',
                            'required': True,
                            'empty': False
                        }
                    }
                },
                'verbs': {
                    'type': 'list',
                    'required': True,
                    'allowed': ['GET', 'POST', 'PUT', 'PATCH', 'DELETE']
                },
                'urls': {
                    'type': 'list',
                    'required': True,
                    'schema': {
                        'type': 'string',
                        'regex': r'^http(s)?://.*$'
                    }
                },
                'proxies': {
                    'type': 'dict',
                    'required': True,
                    'schema': {
                        'http': {
                            'type': 'string',
                            'nullable': True,
                            'regex': r'^(http(s)?|socks5)://.+'
                        },
                        'https': {
                            'type': 'string',
                            'nullable': True,
                            'regex': r'^(http(s)?|socks5)://.+'
                        }
                    }
                }
            }
        },
        'icmp': {
            'type': 'dict',
            'required': True,
            'schema': {
                'timeout': {
                    'type': 'integer',
                    'required': True,
                    'min': 1
                },
                'exfil': {
                    'type': 'dict',
                    'required': True,
                    'empty': False,
                    'schema': {
                        'filename': {
                            'type': 'string',
                            'empty': False,
                            'required': True
                        },
                        'max_chunks': {
                            'type': 'integer',
                            'min': 1,
                            'required': True
                        },
                        'chunk_size': {
                            'type': 'integer',
                            'min': 1,
                            'required': True
                        }
                    }
                },
                'target_hosts': {
                    'type': 'list',
                    'required': True,
                    'schema': {
                        'type': 'string',
                        'required': True,
                        'empty': False,
                        'regex': r'^((\d{1,3}\.){3}\d{1,3}|[0-9a-fA-F:]+)$'
                    }
                }
            }
        },
        'dns': {
            'type': 'dict',
            'required': True,
            'schema': {
                'timeout': {
                    'type': 'integer',
                    'required': True,
                    'min': 1
                },
                'servers': {
                    'type': 'list',
                    'required': True,
                    'schema': {
                        'type': 'string',
                        'required': True,
                        'empty': False,
                        'regex': r'^((\d{1,3}\.){3}\d{1,3}|[0-9a-fA-F:]+)$'
                    }
                },
                'exfil': {
                    'type': 'dict',
                    'required': True,
                    'empty': False,
                    'schema': {
                        'filename': {
                            'type': 'string',
                            'empty': False,
                            'required': True
                        },
                        'nameserver': {
                            'type': 'string',
                            'required': True,
                            'empty': False,
                            'regex': r'^((\d{1,3}\.){3}\d{1,3}|[0-9a-fA-F:]+)$'
                        },
                        'domain': {
                            'type': 'string',
                            'required': True,
                            'empty': False
                        },
                        'record_type': {
                            'type': 'string',
                            'required': True,
                            'empty': False,
                            'allowed': ['A', 'AAAA', 'MX', 'TXT']
                        },
                        'max_chunks': {
                            'type': 'integer',
                            'min': 1,
                            'required': True
                        },
                        'chunk_size': {
                            'type': 'integer',
                            'min': 1,
                            'required': True
                        }
                    }
                },
                'queries': {
                    'type': 'list',
                    'required': True,
                    'schema': {
                        'type': 'dict',
                        'required': True,
                        'empty': False,
                        'schema': {
                            'record': {
                                'type': 'string',
                                'required': True,
                                'empty': False
                            },
                            'record_type': {
                                'type': 'string',
                                'required': True,
                                'empty': False,
                                'allowed': ['A', 'AAAA', 'MX', 'TXT']
                            },
                            'expected_answers': {
                                'type': 'list',
                                'required': False
                            }
                        }
                    }
                }
            }
        },
        'ftp': {
            'type': 'dict',
            'required': True,
            'empty': False,
            'schema': {
                'timeout': {
                    'type': 'integer',
                    'required': True,
                    'min': 1
                },
                'host': {
                    'type': 'string',
                    'required': True,
                    'empty': False
                },
                'username': {
                    'type': 'string',
                    'required': True,
                    'empty': False
                },
                'password': {
                    'type': 'string',
                    'required': True,
                    'empty': False
                },
                'upload_dir': {
                    'type': 'string',
                    'required': True,
                    'nullable': True
                },
                'exfil': {
                    'type': 'dict',
                    'required': True,
                    'empty': False,
                    'schema': {
                        'filename': {
                            'type': 'string',
                            'empty': False,
                            'required': True
                        }
                    }
                }
            }
        }
    }
    try:
        validator = cerberus.Validator(schema)
        is_valid = validator.validate(config)
        return is_valid, validator.errors
    except cerberus.validator.DocumentError as e:
        return False, f'Config file error: {e}'
コード例 #30
0
        prize_email:
          type:     string
          regex:    '[\w]+@.*'

    - rule_caption:     check-for-allergies
      rule_vpath:       "@|@.person_allergies"
      validation_schema:
        person_allergies:
          required:   True
          type:       string
    ''')
    pass
##endif

if (True and "iterate"):
    vcc = cerberus.Validator(allow_unknown=True)
    for dataroot in aadocuments:

        print(
            "## -------------------------------------------------------------------"
        )
        print("## {person_fname} {person_lname}".format(**dataroot))
        pass

        for myruleset in validationrules_table:
            print("----")
            ddresult = dict()
            ddresult.update(myruleset)
            ddresult['rule_vpath_hasdata'] = (not jmespath.compile(
                myruleset['rule_vpath']).search(dataroot) is None)
            ddresult['rule_vpath_dataval'] = (jmespath.compile(