def load(context: Context, prop: Property, data: dict, manifest: Manifest): load_node(context, prop, data, manifest) # Load property type. For datasets, property type is optional. if prop.type: prop.type = load_type(context, prop, data, manifest) else: prop.type = None # Load property source. if isinstance(prop.source, list): prop.source = load( context, CommandList(), [ _get_source(s, prop.parent.source or prop.parent.parent.source) for s in prop.source ], parent=prop, scope='source', argname='source', ) elif prop.source: prop.source = load( context, Command(), _get_source(prop.source, prop.parent.source or prop.parent.parent.source), parent=prop, scope='source', argname='source', ) return prop
def context(mocker, config, postgresql, mongo): mocker.patch.dict(os.environ, { 'AUTHLIB_INSECURE_TRANSPORT': '1', }) Context = config.get('components', 'core', 'context', cast=importstr) Context = type('ContextForTests', (ContextForTests, Context), {}) context = Context() context.set('config', components.Config()) store = context.set('store', Store()) load_commands(config.get('commands', 'modules', cast=list)) load(context, context.get('config'), config) check(context, context.get('config')) load(context, store, config) check(context, store) prepare(context, store.internal) migrate(context, store) prepare(context, store) migrate(context, store) context.bind('auth.server', AuthorizationServer, context) context.bind('auth.resource_protector', ResourceProtector, context, BearerTokenValidator) yield context with context.enter(): # FIXME: quick and dirty workaround on `context.wipe` missing a connection, # when exception is thrown in spinta's logic. context.set('transaction', store.manifests['default'].backend.transaction(write=True)) context.set('auth.token', AdminToken()) # Remove all data after each test run. graph = collections.defaultdict(set) for model in store.manifests['default'].objects['model'].values(): if model.name not in graph: graph[model.name] = set() for prop in model.properties.values(): if prop.type.name == 'ref': graph[prop.type.object].add(model.name) for models in toposort(graph): for name in models: context.wipe(name) # Datasets does not have foreign kei constraints, so there is no need to # topologically sort them. At least for now. for dataset in store.manifests['default'].objects['dataset'].values(): for model in dataset.objects.values(): context.wipe(model) context.wipe(store.internal.objects['model']['transaction'])
def load(context: Context, type: Array, value: object) -> list: # loads value into native python list, including all list items array_item_type = type.items.type loaded_array = type.load(value) new_loaded_array = [] for item in loaded_array: new_loaded_array.append(load(context, array_item_type, item)) return new_loaded_array
def main(ctx, option): c = Config() c.read(cli_args=option) load_commands(c.get('commands', 'modules', cast=list)) Context = c.get('components', 'core', 'context', cast=importstr) context = Context() config = context.set('config', components.Config()) store = context.set('store', Store()) commands.load(context, config, c) commands.check(context, config) commands.load(context, store, c) commands.check(context, store) ctx.ensure_object(dict) ctx.obj['context'] = context
def load(context: Context, model: Model, data: dict, manifest: Manifest): load_node(context, model, data, manifest) # Load source. if isinstance(model.source, list): model.source = load( context, CommandList(), [_get_source(s, model.parent.source) for s in model.source], parent=model, scope='source', argname='source', ) elif model.source: model.source = load( context, CommandList(), [_get_source(model.source, model.parent.source)], parent=model, scope='source', argname='source', ) # 'type' is reserved for object type. props = {'type': {'type': 'string'}} props.update(data.get('properties') or {}) # 'id' is reserved for primary key. props['id'] = props.get('id') or {'type': 'string'} if props['id'].get('type') is None or props['id'].get('type') == 'pk': props['id'] == 'string' # Load model properties. for name, params in props.items(): params = { 'name': name, 'path': model.path, 'parent': model, **(params or {}), } model.properties[name] = load(context, Property(), params, manifest) return model
def push(context: Context, model: Model, backend: PostgreSQL, data: dict, *, action: str): authorize(context, action, model, data=data) # load and check if data is a valid for it's model data = load(context, model, data) check(context, model, data) data = prepare(context, model, data) transaction = context.get('transaction') connection = transaction.connection table = backend.tables[model.manifest.name][model.name] data = { k: v for k, v in data.items() if k in table.main.columns } if action == INSERT_ACTION: result = connection.execute( table.main.insert().values(data), ) row_id = result.inserted_primary_key[0] elif action == UPDATE_ACTION: data['id'] = int(data['id']) result = connection.execute( table.main.update(). where(table.main.c.id == data['id']). values(data) ) if result.rowcount == 1: row_id = data['id'] elif result.rowcount == 0: raise Exception("Update failed, {self.obj} with {data['id']} not found.") else: raise Exception("Update failed, {self.obj} with {data['id']} has found and update {result.rowcount} rows.") elif action == DELETE_ACTION: raise NotImplementedError else: raise Exception(f"Unknown action {action!r}.") # Track changes. connection.execute( table.changes.insert().values( transaction_id=transaction.id, id=row_id, datetime=utcnow(), action=action, change={k: v for k, v in data.items() if k not in {'id'}}, ), ) return prepare(context, action, model, backend, {'id': str(row_id)})
def _dependencies(context: Context, model, deps): if deps: command_calls = {} model_names = set() prop_names = [] prop_name_mapping = {} for name, dep in deps.items(): if isinstance(dep, dict): command_calls[name] = dep continue if '.' not in dep: context.error( f"Dependency must be in 'object/name.property' form, got: {dep}." ) model_name, prop_name = dep.split('.', 1) model_names.add(model_name) prop_names.append(prop_name) prop_name_mapping[prop_name] = name if len(model_names) > 1: names = ', '.join(sorted(model_names)) context.error( f"Dependencies are allowed only from single model, but more than one model found: {names}." ) if len(command_calls) > 1: context.error(f"Only one command call is allowed.") if len(command_calls) > 0: if len(model_names) > 0: context.error( f"Only one command call or one model is allowed in dependencies." ) for name, cmd in command_calls.items(): cmd = load(context, Command(), cmd, parent=model, scope='service') for value in cmd(context): yield {name: value} else: model_name = list(model_names)[0] params = parse_url_path(model_name) depmodel = get_model_from_params(model.manifest, params['path'], params) for row in getall(context, depmodel, depmodel.backend, show=prop_names): yield {prop_name_mapping[k]: v for k, v in row.items()} else: yield {}
def load(context: Context, dataset: Dataset, data: dict, manifest: Manifest): load_node(context, dataset, data, manifest) if dataset.source: dataset.source = load( context, CommandList(), [_get_source(dataset.source, None)], parent=dataset, scope='source', argname='source', ) for name, params in (data.get('objects') or {}).items(): params = { 'path': dataset.path, 'name': name, 'parent': dataset, **(params or {}), } dataset.objects[name] = load(context, Model(), params, manifest) return dataset
def load(context: Context, type: Object, data: dict, manifest: Manifest) -> Type: type.properties = {} for name, prop in data.get('properties', {}).items(): prop = { 'name': name, 'path': type.prop.path, 'parent': type.prop, **prop, } type.properties[name] = load(context, type.prop.__class__(), prop, type.prop.manifest) return type
def load(context: Context, manifest: Manifest, c: Config): config = context.get('config') ignore = c.get('ignore', default=[], cast=list) # Add all supported node types. for name in config.components['nodes'].keys(): manifest.objects[name] = {} for file in manifest.path.glob('**/*.yml'): if is_ignored(ignore, manifest.path, file): continue try: data = yaml.load(file.read_text()) except (ParserError, ScannerError) as e: context.error(f"{file}: {e}.") if not isinstance(data, dict): context.error(f"{file}: expected dict got {data.__class__.__name__}.") if 'type' not in data: raise Exception(f"'type' is not defined in {file}.") if data['type'] not in manifest.objects: raise Exception(f"Unknown type {data['type']!r} in {file}.") node = config.components['nodes'][data['type']]() data = { 'path': file, 'parent': manifest, 'backend': manifest.backend, **data, } load(context, node, data, manifest) if node.name in manifest.objects[node.type]: raise Exception(f"Object {node.type} with name {node.name} already exist.") manifest.objects[node.type][node.name] = node
def load(context: Context, type: Array, data: dict, manifest: Manifest) -> Type: if 'items' in data: prop = { 'name': type.prop.name, 'path': type.prop.path, 'parent': type.prop, **data['items'], } type.items = load(context, type.prop.__class__(), prop, type.prop.manifest) else: type.items = None return type
def load(context: Context, type: Object, value: object) -> dict: # loads value into native python dict, including all dict's items loaded_obj = type.load(value) # check that given obj does not have more keys, than type's schema unknown_params = set(loaded_obj.keys()) - set(type.properties.keys()) if unknown_params: raise DataError("Unknown params: %s" % ', '.join(map(repr, sorted(unknown_params)))) new_loaded_obj = {} for k, v in type.properties.items(): # only load value keys which are available in schema if k in loaded_obj: new_loaded_obj[k] = load(context, v.type, loaded_obj[k]) return new_loaded_obj
def push(context: Context, model: Model, backend: Mongo, data: dict, *, action: str): authorize(context, action, model, data=data) # load and check if data is a valid for it's model data = load(context, model, data) check(context, model, data) data = prepare(context, model, data) # Push data to Mongo backend, this can be an insert, update or delete. If # `id` is not given, it is an insert if `id` is given, it is an update. # # Deletes are not yet implemented, but for deletes `data` must equal to # `{'id': 1, _delete: True}`. # # Also this must return inserted/updated/deleted id. # # Also this command must write information to changelog after change is # done. transaction = context.get('transaction') model_collection = backend.db[model.get_type_value()] # Make a copy of data, because `pymongo` changes the reference `data` # object on `insert_one()` call. # # We want to have our data intact from whatever specific mongo metadata # MongoDB may add to our object. raw_data = copy.deepcopy(data) # FIXME: before creating revision check if there's not collision clash revision_id = get_new_id('revision id') raw_data['revision'] = revision_id if 'id' in data: result = model_collection.update_one({'_id': ObjectId(raw_data['id'])}, {'$set': raw_data}) assert result.matched_count == 1 and result.modified_count == 1 data_id = data['id'] else: data_id = model_collection.insert_one(raw_data).inserted_id # parse `ObjectId` to string and add it to our object raw_data['id'] = str(data_id) return prepare(context, action, model, backend, raw_data)
def load( context: Context, command: CommandList, data: list, *, parent: Node, scope: str, argname: str = None, ) -> CommandList: command.parent = parent command.commands = [ load(context, Command(), x, parent=parent, scope=scope, argname=argname) for x in data ] return command
def load_type(context: Context, prop: Node, data: dict, manifest: Manifest): na = object() config = context.get('config') if prop.type not in config.components['types']: raise Exception(f"Unknown property type {prop.type!r}.") type = config.components['types'][prop.type]() type_schema = resolve_schema(type, Type) for name in type_schema: schema = type_schema[name] value = data.get(name, na) if schema.get('required', False) and value is na: raise Exception(f"Missing requied option {name!r}.") if value is na: value = schema.get('default') setattr(type, name, value) type.prop = prop type.name = data['type'] return load(context, type, data, manifest)
def load(context: Context, store: Store, config: Config) -> Store: """Load backends and manifests from configuration.""" # Load backends. store.backends = {} for name in config.keys('backends'): Backend = config.get('backends', name, 'backend', cast=importstr) backend = store.backends[name] = Backend() backend.name = name load(context, backend, config) # Load intrnal manifest. internal = store.internal = Manifest() internal.name = 'internal' internal.path = pathlib.Path(pres.resource_filename('spinta', 'manifest')) internal.backend = store.backends['default'] load(context, internal, config) # Load manifests store.manifests = {} for name in config.keys('manifests'): manifest = store.manifests[name] = Manifest() manifest.name = name manifest.path = config.get('manifests', name, 'path', cast=pathlib.Path, required=True) manifest.backend = store.backends[config.get('manifests', name, 'backend', required=True)] load(context, manifest, config) if 'default' not in store.manifests: raise Exception("'default' manifest must be set in the configuration.") return store
logging.basicConfig( level=logging.INFO, format='%(levelname)s: %(message)s', ) c = Config() c.read() load_commands(c.get('commands', 'modules', cast=list)) Context = c.get('components', 'core', 'context', cast=importstr) context = Context() config = context.set('config', components.Config()) store = context.set('store', Store()) load(context, config, c) check(context, config) load(context, store, c) check(context, store) wait(context, store, c) prepare(context, store.internal) prepare(context, store) context.set('auth.server', AuthorizationServer(context)) context.set('auth.resource_protector', ResourceProtector(context, BearerTokenValidator)) set_context(context)