def specific_field(fieldname, value): schema = get_schema_fields(self.context) result = filter(lambda (name, field, schema): name == fieldname, get_schema_fields(self.context)) if len(result) == 0: return False name, field, schema = result[0] fieldvalue = field.get(schema(self.context)) return matches(value, fieldvalue)
def arguments(self, parser, args, rest): parser.declare_argument('keywords', {}) model_or_obj, args_required = ((creatable_models.get(args.type), True) if self.context.name == 'mk' else (self.context.traverse(args.path), False)) schema_fields = get_schema_fields(model_or_obj, marker=getattr( self.context.current_obj, '__contains__', None)) for name, field, schema in schema_fields: if field.readonly: continue field = field.bind(model_or_obj) choices = ([i.value.encode('utf-8') for i in field.vocabulary] if isinstance(field, zope.schema.Choice) else None) type = (int if isinstance(field, zope.schema.Int) else None) kwargs = {} if isinstance(field, Path): kwargs['is_path'] = True base_path = '.' if field.relative_to == Path.PARENT: if self.context.name == 'mk': base_path = self.context.protocol._cwd() else: base_path = canonical_path(model_or_obj.__parent__) kwargs['base_path'] = os.path.join(base_path, field.base_path) parser.add_argument('=%s' % name, required=(args_required and field.required), type=type, action=GroupDictAction, group='keywords', help=field.title.encode('utf8'), choices=choices, **kwargs) return parser
def arguments(self, parser, args, rest): parser.declare_argument('keywords', {}) model_or_obj, args_required = ((creatable_models.get(args.type), True) if self.context.name == 'mk' else (self.context.traverse(args.path), False)) schema_fields = get_schema_fields(model_or_obj, marker=getattr(self.context.current_obj, '__contains__', None)) for name, field, schema in schema_fields: if field.readonly: continue field = field.bind(model_or_obj) choices = ([i.value.encode('utf-8') for i in field.vocabulary] if isinstance(field, zope.schema.Choice) else None) type = (int if isinstance(field, zope.schema.Int) else None) kwargs = {} if isinstance(field, Path): kwargs['is_path'] = True base_path = '.' if field.relative_to == Path.PARENT: if self.context.name == 'mk': base_path = self.context.protocol._cwd() else: base_path = canonical_path(model_or_obj.__parent__) kwargs['base_path'] = os.path.join(base_path, field.base_path) parser.add_argument('=%s' % name, required=(args_required and field.required), type=type, action=GroupDictAction, group='keywords', help=field.title.encode('utf8'), choices=choices, **kwargs) return parser
def __init__(self, data, model, marker=None): assert isinstance(data, dict) self.schemas = list(get_schemas(model, marker=marker)) self.fields = list(get_schema_fields(model, marker=marker)) self.data = data self.model = model
def __init__(self, data, obj, marker=None): assert isinstance(data, dict) self.schemas = list(get_schemas(obj, marker=marker)) self.fields = list(get_schema_fields(obj, marker=marker)) self.data = data self.obj = obj
class ExportMetadataCmd(Cmd, SetAclMixin): implements(ICmdArgumentsSyntax) command('importexport') serialize_action_map = {'json': json.dumps, 'yaml': yaml.dump} deserialize_action_map = {'json': json.loads, 'yaml': yaml.load} traverse_paths = (('/machines/', True), ('/ippools/', False), ('/templates/', False), ('/home/', False)) type_blacklist = ('IncomingMachines', 'ByNameContainer', 'ActionsContainer') def arguments(self): parser = VirtualConsoleArgumentParser() parser.add_argument( 'filename', help='OS file path where the data to import from or export to') parser.add_argument('-i', '--import-data', action='store_true', help='Import data') parser.add_argument('-f', '--format', choices=['json', 'yaml'], help='Input/output file format', default='yaml') parser.add_argument('-p', '--full-path', action='store_true', help='Add full OMS paths (export-only)', default=False) parser.add_argument('-m', '--max-depth', type=int, help='Max path recursion depth', default=5) parser.add_argument('-a', '--attributes', type=list, help='List of attributes to import/export', default=[]) return parser @require_admins_only @defer.inlineCallbacks def execute(self, args): log.msg('Exporting all object ownership data...') if args.import_data: yield self.import_data(args) else: yield self.export_data(args) @db.transact def import_data(self, args): with open(args.filename, 'r') as f: serialized = f.read() data = self.deserialize_action_map.get(args.format)(serialized) for path, recursive in self.traverse_paths: container = traverse1(path) pdata = data.get(path) if container and pdata: self.write( 'Importing %s (%s)...\n' % (path, 'recursive' if recursive else 'non-recursive')) self.traverse_level_set(pdata, container, args.attributes, recursive=recursive, maxlevel=args.max_depth) @db.assert_transact def traverse_level_set(self, data, container, attrs, recursive=False, maxlevel=5, level=0): def import_cls(module, name): mod = __import__(module) for comp in module.split('.')[1:]: mod = getattr(mod, comp) return getattr(mod, name) for name, di in data.iteritems(): self.write('%s%s\n' % (' ' * level, name)) element = container[name] if di['__classname__'] in self.type_blacklist: continue obj = import_cls(di['__module__'], di['__classname__']) if not element else element if obj.__transient__: continue cobj = self._do_create_or_set(di, obj, attrs=attrs, marker=getattr( container, '__contains__', None)) if cobj is None: continue if not element: container.add(cobj) if IContainer.providedBy(cobj) and recursive and level < maxlevel: chdata = di.get('children') if chdata is not None: self.traverse_level_set(chdata, cobj, attrs, recursive=recursive, maxlevel=maxlevel, level=level + 1) attr_blacklist = ( '__module__', '__name__', '__classname__', 'children', 'ctime', 'features', 'module', 'mtime', 'owner', 'permissions', 'tags', 'type', ) def apply_form(self, form_class, data, obj, action, marker=None): DoesNotExist = "<NOVALUE>" def format_error_message(errors): return ('ERROR: %s while importing data for %s\n' % ([(attr, data.get(attr, DoesNotExist), err) for attr, err in errors], obj)) form = form_class(data, obj, marker=marker) if form.errors and any( map(lambda (f, err): isinstance(err, NoSchemaFound), form.errors)): self.write('WARNING: import of %s failed: no schema is defined\n' % (obj)) return wrong_type_errors = filter( lambda (f, err): isinstance(err, WrongType) and data.get( f, DoesNotExist) is None, form.errors) fields = dict( map(lambda (f, t, i): (f, t), get_schema_fields(obj, marker=marker))) # Attempt to fix wrong type errors by setting default values of the respective types for field_name, err in wrong_type_errors: self.write( 'Attempting to fix field "%s" of %s with a WrontType error... ' % (field_name, obj)) try: field = fields[field_name] if isinstance(field._type, tuple): default = field._type[0]() else: default = field._type() data[field_name] = default except ValueError: self.write('Failed!\n') else: self.write('Done.\n') # List missing required fields missing_fields = filter( lambda (f, err): isinstance(err, RequiredMissing), form.errors) for field_name, err in missing_fields: self.write('Missing required field: %s %s in %s' % (obj, field_name, data.keys())) # Force renewal of the validation delattr(form, '_errors') assert not form.errors, format_error_message(form.errors) return getattr(form, action)(ignore_readonly=True)
def any_field(keyword): return any(matches(keyword, field.get(schema(self.context))) for name, field, schema in get_schema_fields(self.context))
def any_field(keyword): return any( matches(keyword, field.get(schema(self.context))) for name, field, schema in get_schema_fields(self.context))