def __call__(self): #print 'eidfrom: %s, eidto: %s, rtype: %s' % (self.eidfrom, self.eidto, self.rtype) #Cuando ya existe la relación no se evaluan las condiciones especiales srql = 'Any X, Y WHERE X %s Y, X eid %s, Y eid %s' % ( self.rtype, self.eidfrom, self.eidto) if self._cw.execute(srql).rowcount > 0: return eidfrom = self._cw.entity_from_eid(self.eidfrom) eidto = self._cw.entity_from_eid(self.eidto) #Evaluate the direct relation target = '' specialsearch = AutoCompleteEntityRetriever().getSpecialSearch( self._cw, eidfrom, self.rtype, type(eidto).__name__, 'subject') if specialsearch != ' ': unrelated = eidfrom.cw_unrelated_rql(self.rtype, type(eidto).__name__, 'subject') srql = ((unrelated[0] % unrelated[1]) + specialsearch + ', O eid ' + str(self.eidto)) if self._cw.execute(srql).rowcount < 1: target = ('%(entity)s|%(relation)s%(role)s|%(etype_search)s' % { 'entity': type(eidfrom).__name__, 'relation': self.rtype, 'role': '', 'etype_search': type(eidto).__name__ }) helpmsg = self._cw._('Validation error, relation not valid') if target in AutoCompleteEntityRetriever().HELP_MESSAGES: helpmsg = self._cw._( AutoCompleteEntityRetriever().HELP_MESSAGES[target]) raise ValidationError(self.eidfrom, {self.rtype: helpmsg}) #Evaluate the reverse relation target = '' specialsearch = AutoCompleteEntityRetriever().getSpecialSearch( self._cw, eidto, self.rtype, type(eidfrom).__name__, 'object') if specialsearch != ' ': unrelated = eidto.cw_unrelated_rql(self.rtype, type(eidfrom).__name__, 'object') srql = ((unrelated[0] % unrelated[1]) + specialsearch + ', S eid ' + str(self.eidfrom)) if self._cw.execute(srql).rowcount < 1: target = ('%(entity)s|%(relation)s%(role)s|%(etype_search)s' % { 'entity': type(eidto).__name__, 'relation': self.rtype, 'role': '_object', 'etype_search': type(eidfrom).__name__ }) helpmsg = self._cw._('Validation error, relation not valid') if target in AutoCompleteEntityRetriever().HELP_MESSAGES: helpmsg = self._cw._( AutoCompleteEntityRetriever().HELP_MESSAGES[target]) raise ValidationError(self.eidto, {self.rtype: helpmsg})
def _default_publish(self): req = self._cw self.errors = [] self.relations_rql = [] form = req.form # so we're able to know the main entity from the repository side if '__maineid' in form: req.transaction_data['__maineid'] = form['__maineid'] # no specific action, generic edition self._to_create = req.data['eidmap'] = {} # those three data variables are used to handle relation from/to entities # which doesn't exist at time where the entity is edited and that # deserves special treatment req.data['pending_inlined'] = defaultdict(set) req.data['pending_others'] = set() req.data['pending_composite_delete'] = set() req.data['pending_values'] = dict() try: for formparams in self._ordered_formparams(): self.edit_entity(formparams) except (RequestError, NothingToEdit) as ex: if '__linkto' in req.form and 'eid' in req.form: self.execute_linkto() elif '__delete' not in req.form: raise ValidationError(None, {None: str(ex)}) # all pending inlined relations to newly created entities have been # treated now (pop to ensure there are no attempt to add new ones) pending_inlined = req.data.pop('pending_inlined') assert not pending_inlined, pending_inlined pending_values = req.data.pop('pending_values') # handle all other remaining relations now while req.data['pending_others']: form_, field = req.data['pending_others'].pop() # attempt to retrieve values and original values if they have already gone through # handle_formfield (may not if there has been some not yet known eid at the first # processing round). In the later case we've to go back through handle_formfield. try: values, origvalues = pending_values.pop((form_, field)) except KeyError: self.handle_formfield(form_, field) else: self.handle_relation(form_, field, values, origvalues) assert not pending_values, 'unexpected remaining pending values %s' % pending_values del req.data['pending_others'] # then execute rql to set all relations for querydef in self.relations_rql: self._cw.execute(*querydef) # delete pending composite for entity in req.data['pending_composite_delete']: entity.cw_delete() # XXX this processes *all* pending operations of *all* entities if '__delete' in req.form: todelete = req.list_form_param('__delete', req.form, pop=True) if todelete: autoform.delete_relations(self._cw, todelete) self._cw.remove_pending_operations() if self.errors: errors = dict((f.name, str(ex)) for f, ex in self.errors) raise ValidationError(valerror_eid(form.get('__maineid')), errors)
def load_forms(cw_config, form_name): """ Function to load the forms structures from the file defined in the 'menu_json' cubicweb instance parameter. Parameters ---------- cw_config: dict the cubicweb configuration built from the instance 'all-in-one.conf' file. form_name: str the name of the registered form to be loaded. Returns ------- config: dict the forms descriptions defined in the 'menu_json' setting file. """ config_file = cw_config[form_name] if not os.path.isfile(config_file): raise ValidationError( "CWUpload", { "settings": unicode("cannot find the 'menu_json' " "configuration file at location " "'{0}'".format(config_file)) }) config = load_json(config_file) return byteify(config)
def _set_container_parent(cnx, rtype, eid, peid): target = cnx.entity_from_eid(eid) if target.container_parent: mp_protocol = target.cw_adapt_to('container.multiple_parents') if mp_protocol: mp_protocol.possible_parent(rtype, peid) return cparent = target.container_parent[0] if cparent.eid == peid: cnx.warning('relinking %s (eid:%s parent:%s)', rtype, eid, peid) return # this is a replacement: we allow replacing within the same container # for the same rtype other_rtype = find_valued_parent_rtype(target, but=rtype) if other_rtype: container = target.cw_adapt_to('Container').related_container parent = cnx.entity_from_eid(peid) parent_container = parent.cw_adapt_to( 'Container').related_container if container.eid != parent_container.eid or rtype != other_rtype: cnx.warning( '%s is already in container %s, cannot go into %s ' ' (rtype from: %s, rtype to: %s)', target, parent_container, container, other_rtype, rtype) msg = (cnx._('%s is already in a container through %s') % (target.e_schema, rtype)) raise ValidationError(target.eid, {rtype: msg}) target.cw_set(container_parent=peid)
def check_config(self, source_entity): """check configuration of source entity""" typed_config = super(DataFeedSource, self).check_config(source_entity) if typed_config['synchronization-interval'] < 60: msg = _('synchronization-interval must be greater than 1 minute') raise ValidationError(source_entity.eid, {'config': msg}) return typed_config
def rql(self, rql, parameter_name): """ Method that patch the rql. note:: The patched rql returned first elements are then the file pathes. Reserved keys are 'PATH', 'PROCESSING', 'SCORE'. """ # Check that reserved keys are not used split_rql = re.split(r"[ ,]", rql) for revered_key in ["SCORE", "PATH"]: if revered_key in split_rql: raise ValidationError( "CWSearch", { "rql": _( 'cannot edit the rql "{0}", "{1}" is a reserved key, ' 'choose another name'.format(rql, revered_key))}) # Remove the begining of the rql in order to complete it formated_rql = " ".join(rql.split()[1:]) # Complete the rql in order to access file pathes global_rql = ("Any PATH, {0}, {1} results SCORE, SCORE is ExternalResource, " "SCORE filepath PATH".format(formated_rql, parameter_name)) return global_rql
def check_urls(self, source_entity): urls = super(LDAPFeedSource, self).check_urls(source_entity) if len(urls) > 1: raise ValidationError(source_entity.eid, {'url': _('can only have one url')}) try: protocol, hostport = urls[0].split('://') except ValueError: raise ValidationError(source_entity.eid, {'url': _('badly formatted url')}) if protocol not in PROTO_PORT: raise ValidationError(source_entity.eid, {'url': _('unsupported protocol')}) return urls
def interpret_constant(entity, str_value): _ = entity._cw._ if not str_value: raise ValidationError(entity.eid, {'data': _('required field')}) try: return entity.output_value(str_value) except (ValueError, TypeError): raise ValidationError( entity.eid, {'data': _('accepted type: %s') % _(entity.data_type)}) except KeyError as k: # this can happen at creation time if data_type has not been provided # e.g.: data_type is automatically handled in a hook to be executed later # (yes, there are use case/applications that want to do this) # here we might also look into the form and extract the given # data_type value; for now, let's be dumb assert entity.data_type is None, 'failed with data_type %s' % entity.data_type return float(str_value)
def publish(self, rset=None): txuuid = self._cw.form['txuuid'] try: self._cw.cnx.undo_transaction(txuuid) except UndoTransactionException as exc: errors = exc.errors #This will cause a rollback in main_publish raise ValidationError(None, {None: '\n'.join(errors)}) else: self.redirect() # Will raise Redirect
def check_urls(self, source_entity): """Check URL of source entity: `urls` is a string that may contain one URL per line), and return a list of at least one validated URL. """ urls = source_entity.url if source_entity.url else '' urls = [url.strip() for url in urls.splitlines() if url.strip()] if not urls: msg = _('specifying an URL is mandatory') raise ValidationError(source_entity.eid, {role_name('url', 'subject'): msg}) return urls
def edit_entity(self, formparams, multiple=False): """edit / create / copy an entity and return its eid""" req = self._cw etype = formparams['__type'] entity = req.vreg['etypes'].etype_class(etype)(req) entity.eid = valerror_eid(formparams['eid']) is_main_entity = req.form.get('__maineid') == formparams['eid'] # let a chance to do some entity specific stuff entity.cw_adapt_to('IEditControl').pre_web_edit() # create a rql query from parameters rqlquery = RqlQuery() # process inlined relations at the same time as attributes # this will generate less rql queries and might be useful in # a few dark corners if is_main_entity: formid = req.form.get('__form_id', 'edition') else: # XXX inlined forms formid should be saved in a different formparams entry # inbetween, use cubicweb standard formid for inlined forms formid = 'edition' form = req.vreg['forms'].select(formid, req, entity=entity) eid = form.actual_eid(entity.eid) editedfields = formparams['_cw_entity_fields'] form.formvalues = {} # init fields value cache for field in form.iter_modified_fields(editedfields, entity): self.handle_formfield(form, field, rqlquery) # if there are some inlined field which were waiting for this entity's # creation, add relevant data to the rqlquery for form_, field in req.data['pending_inlined'].pop(entity.eid, ()): rqlquery.set_inlined(field.name, form_.edited_entity.eid) if not rqlquery.canceled: if self.errors: errors = dict( (f.role_name(), str(ex)) for f, ex in self.errors) raise ValidationError(valerror_eid(entity.eid), errors) if eid is None: # creation or copy entity.eid = eid = self._insert_entity(etype, formparams['eid'], rqlquery) elif rqlquery.edited: # edition of an existant entity self.check_concurrent_edition(formparams, eid) self._update_entity(eid, rqlquery) else: self.errors = [] if is_main_entity: self.notify_edited(entity) if '__delete' in formparams: # XXX deprecate? todelete = req.list_form_param('__delete', formparams, pop=True) autoform.delete_relations(req, todelete) if '__cloned_eid' in formparams: entity.copy_relations(int(formparams['__cloned_eid'])) if is_main_entity: # only execute linkto for the main entity self.execute_linkto(entity.eid) return eid
def grok_data(self): """ self.data is something such as an excel file or CSV data or a pickled numpy array or an already processed binary. Ensure it's a pickle numpy array before storing object in db. If data seems to be already processed, return True, else return False. """ entity = self.entity try: filename = entity.data.filename.lower() except AttributeError: data = entity.data if isinstance(data, Binary): return True # if not isinstance(data, numpy.ndarray): # raise TypeError('data is neither a Binary nor a numpy array (%s)' % type(data)) numpy_array = data else: adapter = self._cw.vreg['adapters'].select_or_none( 'source_to_numpy_array', self._cw, entity=entity, filename=filename) if adapter is None: msg = self._cw._( 'Unsupported file type %s') % entity.data.filename raise ValidationError(entity.eid, {'data': msg}) numpy_array = adapter.to_numpy_array(entity.data, filename) if numpy_array.ndim != 1: raise ValidationError( entity.eid, {'data': _('data must be a 1-dimensional array')}) if numpy_array.size == 0: raise ValidationError( entity.eid, {'data': _('data must have at least one value')}) data = Binary() compressed_data = zlib.compress(pickle.dumps(numpy_array, protocol=2)) data.write(compressed_data) entity.cw_edited['data'] = data entity.array = numpy_array return False
def _check_config_dict(self, eid, confdict, raise_on_error=True): """Check configuration of source entity and return config dict properly typed with defaults set. If `raise_on_error` is True (the default), a ValidationError will be raised if some error is encountered, else the problem will be ignored. """ processed = {} for optname, optdict in self.options: value = confdict.pop(optname, optdict.get('default')) if value is configuration.REQUIRED: if not raise_on_error: continue msg = _('specifying %s is mandatory') msgargs = optname raise ValidationError(eid, {role_name('config', 'subject'): msg}, msgargs) elif value is not None: # type check try: value = configuration._validate(value, optdict, optname) except Exception as ex: if not raise_on_error: continue msg = str(ex) raise ValidationError(eid, {role_name('config', 'subject'): msg}) processed[optname] = value # cw < 3.10 bw compat try: processed['adapter'] = confdict['adapter'] except KeyError: pass # check for unknown options if confdict and tuple(confdict) != ('adapter',): if raise_on_error: msg = _('unknown options %s') msgargs = ', '.join(confdict) raise ValidationError(eid, {role_name('config', 'subject'): msg}, msgargs) else: self.warning('unknown options %s', ', '.join(confdict)) # add options to processed, they may be necessary during migration processed.update(confdict) return processed
def __call__(self): self.debug('hook %s', self.__class__.__name__) entity = self.entity errors = {} if entity.thousands_separator == entity.decimal_separator: msg = self._cw._('thousands separator must not be the same as decimal separator') errors['thousands_separator'] =msg if entity.csv_separator == entity.decimal_separator: msg = self._cw._('column separator must not be the same as decimal separator') errors['csv_separator'] =msg if errors: raise ValidationError(entity.eid, errors)
def rql(self, rql, parameter_name, identifier=1): """ Method that patch the rql. .. note:: * reserved keys are 'PATH', 'FENTRIES' postfixed with the identifier number. * returned files are necessary at the beginning of the adapted rql. Parameters ---------- rql: str (mandatory) the request to adapt. parameter_name: str (mandatory) the label of the entity to adapt. identifier: int (optional) postfix the reserved keys with this identifier. Returns ------- global_rql: str the adapted rql. nb_files: int the number of files returned at the begining of the result set. """ # Define reserved labels reserved_labels = [ "PATH{0}".format(identifier), "FENTRIES{0}".format(identifier) ] # Check that reserved keys are not used split_rql = re.split(r"[ ,]", rql) for reserved_key in reserved_labels: if reserved_key in split_rql: raise ValidationError( "CWSearch", { "rql": _('cannot edit the rql "{0}", "{1}" is a reserved key, ' 'choose another name.'.format(rql, reserved_key)) }) # Remove the begining of the rql in order to complete it formated_rql = " ".join(rql.split()[1:]) # Complete the rql in order to access file pathes global_rql = ("Any {0}, {1}, {2} external_files {3}, " "{3} filepath {0}".format(reserved_labels[0], formated_rql, parameter_name, reserved_labels[1])) return global_rql, 1
def raise_user_exception(self): rtypes = self.exc.rtypes errors = {} msgargs = {} i18nvalues = [] for rtype in rtypes: errors[rtype] = _( '%(KEY-rtype)s is part of violated unicity constraint') msgargs[rtype + '-rtype'] = rtype i18nvalues.append(rtype + '-rtype') errors[''] = _('some relations violate a unicity constraint') raise ValidationError(self.entity.eid, errors, msgargs=msgargs, i18nvalues=i18nvalues)
def check_inlined_allowed(self): """check inlining is possible, raise ValidationError if not possible """ # don't use the persistent schema, we may miss cardinality changes # in the same transaction for rdef in self.reverse_relation_type: card = rdef.cardinality[0] if not card in '?1': qname = role_name('inlined', 'subject') rtype = self.name stype = rdef.stype otype = rdef.otype msg = self._cw._("can't set inlined=True, " "%(stype)s %(rtype)s %(otype)s " "has cardinality=%(card)s") raise ValidationError(self.eid, {qname: msg % locals()})
def new_fs_path(self, entity, attr): # We try to get some hint about how to name the file using attribute's # name metadata, so we use the real file name and extension when # available. Keeping the extension is useful for example in the case of # PIL processing that use filename extension to detect content-type, as # well as providing more understandable file names on the fs. basename = [str(entity.eid), attr] name = entity.cw_attr_metadata(attr, 'name') if name is not None: basename.append(name) fd, fspath = uniquify_path(self.default_directory, '_'.join(basename)) if fspath is None: msg = entity._cw._('failed to uniquify path (%s, %s)') % ( self.default_directory, '_'.join(basename)) raise ValidationError(entity.eid, {role_name(attr, 'subject'): msg}) assert isinstance(fspath, str) return fd, fspath
def process_posted(self): """use this method to process the content posted by a simple form. it will return a dictionary with field names as key and typed value as associated value. """ with tempattr(self, 'formvalues', {}): # init fields value cache errors = [] processed = {} for field in self.iter_modified_fields(): try: for field, value in field.process_posted(self): processed[field.role_name()] = value except ProcessFormError as exc: errors.append((field, exc)) if errors: errors = dict((f.role_name(), str(ex)) for f, ex in errors) raise ValidationError(None, errors) return processed
def check_concurrent_edition(self, formparams, eid): req = self._cw try: form_ts = datetime.utcfromtimestamp( float(formparams['__form_generation_time'])) except KeyError: # Backward and tests compatibility : if no timestamp consider edition OK return if req.execute( "Any X WHERE X modification_date > %(fts)s, X eid %(eid)s", { 'eid': eid, 'fts': form_ts }): # We only mark the message for translation but the actual # translation will be handled by the Validation mechanism... msg = _("Entity %(eid)s has changed since you started to edit it." " Reload the page and reapply your changes.") # ... this is why we pass the formats' dict as a third argument. raise ValidationError(eid, {None: msg}, {'eid': eid})
def raise_user_exception(self): cstrname = self.exc.cstrname eschema = self.entity.e_schema for rschema, attrschema in eschema.attribute_definitions(): rdef = rschema.rdef(eschema, attrschema) for constraint in rdef.constraints: if cstrname == constraint_name_for(constraint, rdef): break else: continue break else: assert 0 key = rschema.type + '-subject' # use .get since a constraint may be associated to an attribute that isn't edited (e.g. # constraint between two attributes). This should be the purpose of an api rework at some # point, we currently rely on the fact that such constraint will provide a dedicated user # message not relying on the `value` argument value = self.entity.cw_edited.get(rschema.type) msg, args = constraint.failed_message(key, value, self.entity) raise ValidationError(self.entity.eid, {key: msg}, args)
def rql(self, rql, parameter_name): """ Method return the rql. """ # Check that reserved keys are not used split_rql = re.split(r"[ ,]", rql) for revered_key in ["PROCESSING", "SCORE"]: if revered_key in split_rql: raise ValidationError( "CWSearch", { "rql": _( 'cannot edit the rql "{0}", "{1}" is a reserved key, ' 'choose another name'.format(rql, revered_key))}) # Remove the begining of the rql in order to complete it formated_rql = " ".join(rql.split()[1:]) # Complete the rql in order to access file pathes global_rql = ("Any SCORE, {0}, {1} related_processing PROCESSING, " "PROCESSING results SCORE, SCORE is ScoreValue".format( formated_rql, parameter_name)) return global_rql
def __call__(self): """ Before adding the CWSearch entity, create a 'rset' and a 'result.json' File entities that contain all the filepath attached to the current rql request. Filepath are found by patching the rql request with the declared 'rqldownload-adaptors' actions. The '__rset_type__' adaptor attribute is used to export the rset. When an 'ecsvexport' is used, no file are then attached in the 'result.json' file. .. warning:: For the moment we assume the database intergrity (ie. all file paths inserted in the db exist on the file system) and thus do not check to speed up the hook. """ # Get the rql/export type from the CWSearch form rql = self.entity.cw_edited.get("path") # Execute the rql # ToDo: try to get the current request cw_rset rset = self._cw.execute(rql) # Get all the entities entities = {} if rset.rowcount > 0: for rowindex in range(len(rset[0])): try: entity = rset.get_entity(0, rowindex) entity_type = entity.__class__.__name__ entities[rowindex] = entity_type except NotAnEntity: pass except: raise if len(entities) == 0: raise ValidationError( "CWSearch", { "entities": _('cannot find any entity for the request ' '{0}'.format(rql))}) # Find the constant nodes constant_nodes = {} self._find_constant_nodes(rset.syntax_tree().children, constant_nodes) # Check we can associated rset entities with their rql labels actions = [] rql_etypes = constant_nodes.get("etype", {}) for etype in entities.values(): if etype not in rql_etypes or len(rql_etypes[etype]) != 1: raise ValidationError( "CWSearch", { "rql": _('cannot find entity description in the ' 'request {0}. Expect something like "Any X ' 'Where X is ' '{1}, ..."'.format(rql, etype))}) # Get all the rqldownload declared adapters possible_actions = self._cw.vreg["actions"]["rqldownload-adapters"] # Keep only actions that respect the current context actions = {} export_vids = set() for index, etype in entities.items(): entity_label = rql_etypes[etype][0] for action in possible_actions: for selector in action.__select__.selectors: if (isinstance(selector, is_instance) and etype in selector.expected_etypes): actions.setdefault(etype, []).append( (action, entity_label)) export_vids.add(unicode(action.__rset_type__)) # Check that at least one action has been found for this request if actions == []: raise ValidationError( "CWSearch", { "actions": _('cannot find an action for this request ' '{0}'.format(rql))}) # Check that the export types are homogeneous if len(export_vids) != 1: raise ValidationError( "CWSearch", { "actions": _('cannot deal with different or no action ' 'export types: {0}'.format(export_vids))}) export_vid = export_vids.pop() # Create an empty result structure result = {"rql": rql, "files": [], "nonexistent-files": [], "upper_file_index": 0} # Here we want to execute rql request with user permissions: user who # is creating this entity with self._cw.security_enabled(read=True, write=True): # Set the adaptor rset type self.entity.cw_edited["rset_type"] = export_vid # Create the global rql from the declared actions global_rql = rql cnt = 1 upper_file_index = 0 for etype, action_item in actions.items(): for action, entity_label in action_item: global_rql, nb_files = action(self._cw).rql( global_rql, entity_label, cnt) upper_file_index += nb_files cnt += 1 result["upper_file_index"] = upper_file_index # Execute the global rql rset = self._cw.execute(global_rql) result["rql"] = global_rql # Because self._cw is not a cubicwebRequest add an empty form # parameter self._cw.__dict__["form"] = {} try: view = self._cw.vreg["views"].select( export_vid, self._cw, rset=rset) rset_view = Binary() view.w = rset_view.write view.call() except: raise ValidationError( "CWSearch", { "rset_type": _('cannot apply this view "{0}" on this ' 'rset, choose another view ' 'id'.format(export_vid))}) # Save the rset in a File entity f_eid = self._cw.create_entity( "File", data=rset_view, data_format=u"text", data_name=u"rset").eid # Entity modification related event: specify that the rset has been # modified self.entity.cw_edited["rset"] = f_eid # Get all the files attached to the current request # Note: we assume the database integrity (ie. all file paths # inserted in the db exist on the file system) and thus do not # check to speed up this process. files_set = set() non_existent_files_set = set() if export_vid != "ecsvexport": for rset_row in rset.rows: for rset_index in range(upper_file_index): files_set.add(rset_row[rset_index]) # Update the result structure result["files"] = list(files_set) result["nonexistent-files"] = list(non_existent_files_set) # Save the result in a File entity f_eid = self._cw.create_entity( "File", data=Binary(json.dumps(result)), data_format=u"text/json", data_name=u"result.json").eid # Entity modification related event: specify that the result has # been modified self.entity.cw_edited["result"] = f_eid
def __call__(self): if self.entity.is_constant: if self.entity.count != 1: raise ValidationError(self.entity, {'granularity': 'TimeSeries is constant, but has more than one value'})
def raise_user_exception(self): raise ValidationError(self.entity.eid, {'hip': 'hop'})
def __call__(self): raise ValidationError(self.entity.eid, {})
def rql(self, rql, parameter_name, identifier=1): """ Method that patch the rql. .. note:: * reserved keys are 'PATH', 'FENTRIES', 'FILES' postfixed with the identifier number. * returned files are necessary at the beginning of the adapted rql. * detect if a filter is activated on the FileSets by checking the unique 'filesets' relation: in this case the reserved 'FENTRIES' postfixed label is the label associated to the FileSets. Parameters ---------- rql: str (mandatory) the request to adapt. parameter_name: str (mandatory) the label of the entity to adapt. identifier: int (optional) postfix the reserved keys with this identifier. Returns ------- global_rql: str the adapted rql. nb_files: int the number of files returned at the begining of the result set. """ # Define reserved labels reserved_labels = [ "PATH{0}".format(identifier), "FILES{0}".format(identifier), "FENTRIES{0}".format(identifier) ] # Check that reserved keys are not used split_rql = re.split(r"[ ,]", rql) for reserved_key in reserved_labels: if reserved_key in split_rql: raise ValidationError( "CWSearch", { "rql": _('cannot edit the rql "{0}", "{1}" is a reserved key, ' 'choose another name.'.format(rql, reserved_key)) }) # Remove the begining of the rql in order to complete it formated_rql = " ".join(rql.split()[1:]) # Complete the rql in order to access file pathes: # case 1: a filter is activated on the FileSets # case 2: download all the file sets associated files if "filesets" in formated_rql: fileset_label = formated_rql.split("filesets")[1].split(",")[0] fileset_label = fileset_label.strip() global_rql = ("Any {0}, {1}, {2} external_files {3}, " "{3} filepath {0}".format(reserved_labels[0], formated_rql, fileset_label, reserved_labels[2])) else: global_rql = ( "Any {0}, {1}, {2} filesets {3}, {3} external_files {4}, " "{4} filepath {0}".format(reserved_labels[0], formated_rql, parameter_name, reserved_labels[1], reserved_labels[2])) return global_rql, 1
def call(self, **kwargs): """ Create the form fields. .. note:: At upload, all field inputs are checked to match the 'check_value' regular expressions defined in the 'upload_structure_json' instance parameter. """ # Get some parameters path = self._cw.relative_path() if "?" in path: path, param = path.split("?", 1) kwargs.update(parse_qs(param)) form_name = kwargs["form_name"][0] # Get the form fields from configuration file config = load_forms(self._cw.vreg.config) # Create a structure to store values that must be checked before the # insertion in the data base check_struct = {} required_file_fields = {} # Update shortcut to access the uploaded files if 0: with self._cw.cnx._cnx.repo.internal_cnx() as cnx: rset = cnx.execute("Any X Where X is UploadFile") storage = cnx.repo.system_source._storages["UploadFile"]["data"] for index in range(rset.rowcount): entity = rset.get_entity(index, 0) eid = entity.eid if eid not in self._cw.vreg.uploaded_file_names: fpath = storage.current_fs_path(entity, "data") self._cw.vreg.uploaded_file_names[eid] = fpath # If json file missing, generate error page if config == -1: self.w(u'<div class="panel panel-danger">') self.w(u'<div class="panel-heading">') self.w(u'<h2 class="panel-title">ERROR</h2>') self.w(u'</div>') self.w(u'<div class="panel-body">') self.w(u"<h3>Configuration file not found</h3>") self.w(u"Check that the path 'upload_structure_json' " "declared in all-in-one.conf file is set.<br>") self.w(u"Then check that the path declared " "(current path:'{0}') corresponds to a " "json file and restart the instance.".format( self._cw.vreg.config["upload_structure_json"])) self.w(u'</div>') self.w(u'</div>') return -1 # If json can't be read, generate error page if config == -2: self.w(u'<div class="panel panel-danger">') self.w(u'<div class="panel-heading">') self.w(u'<h2 class="panel-title">ERROR</h2>') self.w(u'</div>') self.w(u'<div class="panel-body">') self.w(u"<h3>Configuration unknown</h3>") self.w(u"The json file configuring the form can't be " "read: {0}".format( self._cw.vreg.config["upload_structure_json"])) self.w(u'</div>') self.w(u'</div>') return -1 # Create the form form = self._cw.vreg["forms"].select( "upload-form", self._cw, action="", form_name=form_name) fields_types = {} fields_labels = {} error_to_display = None try: # Go through each field description for field in config[form_name]["Fields"]: # Remove reserved field keys # > rql: a RQL that will be used to initialize another field. # The current field must contain a list. # Must be of the form <RQL>:<field_name>. # Format the RQL string with the user login: use '{}' format # synthax in your RQL to inherit from this functionality. if "rql" in field: rql, dest_name = field.pop("rql").split(":") rql = rql.format(self._cw.user.login) if dest_name not in field: raise ValueError( "'{0}' not in field attributes.".format(dest_name)) if not isinstance(field[dest_name], list): raise ValueError( "'{0}' field attribute is not a list.".format( dest_name)) rset = self._cw.execute(rql) for row in rset.rows: field[dest_name].extend(row) # > type: the field type that must be declared in the registry field_type = field.pop("type") fields_types[field["name"]] = field_type fields_labels[field["name"]] = field["label"] # > style: the css style that will be applied to the field div style = None if "style" in field: style = field.pop("style") # Store the fields that must be checked using a Regex if "check_value" in field: check_struct[field["name"]] = field.pop("check_value") # Check that the upload directory is created # If not display a danger message # Store also required file fields if field_type in ("FileField", "MultipleFileField"): if not os.path.isdir( self._cw.vreg.config["upload_directory"]): self.w(u"<p class='label label-danger'>{0}: File " "field can't be used because the " "'upload_directory' has not been set in " "all-in-ine.conf file or its path cannot be " "created ({1})</p>".format( field.pop("label"), self._cw.vreg.config["upload_directory"])) continue if "required" in field and field["required"]: required_file_fields[field["name"]] = field["label"] # If the field is in the registry add the field to the form # If requested add some custom styles to the field if field_type in DECLARED_FIELDS: form.append_field(DECLARED_FIELDS[field_type](**field)) if style is not None: widget = form.field_by_name( field["name"]).get_widget(form) widget.attrs["style"] = unicode(style) # Otherwise display a danger message else: self.w( u"<p class='label label-danger'>'{0}': Unknown field " "type.</p>".format(field_type)) # If something goes wrong during the form creation, display a danger # message and print the trace in the terminal except ValueError as error: print traceback.format_exc() error_to_display = error.message except: print traceback.format_exc() error_to_display = "The configuration file can't be read." # Display the error message if error_to_display is not None: self.w(u'<div class="panel panel-danger">') self.w(u'<div class="panel-heading">') self.w(u'<h2 class="panel-title">ERROR</h2>') self.w(u'</div>') self.w(u'<div class="panel-body">') self.w(u'<h3>Configuration file syntax error</h3>') self.w(u'{0}<br>'.format(error_to_display)) self.w(u'Please refer to the documentation and make corrections') self.w(u'</div>') self.w(u'</div>') return -1 # Form processings error_to_display = None try: # Retrieve the posted form field values posted = form.process_posted() # Check posted fields errors = self.check_posted(posted, required_file_fields, check_struct) if errors != {}: raise ValidationError(None, {}) # Create the CWUpload entity upload = self._cw.create_entity( "CWUpload", form_name=unicode(form_name), status=u"Quarantine") # Go through the posted form parameters. Deported fields are # stored in UploadFile entities, other fields in UploadField # entities file_eids = [] field_eids = [] file_entities = [] field_entities = [] for field_name, field_value in posted.items(): # > files are deported if isinstance(field_value, Binary): # Create an UploadFile entity extension = ".".join(field_value.filename.split(".")[1:]) entity = self._cw.create_entity( "UploadFile", name=field_name, data=field_value, data_extension=unicode(extension), data_name=field_value.filename) file_eids.append(entity.eid) file_entities.append(entity) # Add relation with the CWUpload entity self._cw.execute("SET U upload_files F WHERE " "U eid %(u)s, F eid %(f)s", {"u": upload.eid, "f" : file_eids[-1]}) # > other fields are stored in the database else: # Create an UploadField entity entity = self._cw.create_entity( "UploadField", name=unicode(field_name), value=unicode(field_value), type=unicode(fields_types[field_name]), label=unicode(fields_labels[field_name])) field_eids.append(entity.eid) field_entities.append(entity) # Add relation with the CWUpload entity self._cw.execute("SET U upload_fields F WHERE " "U eid %(u)s, F eid %(f)s", {"u": upload.eid, "f" : field_eids[-1]}) # Call synchrone check function check_func_desc = config[form_name].get("SynchroneCheck") if check_func_desc is not None: module_name = check_func_desc[:check_func_desc.rfind(".")] func_name = check_func_desc[check_func_desc.rfind(".") + 1:] module = import_module(module_name) check_func = getattr(module, func_name) try: error_to_display = check_func( self._cw.cnx, posted, upload, file_entities, field_entities) except: exc_type, exc_value, exc_tb = sys.exc_info() raise Exception(traceback.format_exc()) finally: if error_to_display is not None: raise ValidationError( None, {None: "<br><br>" + error_to_display}) # Redirection to the created CWUpload entity raise Redirect(self._cw.build_url(eid=upload.eid)) # Handle exceptions except RequestError: pass except ValueError as error: error_to_display = error.message except ValidationError as error: # Check posted fields to concatenate the CW and application errors posted = {} for field in form.iter_modified_fields(): posted[field.name] = form._cw.form[field.name] errors = self.check_posted(posted, required_file_fields, check_struct) concatenated_errors = {} for dict_struct in (errors, error.errors): for key, value in dict_struct.items(): concatenated_errors.setdefault(key, []).append(value) concatenated_errors = dict( (key, " - ".join(value)) for key, value in concatenated_errors.items()) raise ValidationError(None, concatenated_errors) except Redirect: raise except Unauthorized: error_to_display = "You are not allowed to upload data." except: print traceback.format_exc() error_to_display = ("Unexpected error, please contact the service " "administrator.") raise ValidationError( None, {None: "<br><br>" + error_to_display}) # Form rendering self.w(u"<legend>'{0}' upload form</legend>".format( form_name)) form.render(w=self.w, formvalues=self._cw.form) # Display the error message in the page if error_to_display is not None: self._cw.cnx.rollback() self.w(u'<div class="panel panel-danger">') self.w(u'<div class="panel-heading">') self.w(u'<h2 class="panel-title">ULPLOAD ERROR</h2>') self.w(u'</div>') self.w(u'<div class="panel-body">') self.w(u"{0}".format(error_to_display)) self.w(u'</div>') self.w(u'</div>')