class RQLExpression(AnyEntity): __regid__ = 'RQLExpression' fetch_attrs, cw_fetch_order = fetch_config(['exprtype', 'mainvars', 'expression']) def dc_title(self): return self.expression or u'' def dc_long_title(self): return '%s(%s)' % (self.exprtype, self.expression or u'') @property def expression_of(self): for rel in ('read_permission', 'add_permission', 'delete_permission', 'update_permission', 'condition'): values = getattr(self, 'reverse_%s' % rel) if values: return values[0] @cached def _rqlexpr(self): if self.exprtype == 'ERQLExpression': return ERQLExpression(self.expression, self.mainvars, self.eid) #if self.exprtype == 'RRQLExpression': return RRQLExpression(self.expression, self.mainvars, self.eid) def check_expression(self, *args, **kwargs): return self._rqlexpr().check(*args, **kwargs)
class CWSource(_CWSourceCfgMixIn, AnyEntity): __regid__ = 'CWSource' fetch_attrs, cw_fetch_order = fetch_config(['name', 'type']) @property def host_config(self): dictconfig = self.dictconfig host = gethostname() for hostcfg in self.host_configs: if hostcfg.match(host): self.info('matching host config %s for source %s', hostcfg.match_host, self.name) dictconfig.update(hostcfg.dictconfig) return dictconfig @property def host_configs(self): return self.reverse_cw_host_config_of @property def repo_source(self): """repository only property, not available from the web side (eg self._cw is expected to be a server session) """ return self._cw.repo.source_by_eid(self.eid)
class Lieu(AnyEntity): __regid__ = 'Lieu' fetch_attrs, cw_fetch_order = fetch_config(['ville', 'region']) def dc_title(self): return u'%s' % (self.ville) def dc_long_title(self): #self.complete() return u'%s (%s)' % (self.ville, self.region)
class MateriauxParure(AnyEntity): __regid__ = 'MateriauxParure' fetch_attrs, cw_fetch_order = fetch_config([ 'usage', 'type_mesure', 'quantite', 'unite', 'provenance_mesure', 'conversion', 'materiaux_achete', 'materiaux' ]) def dc_title(self): return self.materiaux[0].dc_title()
class PaidByAccount(AnyEntity): __regid__ = 'PaidByAccount' fetch_attrs, cw_fetch_order = fetch_config(['label', 'account']) def dc_title(self): return self.label def dc_long_title(self): return u'%s (%s)' % (self.label, self.account)
class CWSourceHostConfig(_CWSourceCfgMixIn, AnyEntity): __regid__ = 'CWSourceHostConfig' fetch_attrs, cw_fetch_order = fetch_config(['match_host', 'config']) @property def cwsource(self): return self.cw_host_config_of[0] def match(self, hostname): return re.match(self.match_host, hostname)
class EmailAddress(AnyEntity): __regid__ = 'EmailAddress' fetch_attrs, cw_fetch_order = fetch_config(['address', 'alias']) rest_attr = 'eid' def dc_title(self): if self.alias: return '%s <%s>' % (self.alias, self.display_address()) return self.display_address() @property def email_of(self): return self.reverse_use_email and self.reverse_use_email[0] or None @property def prefered(self): return self.prefered_form and self.prefered_form[0] or self def related_emails(self, skipeids=None): # XXX move to eemail # check email relations are in the schema first subjrels = self.e_schema.object_relations() if not ('sender' in subjrels and 'recipients' in subjrels): return rset = self._cw.execute( 'DISTINCT Any X, S, D ORDERBY D DESC ' 'WHERE X sender Y or X recipients Y, ' 'X subject S, X date D, Y eid %(y)s', {'y': self.eid}) if skipeids is None: skipeids = set() for i in range(len(rset)): eid = rset[i][0] if eid in skipeids: continue skipeids.add(eid) yield rset.get_entity(i, 0) def display_address(self): if self._cw.vreg.config['mangle-emails']: return mangle_email(self.address) return self.address def printable_value(self, attr, value=_marker, attrtype=None, format='text/html'): """overriden to return displayable address when necessary""" if attr == 'address': address = self.display_address() if format == 'text/html': address = xml_escape(address) return address return super(EmailAddress, self).printable_value(attr, value, attrtype, format)
class Materiaux(AnyEntity): __regid__ = 'Materiaux' fetch_attrs, _ = fetch_config([ 'type', 'famille', 'nom', 'couleur', 'carac_couleur', 'carac_facture' ]) type_names = { 'E': u'étoffe', 'F': u'fourrure', 'M': u'mercerie', 'O': u'orfèvrerie', 'B': u'broderie', 'P': u'peau', '?': u'inconnu' } def dc_title(self): #self.complete() if self.provenance: prov = u' de %s' % self.provenance[0].dc_title() else: prov = '' title = u'%s %s%s' % (self.nom, self.couleur, prov) return title def dc_long_title(self): #self.complete() if self.provenance: prov = u' de %s' % self.provenance[0].dc_title() else: prov = '' title = u'[%s-%s] %s %s%s' % (self.type, self.famille, self.nom, self.couleur, prov) return title @property def long_type(self): return self.type_names[self.type] @property def long_famille(self): type = self.long_type if self.famille == u'NA' or self.famille is None: return type else: return u" - ".join([type, self.famille]) def get_provenance(self): if self.provenance: return self.provenance[0].dc_title() return None @classmethod def cw_fetch_order(cls, select, attr, var): if attr in ('type', 'famille', 'nom', 'couleur'): select.add_sort_var(var, asc=True)
class Card(AnyEntity): __regid__ = 'Card' rest_attr = 'wikiid' fetch_attrs, cw_fetch_order = fetch_config(['title']) def rest_path(self): if self.wikiid: return '%s/%s' % (str(self.e_schema).lower(), self._cw.url_quote(self.wikiid, safe='/')) else: return super(Card, self).rest_path()
class Transaction(AnyEntity): __regid__ = 'Transaction' fetch_attrs, cw_fetch_order = fetch_config(( 'pagination', 'date', 'date_ordre', 'date_recette', 'occasion', 'lieu', 'prix_ensemble', )) @property def _date(self): if self.date is None: if self.date_ordre is None: if self.date_recette is None: date = _('pas de date') else: date = self.date_recette else: date = self.date_ordre else: date = self.date return date def dc_title(self): date = self._date return u'p. %s [n° %d, %s]' % ( self.pagination, self.eid, date, ) def dc_long_title(self): date = self._date return u'%s p. %s [n° %d, %s]' % ( self.compte[0].inventaire, self.pagination, self.eid, date, ) def get_best_date(self): if self.date: return self.date if self.date_ordre: return self.date_ordre if self.date_recette: return self.date_recette compte = self.compte[0] return compte.debut
class Travail(AnyEntity): __regid__ = 'Travail' fetch_attrs, cw_fetch_order = fetch_config( ('tache', 'artisan', 'salaire_argent')) def dc_title(self): title = self.tache multi_artisans = self._cw.execute( 'Any COUNT(Y) WHERE X eid %(eid)s, T travaux X, T travaux Y', {'eid': self.eid})[0][0] if multi_artisans > 1: title += u' (+)' return title
class State(AnyEntity): """customized class for State entities""" __regid__ = 'State' fetch_attrs, cw_fetch_order = fetch_config(['name']) rest_attr = 'eid' def dc_long_title(self): return '%s (%s)' % (self.name, self._cw._(self.name)) @property def workflow(self): # take care, may be missing in multi-sources configuration return self.state_of and self.state_of[0] or None
class CWProperty(AnyEntity): __regid__ = 'CWProperty' fetch_attrs, cw_fetch_order = fetch_config(['pkey', 'value']) rest_attr = 'pkey' def typed_value(self): return self._cw.vreg.typed_value(self.pkey, self.value) def dc_description(self, format='text/plain'): try: return self._cw._(self._cw.vreg.property_info(self.pkey)['help']) except UnknownProperty: return u''
class CWConstraint(AnyEntity): __regid__ = 'CWConstraint' fetch_attrs, cw_fetch_order = fetch_config(['value']) def __repr__(self): return '<Entity %s %s of type %s %s at %s>' % ( self.e_schema, self.eid, self.type, list(self.cw_attr_cache), id(self)) def dc_title(self): return '%s(%s)' % (self.cstrtype[0].name, self.value or u'') @property def type(self): return self.cstrtype[0].name
class CWEType(AnyEntity): __regid__ = 'CWEType' fetch_attrs, cw_fetch_order = fetch_config(['name']) def dc_title(self): return u'%s (%s)' % (self.name, self._cw._(self.name)) def dc_long_title(self): stereotypes = [] _ = self._cw._ if self.final: stereotypes.append(_('final')) if stereotypes: return u'%s <<%s>>' % (self.dc_title(), ', '.join(stereotypes)) return self.dc_title()
class Expense(LineContainerMixIn, AnyEntity): __regid__ = 'Expense' fetch_attrs, cw_fetch_order = fetch_config(['title']) def dc_long_title(self): users = self.paid_by() if users: return u'%s (%s)' % (self.title, ', '.join( euser.login for euser in self.paid_by())) return self.title def euro_taxes(self): return sum(line.euro_taxes() for line in self.has_lines) def euro_total(self): return sum(line.euro_amount() for line in self.has_lines)
class CWGroup(AnyEntity): __regid__ = 'CWGroup' fetch_attrs, cw_fetch_order = fetch_config(['name']) cw_fetch_unrelated_order = cw_fetch_order def dc_long_title(self): name = self.name trname = self._cw._(name) if trname != name: return '%s (%s)' % (name, trname) return name @cached def num_users(self): """return the number of users in this group""" return self._cw.execute('Any COUNT(U) WHERE U in_group G, G eid %(g)s', {'g': self.eid})[0][0]
class Bookmark(AnyEntity): """customized class for Bookmark entities""" __regid__ = 'Bookmark' fetch_attrs, cw_fetch_order = fetch_config(['title', 'path']) def actual_url(self): url = self._cw.build_url(self.path) if self.title: urlparts = list(urlsplit(url)) if urlparts[3]: urlparts[3] += '&vtitle=%s' % self._cw.url_quote(self.title) else: urlparts[3] = 'vtitle=%s' % self._cw.url_quote(self.title) url = urlunsplit(urlparts) return url def action_url(self): return self.absolute_url() + '/follow'
class File(AnyEntity): """customized class for File entities""" __regid__ = 'File' fetch_attrs, cw_fetch_order = fetch_config(['data_name', 'title']) def set_format_and_encoding(self): """try to set format and encoding according to known values (filename, file content, format, encoding). This method must be called in a before_[add|update]_entity hook else it won't have any effect. """ assert 'data' in self.cw_edited, "missing mandatory attribute data" if self.cw_edited.get('data'): if (hasattr(self.data, 'filename') and not self.cw_edited.get('data_name')): self.cw_edited['data_name'] = self.data.filename else: self.cw_edited['data_format'] = None self.cw_edited['data_encoding'] = None self.cw_edited['data_name'] = None return if 'data_format' in self.cw_edited: format = self.cw_edited.get('data_format') else: format = None if 'data_encoding' in self.cw_edited: encoding = self.cw_edited.get('data_encoding') else: encoding = None if not (format and encoding): format, encoding = guess_mimetype_and_encoding( data=self.cw_edited.get('data'), # use get and not get_value since data has changed, we only # want to consider explicitly specified values, not old ones filename=self.cw_edited.get('data_name'), format=format, encoding=encoding, fallbackencoding=self._cw.encoding) if format: self.cw_edited['data_format'] = str(format) if encoding: self.cw_edited['data_encoding'] = str(encoding)
class FabriqueAvecMat(AnyEntity): __regid__ = 'FabriqueAvecMat' fetch_attrs, cw_fetch_order = fetch_config([ 'usage', 'type_mesure', 'quantite', 'unite', 'provenance_mesure', 'conversion', #'achat_matiere', ]) def dc_title(self): return self.achat_matiere[0].dc_title() def dc_long_title(self): fabrique = [fab.dc_title() for fab in self.reverse_avec_mat] return u'fabrique avec %s: %s' % ( self.achat_matiere[0].dc_long_title(), u', '.join(fabrique))
class TrInfo(AnyEntity): """customized class for Transition information entities """ __regid__ = 'TrInfo' fetch_attrs, cw_fetch_order = fetch_config(['creation_date', 'comment'], pclass=None) # don't want modification_date @property def for_entity(self): return self.wf_info_for[0] @property def previous_state(self): return self.from_state[0] @property def new_state(self): return self.to_state[0] @property def transition(self): return self.by_transition and self.by_transition[0] or None
class Compte(AnyEntity): __regid__ = 'Compte' fetch_attrs, cw_fetch_order = fetch_config(['debut', 'fin', 'type_compte', 'inventaire']) def dc_title(self): return self.inventaire def dc_long_title(self): type_compte = self.type_compte + u' ' if self.inventaire.lower().startswith('compte'): prefix = '' type_compte = '' elif self.type_compte.lower().startswith(u'hôtel'): prefix = u"Compte de l'" else: prefix = u"Compte de " return u'%s%s%s [%s %s]' % (prefix, type_compte, self.inventaire, self.debut, self.fin) def dc_description(self, format='text/plain'): title = self.dc_long_title() rql = 'Any C, COUNT(CDE) GROUPBY C WHERE CDE is Commande, CDE compte C, C eid %(eid)s' rset = self._cw.execute(rql, {'eid': self.eid}) if not rset: nb_cdes = 0 else: nb_cdes = rset[0][1] rql = 'Any C, COUNT(CDE) GROUPBY C WHERE CDE is Transaction, CDE compte C, C eid %(eid)s' rset = self._cw.execute(rql, {'eid': self.eid}) if not rset: nb_trans = 0 else: nb_trans = rset[0][1] description = '%d commandes, %d transactions' % (nb_cdes, nb_trans) if format == 'text/plain': return u'\n\n'.join([title, description]) elif format == 'text/html': return u'<h2>%s</h2><p>%s</p>' % (xml_escape(title), xml_escape(description)) return u''
class Intervenant(AnyEntity): __regid__ = 'Intervenant' fetch_attrs, cw_fetch_order = fetch_config(( 'indemnite', #'nb_moyen_transport', #'moyen_transport', #'prix_transport', #'nombre_valets', #'prix_valet', #'duree', 'payeur', 'pris', 'commandement', 'relation_de', 'donne_par', 'par_la_main', 'present', 'delivre_a', 'fait_compte_avec', 'intervenant')) def dc_title(self): return self.intervenant[0].dc_title()
class CWRelation(AnyEntity): __regid__ = 'CWRelation' fetch_attrs = fetch_config(['cardinality'])[0] def dc_title(self): return u'%s %s %s' % ( self.from_entity[0].name, self.relation_type[0].name, self.to_entity[0].name) def dc_long_title(self): card = self.cardinality scard, ocard = u'', u'' if card[0] != '1': scard = '[%s]' % card[0] if card[1] != '1': ocard = '[%s]' % card[1] return u'%s %s%s%s %s' % ( self.from_entity[0].name, scard, self.relation_type[0].name, ocard, self.to_entity[0].name) @property def rtype(self): return self.relation_type[0] @property def stype(self): return self.from_entity[0] @property def otype(self): return self.to_entity[0] def yams_schema(self): rschema = self._cw.vreg.schema.rschema(self.rtype.name) return rschema.rdefs[(self.stype.name, self.otype.name)]
class CWRType(AnyEntity): __regid__ = 'CWRType' fetch_attrs, cw_fetch_order = fetch_config(['name']) def dc_title(self): return u'%s (%s)' % (self.name, self._cw._(self.name)) def dc_long_title(self): stereotypes = [] _ = self._cw._ if self.symmetric: stereotypes.append(_('symmetric')) if self.inlined: stereotypes.append(_('inlined')) if self.final: stereotypes.append(_('final')) if stereotypes: return u'%s <<%s>>' % (self.dc_title(), ', '.join(stereotypes)) return self.dc_title() def check_inlined_allowed(self): """check inlining is possible, raise ValidationError if not possible """ # don't use the persistent schema, we may miss cardinality changes # in the same transaction for rdef in self.reverse_relation_type: card = rdef.cardinality[0] if not card in '?1': qname = role_name('inlined', 'subject') rtype = self.name stype = rdef.stype otype = rdef.otype msg = self._cw._("can't set inlined=True, " "%(stype)s %(rtype)s %(otype)s " "has cardinality=%(card)s") raise ValidationError(self.eid, {qname: msg % locals()})
class ExpenseLine(AnyEntity): __regid__ = 'ExpenseLine' fetch_attrs, cw_fetch_order = fetch_config( ['diem', 'type', 'title', 'amount', 'currency']) @property def parent_expense(self): expenses = [ entity for entity in self.reverse_has_lines if entity.e_schema == 'Expense' ] if expenses: return expenses[0] return None def dc_title(self): return u'%s - %s - %s - %s %s' % (self._cw.format_date( self.diem), self._cw._( self.type), self.title, self.amount, self.currency) def dc_long_title(self): expense = self.parent_expense if expense: return u'%s - %s' % (self.title, expense.dc_title()) else: return self.dc_title() def euro_amount(self): if self.currency == 'EUR': return self.amount return self.exchange_rate * self.amount def euro_taxes(self): if self.taxes_currency == 'EUR': return self.taxes return self.taxes_exchange_rate * self.taxes
class Commande(AnyEntity): __regid__ = 'Commande' fetch_attrs, cw_fetch_order = fetch_config(('numero', 'prix_str', 'date_ordre_str')) def dc_title(self): return u'item %d' % self.numero
class TimeSeries(abstract.AbstractTSMixin, AnyEntity): __regid__ = 'TimeSeries' fetch_attrs, cw_fetch_order = fetch_config( ['data_type', 'unit', 'granularity', 'start_date']) _dtypes_in = { 'Float': numpy.float64, 'Integer': numpy.int32, 'Boolean': numpy.bool } _dtypes_out = {'Float': float, 'Integer': int, 'Boolean': utils.boolint} def dc_title(self): return u'TS %s' % self.eid @property def is_constant(self): return self.granularity == u'constant' def dc_long_title(self): if self.is_constant: return self._cw._(u'Constant time series (value: %s)' % self._cw.format_float(self.first)) return self._cw._(u'Time series %s starting on %s with %d values' % (self.dc_title(), self.start_date, self.count)) @cached def timestamped_array(self): date = self.start_date # pylint:disable-msg=E1101 data = [] for v in self.array: data.append((date, self.output_value(v))) date = self.get_next_date(date) return data @property def end_date(self): if self.granularity in TIME_DELTAS: return self.start_date + self.count * TIME_DELTAS[self.granularity] return self.get_next_date(self.timestamped_array()[-1][0]) def _check_intervals(self, intervals): for start, end in intervals: if end < self.start_date: raise IndexError("%s date is before the time series's " "start date (%s)" % (end, self.start_date)) supported_modes = frozenset(('sum', 'average', 'weighted_average', 'last', 'sum_realized', 'max', 'min')) def aggregated_value(self, intervals, mode, use_last_interval=False): # pylint:disable-msg=E1101 assert mode in self.supported_modes, 'unsupported mode' if use_last_interval and mode != 'last': raise AssertionError( '"use_last_interval" may be True only if mode is "last"') if self.is_constant: if mode == 'sum': raise ValueError( "sum can't be computed with a constant granularity") return intervals[0][0], self.first if mode == 'last' and len(intervals) != 1 and not use_last_interval: raise ValueError( '"last" aggregation method cannot be used with more than 1 interval' ) self._check_intervals(intervals) values = [] flat_values = [] for start, end in intervals: interval_date_values = self.get_by_date(slice(start, end), with_dates=True) values.append((start, end, numpy.array(interval_date_values))) interval_values = [ date_value[1] for date_value in interval_date_values ] flat_values += interval_values if len(interval_values) == 0: raise IndexError() flat_values = numpy.array(flat_values) start = intervals[0][0] end = intervals[-1][1] if mode == 'last': last_index = self.get_rel_index(end - timedelta(seconds=1)) tstamp = end - timedelta(seconds=1) value = self.timestamped_array()[last_index][1] return tstamp, value elif mode == 'max': return start, flat_values.max() elif mode == 'min': return start, flat_values.min() elif mode == 'sum_realized': return start, flat_values.sum() elif mode in ('sum', 'average', 'weighted_average'): nums = [] denoms = [] for start, end, interval_date_values in values: interval_values = interval_date_values[:, 1] coefs = numpy.ones(interval_values.shape, float) start_frac = self.get_frac_offset(start) end_frac = self.get_frac_offset(end) coefs[0] -= start_frac if end_frac != 0: coefs[-1] -= 1 - end_frac if mode == 'weighted_average': interval_dates = interval_date_values[:, 0] weights = [ self.get_duration_in_days(date) for date in interval_dates ] coefs *= weights num = (interval_values * coefs).sum() nums.append(num) denom = coefs.sum() denoms.append(denom) if mode == 'sum': return start, sum(nums) elif mode in ('average', 'weighted_average'): return start, sum(nums) / sum(denoms) else: raise ValueError('unknown mode %s' % mode) def get_offset(self, date): return self.calendar.get_offset(date, self.granularity) def get_frac_offset(self, date): return self.calendar.get_frac_offset(date, self.granularity) def get_duration_in_days(self, date): return self.calendar.get_duration_in_days(self.granularity, date) def get_next_date(self, date): return utils.get_next_date(self.granularity, date) def get_next_month(self, date): return utils.get_next_month(date) def get_next_year(self, date): return utils.get_next_year(date) def compressed_timestamped_array(self): """ eliminates duplicated values in piecewise constant timeseries """ data = self.timestamped_array() compressed_data = [data[0]] delta = timedelta(seconds=1) last_date = data[-1][0] if len(data) != 1: for date, value in data[1:]: previous_value = compressed_data[-1][1] if value != previous_value: compressed_data.append((date - delta, previous_value)) compressed_data.append((date, value)) if date == last_date: if value != previous_value: compressed_data.append((date, value)) compressed_data.append( (self.get_next_date(date), value)) else: compressed_data.append( (self.get_next_date(date), value)) else: end_date = self.get_next_date(last_date) value = data[-1][1] compressed_data.append((end_date, value)) return compressed_data def python_value(self, v): self.warning('python_value is deprecated, use output_value instead') return self.output_value(v) def output_value(self, v): """ use this for external representation purposes, but NOT as an entry/input method as Boolean really should be a boolean internally """ return self._dtypes_out[self.data_type](v) # pylint:disable-msg=E1101 def input_value(self, v): """ if you need to update some data piecewise, use this to get it to the correct input type """ return self._dtypes_in[self.data_type](v) # pylint:disable-msg=E1101 @property def dtype(self): """ provides the correct python data type for input purposes """ return self._dtypes_in.get(self.data_type, numpy.float64) @property def safe_unit(self): # XXX maybe we just want '' as default ? if self.unit is None: return u'' return self.unit @property def first(self): return self.array[0] @property def first_unit(self): return '%s%s' % (self.first, self.safe_unit) @property def last(self): return self.array[-1] @property def last_unit(self): return '%s%s' % (self.last, self.safe_unit) @property def count(self): return self.array.size @property def min(self): return self.array.min() @property def min_unit(self): return '%s%s' % (self.output_value(self.min), self.safe_unit) @property def max(self): return self.array.max() @property def max_unit(self): return '%s%s' % (self.output_value(self.max), self.safe_unit) @property def sum(self): return self.array.sum() @property def sum_unit(self): return '%s%s' % (self.sum, self.safe_unit) @property def average(self): return self.array.mean() @property def average_unit(self): return '%s%s' % (self.average, self.safe_unit) @property def use_calendar(self): return 'gregorian' @property def calendar(self): return get_calendar(self.use_calendar) # pylint:disable-msg=E1101 def get_values_between(self, start_date, end_date): # pylint:disable-msg=E1101 if start_date is None: start_date = self.start_date if self.is_constant: return [ (start_date, self.first), ] values = [] for tstamp, value in self.timestamped_array(): if tstamp < start_date: continue elif end_date is not None and tstamp >= end_date: break values.append((tstamp, value)) return values def get_absolute(self, abs_index, with_dates=False): index = self._make_relative_index(abs_index) return self.get_relative(index, with_dates) def get_rel_index(self, date): abs_index = self.get_offset(date) return self._make_relative_index(abs_index) def get_by_date(self, date, with_dates=False): # pylint:disable-msg=E1101 if type(date) is slice: assert date.step is None if self.is_constant: date = slice(None, None) if date.start is None: start = None else: # start = self.get_rel_index(date.start) start = self.get_offset(date.start) if date.stop is None: stop = None else: # stop = self.get_rel_index(date.stop) stop = self.get_offset(date.stop) index = slice(start, stop, None) else: # index = self.get_rel_index(date) index = self.get_offset(date) # return self.get_relative(index, with_dates) return self.get_absolute(index, with_dates) def _make_relative_index(self, abs_index): if isinstance(abs_index, (int, float)): return int(floor(abs_index - self._start_offset)) elif type(abs_index) is slice: if abs_index.start is None: start = None else: start = max(0, int(floor(abs_index.start - self._start_offset))) if abs_index.stop is None: stop = None else: stop = max(0, int(ceil(abs_index.stop - self._start_offset))) if start > len(self.array): raise IndexError('start is too big') return slice(start, stop, abs_index.step) else: raise TypeError('Unsupported index type %s' % type(abs_index)) def get_relative(self, index, with_dates=False): try: if with_dates: return self.timestamped_array()[index] else: return self.array[index] except IndexError as exc: raise IndexError(exc.args + (index, )) @property @cached def _start_offset(self): return self.get_offset(self.start_date)
class BaseTransition(AnyEntity): """customized class for abstract transition provides a specific may_be_fired method to check if the relation may be fired by the logged user """ __regid__ = 'BaseTransition' fetch_attrs, cw_fetch_order = fetch_config(['name', 'type']) def __init__(self, *args, **kwargs): if self.cw_etype == 'BaseTransition': raise WorkflowException('should not be instantiated') super(BaseTransition, self).__init__(*args, **kwargs) @property def workflow(self): return self.transition_of[0] def has_input_state(self, state): if hasattr(state, 'eid'): state = state.eid return any(s for s in self.reverse_allowed_transition if s.eid == state) def may_be_fired(self, eid): """return true if the logged user may fire this transition `eid` is the eid of the object on which we may fire the transition """ DBG = False if server.DEBUG & server.DBG_SEC: if 'transition' in server._SECURITY_CAPS: DBG = True user = self._cw.user # check user is at least in one of the required groups if any groups = frozenset(g.name for g in self.require_group) if groups: matches = user.matching_groups(groups) if matches: if DBG: print('may_be_fired: %r may fire: user matches %s' % (self.name, groups)) return matches if 'owners' in groups and user.owns(eid): if DBG: print('may_be_fired: %r may fire: user is owner' % self.name) return True # check one of the rql expression conditions matches if any if self.condition: if DBG: print('may_be_fired: %r: %s' % (self.name, [(rqlexpr.expression, rqlexpr.check_expression(self._cw, eid)) for rqlexpr in self.condition])) for rqlexpr in self.condition: if rqlexpr.check_expression(self._cw, eid): return True if self.condition or groups: return False return True def set_permissions(self, requiredgroups=(), conditions=(), reset=True): """set or add (if `reset` is False) groups and conditions for this transition """ if reset: self._cw.execute('DELETE T require_group G WHERE T eid %(x)s', {'x': self.eid}) self._cw.execute('DELETE T condition R WHERE T eid %(x)s', {'x': self.eid}) for gname in requiredgroups: rset = self._cw.execute( 'SET T require_group G ' 'WHERE T eid %(x)s, G name %(gn)s', { 'x': self.eid, 'gn': gname }) assert rset, '%s is not a known group' % gname if isinstance(conditions, str): conditions = (conditions, ) for expr in conditions: if isinstance(expr, str): kwargs = {'expr': expr} else: assert isinstance(expr, dict) kwargs = expr kwargs['x'] = self.eid kwargs.setdefault('mainvars', u'X') self._cw.execute( 'INSERT RQLExpression X: X exprtype "ERQLExpression", ' 'X expression %(expr)s, X mainvars %(mainvars)s, ' 'T condition X WHERE T eid %(x)s', kwargs)
class NonPeriodicTimeSeries(timeseries.TimeSeries): __regid__ = 'NonPeriodicTimeSeries' fetch_attrs, cw_fetch_order = fetch_config(['data_type', 'unit', 'granularity']) is_constant = False @cachedproperty def timestamps_array(self): # XXX turn into datetime here ? raw_data = self.timestamps.getvalue() raw_data = zlib.decompress(raw_data) return pickle.loads(raw_data) @cached def timestamped_array(self): data = [] for t, v in izip(self.timestamps_array, self.array): data.append((self.calendar.timestamp_to_datetime(t), self.output_value(v))) return data @cachedproperty def start_date(self): return self.calendar.timestamp_to_datetime(self.timestamps_array[0]) def get_next_date(self, date): index = bisect_left(self.timestamps_array, self.calendar.datetime_to_timestamp(date)) # XXX what if out of bound return self.calendar.timestamp_to_datetime(self.timestamps_array[index]) def get_rel_index(self, date, offset=-1): timestamp = self.calendar.datetime_to_timestamp(date) array = self.timestamps_array idx = bisect_left(array, timestamp) # unless this is an exact match, add offset if any to mimick periodic ts # behaviour if timestamp != array[idx]: return max(idx + offset, 0) return idx def get_by_date(self, date, with_dates=False): #pylint:disable-msg=E1101 if type(date) is slice: assert date.step is None if date.start is None: start = None else: start = self.get_rel_index(date.start, -1) if date.stop is None: stop = None else: stop = self.get_rel_index(date.stop, 0) index = slice(start, stop, None) else: index = self.get_rel_index(date) return self.get_relative(index, with_dates) def get_duration_in_days(self, date): idx = self.get_rel_index(date) array = self.timestamped_array() return timedelta_to_days(array[idx+1][0] - array[idx][0]) def get_frac_offset(self, date): idx = self.get_rel_index(date) array = self.timestamped_array() try: totalsecs = timedelta_to_seconds(array[idx+1][0] - array[idx][0]) except IndexError: # date out of bound, consider previous interval totalsecs = timedelta_to_seconds(array[idx][0] - array[idx-1][0]) deltasecs = timedelta_to_seconds(date - array[idx][0]) return deltasecs / max(totalsecs, deltasecs) @property def _start_offset(self): return self.calendar.get_offset(self.start_date, self.granularity) def get_offset(self, datetime): timestamp = self.calendar.datetime_to_timestamp(datetime) array = self.timestamps_array idx = bisect_left(array, timestamp) return idx def cw_clear_all_caches(self): super(NonPeriodicTimeSeries, self).cw_clear_all_caches() if 'start_date' in vars(self): del self.start_date if 'timestamps_array' in vars(self): del self.timestamps_array clear_cache(self, 'timestamped_array')