class CatalogSearch(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} index_name = self.cfg.get('index', None) static_arguments = self.cfg.get('s', {}) dynamic_arguments = self.cfg.get('d', {}) search_arguments = context.input.get('search') overide_arguments = {} overide_arguments.update(static_arguments) for key, value in dynamic_arguments.iteritems(): overide_arguments[key] = tools.get_attr(context, value) tools.override_dict(search_arguments, overide_arguments) query = search_arguments['property'].build_search_query( search_arguments) index = search.Index(name=index_name) result = index.search(query) context._total_matches = result.number_found context._entities_count = len(result.results) context._entities = map(context.model.search_document_to_dict, result.results) more = False cursor = result.cursor if cursor is not None: cursor = cursor.web_safe_string more = True context._cursor = cursor context._more = more
class CatalogCronDelete(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} limit = self.cfg.get('page', 10) catalog_unpublished_life = self.cfg.get('unpublished_life', 7) catalog_discontinued_life = self.cfg.get('discontinued_life', 180) Catalog = context.models['31'] catalogs = [] unpublished_catalogs = Catalog.query( Catalog.state == 'draft', Catalog.created < (datetime.datetime.now() - datetime.timedelta(days=catalog_unpublished_life))).fetch( limit=limit) discontinued_catalogs = Catalog.query( Catalog.state == 'discontinued', Catalog.discontinued_date < (datetime.datetime.now() - datetime.timedelta(days=catalog_discontinued_life))).fetch( limit=limit) catalogs.extend(unpublished_catalogs) catalogs.extend(discontinued_catalogs) for catalog in catalogs: data = { 'action_id': 'delete', 'action_model': '31', 'key': catalog.key.urlsafe() } context._callbacks.append(('callback', data))
class CatalogDiscontinue(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} Catalog = context.models['31'] account_key = context.input.get('account') account = account_key.get() if account is not None: catalogs = Catalog.query(Catalog.state.IN(['published', 'indexed']), ancestor=account.key).fetch(limit=4) for catalog in catalogs: data = { 'action_id': 'sudo_discontinue', 'action_model': '31', 'key': catalog.key.urlsafe() } context._callbacks.append(('callback', data)) if catalogs: data = { 'action_id': 'account_discontinue', 'action_model': '31', 'key': account_key.urlsafe() } context._callbacks.append(('callback', data))
class Notify(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} entity_path = self.cfg.get('path', '_' + context.model.__name__.lower()) method = self.cfg.get('method', ['mail']) if not isinstance(method, (list, tuple)): method = [method] condition = self.cfg.get('condition', None) static_values = self.cfg.get('s', {}) dynamic_values = self.cfg.get('d', {}) entity = tools.get_attr(context, entity_path) values = { 'account': context.account, 'input': context.input, 'action': context.action, 'entity': entity } values.update(static_values) for key, value in dynamic_values.iteritems(): values[key] = tools.get_attr(context, value) if condition is None or condition(**values): if 'mail' in method: tools.mail_send(values) if 'http' in method: tools.http_send(values) if 'channel' in method: tools.channel_send(values)
class Set(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} static_values = self.cfg.get('s', {}) dynamic_values = self.cfg.get('d', {}) remove_values = self.cfg.get('rm', []) function_values = self.cfg.get('f', {}) remove_structured_values = self.cfg.get('rms', []) for key, f in function_values.iteritems(): tools.set_attr(context, key, f()) for key, value in static_values.iteritems(): tools.set_attr(context, key, value) for key, value in dynamic_values.iteritems(): set_value = tools.get_attr(context, value, tools.Nonexistent) if set_value is not tools.Nonexistent: tools.set_attr(context, key, set_value) for key in remove_values: tools.del_attr(context, key) for key in remove_structured_values: items = tools.get_attr(context, key) if isinstance(items, list): for item in items: item._state = 'removed' else: items._state = 'removed'
class Write(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} entity_path = self.cfg.get('path', '_' + context.model.__name__.lower()) static_record_arguments = self.cfg.get('sra', {}) dynamic_record_arguments = self.cfg.get('dra', {}) entity = tools.get_attr(context, entity_path) condition = self.cfg.get('condition', None) condition_kwargs = self.cfg.get('condition_kwargs', {}) if condition: default_condition_kwargs = {'entity': entity} for key, value in condition_kwargs.iteritems(): default_condition_kwargs[key] = tools.get_attr(context, value) if not condition(**default_condition_kwargs): return # skip run if condition does not satisfy if entity and isinstance(entity, orm.Model): record_arguments = { 'agent': context.account.key, 'action': context.action.key } record_arguments.update(static_record_arguments) for key, value in dynamic_record_arguments.iteritems(): record_arguments[key] = tools.get_attr(context, value) entity.write(record_arguments)
class AccountCacheGroupUpdate(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): AccountCacheGroup = context.models['135'] delete = context.input.get('delete') keys = context.input.get('keys') if keys: keys = context.input.get('keys') if keys: keys = zlib.decompress(base64.b64decode(keys)).split(',') ids = [ AccountCacheGroup.build_key(id) for id in context.input.get('ids') ] groups = orm.get_multi(ids) save = [] active = [] delete_active = [] def make_active(k): return '%s_active' % k for i, group in enumerate(groups): changes = False if not group: changes = True group = AccountCacheGroup(id=context.input.get('ids')[i], keys=[]) for k in keys: if k in group.keys: changes = True if delete: group.keys.remove(k) delete_active.extend( [make_active(kk) for kk in group.keys]) else: changes = True group.keys.append(k) active.extend([make_active(kk) for kk in group.keys]) if changes: save.append(group) try: orm.put_multi(save) tools.mem_delete_multi(delete_active) tools.mem_set_multi(dict((k, True) for k in active)) except RequestTooLargeError as e: # size of entity exceeded if not delete: delete_keys = [] for s in save: delete_keys.extend(s.keys) s.keys = keys orm.put_multi(save) if delete_keys: tools.mem_delete_multi(delete_keys)
class RuleExec(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} entity_path = self.cfg.get('path', '_' + context.model.__name__.lower()) action_path = self.cfg.get('action', 'action') entity = tools.get_attr(context, entity_path) action = tools.get_attr(context, action_path) tools.rule_exec(entity, action)
class CatalogSearchDocumentWrite(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} documents = [] index_name = self.cfg.get('index', None) context._catalog.parent_entity.read() # read seller in-memory catalog_fields = { 'parent_entity.name': orm.SuperStringProperty(search_document_field_name='seller_name'), 'parent_entity.key._root': orm.SuperKeyProperty( kind='23', search_document_field_name='seller_account_key'), 'parent_entity.logo.value.serving_url': orm.SuperStringProperty(search_document_field_name='seller_logo'), 'cover.value.serving_url': orm.SuperStringProperty(search_document_field_name='cover'), 'cover.value.proportion': orm.SuperStringProperty( search_document_field_name='cover_proportion') } product_fields = { 'key_parent._parent.entity.name': orm.SuperStringProperty(search_document_field_name='catalog_name'), 'key_parent._parent._parent.entity.name': orm.SuperStringProperty(search_document_field_name='seller_name'), 'key_parent._parent._parent.entity.logo.value.serving_url': orm.SuperStringProperty(search_document_field_name='seller_logo') } products = [] for image in context._catalog._images.value: products.extend(image.products.value) context._catalog._images = [ ] # dismember images from put queue to avoid too many rpcs write_index = True if not len(products): write_index = False # catalogs with no products are not allowed to be indexed results = None if write_index: documents.extend( [context._catalog.get_search_document(catalog_fields)]) documents.extend([ product.get_search_document(product_fields) for product in products ]) context._catalog._write_custom_indexes = {} context._catalog._write_custom_indexes[index_name] = documents context._catalog._products = []
class UploadImages(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} entity_path = self.cfg.get('path', '_' + context.model.__name__.lower()) images_path = self.cfg.get('images_path') property_value = tools.get_attr(context, entity_path) if property_value is not None and hasattr( property_value, 'add') and callable(property_value.add): property_value.add(tools.get_attr(context, images_path))
class BlobURL(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} gs_bucket = self.cfg.get('bucket', None) sufix = self.cfg.get('sufix', '/' + context.account.key_urlsafe) upload_url = context.input.get('upload_url') if upload_url and gs_bucket: gs_bucket_name = gs_bucket + sufix context._blob_url = tools.blob_create_upload_url( upload_url, gs_bucket_name)
class Search(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) @tools.detail_profile( 'Search.%s slow %s', satisfy=lambda profiler, ctime: ctime.miliseconds > 1000) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} static_arguments = self.cfg.get('s', {}) dynamic_arguments = self.cfg.get('d', {}) search_arguments = context.input.get('search') overide_arguments = {} overide_arguments.update(static_arguments) for key, value in dynamic_arguments.iteritems(): overide_arguments[key] = tools.get_attr(context, value) tools.override_dict(search_arguments, overide_arguments) result = context.model.search(search_arguments) if search_arguments.get('keys'): context._entities = result context._cursor = None context._more = False elif context.model._use_search_engine: context._total_matches = result.number_found context._entities_count = len(result.results) context._entities = map(context.model.search_document_to_entity, result.results) more = False cursor = result.cursor if cursor is not None: cursor = cursor.web_safe_string more = True context._cursor = cursor context._more = more else: context._entities_count = len(result[0]) context._entities = result[0] cursor = result[1] if cursor is not None: cursor = cursor.urlsafe() context._cursor = cursor context._more = result[2] # if we dont call .read() it wont load any properties that depend on it. e.g. localstructured ones. make_original = {'self_reference': True} map(lambda ent: ent.read(make_original=make_original), context._entities)
class Read(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default=[]) @tools.detail_profile( 'Read.%s slow %s', satisfy=lambda profiler, ctime: ctime.miliseconds > 1000) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} source_path = self.cfg.get('source', 'input.key') model_path = self.cfg.get('model', 'model') parent_path = self.cfg.get('parent', None) namespace_path = self.cfg.get('namespace', 'namespace') read_arguments_path = self.cfg.get('read', 'input.read_arguments') save_path = self.cfg.get('path', '_' + context.model.__name__.lower()) source = tools.get_attr(context, source_path, None) model = tools.get_attr(context, model_path) parent = tools.get_attr(context, parent_path) namespace = tools.get_attr(context, namespace_path) if isinstance(read_arguments_path, dict): read_arguments = copy.deepcopy(read_arguments_path) else: read_arguments = tools.get_attr(context, read_arguments_path, {}) if parent is not None: namespace = None if source and isinstance(source, orm.Key): entity = source.get() entity.read(read_arguments) elif hasattr(model, 'prepare_key'): model_key = model.prepare_key( context.input, parent=parent, namespace=namespace ) # @note Perhaps, all context system wide variables should be passed to prepare_key (input, output, action, model, models, domain, namespace...) if model_key.id() is not None: entity = model_key.get() if entity is None: entity = model(key=model_key) else: entity.read(read_arguments) else: entity = model(key=model_key) else: entity = model(parent=parent, namespace=namespace) entity.make_original() tools.set_attr(context, save_path, entity)
class CatalogSearchDocumentDelete(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} index_name = self.cfg.get('index', None) entities = [] entities.append(context._catalog.key_urlsafe) product_keys = [] for image in context._catalog._images.value: product_keys.extend( [product.key_urlsafe for product in image.products.value]) context._catalog._images = [] entities.extend(product_keys) context._catalog._delete_custom_indexes = {} context._catalog._delete_custom_indexes[index_name] = entities
class Delete(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} entity_path = self.cfg.get('path', '_' + context.model.__name__.lower()) static_record_arguments = self.cfg.get('sra', {}) dynamic_record_arguments = self.cfg.get('dra', {}) entity = tools.get_attr(context, entity_path) if entity and isinstance(entity, orm.Model): record_arguments = { 'agent': context.account.key, 'action': context.action.key } record_arguments.update(static_record_arguments) for key, value in dynamic_record_arguments.iteritems(): record_arguments[key] = tools.get_attr(context, value) entity.delete(record_arguments) entity._state = 'deleted' # signal the response that this is no longer existing
class RulePrepare(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) #@tools.detail_profile('RulePrepare.%s %s') def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} entity_path = self.cfg.get('path', '_' + context.model.__name__.lower()) static_kwargs = self.cfg.get('s', {}) dynamic_kwargs = self.cfg.get('d', {}) kwargs = { 'account': context.account, 'action': context.action, 'input': context.input } kwargs.update(static_kwargs) for key, value in dynamic_kwargs.iteritems(): kwargs[key] = tools.get_attr(context, value) entity = tools.get_attr(context, entity_path) tools.rule_prepare(entity, **kwargs)
class Duplicate(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} entity_path = self.cfg.get('source', '_' + context.model.__name__.lower()) save_path = self.cfg.get('path', '_' + context.model.__name__.lower()) duplicate_path = self.cfg.get('duplicate_path') entity = tools.get_attr(context, entity_path) if duplicate_path: # state entity._images.value.0.pricetags.value.0 split_duplicate_path = duplicate_path.split('.') child_entity = tools.get_attr(entity, duplicate_path) duplicated_child_entity = child_entity.duplicate() context.duplicated_entity = duplicated_child_entity duplicate_entity = entity # gets _images.value.0.pricetags child_entity_path = ".".join(split_duplicate_path[:-2]) # sets entity._images.value.0.pricetags => [duplicated_child_entity] try: int( split_duplicate_path[-1] ) # this is a case of repeated property, put the duplicated child into list duplicated_child_entity = [duplicated_child_entity] except ValueError: pass if isinstance(duplicated_child_entity, list): length = len( tools.get_attr(entity, '%s.value' % child_entity_path)) duplicated_child_entity[0]._sequence = length tools.set_attr(entity, child_entity_path, duplicated_child_entity) else: if entity and isinstance(entity, orm.Model): duplicate_entity = entity.duplicate() context.duplicated_entity = duplicate_entity tools.set_attr(context, save_path, duplicate_entity)
class CreateChannel(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} dynamic_token_reference = self.cfg.get('dynamic_token_reference', 'account.key_urlsafe') static_token_reference = self.cfg.get('static_token_refere', None) if static_token_reference is None: token_reference = 'channel_%s' % tools.get_attr( context, dynamic_token_reference) else: token_reference = 'channel_%s' % static_token_reference token = tools.mem_rpc_get(token_reference) if token and token[1] > time.time(): context._token = token[0] else: context._token = tools.channel_create(token_reference) tools.mem_rpc_set( token_reference, [context._token, time.time() + 6000], 9600)
class CallbackExec(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default=[]) def run(self, context): if not isinstance(self.cfg, list): self.cfg = [] queues = {} for config in self.cfg: queue_name, static_values, dynamic_values, condition = config values = {} values.update(static_values) for key, value in dynamic_values.iteritems(): values[key] = tools.get_attr(context, value) if condition is None or condition(**values): context._callbacks.append((queue_name, values)) for callback in context._callbacks: if callback[1].get('caller_account') is None: callback[1]['caller_account'] = context.account.key_urlsafe if callback[1].get('caller_action') is None: callback[1]['caller_action'] = context.action.key_urlsafe tools.callback_exec('/api/task/io_engine_run', context._callbacks) context._callbacks = []
class UnitCurrencyUpdateWrite(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} update_file_path = self.cfg.get('file', None) if not update_file_path: raise orm.TerminateAction() Unit = context.models['17'] keys = Unit.query(Unit.measurement == 'Currency').fetch(keys_only=True) orm.delete_multi(keys) # delete all currencies with file(update_file_path) as f: tree = ElementTree.fromstring(f.read()) root = tree.findall('data') uoms = [] def __text(item, key): value = item.get(key) if value is not None: if value.text == 'None' or value.text is None: return None return str(value.text) return value def __eval(item, key): value = item.get(key) if value == 'None': value = None if value is not None: evaled = value.attrib.get('eval') if evaled == 'None' or evaled is None: return None return eval(evaled) return value for child in root[1]: if child.attrib.get('model') == 'currency.currency': uom = {'id': child.attrib.get('id')} uom_data = {} for sub_child in child: uom_data[sub_child.attrib.get('name')] = sub_child rounding = uom_data.get('rounding') digits = uom_data.get('digits') grouping = uom_data.get('mon_grouping') if rounding is not None: rounding = Decimal(eval(rounding.attrib.get('eval'))) if digits is not None: digits = long(eval(digits.attrib.get('eval'))) if grouping is not None: grouping = eval(grouping.text) else: grouping = [] if digits is None: digits = 2 uom.update({ 'measurement': 'Currency', 'name': uom_data['name'].text, 'code': uom_data['code'].text, 'numeric_code': uom_data['numeric_code'].text, 'symbol': uom_data['symbol'].text, 'rounding': rounding, 'digits': digits, 'grouping': grouping, 'decimal_separator': __text(uom_data, 'mon_decimal_point'), 'thousands_separator': __text(uom_data, 'mon_thousands_sep'), 'positive_sign_position': __eval(uom_data, 'p_sign_posn'), 'negative_sign_position': __eval(uom_data, 'n_sign_posn'), 'positive_sign': __text(uom_data, 'positive_sign'), 'negative_sign': __text(uom_data, 'negative_sign'), 'positive_currency_symbol_precedes': __eval(uom_data, 'p_cs_precedes'), 'negative_currency_symbol_precedes': __eval(uom_data, 'n_cs_precedes'), 'positive_separate_by_space': __eval(uom_data, 'p_sep_by_space'), 'negative_separate_by_space': __eval(uom_data, 'n_sep_by_space'), 'active': True }) uoms.append(uom) put_entities = [Unit(**d) for d in uoms] for entity in put_entities: entity._use_rule_engine = False orm.put_multi(put_entities)
class BaseCache(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} active = self.cfg.get('active', True) if not active: return CacheGroup = context.models[self.cfg.get('kind', '135')] cache = self.cfg.get('cache', []) group_id = self.cfg.get('group', None) if not isinstance(group_id, (list, tuple)) and group_id is not None: group_id = [group_id] if group_id is not None: group_ids = group_id[:] for i, g in enumerate(group_ids): if callable(g): thing = g(context) if thing is not None: group_ids[i] = g(context) else: group_ids.remove(g) group_id = group_ids dcache_driver = self.cfg.get('dcache', []) cache_drivers = [] all_prequesits = ['auth', 'guest', context.account.key_id_str] for driver in cache: if callable(driver): driver = driver(context) if driver is None: continue user = driver == 'account' if not context.account._is_guest: if user and not context.account._is_guest: cache_drivers.append(context.account.key_id_str) if driver == 'auth': cache_drivers.append('auth') if driver == 'admin' and context.account._root_admin: cache_drivers.append('admin') elif driver == 'guest': cache_drivers.append('guest') if driver == 'all' and not any(baddie in cache_drivers for baddie in all_prequesits): cache_drivers.append('all') for d in dcache_driver: cache_drivers.append(tools.get_attr(context, d)) cache_drivers = set(cache_drivers) key = self.cfg.get('key') if callable(key): key = key(context) if not key: key = hashlib.md5(tools.json_dumps(context.raw_input)).hexdigest() data = None def build_key(driver, key, group_key): out = '%s_%s' % (driver, key) if group_key: out += '_%s' % group_key._id_str return hashlib.md5(out).hexdigest() if self.getter: group_key = None if group_id: first_group_id = group_id[0] group_key = CacheGroup.build_key(first_group_id) def do_save(data): queue = {} saved_keys = [] for driver in cache_drivers: k = build_key(driver, key, group_key) queue[k] = zlib.compress(data) try: tools.mem_set_multi(queue) except ValueError as e: tools.log.error( 'Failed saving response because it\'s over 1mb, with queue keys %s, using group %s, with drivers %s. With input %s' % (queue, group_key, cache_drivers, context.input)) write = False # failed writing this one, because size is over 1mb -- this can be fixed by chunking the `data`, but for now we dont need it saver = {'do_save': do_save} found = None for driver in cache_drivers: k = build_key(driver, key, group_key) active_k = '%s_active' % k data = tools.mem_get_multi([active_k, k]) if data: cache_hit = k in data if not cache_hit: continue if group_key and cache_hit and not data.get(active_k): tools.log.debug('Cache hit at %s but waiting for %s' % (k, active_k)) return # this means that taskqueue did not finish storing the key and cache will be available as soon as possible try: found = zlib.decompress(data[k]) except Exception as e: found = None tools.log.warn( 'Failed upacking memcache data for key %s in context of: using group %s, with driver %s. With input %s. Memory key deleted.' % (k, group_key, driver, context.input)) tools.mem_delete_multi([k, active_k]) break if found: context.cache = {'value': found} raise orm.TerminateAction( 'Got cache with key %s from %s drivers using group %s.' % (k, cache_drivers, group_key)) else: keys = [] for driver in cache_drivers: keys.append(build_key(driver, key, group_key)) if keys: keys = base64.b64encode( zlib.compress(','.join(keys)) ) # we compress keys because of taskqueues limit of 100k request payload if group_key: tools.log.info( 'Scheduling group cache storage for group %s and cache drivers %s' % (group_key, cache_drivers)) context._callbacks.append(('cache', { 'action_id': 'update', 'keys': keys, 'ids': [group_key._id_str], 'action_model': '135' })) else: tools.log.warn( 'No cache for group %s with cache drivers %s' % (group_key, cache_drivers)) context.cache = saver else: tools.mem_delete_multi( [build_key(driver, key, None) for driver in cache_drivers]) if hasattr(context, 'delete_cache_groups'): if not group_id: group_id = [] group_id.extend(context.delete_cache_groups) if group_id: keys = [] satisfy = self.cfg.get('satisfy', {}) for spec in satisfy: groups, callback = spec for group in group_id[:]: if group in groups: if not callback(context, group): group_id.remove(group) group_keys = [CacheGroup.build_key(id) for id in group_id] groups = orm.get_multi( group_keys ) # this can cause operating on multiple groups error # however if that happens, just move the DeleteCache plugin away from the transaction, since it does not need it # anyway 25 entity groups is the limit and usually we operate on max 5 groups per flush for group in groups: if group: keys.extend(group.keys) for k in keys[:]: keys.append('%s_active' % k) tools.mem_delete_multi(keys) tools.log.info('Deleted cache for group(s) %s' % group_id) orm.delete_multi(group_keys)
class Buyer(orm.BaseExpando): _kind = 19 _use_record_engine = True ''' read: buyer_<account.id> ''' READ_CACHE_POLICY = {'key': 'buyer', 'cache': ['account']} addresses = orm.SuperLocalStructuredProperty(BuyerAddress, '1', repeated=True) _default_indexed = False _virtual_fields = {'_records': orm.SuperRecordProperty('19')} def condition_not_guest_and_owner(account, entity, **kwargs): return not account._is_guest and entity._original.key_root == account.key _permissions = [ orm.ExecuteActionPermission(('update', 'read'), condition_not_guest_and_owner), orm.ReadFieldPermission(('addresses'), condition_not_guest_and_owner), orm.WriteFieldPermission(('addresses', '_records'), condition_not_guest_and_owner) ] _actions = [ orm.Action( id='update', arguments={ 'account': orm.SuperKeyProperty(kind='11', required=True), 'addresses': orm.SuperLocalStructuredProperty(BuyerAddress, repeated=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), Set(cfg={'d': { '_buyer.addresses': 'input.addresses' }}), RulePrepare(), RuleExec() ]), orm.PluginGroup(transactional=True, plugins=[ Write(), DeleteCache(cfg=READ_CACHE_POLICY), Set(cfg={'d': { 'output.entity': '_buyer' }}) ]) ]), orm.Action(id='read', arguments={ 'account': orm.SuperKeyProperty(kind='11', required=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), GetCache(cfg=READ_CACHE_POLICY), Read(), RulePrepare(), RuleExec(), Set(cfg={'d': { 'output.entity': '_buyer' }}) ]) ]) ] @classmethod def prepare_key(cls, input, **kwargs): return cls.build_key('buyer', parent=input.get('account'))
class CatalogProductCategoryUpdateWrite(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): # This code builds leaf categories for selection with complete names, 3.8k of them. if not isinstance(self.cfg, dict): self.cfg = {} update_file_path = self.cfg.get('file', None) debug_environment = self.cfg.get('debug_environment', False) if not update_file_path: raise orm.TerminateAction() Category = context.models['24'] gets = datastore.Query('24', namespace=None, keys_only=True).Run() keys = list(gets) datastore.Delete(keys) categories = [] put_entities = [] structure = {} with file(update_file_path) as f: for line in f: if not line.startswith('#'): item = line.replace('\n', '') categories.append(item) full_path = item.split(' > ') current_structure = structure for xi, path in enumerate(full_path): if path not in current_structure: current_structure[path] = {} current_structure = current_structure[path] for i, item in enumerate(categories): full_path = item.split(' > ') path_map = structure current = full_path parent = current[:-1] category = {} category['id'] = hashlib.md5(''.join(current)).hexdigest() if parent: category['parent_record'] = Category.build_key( hashlib.md5(''.join(parent)).hexdigest()) else: category['parent_record'] = None category['name'] = ' / '.join(current) category['state'] = ['indexable'] leaf = False for path in full_path: if path in path_map: path_map = path_map[path] if not len(path_map): leaf = True if leaf: category['state'].append( 'visible') # marks the category as leaf category = Category(**category) category._use_rule_engine = False category._use_record_engine = False category._use_memcache = False category._use_cache = False put_entities.append(category) tools.log.debug('Writing %s categories' % len(put_entities)) orm.put_multi(put_entities)
class Seller(orm.BaseExpando): _kind = 23 _use_record_engine = True name = orm.SuperStringProperty('1', required=True) logo = orm.SuperImageLocalStructuredProperty(orm.Image, '2', required=True) _default_indexed = False _virtual_fields = { '_plugin_group': orm.SuperRemoteStructuredProperty(SellerPluginContainer), '_records': orm.SuperRecordProperty('23'), '_stripe_publishable_key': orm.SuperComputedProperty(lambda self: self.get_stripe_publishable_key()), '_currency': orm.SuperReferenceProperty('17', autoload=True, callback=lambda self: self.get_currency_callback(), format_callback=lambda self, value: value) } def condition_not_guest_and_owner(action, account, entity, **kwargs): return not account._is_guest and entity._original.key_root == account.key def condition_owner_active(action, account, entity, **kwargs): return entity._original.root_entity.state == "active" def condition_taskqueue(account, **kwargs): return account._is_taskqueue _permissions = [ orm.ExecuteActionPermission(('create', 'read', 'update', 'prepare'), condition_not_guest_and_owner), orm.ExecuteActionPermission('far_cache_groups_flush', condition_owner_active), orm.ReadFieldPermission(('name', 'logo', '_plugin_group', '_currency', '_stripe_publishable_key'), condition_not_guest_and_owner), orm.WriteFieldPermission(('name', 'logo', '_plugin_group', '_records'), condition_not_guest_and_owner) ] _actions = [ orm.Action( id='read', arguments={ 'account': orm.SuperKeyProperty(kind='11', required=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup( plugins=[ Context(), GetCache(cfg={'group': lambda context: 'read_23_%s' % context.input['account']._id_str, 'cache': [lambda context: 'account' if context.account.key == context.input['account'] else None, 'all']}), Read(), RulePrepare(), RuleExec(), SellerSetupDefaults(), Set(cfg={'d': {'output.entity': '_seller'}}), CallbackExec() ] ) ] ), orm.Action( id='far_cache_groups_flush', arguments={ 'key': orm.SuperKeyProperty(kind='23', required=True) }, _plugin_groups=[ orm.PluginGroup( plugins=[ Context(), Read(), RulePrepare(), RuleExec(), SellerDeleteFarCacheGroups(), Set(cfg={'d': {'output.entity': '_seller'}}), DeleteCache() ] ) ] ), orm.Action( id='update', arguments={ 'account': orm.SuperKeyProperty(kind='11', required=True), 'name': orm.SuperStringProperty(required=True), 'logo': orm.SuperImageLocalStructuredProperty(orm.Image, upload=True, process_config={'measure': False, 'transform': True, 'width': 720, 'height': 300, 'crop_to_fit': True}), '_plugin_group': orm.SuperLocalStructuredProperty(SellerPluginContainer), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup( plugins=[ Context(), Read(), Set(cfg={'d': {'_seller.name': 'input.name', '_seller.logo': 'input.logo', '_seller._plugin_group': 'input._plugin_group'}}), SellerInterceptEncryptedValue(), SellerSetupDefaults(), SellerMaybeDeleteFarCacheGroups(), RulePrepare(), RuleExec() ] ), orm.PluginGroup( transactional=True, plugins=[ Write(), DeleteCache(cfg={'group': lambda context: 'read_23_%s' % context.input['account']._id_str}), Set(cfg={'d': {'output.entity': '_seller'}}), CallbackExec() ] ) ] ) ] def get_stripe_publishable_key(self): stripe_publishable_key = None if self.key: self._plugin_group.read() for plugin in self._plugin_group.value.plugins: if plugin.get_kind() == '114': stripe_publishable_key = plugin.publishable_key return stripe_publishable_key def get_currency_callback(self): currency = None if self.key: self._plugin_group.read() for plugin in self._plugin_group.value.plugins: if ((plugin.get_kind() == '117') and (plugin.active)): currency = plugin.currency if currency is not None: currency = currency.get_async() return currency @classmethod def prepare_key(cls, input, **kwargs): return cls.build_key('seller', parent=input.get('account'))
class Account(orm.BaseExpando): _kind = 11 _use_record_engine = True ''' Cache: 11_<account.id> ''' READ_CACHE_POLICY = { 'group': lambda context: '11_%s' % context.account.key_id_str, 'cache': ['account'] } DELETE_CACHE_POLICY = { 'group': [ 'admin', lambda context: '11_%s' % context._account.key_id_str, lambda context: '11_%s' % context.account.key_id_str ] } created = orm.SuperDateTimeProperty('1', required=True, auto_now_add=True) updated = orm.SuperDateTimeProperty('2', required=True, auto_now=True) state = orm.SuperStringProperty('3', required=True, default='active', choices=('active', 'suspended')) identities = orm.SuperStructuredProperty( AccountIdentity, '4', repeated=True) # Soft limit 100 instances. sessions = orm.SuperLocalStructuredProperty( AccountSession, '5', repeated=True) # Soft limit 100 instances. _default_indexed = False _virtual_fields = { 'ip_address': orm.SuperComputedProperty(lambda self: tools.get_remote_addr()), '_primary_email': orm.SuperComputedProperty(lambda self: self.primary_email()), '_csrf': orm.SuperComputedProperty(lambda self: self.get_csrf()), '_records': orm.SuperRecordProperty('11') } def condition_guest_and_active(entity, **kwargs): return entity._is_guest or entity._original.state == "active" def condition_true(entity, **kwargs): return True def condition_not_guest_and_owner(account, entity, **kwargs): return not account._is_guest and account.key == entity._original.key def condition_not_guest(account, **kwargs): return not account._is_guest def condition_root(account, **kwargs): return account._root_admin def condition_sudo_action_and_root(account, action, **kwargs): return action.key_id_str == "sudo" and account._root_admin _permissions = [ orm.ExecuteActionPermission('login', condition_guest_and_active), orm.ExecuteActionPermission('current_account', condition_true), orm.ExecuteActionPermission(('read', 'update', 'logout'), condition_not_guest_and_owner), orm.ExecuteActionPermission(('blob_upload_url', 'create_channel'), condition_not_guest), orm.ExecuteActionPermission(('read', 'search', 'sudo'), condition_root), orm.ReadFieldPermission(('created', 'updated', 'state', 'identities', 'sessions', '_primary_email'), condition_not_guest_and_owner), orm.ReadFieldPermission( ('created', 'updated', 'state', 'identities', 'sessions', '_primary_email', 'ip_address', '_records'), condition_root), orm.WriteFieldPermission( ('state', 'identities', 'sessions', '_primary_email', '_records'), condition_not_guest_and_owner), orm.WriteFieldPermission(('state', 'sessions', '_records'), condition_sudo_action_and_root) ] _actions = [ orm.Action(id='login', skip_csrf=True, arguments={ 'login_method': orm.SuperStringProperty( required=True, choices=[ login_method['type'] for login_method in settings.LOGIN_METHODS ]), 'code': orm.SuperStringProperty(), 'error_message': orm.SuperStringProperty(), 'state': orm.SuperStringProperty(), 'error': orm.SuperStringProperty(), 'redirect_to': orm.SuperStringProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), AccountLoginInit( cfg={ 'methods': settings.LOGIN_METHODS, 'get_host_url': settings.get_host_url }) ]), orm.PluginGroup(transactional=True, plugins=[ AccountLoginWrite(), DeleteCache(cfg=DELETE_CACHE_POLICY) ]) ]), orm.Action(id='current_account', skip_csrf=True, arguments={'read_arguments': orm.SuperJsonProperty()}, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(cfg={'source': 'account.key'}), RulePrepare(), RuleExec(), Set(cfg={'d': { 'output.entity': '_account' }}), CallbackExec() ]) ]), orm.Action(id='read', arguments={ 'key': orm.SuperKeyProperty(kind='11', required=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), GetCache(cfg=READ_CACHE_POLICY), Read(), RulePrepare(), RuleExec(), Set(cfg={'d': { 'output.entity': '_account' }}), CallbackExec() ]) ]), orm.Action( id='update', arguments={ 'key': orm.SuperKeyProperty(kind='11', required=True), 'primary_identity': orm.SuperStringProperty(), 'disassociate': orm.SuperStringProperty(repeated=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), AccountUpdateSet(), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), DeleteCache(cfg=DELETE_CACHE_POLICY), Set(cfg={'d': { 'output.entity': '_account' }}), CallbackExec(cfg=[('callback', { 'action_id': 'account_discontinue', 'action_model': '31' }, { 'account': '_account.key_urlsafe', 'account_state': '_account.state' }, lambda account, account_state, **kwargs: account_state == 'suspended')]) ]) ]), orm.Action( id='search', arguments={ 'search': orm.SuperSearchProperty( default={ 'filters': [], 'orders': [{ 'field': 'created', 'operator': 'desc' }] }, cfg={ 'search_arguments': { 'kind': '11', 'options': { 'limit': settings.SEARCH_PAGE } }, 'filters': { 'key': orm.SuperVirtualKeyProperty(kind='11', searchable=False), 'state': orm.SuperStringProperty(choices=('active', 'suspended')), 'identities.email': orm.SuperStringProperty(searchable=False) }, 'indexes': [{ 'orders': [('created', ['asc', 'desc'])] }, { 'orders': [('updated', ['asc', 'desc'])] }, { 'filters': [('key', ['=='])] }, { 'filters': [('identities.email', ['=='])] }, { 'filters': [('state', ['=='])], 'orders': [('created', ['asc', 'desc'])] }, { 'filters': [('state', ['=='])], 'orders': [('updated', ['asc', 'desc'])] }] }) }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), GetCache(cfg={ 'group': 'admin', 'cache': ['admin'] }), Read(), RulePrepare(), RuleExec(), Search(), RulePrepare(cfg={'path': '_entities'}), Set( cfg={ 'd': { 'output.entities': '_entities', 'output.cursor': '_cursor', 'output.more': '_more' } }) ]) ]), orm.Action( id='sudo', arguments={ 'key': orm.SuperKeyProperty(kind='11', required=True), 'state': orm.SuperStringProperty(required=True, choices=('active', 'suspended')), 'message': orm.SuperTextProperty(required=True), 'note': orm.SuperTextProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), Set( cfg={ 'rm': ['_account.sessions'], 'd': { '_account.state': 'input.state' } }), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), Set(cfg={'d': { 'output.entity': '_account' }}), Notify( cfg={ 's': { 'subject': notifications.ACCOUNT_SUDO_SUBJECT, 'body': notifications.ACCOUNT_SUDO_BODY, 'sender': settings.NOTIFY_EMAIL }, 'd': { 'recipient': '_account._primary_email' } }), DeleteCache(cfg=DELETE_CACHE_POLICY), CallbackExec(cfg=[('callback', { 'action_id': 'account_discontinue', 'action_model': '31' }, { 'account': '_account.key_urlsafe', 'account_state': '_account.state' }, lambda account, account_state, **kwargs: account_state == 'suspended')]) ]) ]), orm.Action( id='logout', arguments={'key': orm.SuperKeyProperty(kind='11', required=True)}, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), Set(cfg={'rm': ['_account.sessions']}), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write( cfg={'dra': { 'ip_address': '_account.ip_address' }}), DeleteCache(cfg=DELETE_CACHE_POLICY), AccountLogoutOutput() ]) ]), orm.Action( id='blob_upload_url', arguments={'upload_url': orm.SuperStringProperty(required=True)}, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), RulePrepare(), RuleExec(), BlobURL(cfg={'bucket': settings.BUCKET_PATH}), Set(cfg={'d': { 'output.upload_url': '_blob_url' }}) ]) ]), orm.Action(id='create_channel', arguments={}, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), RulePrepare(), RuleExec(), CreateChannel(), Set(cfg={'d': { 'output.token': '_token' }}) ]) ]) ] def get_output(self): dic = super(Account, self).get_output() dic.update({ '_is_guest': self._is_guest, '_is_system': self._is_system, '_csrf': self._csrf, '_root_admin': self._root_admin }) location = self.current_location_data() if isinstance(location, dict): dic.update(location) return dic @property def _root_admin(self): return self._primary_email in settings.ROOT_ADMINS @property def _is_taskqueue(self): return tools.mem_temp_get('current_request_is_taskqueue') @property def _is_cron(self): return tools.mem_temp_get('current_request_is_cron') @property def _is_system(self): return self.key_id_str == 'system' @property def _is_guest(self): return self.key is None def primary_email(self): self.identities.read() if not self.identities.value: return None for identity in self.identities.value: if identity.primary: return identity.email def get_csrf(self): session = self.current_account_session() if not session: return tools.get_csrf_token() return hashlib.md5( '%s-%s' % (session.session_id, settings.CSRF_SALT)).hexdigest() @classmethod def current_account(cls): current_account = tools.mem_temp_get('current_account') if not current_account: current_account = cls() cls.set_current_account(current_account) return current_account @classmethod def system_account(cls): account_key = cls.build_key('system') account = account_key.get() if not account: identities = [ AccountIdentity(email='System', identity='1-0', primary=True) ] account = cls(key=account_key, state='active', identities=identities) account._use_rule_engine = False account.put() account._use_rule_engine = True return account @classmethod def current_account_session(cls): return tools.mem_temp_get('current_account_session') def session_by_id(self, session_id): for session in self.sessions.value: if session.session_id == session_id: return session return None def new_session(self): account = self session_ids = set() for session in account.sessions.value: if session.created < (datetime.datetime.now() - datetime.timedelta(days=10)): session._state = 'deleted' session_ids.add(session.session_id) while True: session_id = hashlib.md5(tools.random_chars(30)).hexdigest() if session_id not in session_ids: break session = AccountSession(session_id=session_id, ip_address=self.ip_address) account.sessions = [session] return session @classmethod def current_location_data(cls): return tools.mem_temp_get('current_request_location_data') @classmethod def set_location_data(cls, data): if data: if data.get('_country') and data.get('_country').lower() != 'zz': data['_country'] = orm.Key('12', data['_country'].lower()) if data.get('_region'): data['_region'] = orm.Key( '13', '%s-%s' % (data['_country']._id_str, data['_region'].lower()), parent=data['_country']) else: data['_region'] = None data['_country'] = None return tools.mem_temp_set('current_request_location_data', data) @classmethod def set_taskqueue(cls, flag): return tools.mem_temp_set('current_request_is_taskqueue', flag) @classmethod def set_cron(self, flag): return tools.mem_temp_set('current_request_is_cron', flag) @classmethod def set_current_account(cls, account, session=None): tools.mem_temp_set('current_account', account) tools.mem_temp_set('current_account_session', session) @classmethod def set_current_account_from_access_token(cls, access_token): try: account_key, session_id = access_token.split('|') except: return False # Fail silently if the authorization code is not set properly, or it is corrupted somehow. if not session_id: return False # Fail silently if the session id is not found in the split sequence. account_key = orm.Key(urlsafe=account_key) if account_key.kind() != cls.get_kind() or account_key.id( ) == 'system': return False # Fail silently if the kind is not valid account = account_key.get() if account: account.read() session = account.session_by_id(session_id) if session: cls.set_current_account(account, session) return account
class UnitUpdateWrite(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} update_file_path = self.cfg.get('file', None) if not update_file_path: raise orm.TerminateAction() Unit = context.models['17'] keys = Unit.query(Unit.measurement != 'Currency').fetch(keys_only=True) orm.delete_multi(keys) # delete all currencies with file(update_file_path) as f: tree = ElementTree.fromstring(f.read()) root = tree.findall('data') measurements = {} uoms = [] for child in root[0]: if child.attrib.get('model') == 'product.uom.category': for sub_child in child: name = sub_child.text measurements[child.attrib.get('id')] = name for child in root[0]: if child.attrib.get('model') == 'product.uom': uom = {'id': child.attrib.get('id')[4:]} uom_data = {} for sub_child in child: uom_data[sub_child.attrib.get('name')] = sub_child rounding = uom_data.get('rounding') digits = uom_data.get('digits') if rounding is not None: rounding = Decimal(eval(rounding.attrib.get('eval'))) if digits is not None: digits = long(eval(digits.attrib.get('eval'))) if digits is None: digits = 3 uom.update({ 'name': uom_data['name'].text, 'active': True, 'symbol': uom_data['symbol'].text, 'measurement': measurements.get( uom_data['category'].attrib.get('ref')), 'factor': Decimal(eval(uom_data['factor'].attrib.get('eval'))), 'rate': Decimal(eval(uom_data['rate'].attrib.get('eval'))), 'rounding': rounding, 'digits': digits }) uoms.append(uom) put_entities = [Unit(**d) for d in uoms] for entity in put_entities: entity._use_rule_engine = False orm.put_multi(put_entities)
class Catalog(orm.BaseExpando): _kind = 31 DELETE_CACHE_POLICY = { # only delete public cache when user saves published or indexed catalog 'satisfy': [(['search_31'], lambda context, group_id: True if (context._catalog.state == 'indexed' or (context._catalog.state != 'indexed' and (hasattr(context, 'catalog_original_state') and context. catalog_original_state == 'indexed'))) else False)], 'group': [ 'search_31', 'search_31_admin', lambda context: 'read_31_%s' % context._catalog.key._id_str, lambda context: 'search_31_%s' % context._catalog.key._root._id_str ] } created = orm.SuperDateTimeProperty('1', required=True, auto_now_add=True) updated = orm.SuperDateTimeProperty('2', required=True, auto_now=True) name = orm.SuperStringProperty('3', required=True) published_date = orm.SuperDateTimeProperty('4', required=False) discontinued_date = orm.SuperDateTimeProperty('5', required=False) state = orm.SuperStringProperty('6', required=True, default='draft', choices=('draft', 'published', 'indexed', 'discontinued')) _default_indexed = False _expando_fields = { 'cover': orm.SuperImageLocalStructuredProperty(CatalogImage, '7', process_config={ 'copy': True, 'copy_name': 'cover', 'transform': True, 'width': 240, 'height': 312, 'crop_to_fit': True }) } _virtual_fields = { '_images': orm.SuperImageRemoteStructuredProperty(CatalogImage, repeated=True, search={ 'default': { 'filters': [], 'orders': [{ 'field': 'sequence', 'operator': 'desc' }] }, 'cfg': { 'indexes': [{ 'ancestor': True, 'filters': [], 'orders': [('sequence', ['desc'])] }], } }), '_seller': orm.SuperReferenceStructuredProperty( '23', autoload=True, callback=lambda self: self.key.parent().get_async()) } def condition_not_guest(account, **kwargs): return not account._is_guest def condition_not_guest_and_owner_or_root(account, entity, **kwargs): return not account._is_guest and ( entity._original.key_root == account.key or account._root_admin) def condition_search(account, entity, action, input, **kwargs): def valid_search(): if action.key_id == 'search': _ancestor = input['search'].get('ancestor') _filters = input['search'].get('filters') if _filters: field = _filters[0]['field'] op = _filters[0]['operator'] value = _filters[0]['value'] if field == 'state' and op == 'IN': if value == ['indexed']: # home page return True else: if _ancestor: if 'discontinued' not in value: # seller catalogs view if not account._is_guest and _ancestor._root == account.key: return True if value == ['published', 'indexed']: # seller profile view return True return False return account._root_admin or valid_search() def condition_published_or_indexed(entity, **kwargs): return entity._original.state in ("published", "indexed") def condition_update(account, entity, **kwargs): return not account._is_guest and entity._original.key_root == account.key \ and (entity._original.state in ("draft", "published", "indexed")) def condition_not_guest_and_owner_and_draft(account, entity, **kwargs): return not account._is_guest and entity._original.key_root == account.key \ and entity._original.state == "draft" def condition_deny_write_field_permission(account, entity, action, **kwargs): return not account._is_guest and entity._original.key_root == account.key \ and entity._original.state == "draft" and action.key_id_str == "update" def condition_not_guest_and_owner_and_published(account, entity, **kwargs): return not account._is_guest and entity._original.key_root == account.key \ and entity._original.state in ("published", "indexed") def condition_root(account, **kwargs): return account._root_admin def condition_taskqueue(account, **kwargs): return account._is_taskqueue def condition_cron(account, **kwargs): return account._is_cron def condition_true(**kwargs): return True def condition_false(**kwargs): return False def condition_write_images(account, entity, action, **kwargs): return not account._is_guest and entity._original.key_root == account.key \ and entity._original.state == "draft" \ and action.key_id_str \ in ("read", "catalog_upload_images", "prepare") def condition_write_state(entity, action, **kwargs): return (action.key_id_str == "create" and entity.state == "draft") \ or (action.key_id_str == "publish" and entity.state == "published") \ or (action.key_id_str == "sudo_discontinue" and entity.state == "discontinued") \ or (action.key_id_str == "discontinue" and entity.state == "discontinued") \ or (action.key_id_str == "sudo" and entity.state != "draft") def condition_write_discontinued_date(entity, action, **kwargs): return action.key_id_str in ("sudo_discontinue", "discontinue", "sudo") and entity.state == "discontinued" def condition_write_published_date(entity, action, **kwargs): return action.key_id_str == "sudo" and entity.state in ("published", "indexed") def condition_duplicate(action, **kwargs): return action.key_id_str in ("catalog_process_duplicate") def cache_read(context): if context.input[ 'key']._root == context.account.key or context.account._root_admin: return 'account' else: return None def cache_search(context): _ancestor = context.input['search'].get('ancestor') if context.account._root_admin or (_ancestor and _ancestor._root == context.account.key): return 'account' return None def cache_group_search(context): key = 'search_31' _ancestor = context.input['search'].get('ancestor') if context.account._root_admin: return '%s_admin' % key if _ancestor and _ancestor._root == context.account.key: return '%s_%s' % (key, context.account.key_id_str) return key _permissions = [ orm.ExecuteActionPermission('prepare', condition_not_guest), orm.ExecuteActionPermission('create', condition_not_guest_and_owner_or_root), orm.ExecuteActionPermission('search', condition_search), orm.ExecuteActionPermission('read', condition_published_or_indexed), orm.ExecuteActionPermission('update', condition_update), orm.ExecuteActionPermission( ('read', 'publish', 'catalog_upload_images'), condition_not_guest_and_owner_and_draft), orm.ExecuteActionPermission( ('discontinue', 'catalog_duplicate'), condition_not_guest_and_owner_and_published), orm.ExecuteActionPermission(('read', 'sudo'), condition_root), orm.ExecuteActionPermission('cron', condition_cron), orm.ExecuteActionPermission(('account_discontinue', 'sudo_discontinue', 'catalog_process_duplicate', 'delete'), condition_taskqueue), # field permissions orm.ReadFieldPermission( ('created', 'updated', 'name', 'published_date', 'discontinued_date', 'state', 'cover', '_images'), condition_not_guest_and_owner_or_root), orm.WriteFieldPermission(('name', 'published_date', 'discontinued_date', 'cover', '_images'), condition_not_guest_and_owner_and_draft), orm.DenyWriteFieldPermission( ('_images.image', '_images.content_type', '_images.size', '_images.gs_object_name', '_images.serving_url'), condition_deny_write_field_permission), orm.WriteFieldPermission(('_images'), condition_write_images), orm.WriteFieldPermission(('_images.products.availability', ), condition_not_guest_and_owner_and_published), orm.WriteFieldPermission('state', condition_write_state), orm.WriteFieldPermission('discontinued_date', condition_write_discontinued_date), orm.WriteFieldPermission('published_date', condition_write_published_date), orm.ReadFieldPermission( ('created', 'updated', 'name', 'published_date', 'discontinued_date', 'state', 'cover', '_images'), condition_published_or_indexed), orm.ReadFieldPermission( ('_seller.name', '_seller.logo', '_seller._currency'), condition_true), orm.WriteFieldPermission( ('created', 'updated', 'name', 'published_date', 'discontinued_date', 'state', 'cover', '_images'), condition_duplicate) ] _actions = [ orm.Action(id='prepare', arguments={ 'seller': orm.SuperKeyProperty(kind='23', required=True) }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), RulePrepare(), RuleExec(), Set(cfg={'d': { 'output.entity': '_catalog' }}) ]) ]), orm.Action(id='create', arguments={ 'seller': orm.SuperKeyProperty(kind='23', required=True), 'name': orm.SuperStringProperty(required=True) }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), Set( cfg={ 's': { '_catalog.state': 'draft' }, 'd': { '_catalog.name': 'input.name' } }), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), DeleteCache(cfg=DELETE_CACHE_POLICY), Set(cfg={'d': { 'output.entity': '_catalog' }}) ]) ]), orm.Action(id='read', arguments={ 'key': orm.SuperKeyProperty(kind='31', required=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), GetCache( cfg={ 'group': lambda context: 'read_31_%s' % context. input['key']._id_str, 'cache': [cache_read, 'all'] }), Read(), RulePrepare(), RuleExec(), Set(cfg={'d': { 'output.entity': '_catalog' }}), CallbackExec() ]) ]), orm.Action(id='update', arguments={ 'key': orm.SuperKeyProperty(kind='31', required=True), 'name': orm.SuperStringProperty(required=True), '_images': orm.SuperImageRemoteStructuredProperty(CatalogImage, repeated=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), Set( cfg={ 'd': { '_catalog.name': 'input.name', 'catalog_original_state': '_catalog._original.state', '_catalog._images': 'input._images' } }), CatalogProcessCoverSet(), CatalogProcessProducts(), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), DeleteCache(cfg=DELETE_CACHE_POLICY), Set(cfg={'d': { 'output.entity': '_catalog' }}) ]) ]), orm.Action(id='catalog_upload_images', arguments={ 'key': orm.SuperKeyProperty(kind='31', required=True), '_images': orm.SuperImageLocalStructuredProperty(CatalogImage, upload=True, repeated=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), UploadImages( cfg={ 'path': '_catalog._images', 'images_path': 'input._images' }), CatalogProcessCoverSet(), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), DeleteCache(cfg=DELETE_CACHE_POLICY), Set(cfg={'d': { 'output.entity': '_catalog' }}) ]) ]), orm.Action( id='delete', arguments={'key': orm.SuperKeyProperty(kind='31', required=True)}, _plugin_groups=[ orm.PluginGroup( plugins=[Context( ), Read(), RulePrepare(), RuleExec()]), orm.PluginGroup( transactional=True, plugins=[ Delete(), DeleteCache(cfg=DELETE_CACHE_POLICY), Set(cfg={'d': { 'output.entity': '_catalog' }}) ]) ]), orm.Action( id='search', arguments={ 'search': orm.SuperSearchProperty( default={ 'filters': [], 'orders': [{ 'field': 'created', 'operator': 'desc' }] }, cfg={ 'search_arguments': { 'kind': '31', 'options': { 'limit': settings.SEARCH_PAGE } }, 'ancestor_kind': '23', 'search_by_keys': True, 'filters': { 'name': orm.SuperStringProperty(), 'key': orm.SuperVirtualKeyProperty(kind='31', searchable=False), 'state': orm.SuperStringProperty( repeated=True, choices=('published', 'indexed', 'discontinued', 'draft')) }, 'indexes': [{ 'ancestor': True, 'orders': [('created', ['desc'])] }, { 'ancestor': True, 'filters': [('state', ['IN'])], 'orders': [('created', ['desc']), ('key', ['desc'])] }, { 'ancestor': True, 'filters': [('state', ['IN'])], 'orders': [('published_date', ['desc']), ('key', ['desc'])] }, { 'orders': [('created', ['asc', 'desc'])] }, { 'orders': [('updated', ['asc', 'desc'])] }, { 'orders': [('published_date', ['asc', 'desc'])] }, { 'orders': [('discontinued_date', ['asc', 'desc'])] }, { 'filters': [('state', ['IN'])], 'orders': [('published_date', ['desc'])] }, { 'filters': [('key', ['=='])] }] }) }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), GetCache( cfg={ 'group': cache_group_search, 'cache': ['admin', cache_search, 'all'] }), Read(), RulePrepare(cfg={'d': { 'input': 'input' }}), RuleExec(), Search(), RulePrepare(cfg={'path': '_entities'}), Set( cfg={ 'd': { 'output.entities': '_entities', 'output.cursor': '_cursor', 'output.more': '_more' } }), CallbackExec() ]) ]), orm.Action( id='publish', arguments={'key': orm.SuperKeyProperty(kind='31', required=True)}, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), Set( cfg={ 's': { '_catalog.state': 'published' }, 'd': { 'catalog_original_state': '_catalog._original.state' }, 'f': { '_catalog.published_date': lambda: datetime.datetime.now() } }), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), RulePrepare(), Set(cfg={'d': { 'output.entity': '_catalog' }}), # notify when user publishes catalog Notify( cfg={ 's': { 'subject': notifications.CATALOG_PUBLISH_SUBJECT, 'body': notifications.CATALOG_PUBLISH_BODY, 'sender': settings.NOTIFY_EMAIL }, 'd': { 'recipient': '_catalog.root_entity._primary_email' } }), DeleteCache(cfg=DELETE_CACHE_POLICY) ]) ]), orm.Action( id='sudo_discontinue', arguments={'key': orm.SuperKeyProperty(kind='31', required=True)}, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), Set( cfg={ 's': { '_catalog.state': 'discontinued' }, 'd': { 'catalog_original_state': '_catalog._original.state' }, 'f': { '_catalog.discontinued_date': lambda: datetime.datetime.now() } }), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), RulePrepare(), Set(cfg={'d': { 'output.entity': '_catalog' }}), # notify owner when catalog gets discontinued Notify( cfg={ 's': { 'subject': notifications.CATALOG_SUDO_SUBJECT, 'body': notifications. CATALOG_SUDO_DISCONTINUE_BODY, 'sender': settings.NOTIFY_EMAIL }, 'd': { 'recipient': '_catalog.root_entity._primary_email' } }), DeleteCache(cfg=DELETE_CACHE_POLICY) ]) ]), orm.Action( id='discontinue', arguments={'key': orm.SuperKeyProperty(kind='31', required=True)}, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), Set( cfg={ 's': { '_catalog.state': 'discontinued' }, 'd': { 'catalog_original_state': '_catalog._original.state' }, 'f': { '_catalog.discontinued_date': lambda: datetime.datetime.now() } }), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), RulePrepare(), Set(cfg={'d': { 'output.entity': '_catalog' }}), # notify owner when catalog gets discontinued Notify( cfg={ 's': { 'subject': notifications.CATALOG_DISCONTINUE_SUBJECT, 'body': notifications.CATALOG_DISCONTINUE_BODY, 'sender': settings.NOTIFY_EMAIL }, 'd': { 'recipient': '_catalog.root_entity._primary_email' } }), DeleteCache(cfg=DELETE_CACHE_POLICY) ]) ]), orm.Action( id='account_discontinue', arguments={ 'account': orm.SuperKeyProperty(kind='11', required=True) }, _plugin_groups=[ orm.PluginGroup( plugins=[Context( ), Read(), RulePrepare(), RuleExec()]), orm.PluginGroup(transactional=True, plugins=[CatalogDiscontinue(), CallbackExec()]) ]), orm.Action( id='sudo', arguments={ 'key': orm.SuperKeyProperty(kind='31', required=True), 'state': orm.SuperStringProperty(required=True, choices=('published', 'indexed', 'discontinued')), 'message': orm.SuperTextProperty(required=True), 'note': orm.SuperTextProperty(required=True) }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), Set( cfg={ 'd': { '_catalog.state': 'input.state', 'catalog_original_state': '_catalog._original.state' }, 'f': { '_catalog.published_date': lambda: datetime.datetime.now(), '_catalog.discontinued_date': lambda: datetime.datetime.now() } } ), # ATM permissions handle if this field is writable. RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), RulePrepare(), Set(cfg={'d': { 'output.entity': '_catalog' }}), # use 1 notify plugin with dynamic email Notify( cfg={ 's': { 'subject': notifications.CATALOG_SUDO_SUBJECT, 'body': notifications.CATALOG_SUDO_BODY, 'sender': settings.NOTIFY_EMAIL }, 'd': { 'recipient': '_catalog.root_entity._primary_email' } }), DeleteCache(cfg=DELETE_CACHE_POLICY) ]) ]), orm.Action( id='cron', arguments={}, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), RulePrepare(), RuleExec(), CatalogCronDelete( cfg={ 'page': 100, 'unpublished_life': settings.CATALOG_UNPUBLISHED_LIFE, 'discontinued_life': settings.CATALOG_DISCONTINUED_LIFE }), CallbackExec() ]) ]), orm.Action(id='catalog_duplicate', arguments={ 'key': orm.SuperKeyProperty(kind='31', required=True), 'channel': orm.SuperStringProperty(required=True) }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), RulePrepare(), RuleExec(), Set(cfg={'d': { 'output.entity': '_catalog' }}), CallbackExec(cfg=[('callback', { 'action_id': 'catalog_process_duplicate', 'action_model': '31' }, { 'key': '_catalog.key_urlsafe', 'channel': 'input.channel' }, None)]) ]) ]), orm.Action( id='catalog_process_duplicate', arguments={ 'key': orm.SuperKeyProperty(kind='31', required=True), 'channel': orm.SuperStringProperty(required=True) }, _plugin_groups=[ orm.PluginGroup( plugins=[Context( ), Read(), RulePrepare(), RuleExec()]), orm.PluginGroup( transactional=True, plugins=[ Duplicate(), Set( cfg={ 's': { '_catalog.state': 'draft' }, 'rm': ['_catalog.created'] }), Write(), # notify duplication process complete via channel Notify( cfg={ 's': { 'sender': settings.NOTIFY_EMAIL }, 'd': { 'recipient': 'input.channel', 'catalog_key': '_catalog.key_urlsafe' }, 'method': 'channel' }), DeleteCache(cfg=DELETE_CACHE_POLICY) ]) ]) ] @classmethod def prepare_key(cls, input, **kwargs): return cls.build_key(None, parent=input.get('seller'))
class AccountLoginInit(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def parse_result_3(self, result): return {'id': result['id'], 'email': result['emailAddress']} def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} get_host_url = self.cfg.get('get_host_url') login_methods = self.cfg.get('methods', []) context._account = context.model.current_account() context.account = context.model.current_account() kwargs = {'account': context.account, 'action': context.action} tools.rule_prepare(context._account, **kwargs) tools.rule_exec(context._account, context.action) login_method = context.input.get('login_method') error = context.input.get('error') def get_redirect_uri(cfg): return '%s%s' % (get_host_url( context.input['__request__'].host), cfg['redirect_uri']) if not error: error = context.input.get('error_message') code = context.input.get('code') state = context.input.get('state') if code and state != context.account._csrf: raise OAuth2Error('state_error') for login in login_methods: if login['type'] == login_method: oauth2_cfg = login.copy() oauth2_cfg['redirect_uri'] = get_redirect_uri(oauth2_cfg) oauth2_cfg['state'] = context.account._csrf break client = tools.OAuth2Client(**oauth2_cfg) context.output[ 'authorization_url'] = client.get_authorization_code_uri() urls = {} for cfg in login_methods: urls_oauth2_cfg = cfg.copy() urls_oauth2_cfg['redirect_uri'] = get_redirect_uri(urls_oauth2_cfg) urls_oauth2_cfg['state'] = context.account._csrf urls_client = tools.OAuth2Client(**urls_oauth2_cfg) urls[urls_oauth2_cfg[ 'type']] = urls_client.get_authorization_code_uri() context.output['authorization_urls'] = urls if error: raise OAuth2Error('rejected_account_access') if code: client.get_token(code) if not client.access_token: raise OAuth2Error('failed_access_token') account_info = oauth2_cfg['account_info'] info = client.resource_request(url=account_info) parse = getattr(self, 'parse_result_%s' % login_method, None) if parse: info = parse(info) if info and 'email' in info: identity = oauth2_cfg['type'] context._identity_id = '%s-%s' % (info['id'], identity) context._email = info['email'].lower( ) # we lowercase the email because datastore data searches are case sensetive account = context.model.query(context.model.identities.identity == context._identity_id).get() if account: own_account = context.account.key == account.key if context.account._is_guest or own_account: account.read() context._account = account context.account = account elif not own_account and not context.account._is_guest: raise OAuth2Error('taken_by_other_account') else: raise OAuth2Error('no_email_provided') kwargs = {'account': context.account, 'action': context.action} tools.rule_prepare(context._account, **kwargs) tools.rule_exec(context._account, context.action)
class Order(orm.BaseExpando): _kind = 34 ''' read: read_<order.account.id> search: search_34_<order.account.id> ''' DELETE_CACHE_POLICY = { 'group': [ lambda context: 'read_34_%s' % context._order.key._root._id_str, 'search_34_admin', 'search_34', lambda context: 'search_34_seller_%s' % context._order. seller_reference._root._id_str, lambda context: 'search_34_buyer_%s' % context._order.key._root._id_str ] } created = orm.SuperDateTimeProperty('1', required=True, auto_now_add=True) updated = orm.SuperDateTimeProperty('2', required=True, auto_now=True) state = orm.SuperStringProperty( '3', required=True, default='cart', choices=('cart', 'order')) # 'checkout', 'completed', 'canceled' date = orm.SuperDateTimeProperty('4', required=True) seller_reference = orm.SuperKeyProperty('5', kind='23', required=True) billing_address = orm.SuperLocalStructuredProperty('121', '6') shipping_address = orm.SuperLocalStructuredProperty('121', '7') currency = orm.SuperLocalStructuredProperty('17', '8', required=True) untaxed_amount = orm.SuperDecimalProperty('9', required=True, indexed=False) tax_amount = orm.SuperDecimalProperty('10', required=True, indexed=False) total_amount = orm.SuperDecimalProperty('11', required=True, indexed=False) payment_method = orm.SuperStringProperty( '12', required=False, choices=settings.AVAILABLE_PAYMENT_METHODS) payment_status = orm.SuperStringProperty('13', required=False, indexed=True) carrier = orm.SuperLocalStructuredProperty(OrderCarrier, '14') _default_indexed = False _virtual_fields = { '_seller': orm.SuperReferenceStructuredProperty('23', autoload=True, target_field='seller_reference'), '_tracker': orm.SuperReferenceProperty('136', autoload=True, callback=lambda self: self.get_tracker(), format_callback=lambda self, value: value), '_lines': orm.SuperRemoteStructuredProperty(OrderLine, repeated=True, search={ 'default': { 'filters': [], 'orders': [{ 'field': 'sequence', 'operator': 'asc' }] }, 'cfg': { 'indexes': [{ 'ancestor': True, 'filters': [], 'orders': [('sequence', ['asc'])] }], } }), '_messages': orm.SuperRemoteStructuredProperty(OrderMessage, repeated=True, search={ 'default': { 'filters': [], 'orders': [{ 'field': 'created', 'operator': 'desc' }] }, 'cfg': { 'indexes': [{ 'ancestor': True, 'filters': [], 'orders': [('created', ['desc'])] }], } }), '_seller_reference': orm.SuperComputedProperty(lambda self: self.seller_reference._structure if self.seller_reference else None), } def condition_taskqueue(account, **kwargs): return account._is_taskqueue def condition_cron(account, **kwargs): return account._is_cron def condition_not_guest_and_buyer_and_cart(account, entity, **kwargs): return not account._is_guest and entity._original.key_root == account.key \ and entity._original.state == "cart" def condition_root_or_buyer_or_seller(account, entity, **kwargs): if entity._original.seller_reference is None: return False return account._root_admin or (not account._is_guest and ( (entity._original.key_root == account.key) or (entity._original.seller_reference._root == account.key))) def condition_buyer_or_seller(account, entity, **kwargs): if entity._original.seller_reference is None: return False return not account._is_guest and ( (entity._original.key_root == account.key) or (entity._original.seller_reference._root == account.key)) def condition_search(account, action, entity, input, **kwargs): return action.key_id_str == "search" and (account._root_admin or ( (not account._is_guest and input["search"]["filters"][0]["field"] == "seller_reference" and input["search"]["filters"][0]["value"]._root == account.key) or (not account._is_guest and "ancestor" in input["search"] and input["search"]["ancestor"]._root == account.key))) def condition_pay(action, entity, **kwargs): return action.key_id_str == "pay" and entity._original.state == "cart" def condition_notify(action, entity, **kwargs): return action.key_id_str == "notify" and entity._original.state == "order" def condition_update_line(account, entity, action, **kwargs): return not account._is_guest and entity._original.key_root == account.key \ and entity._original.state == "cart" and action.key_id_str == "update_line" def condition_state(action, entity, **kwargs): return (action.key_id_str == "update_line" and entity.state == "cart") \ or (action.key_id_str == "pay" and entity.state == "order") def condition_update_and_view_order(account, entity, action, **kwargs): return not account._is_guest and entity._original.key_root == account.key \ and entity._original.state == "cart" and action.key_id_str in ("view_order", "update") def cache_group_search(context): key = 'search_34' _ancestor = context.input['search'].get('ancestor') filters = context.input['search'].get('filters') if context.account._root_admin: return '%s_admin' % key if filters and filters[0]['field'] == 'seller_reference' and filters[ 0]['value']._root == context.account.key: return '%s_seller_%s' % (key, context.account.key_id_str) if _ancestor and _ancestor._root == context.account.key: return '%s_buyer_%s' % (key, context.account.key_id_str) return key _permissions = [ # action.key_id_str not in ["search"] and... # Included payment_status in field permissions, will have to further analyse exclusion... orm.ExecuteActionPermission( ('update_line', 'view_order', 'update', 'delete', 'pay'), condition_not_guest_and_buyer_and_cart), orm.ExecuteActionPermission(('read'), condition_root_or_buyer_or_seller), orm.ExecuteActionPermission(('log_message'), condition_buyer_or_seller), orm.ExecuteActionPermission('search', condition_search), orm.ExecuteActionPermission('delete', condition_taskqueue), orm.ExecuteActionPermission(('cron', 'cron_notify'), condition_cron), orm.ExecuteActionPermission('see_messages', condition_buyer_or_seller), orm.ExecuteActionPermission('notify', condition_notify), orm.ReadFieldPermission( ('created', 'updated', 'state', 'date', 'seller_reference', 'billing_address', 'shipping_address', 'currency', 'untaxed_amount', 'tax_amount', 'total_amount', 'carrier', '_seller_reference', 'payment_status', 'payment_method', '_lines', '_messages.created', '_messages.agent', '_messages.action', '_messages.body', '_messages._action', '_tracker', '_seller.name', '_seller.logo', '_seller._stripe_publishable_key', '_seller._currency'), condition_root_or_buyer_or_seller), orm.WriteFieldPermission( ('date', 'seller_reference', 'currency', 'untaxed_amount', 'tax_amount', 'total_amount', 'payment_method', '_lines', 'carrier'), condition_update_line), orm.WriteFieldPermission('state', condition_state), orm.WriteFieldPermission(('payment_status', '_messages'), condition_pay), orm.WriteFieldPermission(('payment_status', '_messages'), condition_notify), orm.WriteFieldPermission('_messages', condition_buyer_or_seller), orm.WriteFieldPermission( ('date', 'shipping_address', 'billing_address', '_lines', 'carrier', 'untaxed_amount', 'tax_amount', 'total_amount', 'payment_method'), condition_update_and_view_order), orm.DenyWriteFieldPermission( ('_lines.taxes', '_lines.product.reference', '_lines.product.name', '_lines.product.code', '_lines.product.description', '_lines.product.unit_price', '_lines.product.mass', '_lines.product.volume'), condition_update_and_view_order) ] _actions = [ orm.Action(id='update_line', arguments={ 'buyer': orm.SuperKeyProperty(kind='19', required=True), 'quantity': orm.SuperDecimalProperty(required=True), 'product': orm.SuperKeyProperty(kind='28', required=True) }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), OrderInit(), OrderPluginExec(cfg={'kinds': ['117']}), OrderProductSpecsFormat(), OrderUpdateLine(), OrderLineRemove(), OrderStockManagement(), OrderLineFormat(), OrderCarrierFormat(), OrderFormat(), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), DeleteCache(cfg=DELETE_CACHE_POLICY), Set(cfg={'d': { 'output.entity': '_order' }}) ]) ]), orm.Action( id='view_order', arguments={ 'buyer': orm.SuperKeyProperty(kind='19', required=True), 'seller': orm.SuperKeyProperty(kind='23', required=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), GetCache( cfg={ 'group': lambda context: 'read_34_%s' % context.input[ 'buyer']._root._id_str, 'cache': ['account'] }), OrderInit(), OrderPluginExec( cfg={'kinds': ['117']} ), # order currency must be available for everyone OrderProductSpecsFormat(), RulePrepare(), RuleExec(), Set(cfg={'d': { 'output.entity': '_order' }}), CallbackExec() ]) ]), orm.Action(id='read', arguments={ 'key': orm.SuperKeyProperty(kind='34', required=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), GetCache( cfg={ 'group': lambda context: 'read_34_%s' % context. input['key']._root._id_str, 'cache': ['account'] }), Read(), RulePrepare(), RuleExec(), Set(cfg={'d': { 'output.entity': '_order' }}), CallbackExec() ]) ]), orm.Action( id='update', arguments={ 'key': orm.SuperKeyProperty(kind='34', required=True), 'billing_address': orm.SuperLocalStructuredProperty('14'), 'shipping_address': orm.SuperLocalStructuredProperty('14'), 'carrier': orm.SuperVirtualKeyProperty(kind='113'), '_lines': orm.SuperLocalStructuredProperty(OrderLine, repeated=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read( cfg={ 'read': { '_lines': { 'config': { 'search': { 'options': { 'limit': 0 } } } } } }), Set(cfg={'d': { '_order._lines': 'input._lines' }}), OrderLineRemove(), OrderStockManagement(), OrderProductSpecsFormat(), OrderFormat( ), # Needed for Carrier. Alternative is to break down this plugin in two, pre-carrier & post-carrier one. OrderPluginExec(), OrderLineFormat(), OrderCarrierFormat(), OrderFormat(), RulePrepare(), RuleExec() ]), orm.PluginGroup(transactional=True, plugins=[ Write(), RulePrepare(), DeleteCache(cfg=DELETE_CACHE_POLICY), Set(cfg={'d': { 'output.entity': '_order' }}) ]) ]), orm.Action( id='delete', arguments={'key': orm.SuperKeyProperty(kind='34', required=True)}, _plugin_groups=[ orm.PluginGroup( plugins=[Context( ), Read(), RulePrepare(), RuleExec()]), orm.PluginGroup(transactional=True, plugins=[ Delete(), DeleteCache(cfg=DELETE_CACHE_POLICY), RulePrepare(), Set(cfg={'d': { 'output.entity': '_order' }}) ]) ]), orm.Action( id='search', arguments={ 'search': orm.SuperSearchProperty( default={ 'filters': [], 'orders': [{ 'field': 'updated', 'operator': 'desc' }] }, cfg={ 'search_arguments': { 'kind': '34', 'options': { 'limit': settings.SEARCH_PAGE } }, 'ancestor_kind': '19', 'search_by_keys': True, 'filters': { 'name': orm.SuperStringProperty(), 'key': orm.SuperVirtualKeyProperty(kind='34', searchable=False), 'state': orm.SuperStringProperty(repeated=True, choices=('cart', 'order')), 'seller_reference': orm.SuperKeyProperty(kind='23', searchable=False) }, 'indexes': [{ 'orders': [('updated', ['asc', 'desc'])] }, { 'orders': [('created', ['asc', 'desc'])] }, { 'filters': [('key', ['=='])] }, { 'filters': [('state', ['IN'])], 'orders': [('updated', ['asc', 'desc'])] }, { 'ancestor': True, 'filters': [('state', ['IN'])], 'orders': [('updated', ['desc'])] }, { 'filters': [('seller_reference', ['=='])], 'orders': [('updated', ['desc'])] }] }) }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), GetCache(cfg={ 'group': cache_group_search, 'cache': ['account'] }), Read(), RulePrepare(), RuleExec(), Search(), RulePrepare(cfg={'path': '_entities'}), Set( cfg={ 'd': { 'output.entities': '_entities', 'output.cursor': '_cursor', 'output.more': '_more' } }), CallbackExec() ]) ]), orm.Action( id='cron', arguments={}, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), RulePrepare(), RuleExec(), OrderCronDelete( cfg={ 'page': 100, 'cart_life': settings.ORDER_CART_LIFE, 'unpaid_order_life': settings.ORDER_UNPAID_LIFE }), CallbackExec() ]) ]), orm.Action( id='notify', skip_csrf=True, arguments={ 'payment_method': orm.SuperStringProperty( required=True, choices=settings.AVAILABLE_PAYMENT_METHODS), 'request': orm.SuperPickleProperty(), }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), OrderNotify(cfg={ 'options': { 'paypal': { 'webscr': settings.PAYPAL_WEBSCR } } }), OrderSetMessage( cfg={ 'expando_fields': 'new_message_fields', 'expando_values': 'new_message' }), RulePrepare(), RuleExec() ]), orm.PluginGroup( transactional=True, plugins=[ Write(), RulePrepare(), Set(cfg={'d': { 'output.entity': '_order' }}), # both seller and buyer must get the message Notify( cfg={ 's': { 'sender': settings.NOTIFY_EMAIL, 'for_seller': False, 'subject': notifications.ORDER_NOTIFY_SUBJECT, 'body': notifications.ORDER_NOTIFY_BODY }, 'd': { 'recipient': '_order.buyer_email' } }), Notify( cfg={ 's': { 'sender': settings.NOTIFY_EMAIL, 'for_seller': True, 'subject': notifications.ORDER_NOTIFY_SUBJECT, 'body': notifications.ORDER_NOTIFY_BODY }, 'd': { 'recipient': '_order.seller_email' } }), DeleteCache(cfg=DELETE_CACHE_POLICY) ]) ]), orm.Action( id='pay', arguments={ 'key': orm.SuperKeyProperty(kind='34', required=True), 'token': orm.SuperStringProperty(required=True), 'read_arguments': orm.SuperJsonProperty() }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read( cfg={ 'read': { '_lines': { 'config': { 'search': { 'options': { 'limit': 0 } } } } } }) ]), orm.PluginGroup( transactional=True, plugins=[ # Transaction failures can still cause payment charges to succeed. # Isolate as little plugins as possible in transaction to minimize transaction failures. # We can also implement a two step payment to make the payment more robust. OrderPay(), OrderSetMessage( cfg={ 'expando_fields': 'new_message_fields', 'expando_values': 'new_message' }), RulePrepare(), RuleExec(), Write(), RulePrepare(), DeleteCache(cfg=DELETE_CACHE_POLICY), Set(cfg={'d': { 'output.entity': '_order' }}) ]), orm.PluginGroup(plugins=[ # both seller and buyer must get the message Notify( cfg={ 's': { 'sender': settings.NOTIFY_EMAIL, 'for_seller': False, 'subject': notifications.ORDER_NOTIFY_SUBJECT, 'body': notifications.ORDER_NOTIFY_BODY }, 'd': { 'recipient': '_order.buyer_email' } }), Notify( cfg={ 's': { 'sender': settings.NOTIFY_EMAIL, 'for_seller': True, 'subject': notifications.ORDER_NOTIFY_SUBJECT, 'body': notifications.ORDER_NOTIFY_BODY }, 'd': { 'recipient': '_order.seller_email' } }) ]) ]), orm.Action( id='see_messages', arguments={'key': orm.SuperKeyProperty(kind='34', required=True)}, _plugin_groups=[ orm.PluginGroup( plugins=[Context( ), Read(), RulePrepare(), RuleExec()]), orm.PluginGroup(transactional=True, plugins=[ OrderNotifyTrackerSeen(), DeleteCache(cfg=DELETE_CACHE_POLICY) ]) ]), orm.Action( id='log_message', arguments={ 'key': orm.SuperKeyProperty(kind='34', required=True), 'message': orm.SuperTextProperty(required=True, max_size=settings.MAX_MESSAGE_SIZE) }, _plugin_groups=[ orm.PluginGroup(plugins=[ Context(), Read(), OrderSetMessage(), RulePrepare(), RuleExec() ]), orm.PluginGroup(transactional=True, plugins=[ Write(), Set(cfg={'d': { 'output.entity': '_order' }}), OrderNotifyTrackerSet( cfg=settings.ORDER_CRON_NOTIFY_TIMER), DeleteCache(cfg=DELETE_CACHE_POLICY) ]) ]), orm.Action( id='cron_notify', arguments={}, _plugin_groups=[ orm.PluginGroup( plugins=[Context( ), Read(), RulePrepare(), RuleExec()]), orm.PluginGroup( transactional=False, plugins=[ # This plugin isolates its parts in transaction, so the group is wrapped in transaction. OrderCronNotify( cfg={ 's': { 'sender': settings.NOTIFY_EMAIL, 'subject': notifications.ORDER_LOG_MESSAGE_SUBJECT, 'body': notifications.ORDER_LOG_MESSAGE_BODY }, 'hours': settings.ORDER_CRON_NOTIFY_TIMER['hours'], 'minutes': settings.ORDER_CRON_NOTIFY_TIMER['minutes'], 'seconds': settings.ORDER_CRON_NOTIFY_TIMER['seconds'] }), CallbackExec() ]) ]) ] @property def buyer_email(self): account = self.root_entity account.read() return account._primary_email @property def seller_email(self): account = self.seller_reference._root.entity account.read() return account._primary_email @property def seller_and_buyer_emails(self): emails = [] emails.append(self.seller_email) emails.append(self.buyer_email) return emails def get_tracker(self): tracker = None if self.key: tracker = OrderNotifyTracker.build_key(self.key.urlsafe()).get() return tracker
class CountryUpdateWrite(orm.BaseModel): cfg = orm.SuperJsonProperty('1', indexed=False, required=True, default={}) def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} update_file_path = self.cfg.get('file', None) debug_environment = self.cfg.get('debug_environment', False) if not update_file_path: raise orm.TerminateAction() Country = context.models['12'] CountrySubdivision = context.models['13'] with file(update_file_path) as f: tree = ElementTree.fromstring(f.read()) root = tree.findall('data') put_entities = [] def make_complete_name_for_subdivision(entity, parent_id, process): path = entity names = [] while True: parent = None parent_key = getattr(path, 'parent_record') if parent_key: parent = process.get(parent_key.urlsafe()) if not parent: names.append(getattr(path, 'name')) break else: names.append(getattr(path, 'name')) path = parent names.reverse() return unicode(' / ').join(names) i = 0 no_regions = {} for child in root[1]: i += 1 dic = dict() dic['id'] = child.attrib['id'] for sub_child in child: name = sub_child.attrib.get('name') if name is None: continue if sub_child.text: dic[name] = sub_child.text country = Country(name=dic['name'], id=dic['id'], code=dic['code'], active=True) country._use_rule_engine = False country._use_record_engine = False country._use_memcache = False country._use_cache = False put_entities.append(country) no_regions[country.key] = country processed_keys = {} processed_ids = {} i = 0 for child in [c for c in root[2]] + [c for c in root[3]]: i += 1 dic = dict() dic['id'] = child.attrib['id'] for sub_child in child: name = sub_child.attrib.get('name') if name is None: continue if sub_child.text: dic[name] = sub_child.text if 'ref' in sub_child.attrib: dic[name] = sub_child.attrib['ref'] country_sub_division_values = dict(name=dic['name'], id=dic['id'], type=dic['type'], code=dic['code'], active=True) if 'country' in dic: country_key = Country.build_key(dic['country']) no_regions.pop(country_key, None) country_sub_division_values['parent'] = country_key if 'parent' in dic: parent = processed_ids.get(dic['parent']) if parent: country_sub_division_values['parent_record'] = parent.key country_sub_division = CountrySubdivision(**country_sub_division_values) country_sub_division._use_cache = False country_sub_division._use_rule_engine = False country_sub_division._use_record_engine = False country_sub_division._use_memcache = False country_sub_division.complete_name = '' if 'parent' in dic: country_sub_division.complete_name = make_complete_name_for_subdivision(country_sub_division, dic['parent'], processed_keys) processed_keys[country_sub_division.key_urlsafe] = country_sub_division processed_ids[dic['id']] = country_sub_division country_sub_division._use_rule_engine = False put_entities.append(country_sub_division) orm.put_multi(put_entities) put_entities = [] for country_key, country in no_regions.iteritems(): country_sub_division = CountrySubdivision(name=country.name, parent=country_key, id=country_key.id(), type='country', code=country.code, active=True) country_sub_division._use_cache = False country_sub_division._use_rule_engine = False country_sub_division._use_record_engine = False country_sub_division._use_memcache = False country_sub_division.complete_name = country.name put_entities.append(country_sub_division) orm.put_multi(put_entities)