def run(self, context): # This code builds leaf categories for selection with complete names, 3.8k of them. if not isinstance(self.cfg, dict): self.cfg = {} update_file_path = self.cfg.get('file', None) debug_environment = self.cfg.get('debug_environment', False) if not update_file_path: raise orm.TerminateAction() Category = context.models['24'] gets = datastore.Query('24', namespace=None, keys_only=True).Run() keys = list(gets) datastore.Delete(keys) categories = [] put_entities = [] structure = {} with file(update_file_path) as f: for line in f: if not line.startswith('#'): item = line.replace('\n', '') categories.append(item) full_path = item.split(' > ') current_structure = structure for xi, path in enumerate(full_path): if path not in current_structure: current_structure[path] = {} current_structure = current_structure[path] for i, item in enumerate(categories): full_path = item.split(' > ') path_map = structure current = full_path parent = current[:-1] category = {} category['id'] = hashlib.md5(''.join(current)).hexdigest() if parent: category['parent_record'] = Category.build_key( hashlib.md5(''.join(parent)).hexdigest()) else: category['parent_record'] = None category['name'] = ' / '.join(current) category['state'] = ['indexable'] leaf = False for path in full_path: if path in path_map: path_map = path_map[path] if not len(path_map): leaf = True if leaf: category['state'].append( 'visible') # marks the category as leaf category = Category(**category) category._use_rule_engine = False category._use_record_engine = False category._use_memcache = False category._use_cache = False put_entities.append(category) tools.log.debug('Writing %s categories' % len(put_entities)) orm.put_multi(put_entities)
def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} update_file_path = self.cfg.get('file', None) if not update_file_path: raise orm.TerminateAction() Unit = context.models['17'] keys = Unit.query(Unit.measurement != 'Currency').fetch(keys_only=True) orm.delete_multi(keys) # delete all currencies with file(update_file_path) as f: tree = ElementTree.fromstring(f.read()) root = tree.findall('data') measurements = {} uoms = [] for child in root[0]: if child.attrib.get('model') == 'product.uom.category': for sub_child in child: name = sub_child.text measurements[child.attrib.get('id')] = name for child in root[0]: if child.attrib.get('model') == 'product.uom': uom = {'id': child.attrib.get('id')[4:]} uom_data = {} for sub_child in child: uom_data[sub_child.attrib.get('name')] = sub_child rounding = uom_data.get('rounding') digits = uom_data.get('digits') if rounding is not None: rounding = Decimal(eval(rounding.attrib.get('eval'))) if digits is not None: digits = long(eval(digits.attrib.get('eval'))) if digits is None: digits = 3 uom.update({ 'name': uom_data['name'].text, 'active': True, 'symbol': uom_data['symbol'].text, 'measurement': measurements.get( uom_data['category'].attrib.get('ref')), 'factor': Decimal(eval(uom_data['factor'].attrib.get('eval'))), 'rate': Decimal(eval(uom_data['rate'].attrib.get('eval'))), 'rounding': rounding, 'digits': digits }) uoms.append(uom) put_entities = [Unit(**d) for d in uoms] for entity in put_entities: entity._use_rule_engine = False orm.put_multi(put_entities)
def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} active = self.cfg.get('active', True) if not active: return CacheGroup = context.models[self.cfg.get('kind', '135')] cache = self.cfg.get('cache', []) group_id = self.cfg.get('group', None) if not isinstance(group_id, (list, tuple)) and group_id is not None: group_id = [group_id] if group_id is not None: group_ids = group_id[:] for i, g in enumerate(group_ids): if callable(g): thing = g(context) if thing is not None: group_ids[i] = g(context) else: group_ids.remove(g) group_id = group_ids dcache_driver = self.cfg.get('dcache', []) cache_drivers = [] all_prequesits = ['auth', 'guest', context.account.key_id_str] for driver in cache: if callable(driver): driver = driver(context) if driver is None: continue user = driver == 'account' if not context.account._is_guest: if user and not context.account._is_guest: cache_drivers.append(context.account.key_id_str) if driver == 'auth': cache_drivers.append('auth') if driver == 'admin' and context.account._root_admin: cache_drivers.append('admin') elif driver == 'guest': cache_drivers.append('guest') if driver == 'all' and not any(baddie in cache_drivers for baddie in all_prequesits): cache_drivers.append('all') for d in dcache_driver: cache_drivers.append(tools.get_attr(context, d)) cache_drivers = set(cache_drivers) key = self.cfg.get('key') if callable(key): key = key(context) if not key: key = hashlib.md5(tools.json_dumps(context.raw_input)).hexdigest() data = None def build_key(driver, key, group_key): out = '%s_%s' % (driver, key) if group_key: out += '_%s' % group_key._id_str return hashlib.md5(out).hexdigest() if self.getter: group_key = None if group_id: first_group_id = group_id[0] group_key = CacheGroup.build_key(first_group_id) def do_save(data): queue = {} saved_keys = [] for driver in cache_drivers: k = build_key(driver, key, group_key) queue[k] = zlib.compress(data) try: tools.mem_set_multi(queue) except ValueError as e: tools.log.error( 'Failed saving response because it\'s over 1mb, with queue keys %s, using group %s, with drivers %s. With input %s' % (queue, group_key, cache_drivers, context.input)) write = False # failed writing this one, because size is over 1mb -- this can be fixed by chunking the `data`, but for now we dont need it saver = {'do_save': do_save} found = None for driver in cache_drivers: k = build_key(driver, key, group_key) active_k = '%s_active' % k data = tools.mem_get_multi([active_k, k]) if data: cache_hit = k in data if not cache_hit: continue if group_key and cache_hit and not data.get(active_k): tools.log.debug('Cache hit at %s but waiting for %s' % (k, active_k)) return # this means that taskqueue did not finish storing the key and cache will be available as soon as possible try: found = zlib.decompress(data[k]) except Exception as e: found = None tools.log.warn( 'Failed upacking memcache data for key %s in context of: using group %s, with driver %s. With input %s. Memory key deleted.' % (k, group_key, driver, context.input)) tools.mem_delete_multi([k, active_k]) break if found: context.cache = {'value': found} raise orm.TerminateAction( 'Got cache with key %s from %s drivers using group %s.' % (k, cache_drivers, group_key)) else: keys = [] for driver in cache_drivers: keys.append(build_key(driver, key, group_key)) if keys: keys = base64.b64encode( zlib.compress(','.join(keys)) ) # we compress keys because of taskqueues limit of 100k request payload if group_key: tools.log.info( 'Scheduling group cache storage for group %s and cache drivers %s' % (group_key, cache_drivers)) context._callbacks.append(('cache', { 'action_id': 'update', 'keys': keys, 'ids': [group_key._id_str], 'action_model': '135' })) else: tools.log.warn( 'No cache for group %s with cache drivers %s' % (group_key, cache_drivers)) context.cache = saver else: tools.mem_delete_multi( [build_key(driver, key, None) for driver in cache_drivers]) if hasattr(context, 'delete_cache_groups'): if not group_id: group_id = [] group_id.extend(context.delete_cache_groups) if group_id: keys = [] satisfy = self.cfg.get('satisfy', {}) for spec in satisfy: groups, callback = spec for group in group_id[:]: if group in groups: if not callback(context, group): group_id.remove(group) group_keys = [CacheGroup.build_key(id) for id in group_id] groups = orm.get_multi( group_keys ) # this can cause operating on multiple groups error # however if that happens, just move the DeleteCache plugin away from the transaction, since it does not need it # anyway 25 entity groups is the limit and usually we operate on max 5 groups per flush for group in groups: if group: keys.extend(group.keys) for k in keys[:]: keys.append('%s_active' % k) tools.mem_delete_multi(keys) tools.log.info('Deleted cache for group(s) %s' % group_id) orm.delete_multi(group_keys)
def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} update_file_path = self.cfg.get('file', None) if not update_file_path: raise orm.TerminateAction() Unit = context.models['17'] keys = Unit.query(Unit.measurement == 'Currency').fetch(keys_only=True) orm.delete_multi(keys) # delete all currencies with file(update_file_path) as f: tree = ElementTree.fromstring(f.read()) root = tree.findall('data') uoms = [] def __text(item, key): value = item.get(key) if value is not None: if value.text == 'None' or value.text is None: return None return str(value.text) return value def __eval(item, key): value = item.get(key) if value == 'None': value = None if value is not None: evaled = value.attrib.get('eval') if evaled == 'None' or evaled is None: return None return eval(evaled) return value for child in root[1]: if child.attrib.get('model') == 'currency.currency': uom = {'id': child.attrib.get('id')} uom_data = {} for sub_child in child: uom_data[sub_child.attrib.get('name')] = sub_child rounding = uom_data.get('rounding') digits = uom_data.get('digits') grouping = uom_data.get('mon_grouping') if rounding is not None: rounding = Decimal(eval(rounding.attrib.get('eval'))) if digits is not None: digits = long(eval(digits.attrib.get('eval'))) if grouping is not None: grouping = eval(grouping.text) else: grouping = [] if digits is None: digits = 2 uom.update({ 'measurement': 'Currency', 'name': uom_data['name'].text, 'code': uom_data['code'].text, 'numeric_code': uom_data['numeric_code'].text, 'symbol': uom_data['symbol'].text, 'rounding': rounding, 'digits': digits, 'grouping': grouping, 'decimal_separator': __text(uom_data, 'mon_decimal_point'), 'thousands_separator': __text(uom_data, 'mon_thousands_sep'), 'positive_sign_position': __eval(uom_data, 'p_sign_posn'), 'negative_sign_position': __eval(uom_data, 'n_sign_posn'), 'positive_sign': __text(uom_data, 'positive_sign'), 'negative_sign': __text(uom_data, 'negative_sign'), 'positive_currency_symbol_precedes': __eval(uom_data, 'p_cs_precedes'), 'negative_currency_symbol_precedes': __eval(uom_data, 'n_cs_precedes'), 'positive_separate_by_space': __eval(uom_data, 'p_sep_by_space'), 'negative_separate_by_space': __eval(uom_data, 'n_sep_by_space'), 'active': True }) uoms.append(uom) put_entities = [Unit(**d) for d in uoms] for entity in put_entities: entity._use_rule_engine = False orm.put_multi(put_entities)
def run(self, context): if not isinstance(self.cfg, dict): self.cfg = {} update_file_path = self.cfg.get('file', None) debug_environment = self.cfg.get('debug_environment', False) if not update_file_path: raise orm.TerminateAction() Country = context.models['12'] CountrySubdivision = context.models['13'] with file(update_file_path) as f: tree = ElementTree.fromstring(f.read()) root = tree.findall('data') put_entities = [] def make_complete_name_for_subdivision(entity, parent_id, process): path = entity names = [] while True: parent = None parent_key = getattr(path, 'parent_record') if parent_key: parent = process.get(parent_key.urlsafe()) if not parent: names.append(getattr(path, 'name')) break else: names.append(getattr(path, 'name')) path = parent names.reverse() return unicode(' / ').join(names) i = 0 no_regions = {} for child in root[1]: i += 1 dic = dict() dic['id'] = child.attrib['id'] for sub_child in child: name = sub_child.attrib.get('name') if name is None: continue if sub_child.text: dic[name] = sub_child.text country = Country(name=dic['name'], id=dic['id'], code=dic['code'], active=True) country._use_rule_engine = False country._use_record_engine = False country._use_memcache = False country._use_cache = False put_entities.append(country) no_regions[country.key] = country processed_keys = {} processed_ids = {} i = 0 for child in [c for c in root[2]] + [c for c in root[3]]: i += 1 dic = dict() dic['id'] = child.attrib['id'] for sub_child in child: name = sub_child.attrib.get('name') if name is None: continue if sub_child.text: dic[name] = sub_child.text if 'ref' in sub_child.attrib: dic[name] = sub_child.attrib['ref'] country_sub_division_values = dict(name=dic['name'], id=dic['id'], type=dic['type'], code=dic['code'], active=True) if 'country' in dic: country_key = Country.build_key(dic['country']) no_regions.pop(country_key, None) country_sub_division_values['parent'] = country_key if 'parent' in dic: parent = processed_ids.get(dic['parent']) if parent: country_sub_division_values['parent_record'] = parent.key country_sub_division = CountrySubdivision(**country_sub_division_values) country_sub_division._use_cache = False country_sub_division._use_rule_engine = False country_sub_division._use_record_engine = False country_sub_division._use_memcache = False country_sub_division.complete_name = '' if 'parent' in dic: country_sub_division.complete_name = make_complete_name_for_subdivision(country_sub_division, dic['parent'], processed_keys) processed_keys[country_sub_division.key_urlsafe] = country_sub_division processed_ids[dic['id']] = country_sub_division country_sub_division._use_rule_engine = False put_entities.append(country_sub_division) orm.put_multi(put_entities) put_entities = [] for country_key, country in no_regions.iteritems(): country_sub_division = CountrySubdivision(name=country.name, parent=country_key, id=country_key.id(), type='country', code=country.code, active=True) country_sub_division._use_cache = False country_sub_division._use_rule_engine = False country_sub_division._use_record_engine = False country_sub_division._use_memcache = False country_sub_division.complete_name = country.name put_entities.append(country_sub_division) orm.put_multi(put_entities)