def get_all_access_restrictions(): cache_key = 'all_access_restrictions:%s' % MapUpdate.current_cache_key() access_restriction_ids = cache.get(cache_key, None) if access_restriction_ids is None: access_restriction_ids = set(AccessRestriction.objects.values_list('pk', flat=True)) cache.set(cache_key, access_restriction_ids, 300) return access_restriction_ids
def get_fields(cls): cache_key = MapUpdate.current_cache_key() if cls.fields_cache_key != cache_key: with cls.fields_cache_lock: cls.fields_cache_key = cache_key cls.fields_cached = cls.build_fields() return cls.fields_cached
def fetch(self, request, key=None): cross_origin = request.META.get('HTTP_ORIGIN') if cross_origin is not None: try: if request.META['HTTP_HOST'] == urlparse( cross_origin).hostname: cross_origin = None except ValueError: pass counter_key = 'api_updates_fetch_requests%s' % ( '_cross_origin' if cross_origin is not None else '') try: cache.incr(counter_key) except ValueError: cache.set(counter_key, 0, None) from c3nav.site.models import SiteUpdate result = { 'last_site_update': SiteUpdate.last_update(), 'last_map_update': MapUpdate.current_processed_cache_key(), } if cross_origin is None: result.update({ 'user': get_user_data(request), }) response = Response(result) if cross_origin is not None: response['Access-Control-Allow-Origin'] = cross_origin response['Access-Control-Allow-Credentials'] = 'true' set_tile_access_cookie(request, response) return response
def fetch(self, request, key=None): cross_origin = request.META.get('HTTP_ORIGIN') if cross_origin is not None: try: if request.META['HTTP_HOST'] == urlparse(cross_origin).hostname: cross_origin = None except ValueError: pass increment_cache_key('api_updates_fetch_requests%s' % ('_cross_origin' if cross_origin is not None else '')) from c3nav.site.models import SiteUpdate result = { 'last_site_update': SiteUpdate.last_update(), 'last_map_update': MapUpdate.current_processed_cache_key(), } if cross_origin is None: result.update({ 'user': get_user_data(request), }) response = Response(result) if cross_origin is not None: response['Access-Control-Allow-Origin'] = cross_origin response['Access-Control-Allow-Credentials'] = 'true' set_tile_access_cookie(request, response) return response
def wrapped_func(self, request, *args, **kwargs): response_format = self.perform_content_negotiation( request)[0].format etag_user = ( ':' + str(request.user.pk or 0)) if response_format == 'api' else '' raw_etag = '%s%s:%s:%s' % (response_format, etag_user, get_language(), (etag_func(request) if permissions else MapUpdate.current_cache_key())) etag = quote_etag(raw_etag) response = get_conditional_response(request, etag=etag) if response is None: cache_key = 'mapdata:api:' + request.path_info[5:].replace( '/', '-').strip('-') + ':' + raw_etag if cache_parameters is not None: for param, type_ in cache_parameters.items(): value = int( param in request.GET) if type_ == bool else type_( request.GET.get(param)) cache_key += ':' + urlsafe_base64_encode( str(value).encode()).decode() data = cache.get(cache_key) if data is not None: response = Response(data) if response is None: response = func(self, request, *args, **kwargs) if cache_parameters is not None and response.status_code == 200: cache.set(cache_key, response.data, 300) response['ETag'] = etag response['Cache-Control'] = 'no-cache' return response
def wrapped_func(self, request, *args, **kwargs): response_format = self.perform_content_negotiation(request)[0].format etag_user = (':'+str(request.user.pk or 0)) if response_format == 'api' else '' raw_etag = '%s%s:%s:%s' % (response_format, etag_user, get_language(), (etag_func(request) if permissions else MapUpdate.current_cache_key())) if base_mapdata_check and self.base_mapdata: raw_etag += ':%d' % request.user_permissions.can_access_base_mapdata etag = quote_etag(raw_etag) response = get_conditional_response(request, etag=etag) if response is None: cache_key = 'mapdata:api:'+request.path_info[5:].replace('/', '-').strip('-')+':'+raw_etag if cache_parameters is not None: for param, type_ in cache_parameters.items(): value = int(param in request.GET) if type_ == bool else type_(request.GET.get(param)) cache_key += ':'+urlsafe_base64_encode(str(value).encode()).decode() data = request_cache.get(cache_key) if data is not None: response = Response(data) if response is None: with GeometryMixin.dont_keep_originals(): response = func(self, request, *args, **kwargs) if cache_parameters is not None and response.status_code == 200: request_cache.set(cache_key, response.data, 900) if response.status_code == 200: response['ETag'] = etag response['Cache-Control'] = 'no-cache' return response
def process_map_updates(self): if self.request.called_directly: logger.info('Processing map updates by direct command...') else: logger.info('Processing map updates...') from c3nav.mapdata.models import MapUpdate try: try: updates = MapUpdate.process_updates() except MapUpdate.ProcessUpdatesAlreadyRunning: if self.request.called_directly: raise logger.info( 'Processing is already running, retrying in 30 seconds.') raise self.retry(countdown=30) except MaxRetriesExceededError: logger.info('Cannot retry, retries exceeded. Exiting.') return if updates: print() logger.info( ungettext_lazy('%d map update processed.', '%d map updates processed.', len(updates)) % len(updates)) if updates: logger.info( _('Last processed update: %(date)s (#%(id)d)') % { 'date': date_format(updates[-1].datetime, 'DATETIME_FORMAT'), 'id': updates[-1].pk, })
def process_map_updates(self): if self.request.called_directly: logger.info('Processing map updates by direct command...') else: logger.info('Processing map updates...') from c3nav.mapdata.models import MapUpdate try: try: updates = MapUpdate.process_updates() except MapUpdate.ProcessUpdatesAlreadyRunning: if self.request.called_directly: raise logger.info('Processing is already running, retrying in 30 seconds.') raise self.retry(countdown=30) except Exception: cache.set('mapdata:last_process_updates_run', (int(time.time()), False), None) raise else: cache.set('mapdata:last_process_updates_run', (int(time.time()), True), None) except MaxRetriesExceededError: logger.info('Cannot retry, retries exceeded. Exiting.') return if updates: print() logger.info(ungettext_lazy('%d map update processed.', '%d map updates processed.', len(updates)) % len(updates)) if updates: logger.info(_('Last processed update: %(date)s (#%(id)d)') % { 'date': date_format(updates[-1].datetime, 'DATETIME_FORMAT'), 'id': updates[-1].pk, })
def load(cls): from c3nav.mapdata.models import MapUpdate update = MapUpdate.last_processed_update() if cls.cache_update != update: with cls.cache_lock: cls.cache_update = update cls.cached = cls.load_nocache(update) return cls.cached
def shortest_path(self, restrictions, options): options_key = json.dumps(options.data, separators=(',', '='), sort_keys=True)[1:-1] cache_key = 'router:shortest_path:%s:%s:%s' % (MapUpdate.current_processed_cache_key(), restrictions.cache_key, options_key) result = cache.get(cache_key) if result: distances, predecessors = result return (np.frombuffer(distances, dtype=np.float64).reshape(self.graph.shape), np.frombuffer(predecessors, dtype=np.int32).reshape(self.graph.shape)) graph = self.graph.copy() # speeds of waytypes, if relevant if options['mode'] == 'fastest': self.waytypes[0].speed = 1 self.waytypes[0].speed_up = 1 self.waytypes[0].extra_seconds = 0 self.waytypes[0].walk = True for waytype in self.waytypes: speed = float(waytype.speed) speed_up = float(waytype.speed_up) if waytype.walk: speed *= options.walk_factor speed_up *= options.walk_factor for indices, dir_speed in ((waytype.nonupwards_indices, speed), (waytype.upwards_indices, speed_up)): indices = indices.transpose().tolist() values = graph[indices] values /= dir_speed if waytype.extra_seconds: values += int(waytype.extra_seconds) graph[indices] = values # avoid waytypes as specified in settings for waytype in self.waytypes[1:]: value = options.get('waytype_%s' % waytype.pk, 'allow') if value in ('avoid', 'avoid_up'): graph[waytype.upwards_indices.transpose().tolist()] *= 100000 if value in ('avoid', 'avoid_down'): graph[waytype.nonupwards_indices.transpose().tolist()] *= 100000 # exclude spaces and edges space_nodes = tuple(reduce(operator.or_, (self.spaces[space].nodes for space in restrictions.spaces), set())) graph[space_nodes, :] = np.inf graph[:, space_nodes] = np.inf if restrictions.additional_nodes: graph[tuple(restrictions.additional_nodes), :] = np.inf graph[:, tuple(restrictions.additional_nodes)] = np.inf graph[restrictions.edges.transpose().tolist()] = np.inf distances, predecessors = shortest_path(graph, directed=True, return_predecessors=True) print(distances.dtype, predecessors.dtype) cache.set(cache_key, (distances.astype(np.float64).tobytes(), predecessors.astype(np.int32).tobytes()), 600) return distances, predecessors
def shortest_path(self, restrictions, options): options_key = options.serialize_string() cache_key = 'router:shortest_path:%s:%s:%s' % (MapUpdate.current_processed_cache_key(), restrictions.cache_key, options_key) result = cache.get(cache_key) if result: distances, predecessors = result return (np.frombuffer(distances, dtype=np.float64).reshape(self.graph.shape), np.frombuffer(predecessors, dtype=np.int32).reshape(self.graph.shape)) graph = self.graph.copy() # speeds of waytypes, if relevant if options['mode'] == 'fastest': self.waytypes[0].speed = 1 self.waytypes[0].speed_up = 1 self.waytypes[0].extra_seconds = 0 self.waytypes[0].walk = True for waytype in self.waytypes: speed = float(waytype.speed) speed_up = float(waytype.speed_up) if waytype.walk: speed *= options.walk_factor speed_up *= options.walk_factor for indices, dir_speed in ((waytype.nonupwards_indices, speed), (waytype.upwards_indices, speed_up)): indices = indices.transpose().tolist() values = graph[indices] values /= dir_speed if waytype.extra_seconds: values += int(waytype.extra_seconds) graph[indices] = values # avoid waytypes as specified in settings for waytype in self.waytypes[1:]: value = options.get('waytype_%s' % waytype.pk, 'allow') if value in ('avoid', 'avoid_up'): graph[waytype.upwards_indices.transpose().tolist()] *= 100000 if value in ('avoid', 'avoid_down'): graph[waytype.nonupwards_indices.transpose().tolist()] *= 100000 # exclude spaces and edges space_nodes = tuple(reduce(operator.or_, (self.spaces[space].nodes for space in restrictions.spaces), set())) graph[space_nodes, :] = np.inf graph[:, space_nodes] = np.inf if restrictions.additional_nodes: graph[tuple(restrictions.additional_nodes), :] = np.inf graph[:, tuple(restrictions.additional_nodes)] = np.inf graph[restrictions.edges.transpose().tolist()] = np.inf distances, predecessors = shortest_path(graph, directed=True, return_predecessors=True) cache.set(cache_key, (distances.astype(np.float64).tobytes(), predecessors.astype(np.int32).tobytes()), 600) return distances, predecessors
def open(cls, filename, default_update=None): try: instance = super().open(filename) except FileNotFoundError: if default_update is None: from c3nav.mapdata.models import MapUpdate default_update = MapUpdate.last_processed_update() instance = cls(updates=[default_update], filename=filename) instance.save() return instance
def handle(self, *args, **options): minx = options['minx'] miny = options['miny'] maxx = options['maxx'] maxy = options['maxy'] if minx >= maxx: raise CommandError(_('minx has to be lower than maxx')) if miny >= maxy: raise CommandError(_('miny has to be lower than maxy')) width = maxx-minx height = maxy-miny model = {'areas': Area, 'obstacles': Obstacle}[options['type']] namespaces = {'svg': 'http://www.w3.org/2000/svg'} svg = ElementTree.fromstring(options['svgfile'].read()) svg_width = float(svg.attrib['width']) svg_height = float(svg.attrib['height']) for element in svg.findall('.//svg:clipPath/..', namespaces): for clippath in element.findall('./svg:clipPath', namespaces): element.remove(clippath) for element in svg.findall('.//svg:symbol/..', namespaces): for clippath in element.findall('./svg:symbol', namespaces): element.remove(clippath) if svg.findall('.//*[@transform]'): raise CommandError(_('svg contains transform attributes. Use inkscape apply transforms.')) if model.objects.filter(space=options['space'], import_tag=options['name']).exists(): raise CommandError(_('objects with this import tag already exist in this space.')) with MapUpdate.lock(): changed_geometries.reset() for path in svg.findall('.//svg:path', namespaces): for polygon in self.parse_svg_data(path.attrib['d']): if len(polygon) < 3: continue polygon = Polygon(polygon) polygon = scale(polygon, xfact=1, yfact=-1, origin=(0, svg_height/2)) polygon = scale(polygon, xfact=width / svg_width, yfact=height / svg_height, origin=(0, 0)) polygon = translate(polygon, xoff=minx, yoff=miny) obj = model(geometry=polygon, space=options['space'], import_tag=options['name']) obj.save() MapUpdate.objects.create(type='importsvg') logger = logging.getLogger('c3nav') logger.info('Imported, map update created.') logger.info('Next step: go into the shell and edit them using ' '%s.objects.filter(space_id=%r, import_tag=%r)' % (model.__name__, options['space'].pk, options['name']))
def max_bounds(cls): cache_key = 'mapdata:max_bounds:%s:%s' % (cls.__name__, MapUpdate.current_cache_key()) result = cache.get(cache_key, None) if result is not None: return result result = cls.objects.all().aggregate(models.Min('left'), models.Min('bottom'), models.Max('right'), models.Max('top')) result = ((float(result['left__min']), float(result['bottom__min'])), (float(result['right__max']), float(result['top__max']))) cache.set(cache_key, result, 900) return result
def open_cached(cls): with cls.cache_lock: from c3nav.mapdata.models import MapUpdate cache_key = MapUpdate.current_processed_cache_key() if cls.cache_key != cache_key: cls.cache_key = cache_key cls.cached = None if cls.cached is None: cls.cached = cls.open() return cls.cached
def open_level_cached(cls, level_id, mode): with cls.cache_lock: from c3nav.mapdata.models import MapUpdate cache_key = MapUpdate.current_processed_cache_key() if cls.cache_key != cache_key: cls.cache_key = cache_key cls.cached = {} else: result = cls.cached.get((level_id, mode), None) if result is not None: return result result = cls.open_level(level_id, mode) cls.cached[(level_id, mode)] = result return result
def _get_keys_for_model(self, request, model, key): if hasattr(model, 'qs_for_request'): cache_key = 'mapdata:api:keys:%s:%s:%s' % (model.__name__, key, AccessPermission.cache_key_for_request(request)) qs = model.qs_for_request(request) else: cache_key = 'mapdata:api:keys:%s:%s:%s' % (model.__name__, key, MapUpdate.current_cache_key()) qs = model.objects.all() result = cache.get(cache_key, None) if result is not None: return result result = set(qs.values_list(key, flat=True)) cache.set(cache_key, result, 300) return result
def get_better_space_geometries(): # change space geometries for better representative points cache_key = 'mapdata:better_space_geometries:%s' % MapUpdate.current_cache_key() result = proxied_cache.get(cache_key, None) if result is not None: return result result = {} for space in Space.objects.prefetch_related('columns', 'holes'): geometry = space.geometry.difference( cascaded_union(tuple(obj.geometry for obj in chain(space.columns.all(), space.holes.all()))) ) if not geometry.is_empty: result[space.pk] = geometry proxied_cache.set(cache_key, result, 1800) return result
def get(cls, level): # get the current render data from local variable if no new processed mapupdate exists. # this is much faster than any other possible cache with cls.cache_lock: cache_key = MapUpdate.current_processed_cache_key() level_pk = str(level.pk if isinstance(level, Level) else level) if cls.cache_key != cache_key: cls.cache_key = cache_key cls.cached = {} else: result = cls.cached.get(level_pk, None) if result is not None: return result pk = level.pk if isinstance(level, Level) else level result = pickle.load(open(cls._level_filename(pk), 'rb')) cls.cached[level_pk] = result return result
def _get_keys_for_model(self, request, model, key): if hasattr(model, 'qs_for_request'): cache_key = 'mapdata:api:keys:%s:%s:%s' % ( model.__name__, key, AccessPermission.cache_key_for_request(request)) qs = model.qs_for_request(request) else: cache_key = 'mapdata:api:keys:%s:%s:%s' % ( model.__name__, key, MapUpdate.current_cache_key()) qs = model.objects.all() result = cache.get(cache_key, None) if result is not None: return result result = set(qs.values_list(key, flat=True)) cache.set(cache_key, result, 300) return result
def lock_to_edit(self, request=None): with transaction.atomic(): user = request.user if request is not None and request.user.is_authenticated else None if self.pk is not None: changeset = ChangeSet.objects.select_for_update().get(pk=self.pk) self._object_changed = False yield changeset if self._object_changed: update = changeset.updates.create(user=user, objects_changed=True) changeset.last_update = update changeset.last_change = update changeset.save() elif self.direct_editing: with MapUpdate.lock(): changed_geometries.reset() ChangeSet.objects_changed_count = 0 yield self if ChangeSet.objects_changed_count: MapUpdate.objects.create(user=user, type='direct_edit') else: yield self
def raw_cache_key_by_changes(self): if self.pk is None: return MapUpdate.current_cache_key() return ':'.join((str(self.pk), MapUpdate.current_cache_key(), self.last_change_cache_key))
def apply(self, user): with MapUpdate.lock(): changed_geometries.reset() self._clean_changes() changed_objects = self.relevant_changed_objects() created_objects = [] existing_objects = [] for changed_object in changed_objects: (created_objects if changed_object.is_created else existing_objects).append(changed_object) objects = self.get_objects(changed_objects=changed_objects) # remove slugs on all changed existing objects slugs_updated = set(changed_object.obj_pk for changed_object in existing_objects if (issubclass(changed_object.model_class, LocationSlug) and 'slug' in changed_object.updated_fields)) LocationSlug.objects.filter(pk__in=slugs_updated).update(slug=None) redirects_deleted = set(changed_object.obj_pk for changed_object in existing_objects if (issubclass(changed_object.model_class, LocationRedirect) and changed_object.deleted)) LocationRedirect.objects.filter(pk__in=redirects_deleted).delete() # create created objects created_pks = {} objects_to_create = set(created_objects) while objects_to_create: created_in_last_run = set() for created_object in objects_to_create: model = created_object.model_class pk = created_object.obj_pk # lets try to create this object obj = model() try: created_object.apply_to_instance(obj, created_pks=created_pks) except ApplyToInstanceError: continue obj.save() created_in_last_run.add(created_object) created_pks.setdefault(model, {})[pk] = obj.pk objects.setdefault(model, {})[pk] = obj objects_to_create -= created_in_last_run # update existing objects for existing_object in existing_objects: if existing_object.deleted: continue model = existing_object.model_class pk = existing_object.obj_pk obj = objects[model][pk] existing_object.apply_to_instance(obj, created_pks=created_pks) obj.save() # delete existing objects for existing_object in existing_objects: if not existing_object.deleted and not issubclass(existing_object.model_class, LocationRedirect): continue model = existing_object.model_class pk = existing_object.obj_pk obj = objects[model][pk] obj.delete() # update m2m for changed_object in changed_objects: obj = objects[changed_object.model_class][changed_object.obj_pk] for mode, updates in (('remove', changed_object.m2m_removed), ('add', changed_object.m2m_added)): for name, pks in updates.items(): field = changed_object.model_class._meta.get_field(name) pks = tuple(objects[field.related_model][pk].pk for pk in pks) getattr(getattr(obj, name), mode)(*pks) update = self.updates.create(user=user, state='applied') map_update = MapUpdate.objects.create(user=user, type='changeset') self.state = 'applied' self.last_state_update = update self.last_update = update self.map_update = map_update self.save()
def _clean_changes(self): if self.direct_editing: return with self.lock_to_edit() as changeset: last_map_update_pk = MapUpdate.last_update()[0] if changeset.last_cleaned_with_id == last_map_update_pk: return changed_objects = changeset.changed_objects_set.all() # delete changed objects that refer in some way to deleted objects and clean up m2m changes object_pks = {} for changed_object in changed_objects: changed_object.add_relevant_object_pks(object_pks) to_save = set() deleted_object_pks = {} for model, pks in object_pks.items(): pks = set(pk for pk in pks if not is_created_pk(pk)) deleted_object_pks[model] = pks - set(model.objects.filter(pk__in=pks).values_list('pk', flat=True)) repeat = True while repeat: repeat = False for changed_object in changed_objects: if changed_object.handle_deleted_object_pks(deleted_object_pks): to_save.add(changed_object) if changed_object.pk is None: repeat = True # remove deleted objects changed_objects = [obj for obj in changed_objects if obj.pk is not None] # clean updated fields objects = changeset.get_objects(many=False, changed_objects=changed_objects, prefetch_related=('groups', )) for changed_object in changed_objects: if changed_object.clean_updated_fields(objects): to_save.add(changed_object) # clean m2m for changed_object in changed_objects: if changed_object.clean_m2m(objects): to_save.add(changed_object) # remove duplicate slugs slugs = set() for changed_object in changed_objects: if issubclass(changed_object.model_class, LocationSlug): slug = changed_object.updated_fields.get('slug', None) if slug is not None: slugs.add(slug) qs = LocationSlug.objects.filter(slug__in=slugs) if slugs: qs = qs.filter(reduce(operator.or_, (Q(slug__startswith=slug+'__') for slug in slugs))) existing_slugs = dict(qs.values_list('slug', 'redirect__target_id')) slug_length = LocationSlug._meta.get_field('slug').max_length for changed_object in changed_objects: if issubclass(changed_object.model_class, LocationSlug): slug = changed_object.updated_fields.get('slug', None) if slug is None: continue if slug in existing_slugs: redirect_to = existing_slugs[slug] if issubclass(changed_object.model_class, LocationRedirect) and redirect_to is not None: to_save.discard(changed_object) changed_object.delete() continue new_slug = slug i = 0 while new_slug in existing_slugs: suffix = '__'+str(i) new_slug = slug[:slug_length-len(suffix)]+suffix i += 1 slug = new_slug changed_object.updated_fields['slug'] = new_slug to_save.add(changed_object) existing_slugs[slug] = (None if not issubclass(changed_object.model_class, LocationRedirect) else changed_object.updated_fields['target']) for changed_object in to_save: changed_object.save(standalone=True) changeset.last_cleaned_with_id = last_map_update_pk changeset.save()
def apply(self, user): with MapUpdate.lock(): changed_geometries.reset() self._clean_changes() changed_objects = self.relevant_changed_objects() created_objects = [] existing_objects = [] for changed_object in changed_objects: (created_objects if changed_object.is_created else existing_objects).append(changed_object) objects = self.get_objects(changed_objects=changed_objects) # remove slugs on all changed existing objects slugs_updated = set( changed_object.obj_pk for changed_object in existing_objects if (issubclass(changed_object.model_class, LocationSlug) and 'slug' in changed_object.updated_fields)) LocationSlug.objects.filter(pk__in=slugs_updated).update(slug=None) redirects_deleted = set( changed_object.obj_pk for changed_object in existing_objects if (issubclass(changed_object.model_class, LocationRedirect) and changed_object.deleted)) LocationRedirect.objects.filter(pk__in=redirects_deleted).delete() # create created objects created_pks = {} objects_to_create = set(created_objects) while objects_to_create: created_in_last_run = set() for created_object in objects_to_create: model = created_object.model_class pk = created_object.obj_pk # lets try to create this object obj = model() try: created_object.apply_to_instance( obj, created_pks=created_pks) except ApplyToInstanceError: continue obj.save() created_in_last_run.add(created_object) created_pks.setdefault(model, {})[pk] = obj.pk objects.setdefault(model, {})[pk] = obj objects_to_create -= created_in_last_run # update existing objects for existing_object in existing_objects: if existing_object.deleted: continue model = existing_object.model_class pk = existing_object.obj_pk obj = objects[model][pk] existing_object.apply_to_instance(obj, created_pks=created_pks) obj.save() # delete existing objects for existing_object in existing_objects: if not existing_object.deleted and not issubclass( existing_object.model_class, LocationRedirect): continue model = existing_object.model_class pk = existing_object.obj_pk obj = objects[model][pk] obj.delete() # update m2m for changed_object in changed_objects: obj = objects[changed_object.model_class][ changed_object.obj_pk] for mode, updates in (('remove', changed_object.m2m_removed), ('add', changed_object.m2m_added)): for name, pks in updates.items(): field = changed_object.model_class._meta.get_field( name) pks = tuple(objects[field.related_model][pk].pk for pk in pks) getattr(getattr(obj, name), mode)(*pks) update = self.updates.create(user=user, state='applied') map_update = MapUpdate.objects.create(user=user, type='changeset') self.state = 'applied' self.last_state_update = update self.last_update = update self.map_update = map_update self.save()
def raw_cache_key_without_changes(self): if self.pk is None: return MapUpdate.current_cache_key() return ':'.join((str(self.pk), MapUpdate.current_cache_key()))
def _clean_changes(self): if self.direct_editing: return with self.lock_to_edit() as changeset: last_map_update_pk = MapUpdate.last_update()[0] if changeset.last_cleaned_with_id == last_map_update_pk: return changed_objects = changeset.changed_objects_set.all() # delete changed objects that refer in some way to deleted objects and clean up m2m changes object_pks = {} for changed_object in changed_objects: changed_object.add_relevant_object_pks(object_pks) to_save = set() deleted_object_pks = {} for model, pks in object_pks.items(): pks = set(pk for pk in pks if not is_created_pk(pk)) deleted_object_pks[model] = pks - set( model.objects.filter(pk__in=pks).values_list('pk', flat=True)) repeat = True while repeat: repeat = False for changed_object in changed_objects: if changed_object.handle_deleted_object_pks( deleted_object_pks): to_save.add(changed_object) if changed_object.pk is None: repeat = True # remove deleted objects changed_objects = [ obj for obj in changed_objects if obj.pk is not None ] # clean updated fields objects = changeset.get_objects(many=False, changed_objects=changed_objects, prefetch_related=('groups', )) for changed_object in changed_objects: if changed_object.clean_updated_fields(objects): to_save.add(changed_object) # clean m2m for changed_object in changed_objects: if changed_object.clean_m2m(objects): to_save.add(changed_object) # remove duplicate slugs slugs = set() for changed_object in changed_objects: if issubclass(changed_object.model_class, LocationSlug): slug = changed_object.updated_fields.get('slug', None) if slug is not None: slugs.add(slug) qs = LocationSlug.objects.filter(slug__in=slugs) if slugs: qs = qs.filter( reduce(operator.or_, (Q(slug__startswith=slug + '__') for slug in slugs))) existing_slugs = dict(qs.values_list('slug', 'redirect__target_id')) slug_length = LocationSlug._meta.get_field('slug').max_length for changed_object in changed_objects: if issubclass(changed_object.model_class, LocationSlug): slug = changed_object.updated_fields.get('slug', None) if slug is None: continue if slug in existing_slugs: redirect_to = existing_slugs[slug] if issubclass( changed_object.model_class, LocationRedirect) and redirect_to is not None: to_save.discard(changed_object) changed_object.delete() continue new_slug = slug i = 0 while new_slug in existing_slugs: suffix = '__' + str(i) new_slug = slug[:slug_length - len(suffix)] + suffix i += 1 slug = new_slug changed_object.updated_fields['slug'] = new_slug to_save.add(changed_object) existing_slugs[slug] = ( None if not issubclass(changed_object.model_class, LocationRedirect) else changed_object.updated_fields['target']) for changed_object in to_save: changed_object.save(standalone=True) changeset.last_cleaned_with_id = last_map_update_pk changeset.save()
def cache_key_for_request(cls, request, with_update=True): return ( ((MapUpdate.current_cache_key()+':') if with_update else '') + '-'.join(str(i) for i in sorted(AccessPermission.get_for_request(request)) or '0') )
os.makedirs(tile_dirname, exist_ok=True) with open(tile_filename, 'wb') as f: f.write(data) with open(last_update_filename, 'w') as f: f.write(base_cache_key) cache.set(tile_cache_update_cache_key, base_cache_key, 60) response = HttpResponse(data, 'image/png') response['ETag'] = tile_etag response['Cache-Control'] = 'no-cache' response['Vary'] = 'Cookie' return response @etag(lambda *args, **kwargs: MapUpdate.current_processed_cache_key()) @no_language() def map_history(request, level, mode, filetype): if not request.user.is_superuser: raise PermissionDenied level = get_object_or_404(Level, pk=level) if mode == 'composite' and level.on_top_of_id is not None: raise Http404 history = MapHistory.open_level(level.pk, mode) if filetype == 'png': response = HttpResponse(content_type='image/png') history.to_image().save(response, format='PNG') elif filetype == 'data': response = HttpResponse(content_type='application/octet-stream')
def build_filename(cls, update): return os.path.join(settings.CACHE_ROOT, 'locator_%s.pickle' % MapUpdate.build_cache_key(*update))
def build_filename(cls, update): return os.path.join( settings.CACHE_ROOT, 'locator_%s.pickle' % MapUpdate.build_cache_key(*update))
def _check_mapupdate(self): mapupdate = MapUpdate.current_cache_key() if self._mapupdate != mapupdate: self._items = OrderedDict() self._mapupdate = mapupdate
def cache_key_for_request(cls, request, with_update=True): return (( (MapUpdate.current_cache_key() + ':') if with_update else '' ) + '-'.join( str(i) for i in sorted(AccessPermission.get_for_request(request)) or '0'))