def test_model(self): a = Foo.objects.create(bar="a") b = Foo.objects.create(bar="b") c = Another.objects.create() self.assertEqual(a.lock_id_no_arg.__name__, "lock_id_no_arg") self.assertEqual(a.lock_id.__name__, "lock_id") self.assertEqual(a.lock_bar.__name__, "lock_bar") def callback(): self.assertTrue(lock(a.pk_lockname).locked) self.assertFalse(lock(b.pk_lockname).locked) self.assertRaises(Locked, a.lock_id) self.assertFalse(lock(c.pk_lockname).locked) c.lock_id() another_lock_name = a.pk_lockname.replace(a.lock_name, custom_lock_name) self.assertFalse(lock(another_lock_name).locked) a.another_lockid( lambda: self.assertTrue(lock(another_lock_name).locked)) a.lock_id_no_arg(callback) self.assertFalse(lock(a.pk_lockname).locked) a.lock_id(callback) self.assertFalse(lock(a.pk_lockname).locked) def callback(): self.assertTrue(lock(a.bar_lockname).locked) self.assertFalse(lock(b.bar_lockname).locked) self.assertRaises(Locked, a.lock_bar) a.lock_bar(callback) self.assertFalse(lock(a.bar_lockname).locked)
def callback(): self.assertTrue(lock(a.pk_lockname).locked) self.assertFalse(lock(b.pk_lockname).locked) self.assertRaises(Locked, a.lock_id) self.assertFalse(lock(c.pk_lockname).locked) c.lock_id() another_lock_name = a.pk_lockname.replace(a.lock_name, custom_lock_name) self.assertFalse(lock(another_lock_name).locked) a.another_lockid( lambda: self.assertTrue(lock(another_lock_name).locked))
def test_release(self): self.assertTrue(self.lock.acquire()) lock_a = lock(self.lock_name) # redis in python2.7 sometimes not raise LockWarning self.assertWarns(LockWarning, lock_a.release) self.assertTrue(self.lock.locked) self.lock.release() self.assertFalse(self.lock.locked)
def test_sleep(self): count = 3 sleep = DEFAULT_SETTINGS["SLEEP"] lock_a = lock(self.lock_name, sleep=sleep) with lock_a: with mock.patch.object(Lock, "_acquire"): Lock._acquire.return_value = False block = sleep * count + sleep / 2 self.assertFalse(lock_a.acquire(block)) self.assertEqual(Lock._acquire.call_count, count + 2) with mock.patch.object(Lock, "_acquire"): sleep = 0.05 Lock._acquire.return_value = False lock_b = lock(self.lock_name, sleep=sleep) block = sleep * count + sleep / 2 self.assertFalse(lock_b.acquire(block)) self.assertEqual(Lock._acquire.call_count, count + 2)
def wrap(self, *args, **kwargs): key = build_cache_key(key_prefix, method, args, kwargs) with lock(key, timeout=lock_timeout): value = cache.get(key) if value is not None: value = pickle.loads(value) else: value = method(self, *args, **kwargs) cache.set(key, pickle.dumps(value), timeout=ttl) return value
def test_decorator(self): lock_a = lock(self.lock_name, blocking=False) @lock_a def resource(): self.assertTrue(lock_a.locked) self.assertRaises(Locked, resource) self.assertEqual(resource.__name__, "resource") resource() self.assertFalse(lock_a.locked)
def test_acquire(self): self.assertTrue(self.lock.acquire(False)) self.assertFalse(self.lock.acquire(False)) self.assertRaises(Locked, self.lock.acquire_raise, blocking=False) lock_a = lock("lock_a") self.assertTrue(lock_a.acquire(False)) self.assertFalse(lock_a.acquire(False)) self.lock.release() lock_a.release() self.assertTrue(self.lock.acquire(False))
def test_timeout_memcached(self): timeout = 2 lock_a = lock(self.lock_name, timeout=timeout) started = time.time() self.assertTrue(lock_a.acquire(False)) self.assertTrue(self.lock.acquire()) diff = time.time() - started self.assertGreaterEqual(diff, timeout - 1) self.assertLessEqual(diff, timeout + self.lock.sleep) self.lock.release() self.assertFalse(lock_a.locked) self.assertWarns(LockWarning, lock_a.release)
def test_dynamicname(self): name = "1.2" lock_b = lock(name) def name_generator(*args, **kwargs): return ".".join(args) @lock(name_generator, blocking=False) def resource(*args): self.assertTrue(lock_b.locked) self.assertRaises(Locked, resource, *name.split(".")) self.assertEqual(resource.__name__, "resource") resource(*name.split(".")) self.assertFalse(lock_b.locked)
def test_block(self): block = 0.2 lock_a = lock(self.lock_name, blocking=block) with lock_a: started = time.time() self.assertFalse(lock_a.acquire()) diff = time.time() - started self.assertGreaterEqual(diff, block) self.assertLess(diff, block + lock_a.sleep) block = 0.1 self.assertFalse(lock_a.acquire(block)) diff = time.time() - started - diff self.assertGreaterEqual(diff, block) self.assertLess(diff, block + lock_a.sleep)
def test_thread(self): unsafe_lock = lock("lock_b", thread_local=False) def another_thread(): self.assertWarns(LockWarning, self.lock.release) self.assertTrue(self.lock.locked) unsafe_lock.release() self.assertFalse(unsafe_lock.locked) self.assertTrue(self.lock.acquire()) self.assertTrue(unsafe_lock.acquire()) t = threading.Thread(target=another_thread) t.start() t.join() self.assertTrue(self.lock.locked) self.assertFalse(unsafe_lock.locked)
def test_model_context(self): a = Foo.objects.create(bar="a") b = Foo.objects.create(bar="b") with lock_model(a, blocking=False): self.assertTrue(lock(a.pk_lockname).locked) self.assertFalse(lock(b.pk_lockname).locked) self.assertFalse(lock_model(a, blocking=False).acquire()) self.assertRaises(Locked, a.lock_id) self.assertFalse(lock(a.pk_lockname).locked) with lock_model(a, "bar", blocking=False): self.assertTrue(lock(a.bar_lockname).locked) self.assertFalse(lock(b.bar_lockname).locked) self.assertFalse(lock_model(a, "bar", blocking=False).acquire()) self.assertRaises(Locked, a.lock_bar) self.assertFalse(lock(a.bar_lockname).locked)
def setUp(self): self.lock = lock(self.lock_name) cache.delete(self.lock.key)
def callback(): self.assertTrue(lock(a.bar_lockname).locked) self.assertFalse(lock(b.bar_lockname).locked) self.assertRaises(Locked, a.lock_bar)
def test_owned(self): lock_a = lock(self.lock_name) self.assertTrue(self.lock.acquire()) self.assertTrue(self.lock.owned) self.assertFalse(lock_a.owned)
def get_chunk(request): def get_str_id(strcache, s): try: return strcache.index(s) except ValueError: strcache.append(s) return len(strcache) - 1 BOUNDINGBOX_EXTRA = Decimal('0.008') func_path = getattr(settings, 'CUSTOM_CHUNK_SERIALIZATION_FUNCTION', None) lat = int(request.GET.get('lat')) lon = int(request.GET.get('lon')) if lat is None or lon is None: return HttpResponseBadRequest('Missing GET argument "lat" or "lon"!') # Validate and clean latitude and longitude. Latitude # must be [-90 – 90) and longitude [-180 – 180). if lat < -9000 or lat >= 9000: return HttpResponseBadRequest('Invalid latitude!') lon = (lon + 18000) % 36000 - 18000 if lat % CHUNK_WIDTH_MULTIPLIER != 0 or lon % CHUNK_WIDTH_MULTIPLIER != 0: return HttpResponseBadRequest('"lat" or "lon" must be multiples of {}!'.format(CHUNK_WIDTH_MULTIPLIER)) lat_d = Decimal(lat) / Decimal(100) lon_d = Decimal(lon) / Decimal(100) with lock('get_chunk_{}_{}'.format(lat, lon)): chunk = Chunk.objects.filter(lat=lat, lon=lon).first() if chunk: # If custom data should be returned instead if func_path: return HttpResponse(chunk.custom_data, content_type='application/octet-stream') return HttpResponse(chunk.data, content_type='application/octet-stream') # Check if we should give some rest to Overpass API if cache.get('dont_call_overpass_api'): return HttpResponse(b'', content_type='application/octet-stream', status=503) # Make sure Overpass API has some free slots with lock('check_overpass_api_slots'): response = requests.get('http://overpass-api.de/api/status') if response.status_code < 200 or response.status_code > 299: # There was some error, so do not try again for a while cache.set('dont_call_overpass_api', 'x', 5) return HttpResponse(b'', content_type='application/octet-stream', status=503) if 'slots available now' not in response.content.decode('utf8'): # There are no free slots, so wait for a while cache.set('dont_call_overpass_api', 'x', 1) return HttpResponse(b'', content_type='application/octet-stream', status=503) # Chunk does not exist, so it must be loaded bb_lat_min = lat_d - BOUNDINGBOX_EXTRA bb_lat_max = lat_d + Decimal('0.01') * CHUNK_WIDTH_MULTIPLIER + BOUNDINGBOX_EXTRA bb_lon_min = lon_d - BOUNDINGBOX_EXTRA bb_lon_max = lon_d + Decimal('0.01') * CHUNK_WIDTH_MULTIPLIER + BOUNDINGBOX_EXTRA # Fetch nodes query = """ <query type="node"> <bbox-query s="{bb_lat_min}" n="{bb_lat_max}" w="{bb_lon_min}" e="{bb_lon_max}"/> </query> <print/> """.format( bb_lat_min=bb_lat_min, bb_lat_max=bb_lat_max, bb_lon_min=bb_lon_min, bb_lon_max=bb_lon_max, ) try: response = requests.post('http://overpass-api.de/api/interpreter', data=query) except requests.exceptions.ConnectionError: # Allow Overpass API to have some rest cache.set('dont_call_overpass_api', 'x', 20) return HttpResponse(b'', content_type='application/octet-stream', status=503) nodes_xml = ETree.fromstring(response.text) # Fetch ways query = """ <query type="way"> <bbox-query s="{bb_lat_min}" n="{bb_lat_max}" w="{bb_lon_min}" e="{bb_lon_max}"/> </query> <print/> """.format( bb_lat_min=bb_lat_min, bb_lat_max=bb_lat_max, bb_lon_min=bb_lon_min, bb_lon_max=bb_lon_max, ) response = requests.post('http://overpass-api.de/api/interpreter', data=query) ways_xml = ETree.fromstring(response.text) # Prepare string cache. This is used to reduce space # waste of strings that occur multiple times. strcache = [] # Convert nodes to bytes nodes_bytes = b'' nodes_xml = list(nodes_xml.findall('node')) nodes_bytes += struct.pack('>I', len(nodes_xml)) for node_xml in nodes_xml: # ID nodes_bytes += struct.pack('>Q', int(node_xml.get('id'))) # Latitude and longitude nodes_bytes += struct.pack('>ii', lat_to_int(node_xml.get('lat')), lon_to_int(node_xml.get('lon'))) # Tags tags_xml = list(node_xml.findall('tag')) nodes_bytes += struct.pack('>H', len(tags_xml)) for tag_xml in tags_xml: key = get_str_id(strcache, tag_xml.get('k')) value = get_str_id(strcache, tag_xml.get('v')) nodes_bytes += struct.pack('>HH', key, value) # Convert ways to bytes ways_bytes = b'' ways_xml = list(ways_xml.findall('way')) ways_bytes += struct.pack('>I', len(ways_xml)) for way_xml in ways_xml: # ID ways_bytes += struct.pack('>Q', int(way_xml.get('id'))) # Nodes way_nodes_xml = way_xml.findall('nd') ways_bytes += struct.pack('>H', len(way_nodes_xml)) for way_node_xml in way_nodes_xml: ways_bytes += struct.pack('>Q', int(way_node_xml.get('ref'))) # Tags tags_xml = list(way_xml.findall('tag')) ways_bytes += struct.pack('>H', len(tags_xml)) for tag_xml in tags_xml: key = get_str_id(strcache, tag_xml.get('k')) value = get_str_id(strcache, tag_xml.get('v')) ways_bytes += struct.pack('>HH', key, value) # Form the final bytes data = b'' # Version data += struct.pack('>H', 0) # String cache data += struct.pack('>H', len(strcache)) for s in strcache: s = s.encode('utf8') data += struct.pack('>H', len(s)) data += s # Nodes and ways data += nodes_bytes data += ways_bytes # If custom data should be built custom_data = None if func_path: mod_name, func_name = func_path.rsplit('.', 1) mod = importlib.import_module(mod_name) func = getattr(mod, func_name) custom_data = func(bytes_to_rich_data(data)) Chunk.objects.create( lat=lat, lon=lon, data=data, custom_data=custom_data, ) if func_path: return HttpResponse(custom_data, content_type='application/octet-stream') return HttpResponse(data, content_type='application/octet-stream')