def test_find_feeds__non_recursive(self): scavenger = Scavenger() with self.mocked_requests(scavenger): response = scavenger.find_feeds('http://site-a.com/') self.assertEqual(response, { 'http://site-a.com/feed/', 'http://site-a.com/comments/feed/' })
def test_find_feeds__depth_3(self): scavenger = Scavenger(max_depth=3) with self.mocked_requests(scavenger): response = scavenger.find_feeds('http://site-b.com/') self.assertEqual(response, { 'http://site-b.com/hidden_feed/', 'http://site-b.com/feed_like.xml', 'http://site-c.com/a_feed.xml' })
def test_requests_get(self): with mock.patch('scavenger.requests.get', return_value='some good stuff') as requests_get: scavenger = Scavenger(max_depth=1) result = scavenger._requests_get('a url') self.assertEqual(result, 'some good stuff') requests_get.assert_called_once_with( 'a url', headers={'User-Agent': scavenger._user_agent}, timeout=scavenger._request_timeout )
def loadFromFile(self, path, desired_size): # Set properties. self._path = path self._size = desired_size # Create the preview version if necessary. preview_path = path + '.preview' if os.path.exists(preview_path): self._preview = Image.open(preview_path) else: with open(path, 'r') as infile: original = infile.read() preview = scale(original, desired_size) if type(preview) == RemoteDataHandle: preview = Scavenger.fetch_data(preview) with open(preview_path, 'w') as outfile: outfile.write(preview) self._preview = Image.open(preview_path) # Load pixmap version. self._previewToPixmap()
def run(self): # Create an in-memory version of the image. sio = StringIO() self.image.save(sio, 'JPEG', quality=95) image_data = sio.getvalue() try: if self.op == 'sharpen': data_handle = imageops.sharpen(image_data) image_data = Scavenger.fetch_data(data_handle) sio = StringIO(image_data) new_image = Image.open(sio) elif self.op == 'blur': data_handle = imageops.blur(image_data) image_data = Scavenger.fetch_data(data_handle) sio = StringIO(image_data) new_image = Image.open(sio) elif self.op == 'color': data_handle = imageops.color(image_data, self.args) image_data = Scavenger.fetch_data(data_handle) sio = StringIO(image_data) new_image = Image.open(sio) elif self.op == 'brightness': data_handle = imageops.brightness(image_data, self.args) image_data = Scavenger.fetch_data(data_handle) sio = StringIO(image_data) new_image = Image.open(sio) elif self.op == 'contrast': data_handle = imageops.contrast(image_data, self.args) image_data = Scavenger.fetch_data(data_handle) sio = StringIO(image_data) new_image = Image.open(sio) elif self.op == 'invert': data_handle = imageops.invert(image_data) image_data = Scavenger.fetch_data(data_handle) sio = StringIO(image_data) new_image = Image.open(sio) else: raise Exception('Unknown operation "%s".'%self.op) self.callback(True, new_image) except Exception, e: print e self.callback(False, e)
from scavenger import Scavenger from time import sleep, time sleep(1.2) peers = Scavenger.get_peers() for peer in peers: print peer if len(peers) != 0: print 'start' start = time() print Scavenger.perform_service(peers[0], 'daimi.test.add', {'x':1, 'y':1}, timeout=2) print 'done - elapsed =', time()-start print 'installing service' start = time() if not Scavenger.has_service(peers[0], 'daimi.test.subtract'): Scavenger.install_service(peers[0], 'daimi.test.subtract',""" def perform(x, y): z = 0 for v in range(0,100000): z += v return x - y """) print 'done - elapsed =', time()-start print 'start' start = time() print Scavenger.perform_service(peers[0], 'daimi.test.subtract', {'x':1, 'y':1}, timeout=2) print 'done - elapsed =', time()-start
from scavenger import Scavenger, shutdown, scavenge from time import sleep # Sleep for a little while to allow surrogates to be discovered. print "Sleeping for a little while...", sleep(1.2) print "done" print "Found", len(Scavenger.get_peers()), "surrogates" @scavenge('0.00001', '0.00001') def add(x, y): return x + y print "Scavenging a little..." print add(1,2) print add(3,4) print "done" print 'Doing some manual "scavenging"' print Scavenger.scavenge('daimi.test.add', [1,2], """ def perform(x,y): return x+y """) print Scavenger.scavenge('daimi.test.add', [2,3]) print "done" shutdown()
super(Worker, self).__init__() self._daemon = daemon self._x = x self._y = y def run(self): print self._daemon.scavenge('daimi.test.heavy', {'x':self._x, 'y':self._y}, """ def perform(x, y): z = 0.0 for _x in range(0, x): for _y in range(0, y): z += (_x * _y) / 1000.0 return z """) s = Scavenger() sleep(2) worker1 = Worker(s, 3000, 30000) worker2 = Worker(s, 3000, 30000) start = time() worker1.start() worker2.start() worker1.join() worker2.join() end = time()
self._image = fliphorizontal(self._image) except Exception, e: logger.error("Error performing horizontal flip.", exc_info=True) self._success = False elif self._operation == 'flipvertical': try: self._image = flipvertical(self._image) except Exception, e: logger.error("Error performing vertical flip.", exc_info=True) self._success = False # Fetch result data if necessary. if type(self._image) == RemoteDataHandle: try: self._image = Scavenger.fetch_data(self._image) except Exception, e: logger.error('Error fetching remote data handle.', exc_info=True) self._success = False def image(): #@NoSelf def fget(self): return self._image def fset(self, value): self._image = value def fdel(self): del self._image return locals() image = property(**image()) def operation(): #@NoSelf
elif self.mode == 'config': self.config_window.show() def set_mode(self, mode): old_window = self.__get_active_window() self.fullscreen = old_window.fullscreen self.__set_active_window(mode) old_window.hide() def cb_osso (self, interface, method, arguments, user_data): print 'received an OSSO RPC.' def run (self): self.image_browser.show() gtk.main() if __name__ == '__main__': # Check that the image path and the settings dirs exist. if not exists(Defines.IMAGE_PATH): mkdir(Defines.IMAGE_PATH) if not exists(Defines.CONFIG_DIR): mkdir(Defines.CONFIG_DIR) # Start the application. gtk.gdk.threads_init() app = AugIm_UI() app.run() Scavenger.shutdown() sys.exit(0)
from scavenger import Scavenger from time import sleep sleep(2) def subtract(x, y): print 'local execution' return x - y print Scavenger.scavenge('daimi.test.subtract', {'x':1, 'y':3}, """ def perform(x, y): return x - y """, subtract) print Scavenger.scavenge('daimi.test.add', (1, 43), """ def perform(x, y): return x + y """) print Scavenger.scavenge('daimi.test.add', [1, 43]) Scavenger.shutdown()
def test_find_feeds__fixed_urls(self): scavenger = Scavenger(max_depth=3) with self.mocked_requests(scavenger): response = scavenger.find_feeds('site-b.com') self.assertTrue(len(response) > 0)
def test_find_feeds__depth_1(self): scavenger = Scavenger(max_depth=1) with self.mocked_requests(scavenger): response = scavenger.find_feeds('http://site-b.com/') self.assertEqual(response, set())
def test_find_xmlrpc__empty(self): scavenger = Scavenger() with self.mocked_requests(scavenger): response = scavenger.find_xmlrpc('http://does-not-exist.com/') self.assertEqual(response, set())
def test_find_xmlrpc(self): scavenger = Scavenger() with self.mocked_requests(scavenger): response = scavenger.find_xmlrpc('http://site-a.com/') self.assertEqual(response, {'http://site-a.com/xmlrpc.php?rsd'})
# Check whether a number of test iterations is given. iterations = 1 if '-i' in sys.argv: iterations = int(sys.argv[sys.argv.index('-i') + 1]) # Check whether a scheduler has been specified. scheduler = 'basic' if '-s' in sys.argv: scheduler = sys.argv[sys.argv.index('-s') + 1] # Sleep for a little while to make sure that we discover # the available surrogates. sleep(2.0) # Place the input data at the chosen peer. peers = Scavenger.get_peers() found_him = False for peer in peers: if peer.name == spiked_peer: found_him = True if not Scavenger.has_service(peer, 'std.rdh.store'): Scavenger.install_service(peer, 'std.rdh.store', """ def perform(image): return image """) data_handle = Scavenger.perform_service(peer, 'std.rdh.store', {'image':image}, store=True) data_handle.retain = True Scavenger.retain_data(data_handle) print 'just got spiked!' if not found_him: raise Exception('Unable to find the peer')