def testBlobbableBinaryFile(self): _file = os.path.join(os.path.dirname(__file__), 'data', 'image.gif') with open(_file, 'rb') as f: obj = Binary(f) obj.filename = 'image.gif' blobbable = IBlobbable(obj) target = Blob() blobbable.feed(target) self.assertEqual(target.open('r').read(), getFile('image.gif').read()) self.assertEquals(blobbable.filename(), 'image.gif') self.assertEquals(blobbable.mimetype(), 'image/gif')
def put(self,argv): datagram = pickle.loads(argv.data) port = datagram['port'] path = datagram['path'] ID = datagram['blks'] version = datagram['version'] value = datagram['data'] length = str(len(value)) if path not in self.data[port]: # initialize a dict for a new file self.data[port][path] = {} if ID not in self.data[port][path]: self.data[port][path][ID] = {} # initialize a dict for a new block self.data[port][path][ID][version] = value + self.checksum(value) return Binary(length)
def get(self,argv): rv = False datagram = pickle.loads(argv.data) port = datagram['port'] path = datagram['path'] ID = datagram['blks'] version = datagram['version'] #check whether there is the specified block in the specified path on the specified server. Also, #need to make sure it has the version. if port in self.data and path in self.data[port] and \ ID in self.data[port][path] and \ version in self.data[port][path][ID]: rv = Binary(self.data[port][path][ID][version]) return rv
def r1get(self, key): # Default return value s = shelve.open(f) self.r1data = s['r1data'] rv = list # If the key is in the data structure, return properly formated results key = key.data print(key) if key in self.r1data: print("True") else: print("false") s.close() return Binary(pickle.dumps(self.r1data[key]))
def test_newMediaObject(self): # Create a new Image using the RPC method. path = os.path.join(os.path.dirname(__file__), 'static', 'xmlrpc_wp', 'TODO.gif') bits = Binary(open(path).read()) overrides = { 'data': { 'name': path, 'type': 'image/gif', 'bits': bits, } } result = self.get_result(MetaWeblog.newMediaObject, **overrides) self.assertTrue(result)
def __init__(self, portlist): self.data = {} self.next_check = datetime.now() + timedelta(minutes=5) print "WELCOME" portlist = portlist[1:] url = "http://127.0.0.1:" server_list = [] for each in portlist: server_list.append(xmlrpclib.Server(url + each)) keylist = [] print "server_list", server_list available_servers = [] for server in server_list: try: res = server.get(Binary("keys")) keylist = res available_servers.append(server) except: print "iterate to next server" print "LIST OF KEYS", keylist for key in keylist: available_values = [] print "key is: ", key for server in available_servers: res = server.get(Binary(key)) value = pickle.loads(res["value"].data) available_values.append(value) for each in available_values: count = available_values.count(each) if count >= 2: value = each break print self.put(Binary(key), Binary(pickle.dumps(value)), 6000)
def test_round_trip(): """ Test `ipalib.rpc.xml_wrap` and `ipalib.rpc.xml_unwrap`. This tests the two functions together with ``xmlrpclib.dumps()`` and ``xmlrpclib.loads()`` in a full wrap/dumps/loads/unwrap round trip. """ # We first test that our assumptions about xmlrpclib module in the Python # standard library are correct: assert_equal(dump_n_load(utf8_bytes), unicode_str) assert_equal(dump_n_load(unicode_str), unicode_str) assert_equal(dump_n_load(Binary(binary_bytes)).data, binary_bytes) assert isinstance(dump_n_load(Binary(binary_bytes)), Binary) assert type(dump_n_load('hello')) is str assert type(dump_n_load(u'hello')) is str assert_equal(dump_n_load(''), '') assert_equal(dump_n_load(u''), '') assert dump_n_load(None) is None # Now we test our wrap and unwrap methods in combination with dumps, loads: # All str should come back str (because they get wrapped in # xmlrpclib.Binary(). All unicode should come back unicode because str # explicity get decoded by rpc.xml_unwrap() if they weren't already # decoded by xmlrpclib.loads(). assert_equal(round_trip(utf8_bytes), utf8_bytes) assert_equal(round_trip(unicode_str), unicode_str) assert_equal(round_trip(binary_bytes), binary_bytes) assert type(round_trip('hello')) is str assert type(round_trip(u'hello')) is unicode assert_equal(round_trip(''), '') assert_equal(round_trip(u''), u'') assert round_trip(None) is None compound = [ utf8_bytes, None, binary_bytes, (None, unicode_str), dict(utf8=utf8_bytes, chars=unicode_str, data=binary_bytes) ] assert round_trip(compound) == tuple(compound)
def copy_file_to_machine(self, local_path, remote_path=None, netns=None): remote_path = self._rpc_call_x(netns, "start_copy_to", remote_path) f = open(local_path, "rb") while True: data = f.read(1024 * 1024) # 1MB buffer if len(data) == 0: break self._rpc_call_x(netns, "copy_part_to", remote_path, Binary(data)) self._rpc_call_x(netns, "finish_copy_to", remote_path) return remote_path
def get_data_server(self,path): serv_data = {} i = 0 majority = 0 #To check for the most common data. if self.Qr % 2 == 0: majority = (self.Qr/2)+1 else: majority = (self.Qr+1)/2 # while len(serv_data)<(self.Qr): for i in range(self.Qw): try: serv_data[i] = (pickle.loads(self.data_server[i].get(Binary(path))["value"].data)) except Exception, E: print E print ('Data Server %i is not reachable.' %(i+1))
def GetUpdateApp(self, version): global _apk_size, _apk_time, _apk_version try: apk_state = lstat(APK_FILENAME) if apk_state.st_ctime <> _apk_time or apk_state.st_size <> _apk_size: _apk_time = apk_state.st_ctime _apk_size = apk_state.st_size _apk_version = GetApkVersion() if DEBUG_LEVEL > 0: Log("GetUpdateApp:\t Client version=%d, Current version: %d" % (version, _apk_version)) if version < _apk_version: if DEBUG_LEVEL > 1: Log("GetUpdateApp:\tSend ubdate APK v: %d" % _apk_version) with open(APK_FILENAME, "rb") as handle: return Binary(handle.read()) else: if DEBUG_LEVEL > 1: Log("GetUpdateApp:\tUpdate does not require") return Binary('0') except Exception as e: Log(e, True) raise
def restore_meta(self): dict_t = {} print 'restore func!!!' # node to be deleted node = (self.path.rsplit('/', 1)[1]) # parent node print self.path parent_node = ('/' + self.path.rsplit('/', 1)[0]) + '&&' + 'list_nodes' print parent_node, node # remove node from parents list rpc = xmlrpclib.Server(url + ':' + meta_port) res = rpc.get(Binary(parent_node)) print res if "value" in res: list_nodes = pickle.loads(res["value"].data) print 'before!!', list_nodes else: print 'None in list_nodes' return None del list_nodes[node] print list_nodes rpc.put(Binary(parent_node), Binary(pickle.dumps(list_nodes)), 6000) return
def build_data(self, serv_id): #print('Flag is 1') x, y = self.find_adj_serv(serv_id) #print('here server x=',x) #print('and server y=',y) new_store = {} ret_obj = self.d_server[x].request_data(1, 0) stored = pickle.loads(ret_obj.data) new_store.update(stored) ret_obj = self.d_server[y].request_data(0, 1) stored = pickle.loads(ret_obj.data) new_store.update(stored) self.d_server[serv_id].load_serv(Binary(pickle.dumps(new_store)))
def corrupt(self, key): shelve_datastore = shelve.open(datastore) if key in shelve_datastore.keys(): self.data[key.data] = Binary( pickle.dumps("abc12345\nabcdefghijklmnop1234567891234567")) print("corrupted path :") print(key[2:]) print("Corrupted block no: ") print(key[1]) shelve_datastore.update(self.data) shelve_datastore.close() return True else: print("Path not found on this dataserver") return False
def test_new_media_object(self): file_ = TemporaryFile() file_.write('My test content') file_.seek(0) media = {'name': 'zinnia_test_file.txt', 'type': 'text/plain', 'bits': Binary(file_.read())} file_.close() self.assertRaises(Fault, self.server.metaWeblog.newMediaObject, 1, 'contributor', 'password', media) new_media = self.server.metaWeblog.newMediaObject( 1, 'webmaster', 'password', media) self.assertTrue('/zinnia_test_file' in new_media['url']) default_storage.delete('/'.join([ UPLOAD_TO, new_media['url'].split('/')[-1]]))
def corrupt(self,key): key = Binary(key) key = key.data if key in self.data: ent = self.data[key] corruptValue = str(random.randint(100, 1000000)) tmplist = list(self.data[key]) tmplist[0] = corruptValue self.data[key] = tuple(tmplist) #since it is tuple element, we can't change it in common method returnValue = [] returnValue.append(ent[0]) returnValue.append(corruptValue) print self.data[key] print '------------data that is corruptted -> replace value----------' print returnValue return returnValue
def get(self, key): # Remove expired entries self.check() # Default return value rv = {} # If the key is in the data structure, return properly formated results key = key.data if key in self.data: ent = self.data[key] now = datetime.now() if ent[1] > now: ttl = (ent[1] - now).seconds rv = {"value": Binary(ent[0]), "ttl": ttl} else: del self.data[key] return rv
def get_abac_creds(root): ''' Reas a directory of ABAC certs and return them ready for an XMLRPC call. Technically this reads all the files in root into a list of xmlrpc Binary objects and return them. ''' creds = [] for r, d, f in os.walk(os.path.expanduser(root)): _ = d # appease eclipse for fn in f: try: ff = open(os.path.join(r, fn), 'r') c = ff.read() ff.close() creds.append(Binary(c)) except EnvironmentError, e: # XXX logger?? print sys.stderr, "Error on %s: %s" % (e.filename, e.strerror)
def generate_image(event): png = event.replace('.xml', '.png') thumb = event.replace('.xml', '.thumb.png') png2 = 'latest_event.png' server = ServerProxy('http://localhost:%d' % port) #server = ServerProxy('https://atlas-live.cern.ch/event_files/Main/') #if not server.atlantis.event.xmlrpc.AServerXMLRPCEventSource.isReady(): # print 'Server is not ready, try again later' #else: # print "Requesting image and thumbnail for '%s'" % event images = server.atlantis.event.xmlrpc.AServerXMLRPCEventSource.generateDefaultImages( Binary(data), event, 1024, 0.1) open(png2, 'w').write(images[0].data) open(png, 'w').write(images[0].data) open(thumb, 'w').write(images[1].data)
def list(range=()): '''Lists the song queue's contents. If a range is specified, only the items that fall within that range are listed. Arguments: Either none, or an array of integers that represents a range. * If no range is given, the whole list is returned. * If the range contains a single integer, it will represent all members of the queue whose index is greater than or equal to the value of the integer. * If the range contains two integers, it will represent all members of the queue whose index is greater than or equal to the value of the first integer and less than the value of the second integer. * If the range contains more than two integers, an error will occur. Return value: An array of (base64-encoded) strings, representing the selected range from the song queue's contents. ''' start, end = split_range(range) return [Binary(i) for i in data.song_queue[start:end]]
def runAnalysis(): try: result = {} modelName = optionMenuWidget.cget("text") print "value is",modelName predictDate = cal.selection.date() print ("Date selected:"+str(predictDate.year)) inputValues={'model':modelName,'year':predictDate.year,'month':predictDate.month,'day':predictDate.day,} prediction = diskServer.getPrediction(Binary(pickle.dumps(inputValues))) print "prediction:::",pickle.loads(prediction.data) resultWindow(pickle.loads(prediction.data)['predicted'],modelName) #diskUIRoot.quit() except xmlrpclib.Fault, errcode: errorMessage = str(errcode) if errorMessage.find("IOError"): errorWindow("Invalid Input!! Please enter a valid Disk or Date") else: errorWindow("Remote Server Fault.. Try again later")
def upload_image(ploneServer, folderPath, imageFile, imageDescrip=''): ''' Helper function that upload the image file. ''' assert os.path.isfile(imageFile), 'Oops! The image file does not exist.' imageTitle = os.path.splitext(os.path.split(imageFile)[1])[0] imagePost = { folderPath + '/' + imageTitle.lower(): [{ 'title': imageTitle, 'description': imageDescrip, 'image': Binary(open(imageFile, 'rb').read()) }, 'Image'] } imagePath = ploneServer.post_object(imagePost) print('Create a link to this image in Markdown with:\n' + \ '[![]({0}/@@images/image/preview)]({0})'.format(imagePath[0]))
def read(self, path, size, offset, fh): if self.lost == 1: self.lost = 0 offset_pos = offset % blk_size start_blk = offset // blk_size last_blk = (offset + size) // blk_size data_dict = {} for key in self.data: key_path = key[0] key_blk = key[1] if key_path == path: if (key_blk >= start_blk) and (key_blk <= last_blk): if not key in data_dict: data_dict[key] = self.data[key] #print("this is the dictionary", data_dict) #print(self.data) return Binary(pickle.dumps(data_dict))
def request_data(self, prev_serv, next_serv): new_dict = {} if (prev_serv == 1 and next_serv == 0): for key in self.data: dat = self.data[key][0] checksum = self.data[key][1] copyn = self.data[key][2] if copyn < 2: new_dict[key] = [dat, checksum, copyn + 1] elif (prev_serv == 0 and next_serv == 1): for key in self.data: dat = self.data[key][0] checksum = self.data[key][1] copyn = self.data[key][2] if copyn > 0: new_dict[key] = [dat, checksum, copyn - 1] return Binary(pickle.dumps(new_dict))
def _run_bzr(argv, workdir, func): """Actually executes the command and build the response.""" try: os.chdir(workdir) exitval = func(argv) sys.stderr.flush() sys.stdout.flush() if isinstance(exitval, Fault): return_val = exitval else: # use a Binary object to wrap the output to avoid NULL and other # non xmlrpc (or client xml parsers) friendly chars out = Binary(data=sys.stdout.getvalue()) return_val = (exitval, out, sys.stderr.getvalue()) os.chdir(run_dir) return return_val except: import traceback traceback.print_exc(file=sys.__stderr__) raise
def truncate(self, path, length, fh=None): print("truncate") hash_val = pickle.loads(self.ms_helper.gethashVal(Binary(path))) delete_blocks = pickle.loads( self.ms_helper.truncate(Binary(path), Binary(str(length)))) offset = length % MaxBLOCKSIZE for b in delete_blocks: block_num = int(b[len(hash_val):]) server_id = (int(hash_val) + block_num) % numDServers if (offset != 0): self.ds_helpers[server_id].truncate(Binary(b), Binary(offset)) offset = 0 else: self.ds_helpers[server_id].delete(Binary(b))
def get(self, key): # Remove expired entries self.check() # Default return value rv = {} # If the key is in the data structure, return properly formated results #print "simpleht_server GET key" ,(key) print "simpleht_server GET key" key = key.data #print "simpleht_server GET" ,(key,self.data) if key in self.data: ent = self.data[key] now = datetime.now() #print "simpleht_server GET ent", (ent,Binary(ent[0])) print "simpleht_server GET ent" if ent[1] > now: ttl = (ent[1] - now).seconds rv = {"value": Binary(ent[0]), "ttl": ttl} else: del self.data[key] return rv
def server_dump(self, error_port, key): print 'in server dump' err_port = [] res_port = [] err_port.append(error_port) res_port = list(set(ports) - set(err_port)) for i in res_port: rpc = xmlrpclib.Server(url + ':' + i) count = 0 try: ret = rpc.check_key(Binary(key)) print 'ret', ret except EnvironmentError as exc: if exc.errno == errno.ECONNREFUSED: print 'no connection to port', i count = count + 1 else: if ret == True: rpc.fill(error_port) break print 'count', count
def history(limit=0): '''Returns a list of the items that were recently played. Arguments: If a positive integer argument is given, then no more than that number of entries will be returned. If a number is not specified, or if zero is given, then the entire history is returned. The result is undefined if a negative integer argument is given (but does not raise an exception). Return value: An array of triples, each representing a song that was played along with the times that it started and finished playing. * The first member of the pair is a (base64-encoded) string which represents the song that was previously played. * The second member of the pair is a floating point number which represents the time that the song started playing in seconds since the epoch. * The third member of the pair is a floating point number which represents the time that the song finished playing in seconds since the epoch. ''' return [(Binary(item), starttime, endtime) for item, starttime, endtime in data.history[-limit:]]
def uploadFile(self, local_filename): # Remote destination directory self.semaforo.acquire(True) messaggio, debugMsg, ret = self._server.GetRandomUploadDestinationDirectoryXmlrpc( ) self.semaforo.release() if self.verbose: print "XmlrpcConnectionProvider.uploadFile: messaggio = %s" % ( messaggio) print "XmlrpcConnectionProvider.uploadFile: debugMsg = %s" % ( debugMsg) print "XmlrpcConnectionProvider.uploadFile: ret = %s" % (ret) dest_dir = ret['dest_dir'] self._log("XmlrpcConnectionProvider.uploadFile: dest_dir=%s" % dest_dir) # Short local filename filename_corto = local_filename[len(os.path.dirname(local_filename)):] if filename_corto.startswith('/'): filename_corto = filename_corto[1:] self._log("XmlrpcConnectionProvider.uploadFile: filename_corto=%s" % filename_corto) # Load file myfile = file(local_filename).read() # Upload TODO split the file in chunks self.semaforo.acquire(True) messaggio, debugMsg, ret = self._server.UploadXmlrpc( 1, 1, filename_corto, Binary(myfile), dest_dir) self.semaforo.release() if self.verbose: print "XmlrpcConnectionProvider.uploadFile: messaggio = %s" % ( messaggio) print "XmlrpcConnectionProvider.uploadFile: debugMsg = %s" % ( debugMsg) print "XmlrpcConnectionProvider.uploadFile: ret = %s" % (ret) filename = ret['filename'] dest_dir = ret['dest_dir'] if self.verbose: print "XmlrpcConnectionProvider.uploadFile: filename: ", filename print "XmlrpcConnectionProvider.uploadFile: dest_dir: ", dest_dir return "%s/%s" % (dest_dir, filename)
def GetParameter(self, path): """Gets the assigned parameter file. Args: path: A relative path for locating the parameter. Returns: Content of the parameter. It is always wrapped in a shopfloor.Binary object to provides best flexibility. Raises: ValueError if the parameter does not exist or is not under parameters folder. """ abspath = os.path.abspath(os.path.join(self.parameters_dir, path)) if not abspath.startswith(self.parameters_dir): raise ValueError('GetParameter is limited to parameter directory') if not os.path.isfile(abspath): raise ValueError('File does not exist or it is not a file') return Binary(open(abspath).read())
def load_testdata(): try: datafp = os.path.join(testdatadir, "fakeserver_testdata_large.yml") logger.debug("Attempting to load data from: %s", datafp) with open(datafp) as fd: testdata = yaml.load(fd) logger.debug("Data loaded, %s items", len(testdata)) except IOError as e: logger.warning("Error while reading testdata: %r", e) testdata = dict() # Just create attachment data once - improves test run speed. attachmentfps = (os.path.join(attachmentsdir, fn) for fn in os.listdir(attachmentsdir)) attachmentfps = (fp for fp in attachmentfps if os.path.isfile(fp)) try: attachments = { os.path.basename(fp): Binary(open(fp).read()) for fp in attachmentfps } except (OSError, IOError) as e: logger.warning("%r while loading attachments") return testdata, attachments
#!/usr/bin/python import sys from xmlrpclib import ServerProxy import datetime from xmlrpclib import Binary file_hash = sys.argv[1] if len(sys.argv) != 3: file_count = 1 else: file_count = int(sys.argv[2]) s = ServerProxy('http://localhost:8085') for i in range(file_count): file_struct = s.Service_RequestFileByHash(sys.argv[1]); print str(file_struct) file_content = file_struct[3] bin = Binary(file_content) bin.decode(file_content) print str(bin)
from xmlrpclib import Binary file_name = sys.argv[1] force = sys.argv[2] if (len(sys.argv)==2): force = False elif (sys.argv[2]=="Force"): force = True else: force = False s = ServerProxy('http://localhost:8085') file=open(file_name, "r") file_content=file.read() bin=Binary(file_content) out_file=open("Temp_out", "w") bin.encode(out_file) out_file.close() binary_content=open("Temp_out", "rb").read() new_binary_content="" for line in binary_content.split('\n'): if not (line.startswith('<value>') or line.startswith('</base64>')): new_binary_content += line print "Content: "+new_binary_content bin1=Binary(new_binary_content) ret_val = s.Service_SaveFile(file_name, bin1, force) print "Got return "+str(ret_val) file.close() out_file.close()