def copy_fle(self, fle, dst_dir): new_fle = copy.copy(fle) # Clean Name dst_name = fle['name'] dst_name = util.clean_path(dst_name) dst_name = util.secure_path(dst_name) # Clean Dir dst_dir = util.clean_path(dst_dir) dst_dir = util.secure_path(dst_dir) # Setup Paths src_path = os.path.abspath("{:s}".format(new_fle['path'])) dst_path = os.path.abspath(os.path.join(dst_dir, dst_name)) dst_dir = os.path.dirname(dst_path) msg = "envmod_local: Copying '{:s}' to '{:s}'".format(src_path, dst_path) logger.info(self._format_msg(msg)) # Setup Dst if not os.path.exists(dst_dir): os.makedirs(dst_dir) # Copy shutil.copy(src_path, dst_path) new_fle['name'] = dst_name new_fle['path'] = dst_path # Return return new_fle
def copy_fle(self, fle, dst_dir): new_fle = copy.copy(fle) # Clean Name dst_name = fle['name'] dst_name = util.clean_path(dst_name) dst_name = util.secure_path(dst_name) # Clean Dir dst_dir = util.clean_path(dst_dir) dst_dir = util.secure_path(dst_dir) # Setup Paths src_path = os.path.abspath("{:s}".format(new_fle['path'])) dst_path = os.path.abspath(os.path.join(dst_dir, dst_name)) dst_dir = os.path.dirname(dst_path) msg = "envmod_local: Copying '{:s}' to '{:s}'".format( src_path, dst_path) logger.info(self._format_msg(msg)) # Setup Dst if not os.path.exists(dst_dir): os.makedirs(dst_dir) # Copy shutil.copy(src_path, dst_path) new_fle['name'] = dst_name new_fle['path'] = dst_path # Return return new_fle
def delete_file(filename): """Remove a file""" filename = os.path.basename(filename) # FIXME: possible race condition if os.path.exists(secure_path(cagibi_folder, filename)) and filename in files_info: os.remove(secure_path(cagibi_folder, filename)) del files_info[filename] save_config(files_info, filename="files.json") return "Ok." else: abort(500, "File doesn't exist or is not in database.")
def return_deltas(filename): """return the deltas corresponding to the file. The client must send in its request the hashes of the file""" hashes = json.loads(request.forms.get("hashes")) patchedfile = open(secure_path(cagibi_folder, filename), "rb") deltas = encode_deltas(rsyncdelta(patchedfile, hashes)) patchedfile.close() return json.dumps(deltas)
def update_file_hashes(filename): """Updates a file using the deltas received by a client""" deltas = decode_deltas(json.loads(request.forms.get("deltas"))) unpatched = open(secure_path(cagibi_folder, filename), "rb") save_to = os.tmpfile() patchstream(unpatched, save_to, deltas) unpatched.close() os.unlink(secure_path(cagibi_folder, filename)) save_to.seek(0) # FIXME: rename instead of copying ? file_copy = open(secure_path(cagibi_folder, filename), "w+b") file_copy.write(save_to.read()) file_copy.close() save_to.close() files_info[filename] = {"rev": files_info[filename]["rev"] + 1} save_config(files_info, filename="files.json") return files_info[filename]
def from_new(cls, data, **kwargs): """New Constructor""" # Extract Args src_path = kwargs.pop('src_path', None) if not src_path: raise TypeError("src_path required") # Set Schema schema = set(_FILE_SCHEMA) kwargs['schema'] = schema # Create New Object data = copy.copy(data) # Setup Name name = data.get('name', None) if not name: name = os.path.basename(src_path) name = util.clean_path(name) name = util.secure_path(name) name = os.path.normpath(name) if not name: raise ValueError("Valid filename required") data['name'] = name # Setup Blank Path data['path'] = "" # Get Type typ = mimetypes.guess_type(name) data['type'] = str(typ[0]) data['encoding'] = str(typ[1]) # Call Parent fle = super(File, cls).from_new(data, **kwargs) # Set Path dst_path = os.path.abspath( os.path.join(config.FILESTORAGE_PATH, repr(fle))) fle['path'] = dst_path # Save File try: shutil.copy(src_path, dst_path) except IOError: # Clean up on failure fle.delete(force=True) raise # Return File return fle
def from_new(cls, data, **kwargs): """New Constructor""" # Extract Args src_path = kwargs.pop('src_path', None) if not src_path: raise TypeError("src_path required") # Set Schema schema = set(_FILE_SCHEMA) kwargs['schema'] = schema # Create New Object data = copy.copy(data) # Setup Name name = data.get('name', None) if not name: name = os.path.basename(src_path) name = util.clean_path(name) name = util.secure_path(name) name = os.path.normpath(name) if not name: raise ValueError("Valid filename required") data['name'] = name # Setup Blank Path data['path'] = "" # Get Type typ = mimetypes.guess_type(name) data['type'] = str(typ[0]) data['encoding'] = str(typ[1]) # Call Parent fle = super(File, cls).from_new(data, **kwargs) # Set Path dst_path = os.path.abspath(os.path.join(config.FILESTORAGE_PATH, repr(fle))) fle['path'] = dst_path # Save File try: shutil.copy(src_path, dst_path) except IOError: # Clean up on failure fle.delete(force=True) raise # Return File return fle
def create_file(filename): """Create a file on the server. The method receives two POST parameters: - filename : the filename - contents : the contents """ #FIXME: handle directories filename = os.path.basename(filename) contents = request.forms.get('contents') # FIXME: possible race condition if not os.path.exists(filename) and filename not in files_info: fd = open(secure_path(cagibi_folder, filename), "wb") fd.write(contents) fd.close() files_info[filename] = {"rev": 1} save_config(files_info, filename="files.json") return "Ok." else: abort(400, "File already exists")
def filelist(): """return a list of files with some metadata""" folder = {} files_info = load_config(filename="files.json") for file in os.listdir(cagibi_folder): folder[file] = {} folder[file]["mtime"] = os.path.getmtime(secure_path(cagibi_folder, file)) if file not in files_info: # Automatically add new files files_info[file] = {} files_info[file]["rev"] = 1 # FIXME: don't save the data for every new file. save_config(files_info, filename="files.json") folder[file]["rev"] = files_info[file]["rev"] return json.dumps(folder)
def checkout_upstream_changes(): """Checkout changes on the server""" try: fd = urllib2.urlopen(server_url + "/folder") server_files = json.load(fd) fd.close() except URLError: return local_files = load_config("files.json") modified = False for file in server_files: if file not in local_files: # Get it local_files[file] = {} local_files[file]["rev"] = server_files[file]["rev"] url = "%s/files/%s/data" % (server_url, file) try: urllib.urlretrieve(url, secure_path(cagibi_folder, file)) print "Retrieved %s" % file modified = True except URLError, e: if hasattr(e, 'reason'): # Unable to reach network # Pass - the file will be downloaded later # as checkout_upstream_changes is called # repeatedly. continue else: if server_files[file]["rev"] > local_files[file]["rev"]: # Get it too, but using the rsync algo print "Retrieving file, using the rsync algorithm" try: unpatched = open(secure_path(cagibi_folder, file), "rb") hashes = list(blockchecksums(unpatched)) except IOError: continue json_hashes = json.dumps(hashes) post_data = {} post_data["hashes"] = json_hashes post_string = urllib.urlencode(post_data) url = "%s/files/%s/deltas" % (server_url, file) try: fd = urllib2.urlopen(url, post_string) except URLError, e: if hasattr(e, 'reason'): # Unable to reach network # Pass - the file will be downloaded later # as checkout_upstream_changes is called # repeatedly. continue json_response = decode_deltas(json.load(fd)) unpatched.seek(0) save_to = os.tmpfile() patchstream(unpatched, save_to, json_response) unpatched.close() os.unlink(secure_path(cagibi_folder, file)) save_to.seek(0) # FIXME: rename instead of copying ? try: file_copy = open(secure_path(cagibi_folder, file), "w+b") file_copy.write(save_to.read()) file_copy.close() local_files[file]["rev"] = server_files[file]["rev"] modified = True mqueue.client_modified.append(file) except IOError: continue save_to.close()
fd = urllib2.urlopen(url) except URLError, e: if hasattr(e, 'reason'): mqueue.postponed_modified.append(file) continue except HTTPError: # The server may return us an error if things have gone south during # the file creation process. # If so, mark the file as added instead of modified. print "mark as added instead" mqueue.added.put(file) continue hashes = json.load(fd) try: patchedfile = open(secure_path(cagibi_folder, file), "rb") deltas = encode_deltas(rsyncdelta(patchedfile, hashes)) patchedfile.close() except IOError: print "IOError - continuing" # The file may be locked by another process # Add it to the postponed list. mqueue.postponed_modified.append(file) continue # Send the deltas to the server. post_data = {} post_data["deltas"] = json.dumps(deltas) post_string = urllib.urlencode(post_data) fd = urllib2.urlopen(url, post_string) results = json.load(fd)
def file_hashes(filename): """Return the hashes of a file""" unpatched = open(secure_path(cagibi_folder, filename), "rb") hashes = list(blockchecksums(unpatched)) unpatched.close() return json.dumps(hashes)