def fileSync(self, data): """ Synchronization files using rsync tool """ server = [] user_db = Queries.getUser(data['user']) user_id = 'u' + str(user_db.id).rjust(14, '0') # when want to sync more than one file... for file in data['files_u']: original_name = file[0] server_name = file[1] file_hash = file[2] file_hash_new = file[3][0] file_size = file[3][1] fs = Queries.getFileServer(file_hash) if file_hash != file_hash_new and data['sync_type'] == 'WSYNC_FILE': dict_file = {"file_hash": file_hash_new, "chunk_size": file_size} Queries.updateFileRecordDataByHash(original_name, file_hash, dict_file) server.append((original_name, server_name) + (fs if fs else (None,))) data['server'] = server data['user_id'] = user_id data['cmd'] = 'CSYN' log.msg('[SYNC] SYNC data for User=%s has complete!' % (data['user'])) del data['files_u'] return data
def authorization(self, data): """ Checking user with DB """ log.msg("[AUTH] User=%s trying to auth..." % data['user']) result = Queries.getUser(data['user']) result_msg = "[AUTH] User=%s successfully logged..." % data['user'] # user not found at DB if result is None: data['cmd'] = 'RAUT' data['error'].append('\nWARNING: User not found') result_msg = "[AUTH] User=%s not found" % data['user'] else: if result['name'] == data['user']: # correct users info --> real user hash_psw = str(sha256(data['password']+str(result['id'])).hexdigest()) if result['password'] == str(hash_psw): data['auth'] = True # incorrect password --> fake user else: data['cmd'] = 'RAUT' data['error'].append('\nERROR: Incorrect password. Try again...') result_msg = "[AUTH] Incorrect password for user=%s" % data['user'] log.msg(result_msg) return data
def massive_delete_files(self, data): """ Massive delete files from file storage """ log.msg('[DELF] Delete data for User=%s' % (data['user'])) user_db = Queries.getUser(data['user']) user_id = 'u' + str(user_db.id).rjust(14, '0') del_files = Queries.deleteManyFileRecords(data['deleted_files'], data['default_dir']) data['cmd'] = 'CDLT' data['user_id'] = user_id data['deleted_files'] = del_files log.msg('[DELF] Delete data for User=%s has complete!' % (data['user'])) return data
def read_fs_all(self, data): """ Getting all data, which need for read all files (filename, path, server id and port) """ log.msg('[REAF] Getting data for User=%s' % (data['user'])) files_lst = [] fs_name = str(data['user'] + "_fs") user_db = Queries.getUser(data['user']) user_id = 'u' + str(user_db.id).rjust(14, '0') fs_db = Queries.getFileSpaceByName(fs_name) catalog = Queries.getUserCatalogOnFilespace(fs_db.id) for name, path in data['files_read']: user_file, user_file_servers = Queries.getFirstFileRecord(name, path, catalog.id) files_lst.append((name, path, user_file.server_name, user_file_servers)) data['cmd'] = 'CREA' data['user_id'] = user_id data['files_read'] = files_lst log.msg('[REAF] Getting data for User=%s has complete!' % (data['user'])) return data
def massive_write_files(self, data): """ Massive write files to file storage. If file already exists, then synchronize with him. """ log.msg('[WRTF] Massive write files for User=%s' % (data['user'])) files_lst = [] fs_name = str(data['user'] + "_fs") user_db = Queries.getUser(data['user']) user_id = 'u' + str(user_db.id).rjust(14, '0') fs_db = Queries.getFileSpaceByName(fs_name) catalog = Queries.getUserCatalogOnFilespace(fs_db.id) for name, path, file_hash_new, size in data['files_write']: file_path = path.replace(data['default_dir'] , '') user_file, user_file_servers = Queries.getFirstFileRecord(name, file_path, catalog.id) # if file not exists, then add record to database and write file in fileserver if user_file is None: server = self.balancer.getFileServer("WRTE", file_hash_new) if server is not None: server_ip = str(server[0]) port = int(server[1]) cnt_files = Queries.getCountFiles() + 1 # processing data filename, type_file = os.path.splitext(name) file_id = str(cnt_files).rjust(25-len(type_file), '0') + type_file # write record into DB Queries.createFileRecordOneChunk(name, file_id, file_hash_new, file_path, size, catalog.id, server_ip, port) servers = [] servers.append((server_ip, port)) files_lst.append(('WRITE_FILE', name, path, file_id, servers)) # if file already exists, then just sync with file on fileserver elif user_file.file_hash != file_hash_new: Queries.updateFirstFileRecordHashAndSize(user_file.id, file_hash_new, size) files_lst.append(('WSYNC_FILE', name, path, user_file.server_name, user_file_servers)) data['cmd'] = 'CWRT' data['user_id'] = user_id data['files_write'] = files_lst log.msg('[WRTF] WRTF for User=%s has complete!' % (data['user'])) return data
def write_file(self, data): """ Checking user with DB """ log.msg("[WRTE] User=%s trying to write file..." % data['user']) server = self.balancer.getFileServer(data['cmd'], data['file_hash']) if server is None: msg = "ERROR: Can't write now your file: servers in offline. Try later..." data['cmd'] = 'AUTH' data['error'].append(msg) log.msg(log.msg("[WRTE] %s..." % msg)) else: # get info from DB user_db = Queries.getUser(data['user']) fs = Queries.getFileSpace(user_db.filespace_id) catalog = Queries.getUserCatalogOnFilespace(fs.id) server_ip = str(server[0]) port = int(server[1]) cnt_files = Queries.getCountFiles() + 1 # processing data user_path, original_filename = os.path.split(data['file_path']) if not data['gui']: user_path = u'' filename, type_file = os.path.splitext(original_filename) user_id = 'u' + str(user_db.id).rjust(14, '0') file_id = str(cnt_files).rjust(25-len(type_file), '0') + type_file data['server'] = server data['json'] = ('WRITE_FILE', user_id, file_id, data['file_path']) # write record into DB Queries.createFileRecordOneChunk(original_filename, file_id, data['file_hash'], user_path, data['file_size'], catalog.id, server_ip, port) log.msg(log.msg("[WRTE] Operation with DB and User=%s has complete..." % data['user'])) data['cmd'] = 'COWF' del data['file_path'] del data['file_hash'] del data['file_size'] return data