Example #1
0
    def pull_file(self, encoded_file, sender_ip):
        file = decode_file(encoded_file)
        file_name = file.file_name
        stored_ips = self.factory.tracked_files[file_name][1][0]
        stored_num_chnks = len(stored_ips)
        chnk_indx = 0
        chnks_to_update = ''
        sync_actn = 'pull'

        while chnk_indx < stored_num_chnks:
            chnks_to_update += str(chnk_indx) + ' '
            chnk_indx += 1
        ip = stored_ips[0]
        return {'ips': ip, 'chnks': chnks_to_update, 'actn': sync_actn}
Example #2
0
 def create_file(self, encoded_file):
     file = decode_file(encoded_file)
     root_path = os.path.normpath(os.getcwd() + os.sep + os.pardir)
     path_to_file = os.path.join(root_path, 'src', 'monitored_files', 'ians_share', file.file_name)
     print('in creating slave file', file.file_name)
     if not path.exists(path_to_file):
         print('slave missign created file')
         self.file_statuses[file.file_name] = ('updating', time.time())
         self.files.append(file)
         open(path_to_file, 'w').close()
         file.last_mod_time = 0
         update = self.callRemote(UpdateFile, encoded_file=file.encode(), sender_ip=self.get_local_ip())
         update.addCallback(self.update_file, file)
     return {}
    def serve_chunks(self, encoded_file, sender_ip):
        file = decode_file(encoded_file)
        print('FTP SERVER:', get_local_ip(), 'Serving file', file.file_name)
        chunks_needed = file.chunks_needed.split(' ')
        file.file_path = file.get_file_path()
        chunks = self.get_chunks(file.file_path)

        # Return each chunk with its data
        for i in chunks_needed:
            if i != '':
                chunk = Chunk(int(i), file)
                chunk.data = chunks[int(i)]
                self.callRemote(ReceiveChunk, chunk=chunk.encode())
        return {}
Example #4
0
    def seed_file(self, encoded_file, sender_ip):
        file = decode_file(encoded_file)
        file_name = file.file_name
        hashes = file.chunk_hashes
        chunk_ips = []
        mod_times = []

        # Start tracking each hash in the file
        for _ in hashes:
            chunk_ips.append(sender_ip)
            mod_times.append(file.last_mod_time)
            self.factory.tracked_files[file_name] = (hashes, (chunk_ips,
                                                              mod_times))
        print('MASTER: Tracking', self.factory.tracked_files)
        return {}
 def serve_chunks(self, encoded_file, sender_ip):
     file = decode_file(encoded_file)
     print('FTP SERVER: Serving file', file.file_name)
     chunks_needed = file.chunks_needed.split(' ')
     file.file_path = file.get_file_path()
     chunks = self.get_chunks(file.file_path)
     # Return each chunk with its data
     for i in chunks_needed:
         if i != '':
             chunk = Chunk(int(i), file)
             chunk.data = chunks[int(i)]
             print('FTP SERVER: Serving', chunk.data)
             self.callRemote(ReceiveChunk, chunk=chunk.encode())
     deferLater(reactor, 2, self.factory.slave.update_file_status,
                file.file_name)
     return {}
Example #6
0
    def push_file(self, encoded_file, sender_ip):
        file = decode_file(encoded_file)
        file_name = file.file_name
        stored_ips = self.factory.tracked_files[file_name][1][0]
        stored_num_chnks = len(stored_ips)
        chnk_indx = 0
        chnks_to_update = ''
        sync_actn = 'push'

        # Add appended chunks
        while chnk_indx < stored_num_chnks:
            chnks_to_update += str(chnk_indx) + ' '
            chnk_indx += 1
        ip = self.dist_ip

        # Ensure enpoint is up todate
        if sender_ip == self.dist_ip:
            ip = list(self.factory.endpoints.keys())[1]
        return {'ips': ip, 'chnks': chnks_to_update, 'actn': sync_actn}
 def initiate_serve(self, encoded_file):
     file = decode_file(encoded_file)
     self.callRemote(ServeChunks,
                     encoded_file=file.encode(),
                     sender_ip=get_local_ip())
     return {}
Example #8
0
 def create_file(self, encoded_file, sender_ip):
     file = decode_file(encoded_file)
     self.seed_file(encoded_file, sender_ip)
     for slave in self.factory.endpoints.values():
         slave.callRemote(CreateFile, encoded_file=file.encode())
     return {}
Example #9
0
    def update_file(self, encoded_file, sender_ip):
        file = decode_file(encoded_file)
        file_name = file.file_name
        hashes = file.chunk_hashes
        ips = ''
        chnks_to_update = ''
        chnk_indx = 0
        sync_actn = 'pull'
        mstr_has_file = True
        mstrfile_mtchs_sntfile = False

        # Track any files this master has never seen before
        if file_name not in self.factory.tracked_files:
            self.seed_file(encoded_file, sender_ip)
            mstr_has_file = False

        # Set master tracking info for file
        stored_ips = self.factory.tracked_files[file_name][1][0]
        stored_num_chnks = len(stored_ips)
        stored_timestmp = self.factory.tracked_files[file_name][1][1]
        stored_hashes = self.factory.tracked_files[file_name][0]

        # Check new file's hashes against stored master hashes
        while chnk_indx < stored_num_chnks:
            mstr_file_curr = cmp_floats(stored_timestmp[chnk_indx],
                                        file.last_mod_time)
            # Check if master file matches the file being updates
            if mstr_has_file:
                mstrfile_mtchs_sntfile = stored_ips[chnk_indx] == sender_ip

            # Choose file data to store
            try:
                if stored_hashes[chnk_indx] != hashes[chnk_indx]:
                    stored_timestmp[chnk_indx] = stored_timestmp[
                        chnk_indx] if mstr_file_curr else file.last_mod_time
                    stored_hashes[chnk_indx] = stored_hashes[
                        chnk_indx] if mstr_file_curr else file.sha1_hash
                    stored_ips[chnk_indx] = stored_ips[
                        chnk_indx] if mstr_file_curr else file.addresses[
                            chnk_indx]
            except:
                stored_timestmp[chnk_indx] = stored_timestmp[chnk_indx]
                stored_hashes[chnk_indx] = stored_hashes[chnk_indx]
                stored_ips[chnk_indx] = stored_ips[chnk_indx]
                mstr_file_curr = True
                mstrfile_mtchs_sntfile = False
                while chnk_indx < stored_num_chnks - 1:
                    chnks_to_update += str(chnk_indx) + ' '
                    chnk_indx += 1

            # Signal slave to push file
            if not mstr_file_curr and not mstrfile_mtchs_sntfile:
                sync_actn = 'push'
            elif mstr_file_curr and not mstrfile_mtchs_sntfile:
                sync_actn = 'pull'

            chnks_to_update += str(chnk_indx) + ' '
            chnk_indx += 1

        print('MASTER: Stored file', file_name, 'is current:', mstr_file_curr)

        # Track any new file chunks appended to end of file
        while chnk_indx < file.num_chunks:
            chnks_to_update += str(chnk_indx) + ' '
            chnk_indx += 1

        # Add ips to push to
        if sync_actn == 'push':
            for ip in self.factory.endpoints.keys():
                ips += str(ip) + ' '
        # Add ips to pull from
        else:
            ips = self.factory.tracked_files[file_name][1][0][0]
        print('MASTER: Awaiting', sync_actn, 'for', file_name, chnks_to_update)
        ip = self.dist_ip
        if sender_ip == self.dist_ip:
            ip = self.factory.ip
        return {'ips': ip, 'chnks': chnks_to_update, 'actn': sync_actn}