예제 #1
0
def get_url():
    filename = config.get_main_option("filename")
    if filename is None:
        filename = ensure_unicode(
            join(get_patches_dir(get_data_dir(), create=True), 'patches.db'))
    url = config.get_main_option("sqlalchemy.url")
    url = ensure_unicode(url)
    url = url.format(filename=FilePath(filename))
    return url
예제 #2
0
            def on_finished():
                logger.info("Migration dialog closed")
                if not self._migration_cancelled:
                    logger.debug("Setting new location")
                    self._ui.location_edit.setText(FilePath(selected_folder))

                    disable_file_logging(logger)
                    shutil.rmtree(op.join(old_dir, '.pvtbox'),
                                  ignore_errors=True)
                    set_root_directory(FilePath(selected_folder))
                    enable_file_logging(logger)

                    make_dir_hidden(get_patches_dir(selected_folder))

                self._start_service()
예제 #3
0
    def _on_upload_added(self, upload_info):
        """
        Slot to be called on new upload notification

        @param upload_info Value of 'upload_add' protocol message 'data' field
        """

        upload_id = upload_info['upload_id']

        # Check whether this download is already being processed
        if upload_id in self.download_tasks_info:
            logger.warning("Upload ID '%s' is being downloaded already",
                           upload_id)
            return

        # Save upload data
        upload_info['loaded'] = 0
        upload_info['size'] = 0
        upload_info['state'] = 'running'
        upload_info['elapsed'] = 0.0
        self.download_tasks_info[upload_id] = upload_info

        # Check whether upload path is not ready or is excluded from sync
        # or is deleted
        path = self._check_upload_path(upload_id)
        if path is None:
            return
        else:
            if not self._check_upload_path_timer.isActive():
                self._check_upload_path_timer.start()

        added_info, changed_info = self._get_download_info(
            upload_id, is_first_report=True)
        self.download_status.emit(*self._empty_progress,
                                  [added_info, changed_info, []], {})

        self.working.emit()

        # Generate filename to save file into
        tmp_fn = self.download_tasks_info[upload_id]['tmp_fn'] = op.join(
            get_patches_dir(self._cfg.sync_directory),
            '.upload_' + str(upload_id))

        self._download(upload_id, tmp_fn)
예제 #4
0
 def get_patch_path(self, diff_file_uuid):
     return os.path.join(get_patches_dir(self._root), diff_file_uuid)
예제 #5
0
 def get_patch_filename(suffix):
     return os.path.join(
         get_patches_dir(root), 'patches',
         str(old_file_hash) + str(new_file_hash) + suffix)
예제 #6
0
    def create_patch(cls,
                     modify_file,
                     root,
                     old_blocks_hashes=None,
                     new_blocks_hashes=None,
                     old_file_hash=None,
                     new_file_hash=None,
                     uuid=None,
                     blocksize=SIGNATURE_BLOCK_SIZE):
        def get_patch_filename(suffix):
            return os.path.join(
                get_patches_dir(root), 'patches',
                str(old_file_hash) + str(new_file_hash) + suffix)

        patch_data_file = get_patch_filename('.patch_data')

        # Create directory structure to store patch file
        make_dirs(patch_data_file)

        with open(modify_file, 'rb') as handle_file, \
                open(patch_data_file, 'wb') as data_file:
            blocks = SortedDict()
            patch = dict()
            new_blocks_hashes_search = dict()
            if old_blocks_hashes:
                old_blocks_hashes_search = \
                    dict((value, key) for key, value in
                         old_blocks_hashes.items())
            else:
                old_blocks_hashes_search = dict()
            if new_blocks_hashes is None:
                new_blocks_hashes = cls.block_checksum(filepath=modify_file,
                                                       blocksize=blocksize)
            for new_offset, new_hash in new_blocks_hashes.items():
                clone_block_offset = new_blocks_hashes_search.get(
                    new_hash, None)
                from_patch = clone_block_offset is not None
                clone_block_offset = clone_block_offset if from_patch \
                    else old_blocks_hashes_search.get(new_hash, None)
                if clone_block_offset is None:
                    data_file_offset = data_file.tell()
                    data = cls.get_data(handle=handle_file,
                                        size=blocksize,
                                        offset=new_offset)
                    data_file.write(data)
                    data_size = data_file.tell() - data_file_offset
                    blocks[new_offset] = dict(
                        new=True,
                        hash=new_hash,
                        offset=data_file_offset,
                        data_size=data_size,
                    )
                    new_blocks_hashes_search[new_hash] = new_offset
                else:
                    blocks[new_offset] = dict(new=False,
                                              hash=new_hash,
                                              offset=clone_block_offset,
                                              from_patch=from_patch)

        patch['old_hash'] = old_file_hash
        if new_file_hash is None:
            new_file_hash = Rsync.hash_from_block_checksum(new_blocks_hashes)
        patch['new_hash'] = new_file_hash

        info = cls.getfileinfo(modify_file)
        patch['blocks'] = blocks
        patch['time_modify'] = info.st_mtime
        patch['size'] = info.st_size
        patch['blocksize'] = blocksize

        patch_info_file = get_patch_filename('.patch_info')

        with open(patch_info_file, 'w') as info_file:
            json.dump(patch, info_file)

        if uuid is not None:
            patch_archive_file = op.join(get_patches_dir(root, create=True),
                                         uuid)
        else:
            patch_archive_file = get_patch_filename('.patch')

        with tarfile.open(patch_archive_file, 'w') as archive:
            archive.add(patch_info_file, arcname='info')
            archive.add(patch_data_file, arcname='data')
        remove_file(patch_info_file)
        remove_file(patch_data_file)

        patch['archive_file'] = patch_archive_file
        patch['archive_size'] = os.stat(patch_archive_file).st_size
        return patch
예제 #7
0
    def _accept_patch(patch_info, patch_data, unpatched_file, root):
        file_blocks_hashes = SortedDict()
        blocksize = patch_info['blocksize']
        temp_name = os.path.join(get_patches_dir(root),
                                 '.patching_' + generate_uuid())

        blocks = SortedDict(
            (int(k), v) for k, v in patch_info['blocks'].items())

        source_file = None
        if op.exists(unpatched_file):
            source_file = open(unpatched_file, "rb")
        with open(temp_name, "wb") as temp_file:
            # count = 0
            # min = 999999999.0
            # max = 0.0
            # avg = 0.0
            # sum = 0.0
            for offset, block in blocks.items():
                # count += 1
                # start_time = time.time()
                block_offset = int(block['offset'])
                if block['new']:
                    patch_data.seek(block_offset)
                    data_size = block['data_size']
                    data = patch_data.read(data_size)
                else:
                    if block['from_patch']:
                        patch_offset = blocks[block_offset]['offset']
                        data_size = blocks[block_offset].get(
                            'data_size', blocksize)
                        patch_data.seek(patch_offset)
                        data = patch_data.read(data_size)
                    else:
                        if source_file is None:
                            raise IOError("Source file not found")
                        source_file.seek(block_offset)
                        data = source_file.read(blocksize)
                temp_file.seek(offset)
                temp_file.write(data)
                file_blocks_hashes[offset] = block['hash']
                # diff = time.time() - start_time
                # min = diff if diff < min else min
                # max = diff if diff > max else max
                # avg = diff if avg == 0 else (avg + diff) / 2
                # sum += diff
                # logger.debug(
                #     'processed block %s:%s in %s', count, len(blocks), diff)
        # logger.debug(
        #     'processing blocks time:%s, min:%s, max:%s, avg:%s',
        #     sum, min, max, avg)
        if source_file:
            source_file.close()
        logger.debug('calculating patched file signature')
        file_signature = Rsync.block_checksum(temp_name, blocksize=blocksize)
        logger.debug('calculated patched file signature')
        if file_signature != file_blocks_hashes:
            remove_file(temp_name)
            raise IOError(
                "Invalid patch result, expected signature: {}, actual: {}".
                format(file_blocks_hashes, file_signature))

        new_hash = patch_info['new_hash']
        logger.debug('moving patched file')
        copy = join(get_copies_dir(root), new_hash)
        if not exists(copy):
            copy_file(temp_name, copy)
        shutil.move(temp_name, unpatched_file)
        logger.debug('moved patched file')

        return new_hash, file_blocks_hashes, patch_info['old_hash']
예제 #8
0
파일: d.py 프로젝트: pvtbox/pvtbox-desktop
        args = vars(parser.parse_args(new_argv))
    else:
        args = vars(namespace)

    return args


if __name__ == "__main__":
    # for multiprocessing under build pyinstaller
    multiprocessing.freeze_support()

    from common import utils

    utils.is_daemon = True
    utils.get_cfg_dir(create=True)
    utils.get_patches_dir(utils.get_data_dir(create=True), create=True)

    from common.application import Application
    from daemon.application_impl import ApplicationImpl

    args = sys.argv[1:]
    # Parse command line arguments
    args = parseArgs(args)

    # To terminate from console with Ctrl+C
    signal.signal(signal.SIGINT, signal.SIG_DFL)

    Application.set_instance_class(ApplicationImpl)
    Application.start(args)

    print('Exited')