Example #1
0
    def update_metadata_and_save(self, bundle, new_bundle):
        """
        Updates the metadata about the contents of the bundle, including
        data_size as well as the total amount of disk used by the user.

        If |new_bundle| is True, saves the bundle as a new bundle. Otherwise,
        updates it.
        """
        bundle_path = self._bundle_store.get_bundle_location(bundle.uuid)

        dirs_and_files = None
        if os.path.isdir(bundle_path):
            dirs_and_files = path_util.recursive_ls(bundle_path)
        else:
            dirs_and_files = [], [bundle_path]

        data_hash = '0x%s' % (path_util.hash_directory(bundle_path, dirs_and_files))
        data_size = path_util.get_size(bundle_path, dirs_and_files)

        if new_bundle:
            bundle.data_hash = data_hash
            bundle.metadata.set_metadata_key('data_size', data_size)
            self._bundle_model.save_bundle(bundle)
        else:
            bundle_update = {
               'data_hash': data_hash,
               'metadata': {
                    'data_size': data_size,             
                },
            }
            self._bundle_model.update_bundle(bundle, bundle_update)

        self._bundle_model.update_user_disk_used(bundle.owner_id)
Example #2
0
    def update_metadata_and_save(self, bundle, enforce_disk_quota=False):
        """
        Updates the metadata about the contents of the bundle, including
        data_size as well as the total amount of disk used by the user.

        If |new_bundle| is True, saves the bundle as a new bundle. Otherwise,
        updates it.
        """
        bundle_path = self._bundle_store.get_bundle_location(bundle.uuid)

        dirs_and_files = None
        if os.path.isdir(bundle_path):
            dirs_and_files = path_util.recursive_ls(bundle_path)
        else:
            dirs_and_files = [], [bundle_path]

        data_hash = '0x%s' % (path_util.hash_directory(bundle_path,
                                                       dirs_and_files))
        data_size = path_util.get_size(bundle_path, dirs_and_files)
        if enforce_disk_quota:
            disk_left = self._bundle_model.get_user_disk_quota_left(
                bundle.owner_id)
            if data_size > disk_left:
                raise UsageError(
                    "Can't save bundle, bundle size %s greater than user's disk quota left: %s"
                    % (data_size, disk_left))

        bundle_update = {
            'data_hash': data_hash,
            'metadata': {
                'data_size': data_size,
            },
        }
        self._bundle_model.update_bundle(bundle, bundle_update)
        self._bundle_model.update_user_disk_used(bundle.owner_id)
Example #3
0
    def update_metadata_and_save(self, bundle, new_bundle):
        """
        Updates the metadata about the contents of the bundle, including
        data_size as well as the total amount of disk used by the user.

        If |new_bundle| is True, saves the bundle as a new bundle. Otherwise,
        updates it.
        """
        bundle_path = self._bundle_store.get_bundle_location(bundle.uuid)

        dirs_and_files = None
        if os.path.isdir(bundle_path):
            dirs_and_files = path_util.recursive_ls(bundle_path)
        else:
            dirs_and_files = [], [bundle_path]

        data_hash = '0x%s' % (path_util.hash_directory(bundle_path, dirs_and_files))
        data_size = path_util.get_size(bundle_path, dirs_and_files)

        if new_bundle:
            bundle.data_hash = data_hash
            bundle.metadata.set_metadata_key('data_size', data_size)
            self._bundle_model.save_bundle(bundle)
        else:
            bundle_update = {
               'data_hash': data_hash,
               'metadata': {
                    'data_size': data_size,             
                },
            }
            self._bundle_model.update_bundle(bundle, bundle_update)

        self._bundle_model.update_user_disk_used(bundle.owner_id)
Example #4
0
    def upload(self, path, allow_symlinks=False):
        '''
        Copy the contents of the directory at path into the data subdirectory,
        in a subfolder named by a hash of the contents of the new data directory.

        Return a (data_hash, metadata) pair, where the metadata is a dict mapping
        keys to precomputed statistics about the new data directory.
        '''
        absolute_path = path_util.normalize(path)
        path_util.check_isvalid(absolute_path, 'upload')
        # Recursively copy the directory into a new BundleStore temp directory.
        temp_directory = uuid.uuid4().hex
        temp_path = os.path.join(self.temp, temp_directory)
        path_util.copy(absolute_path, temp_path)
        # Multiplex between uploading a directory and uploading a file here.
        # All other path_util calls will use these lists of directories and files.
        if os.path.isdir(temp_path):
            dirs_and_files = path_util.recursive_ls(temp_path)
        else:
            dirs_and_files = ([], [temp_path])
        if not allow_symlinks:
            path_util.check_for_symlinks(temp_path, dirs_and_files)
        path_util.set_permissions(temp_path, 0o755, dirs_and_files)
        # Hash the contents of the temporary directory, and then if there is no
        # data with this hash value, move this directory into the data directory.
        data_hash = '0x%s' % (path_util.hash_directory(temp_path, dirs_and_files),)
        data_size = path_util.get_size(temp_path, dirs_and_files)
        final_path = os.path.join(self.data, data_hash)
        final_path_exists = False
        try:
            os.utime(final_path, None)
            final_path_exists = True
        except OSError, e:
            if e.errno == errno.ENOENT:
                os.rename(temp_path, final_path)
            else:
                raise
Example #5
0
    def update_running_bundle(self, status):
        '''
        Update the database with information about the bundle given by |status|.
        If the bundle is completed, then we need to install the bundle and clean up.
        '''
        status['last_updated'] = int(time.time())

        # Update the bundle's data with status (which is the new information).
        bundle = status['bundle']

        # Update to the database
        db_update = {}

        # Update state
        if 'state' in status and status['state']:
            db_update['state'] = status['state']

        # Add metadata from the machine
        db_update['metadata'] = metadata = {}
        bundle_subclass = type(bundle)
        for spec in bundle_subclass.METADATA_SPECS:
            value = status.get(spec.key)
            if value is not None:
                metadata[spec.key] = value

        #print 'update_running_bundle', status

        # See if the bundle is completed.
        success = status.get('success')
        if success is not None:
            # Re-install dependencies.
            # - For RunBundle, remove the dependencies.
            # - For MakeBundle, copy.  This way, we maintain the invariant that
            # we always only need to look back one-level at the dependencies,
            # not recurse.
            try:
                temp_dir = status.get('temp_dir')
                if not temp_dir:
                    temp_dir = bundle.metadata.temp_dir
                if isinstance(bundle, RunBundle):
                    print >>sys.stderr, 'Worker.finalize_bundle: removing dependencies from %s (RunBundle)' % temp_dir
                    bundle.remove_dependencies(self.bundle_store, self.get_parent_dict(bundle), temp_dir)
                else:
                    print >>sys.stderr, 'Worker.finalize_bundle: installing (copying) dependencies to %s (MakeBundle)' % temp_dir
                    bundle.install_dependencies(self.bundle_store, self.get_parent_dict(bundle), temp_dir, copy=True)

                db_update['data_hash'] = '0x%s' % path_util.hash_path(temp_dir)
                metadata.update(data_size=path_util.get_size(temp_dir))
            except Exception as e:
                print '=== INTERNAL ERROR: %s' % e
                traceback.print_exc()
                success = False
                metadata['failure_message'] = 'Internal error: ' + e.message

            # Clean up any state for RunBundles.
            if isinstance(bundle, RunBundle):
                try:
                    self.machine.finalize_bundle(bundle)
                except Exception as e:
                    success = False
                    if 'failure_message' not in metadata:
                        metadata['failure_message'] = e.message
                    else:
                        metadata['failure_message'] += '\n' + e.message

            state = State.READY if success else State.FAILED
            db_update['state'] = state
            print '-- END BUNDLE: %s [%s]' % (bundle, state)
            print ''

            self._update_events_log('finalize_bundle', bundle, (bundle.uuid, state, metadata))

            # Update user statistics
            self.model.increment_user_time_used(bundle.owner_id, getattr(bundle.metadata, 'time', 0))
            self.model.update_user_disk_used(bundle.owner_id)

        # Update database!
        self.model.update_bundle(bundle, db_update)
Example #6
0
    def upload(self, sources, follow_symlinks, exclude_patterns, git, unpack, remove_sources):
        '''
        |sources|: specifies the locations of the contents to upload.  Each element is either a URL or a local path.
        |follow_symlinks|: for local path(s), whether to follow (resolve) symlinks
        |exclude_patterns|: for local path(s), don't upload these patterns (e.g., *.o)
        |git|: for URL, whether |source| is a git repo to clone.
        |unpack|: for each source in |sources|, whether to unpack it if it's an archive.
        |remove_sources|: remove |sources|.

        If |sources| contains one source, then the bundle contents will be that source.
        Otherwise, the bundle contents will be a directory with each of the sources.
        Exceptions:
        - If |git|, then each source is replaced with the result of running 'git clone |source|'
        - If |unpack| is True or a source is an archive (zip, tar.gz, etc.), then unpack the source.

        Install the contents of the directory at |source| into
        DATA_SUBDIRECTORY in a subdirectory named by a hash of the contents.

        Return a (data_hash, metadata) pair, where the metadata is a dict mapping
        keys to precomputed statistics about the new data directory.
        '''
        to_delete = []

        # Create temporary directory as a staging area and put everything there.
        temp_path = tempfile.mkdtemp('-bundle_store_upload')
        temp_subpaths = []
        for source in sources:
            # Where to save |source| to (might change this value if we unpack).
            temp_subpath = os.path.join(temp_path, os.path.basename(source))
            if remove_sources:
                to_delete.append(source)
            source_unpack = unpack and zip_util.path_is_archive(source)

            if path_util.path_is_url(source):
                # Download the URL.
                print_util.open_line('BundleStore.upload: downloading %s to %s' % (source, temp_path))
                if git:
                    file_util.git_clone(source, temp_subpath)
                else:
                    file_util.download_url(source, temp_subpath, print_status=True)
                    if source_unpack:
                        zip_util.unpack(temp_subpath, zip_util.strip_archive_ext(temp_subpath))
                        path_util.remove(temp_subpath)
                        temp_subpath = zip_util.strip_archive_ext(temp_subpath)
                print_util.clear_line()
            else:
                # Copy the local path.
                source_path = path_util.normalize(source)
                path_util.check_isvalid(source_path, 'upload')

                # Recursively copy the directory into a new BundleStore temp directory.
                print_util.open_line('BundleStore.upload: %s => %s' % (source_path, temp_subpath))
                if source_unpack:
                    zip_util.unpack(source_path, zip_util.strip_archive_ext(temp_subpath))
                    temp_subpath = zip_util.strip_archive_ext(temp_subpath)
                else:
                    if remove_sources:
                        path_util.rename(source_path, temp_subpath)
                    else:
                        path_util.copy(source_path, temp_subpath, follow_symlinks=follow_symlinks, exclude_patterns=exclude_patterns)
                print_util.clear_line()

            temp_subpaths.append(temp_subpath)

        # If exactly one source, then upload that directly.
        if len(temp_subpaths) == 1:
            to_delete.append(temp_path)
            temp_path = temp_subpaths[0]

        # Multiplex between uploading a directory and uploading a file here.
        # All other path_util calls will use these lists of directories and files.
        if os.path.isdir(temp_path):
            dirs_and_files = path_util.recursive_ls(temp_path)
        else:
            dirs_and_files = ([], [temp_path])

        # Hash the contents of the temporary directory, and then if there is no
        # data with this hash value, move this directory into the data directory.
        print_util.open_line('BundleStore.upload: hashing %s' % temp_path)
        data_hash = '0x%s' % (path_util.hash_directory(temp_path, dirs_and_files),)
        print_util.clear_line()
        print_util.open_line('BundleStore.upload: computing size of %s' % temp_path)
        data_size = path_util.get_size(temp_path, dirs_and_files)
        print_util.clear_line()
        final_path = os.path.join(self.data, data_hash)
        if os.path.exists(final_path):
            # Already exists, just delete it
            path_util.remove(temp_path)
        else:
            print >>sys.stderr, 'BundleStore.upload: moving %s to %s' % (temp_path, final_path)
            path_util.rename(temp_path, final_path)

        # Delete paths.
        for path in to_delete:
            if os.path.exists(path):
                path_util.remove(path)

        # After this operation there should always be a directory at the final path.
        assert(os.path.lexists(final_path)), 'Uploaded to %s failed!' % (final_path,)
        return (data_hash, {'data_size': data_size})
Example #7
0
    def update_running_bundle(self, status):
        """
        Update the database with information about the bundle given by |status|.
        If the bundle is completed, then we need to install the bundle and clean up.
        """
        status["last_updated"] = int(time.time())

        # Update the bundle's data with status (which is the new information).
        bundle = status["bundle"]

        # Update to the database
        db_update = {}

        # Update state
        if "state" in status and status["state"]:
            db_update["state"] = status["state"]

        # Add metadata from the machine
        db_update["metadata"] = metadata = {}
        bundle_subclass = type(bundle)
        for spec in bundle_subclass.METADATA_SPECS:
            value = status.get(spec.key)
            if value is not None:
                metadata[spec.key] = value

        # print 'update_running_bundle', status

        # See if the bundle is completed.
        success = status.get("success")
        if success is not None:
            # Re-install dependencies.
            # - For RunBundle, remove the dependencies.
            # - For MakeBundle, copy.  This way, we maintain the invariant that
            # we always only need to look back one-level at the dependencies,
            # not recurse.
            try:
                temp_dir = status.get("temp_dir")
                if not temp_dir:
                    temp_dir = bundle.metadata.temp_dir
                if isinstance(bundle, RunBundle):
                    print >> sys.stderr, "Worker.finalize_bundle: removing dependencies from %s (RunBundle)" % temp_dir
                    bundle.remove_dependencies(self.bundle_store, self.get_parent_dict(bundle), temp_dir)
                else:
                    print >> sys.stderr, "Worker.finalize_bundle: installing (copying) dependencies to %s (MakeBundle)" % temp_dir
                    bundle.install_dependencies(self.bundle_store, self.get_parent_dict(bundle), temp_dir, copy=True)

                db_update["data_hash"] = "0x%s" % path_util.hash_path(temp_dir)
                metadata.update(data_size=path_util.get_size(temp_dir))
            except Exception as e:
                print "=== INTERNAL ERROR: %s" % e
                traceback.print_exc()
                success = False
                metadata["failure_message"] = "Internal error: " + e.message

            # Clean up any state for RunBundles.
            if isinstance(bundle, RunBundle):
                try:
                    self.machine.finalize_bundle(bundle)
                except Exception as e:
                    success = False
                    if "failure_message" not in metadata:
                        metadata["failure_message"] = e.message
                    else:
                        metadata["failure_message"] += "\n" + e.message

            state = State.READY if success else State.FAILED
            db_update["state"] = state
            print "-- END BUNDLE: %s [%s]" % (bundle, state)
            print ""

            self._update_events_log("finalize_bundle", bundle, (bundle.uuid, state, metadata))

            # Update user statistics
            self.model.increment_user_time_used(bundle.owner_id, getattr(bundle.metadata, "time", 0))
            self.model.update_user_disk_used(bundle.owner_id)

        # Update database!
        self.model.update_bundle(bundle, db_update)
Example #8
0
    def upload(self, sources, follow_symlinks, exclude_patterns, git, unpack, remove_sources, uuid):
        """
        |sources|: specifies the locations of the contents to upload.  Each element is either a URL or a local path.
        |follow_symlinks|: for local path(s), whether to follow (resolve) symlinks
        |exclude_patterns|: for local path(s), don't upload these patterns (e.g., *.o)
        |git|: for URL, whether |source| is a git repo to clone.
        |unpack|: for each source in |sources|, whether to unpack it if it's an archive.
        |remove_sources|: remove |sources|.

        If |sources| contains one source, then the bundle contents will be that source.
        Otherwise, the bundle contents will be a directory with each of the sources.
        Exceptions:
        - If |git|, then each source is replaced with the result of running 'git clone |source|'
        - If |unpack| is True or a source is an archive (zip, tar.gz, etc.), then unpack the source.

        Install the contents of the directory at |source| into
        DATA_SUBDIRECTORY in a subdirectory named by a hash of the contents.

        Return a (data_hash, metadata) pair, where the metadata is a dict mapping
        keys to precomputed statistics about the new data directory.
        """
        to_delete = []

        # If just a single file, set the final path to be equal to that file
        single_path = len(sources) == 1

        # Determine which disk this will go on
        disk_choice = self.ring.get_node(uuid)

        final_path = os.path.join(self.partitions, disk_choice, self.DATA_SUBDIRECTORY, uuid)
        if os.path.exists(final_path):
            raise UsageError('Path %s already present in bundle store' % final_path)
        # Only make if not there
        elif not single_path:
            path_util.make_directory(final_path)

        # Paths to resources
        subpaths = []

        for source in sources:
            # Where to save |source| to (might change this value if we unpack).
            if not single_path:
                subpath = os.path.join(final_path, os.path.basename(source))
            else:
                subpath = final_path

            if remove_sources:
                to_delete.append(source)
            source_unpack = unpack and zip_util.path_is_archive(source)

            if source_unpack and single_path:
                # Load the file into the bundle store under the given path
                subpath += zip_util.get_archive_ext(source)

            if path_util.path_is_url(source):
                # Download the URL.
                print_util.open_line('BundleStore.upload: downloading %s to %s' % (source, subpath))
                if git:
                    file_util.git_clone(source, subpath)
                else:
                    file_util.download_url(source, subpath, print_status=True)
                    if source_unpack:
                        zip_util.unpack(subpath, zip_util.strip_archive_ext(subpath))
                        path_util.remove(subpath)
                        subpath = zip_util.strip_archive_ext(subpath)
                print_util.clear_line()
            else:
                # Copy the local path.
                source_path = path_util.normalize(source)
                path_util.check_isvalid(source_path, 'upload')

                # Recursively copy the directory into the BundleStore
                print_util.open_line('BundleStore.upload: %s => %s' % (source_path, subpath))
                if source_unpack:
                    zip_util.unpack(source_path, zip_util.strip_archive_ext(subpath))
                    subpath = zip_util.strip_archive_ext(subpath)
                else:
                    if remove_sources:
                        path_util.rename(source_path, subpath)
                    else:
                        path_util.copy(source_path, subpath, follow_symlinks=follow_symlinks, exclude_patterns=exclude_patterns)
                print_util.clear_line()

            subpaths.append(subpath)

        dirs_and_files = None
        if os.path.isdir(final_path):
            dirs_and_files = path_util.recursive_ls(final_path)
        else:
            dirs_and_files = [], [final_path]

        # Hash the contents of the bundle directory. Update the data_hash attribute
        # for the bundle
        print_util.open_line('BundleStore.upload: hashing %s' % final_path)
        data_hash = '0x%s' % (path_util.hash_directory(final_path, dirs_and_files))
        print_util.clear_line()
        print_util.open_line('BundleStore.upload: computing size of %s' % final_path)
        data_size = path_util.get_size(final_path, dirs_and_files)
        print_util.clear_line()

        # Delete paths.
        for path in to_delete:
            if os.path.exists(path):
                path_util.remove(path)

        # After this operation there should always be a directory at the final path.
        assert (os.path.lexists(final_path)), 'Uploaded to %s failed!' % (final_path,)
        return (data_hash, {'data_size': data_size})
Example #9
0
    def upload(self, path, follow_symlinks, exclude_patterns):
        '''
        Copy the contents of the directory at |path| into the data subdirectory,
        in a subfolder named by a hash of the contents of the new data directory.
        If |path| is in a temporary directory, then we just move it.

        Return a (data_hash, metadata) pair, where the metadata is a dict mapping
        keys to precomputed statistics about the new data directory.
        '''
        # Create temporary directory as a staging area.
        # If |path| is already temporary, then we use that directly
        # (with the understanding that |path| will be moved)
        if not isinstance(path, list) and os.path.realpath(path).startswith(
                os.path.realpath(self.temp)):
            temp_path = path
        else:
            temp_path = os.path.join(self.temp, uuid.uuid4().hex)

        if not isinstance(path, list) and path_util.path_is_url(path):
            # Have to be careful.  Want to make sure if we're fetching a URL
            # that points to a file, we are allowing this.
            if path.startswith('file://'):
                path_suffix = path[7:]
                if os.path.islink(path_suffix):
                    raise UsageError('Not allowed to upload symlink %s' %
                                     path_suffix)
                if not any(
                        path_suffix.startswith(f)
                        for f in self.direct_upload_paths):
                    raise UsageError(
                        'Not allowed to upload %s (only %s allowed)' %
                        (path_suffix, self.direct_upload_paths))

            # Download |path| if it is a URL.
            print >> sys.stderr, 'BundleStore.upload: downloading %s to %s' % (
                path, temp_path)
            file_util.download_url(path, temp_path, print_status=True)
        elif path != temp_path:
            # Copy |path| into the temp_path.
            if isinstance(path, list):
                absolute_path = [path_util.normalize(p) for p in path]
                for p in absolute_path:
                    path_util.check_isvalid(p, 'upload')
            else:
                absolute_path = path_util.normalize(path)
                path_util.check_isvalid(absolute_path, 'upload')

            # Recursively copy the directory into a new BundleStore temp directory.
            print_util.open_line('BundleStore.upload: copying %s to %s' %
                                 (absolute_path, temp_path))
            path_util.copy(absolute_path,
                           temp_path,
                           follow_symlinks=follow_symlinks,
                           exclude_patterns=exclude_patterns)
            print_util.clear_line()

        # Multiplex between uploading a directory and uploading a file here.
        # All other path_util calls will use these lists of directories and files.
        if os.path.isdir(temp_path):
            dirs_and_files = path_util.recursive_ls(temp_path)
        else:
            dirs_and_files = ([], [temp_path])

        # Hash the contents of the temporary directory, and then if there is no
        # data with this hash value, move this directory into the data directory.
        print_util.open_line('BundleStore.upload: hashing %s' % temp_path)
        data_hash = '0x%s' % (path_util.hash_directory(temp_path,
                                                       dirs_and_files), )
        print_util.clear_line()
        print_util.open_line('BundleStore.upload: computing size of %s' %
                             temp_path)
        data_size = path_util.get_size(temp_path, dirs_and_files)
        print_util.clear_line()
        final_path = os.path.join(self.data, data_hash)
        final_path_exists = False
        try:
            # If data_hash already exists, then we don't need to move it over.
            os.utime(final_path, None)
            final_path_exists = True
        except OSError, e:
            if e.errno == errno.ENOENT:
                print >> sys.stderr, 'BundleStore.upload: moving %s to %s' % (
                    temp_path, final_path)
                path_util.rename(temp_path, final_path)
            else:
                raise
Example #10
0
    def upload(self, path, follow_symlinks):
        """
        Copy the contents of the directory at |path| into the data subdirectory,
        in a subfolder named by a hash of the contents of the new data directory.
        If |path| is in a temporary directory, then we just move it.

        Return a (data_hash, metadata) pair, where the metadata is a dict mapping
        keys to precomputed statistics about the new data directory.
        """
        # Create temporary directory as a staging area.
        # If |path| is already temporary, then we use that directly
        # (with the understanding that |path| will be moved)
        if not isinstance(path, list) and os.path.realpath(path).startswith(os.path.realpath(self.temp)):
            temp_path = path
        else:
            temp_path = os.path.join(self.temp, uuid.uuid4().hex)

        if not isinstance(path, list) and path_util.path_is_url(path):
            # Have to be careful.  Want to make sure if we're fetching a URL
            # that points to a file, we are allowing this.
            if path.startswith("file://"):
                path_suffix = path[7:]
                if os.path.islink(path_suffix):
                    raise UsageError("Not allowed to upload symlink %s" % path_suffix)
                if not any(path_suffix.startswith(f) for f in self.direct_upload_paths):
                    raise UsageError(
                        "Not allowed to upload %s (only %s allowed)" % (path_suffix, self.direct_upload_paths)
                    )

            # Download |path| if it is a URL.
            print >>sys.stderr, "BundleStore.upload: downloading %s to %s" % (path, temp_path)
            file_util.download_url(path, temp_path, print_status=True)
        elif path != temp_path:
            # Copy |path| into the temp_path.
            if isinstance(path, list):
                absolute_path = [path_util.normalize(p) for p in path]
                for p in absolute_path:
                    path_util.check_isvalid(p, "upload")
            else:
                absolute_path = path_util.normalize(path)
                path_util.check_isvalid(absolute_path, "upload")

            # Recursively copy the directory into a new BundleStore temp directory.
            print >>sys.stderr, "BundleStore.upload: copying %s to %s" % (absolute_path, temp_path)
            path_util.copy(absolute_path, temp_path, follow_symlinks=follow_symlinks)

        # Multiplex between uploading a directory and uploading a file here.
        # All other path_util calls will use these lists of directories and files.
        if os.path.isdir(temp_path):
            dirs_and_files = path_util.recursive_ls(temp_path)
        else:
            dirs_and_files = ([], [temp_path])

        # Hash the contents of the temporary directory, and then if there is no
        # data with this hash value, move this directory into the data directory.
        print >>sys.stderr, "BundleStore.upload: hashing %s" % (temp_path)
        data_hash = "0x%s" % (path_util.hash_directory(temp_path, dirs_and_files),)
        data_size = path_util.get_size(temp_path, dirs_and_files)
        final_path = os.path.join(self.data, data_hash)
        final_path_exists = False
        try:
            # If data_hash already exists, then we don't need to move it over.
            os.utime(final_path, None)
            final_path_exists = True
        except OSError, e:
            if e.errno == errno.ENOENT:
                print >>sys.stderr, "BundleStore.upload: moving %s to %s" % (temp_path, final_path)
                path_util.rename(temp_path, final_path)
            else:
                raise