예제 #1
0
def _get_tmp_dir():
    if hasattr(_get_tmp_dir, 'directory'):
        return _get_tmp_dir.directory
    else:
        _get_tmp_dir.directory = tempfile.mkdtemp()

        # register directory to be deleted on next start-up
        # unfortunately, there is no reliable way to do clean-up on exit
        # see https://github.com/SublimeTextIssues/Core/issues/10
        cache_dir = get_cache_directory()
        make_dirs(cache_dir)

        temporary_output_dirs = os.path.join(
            cache_dir,
            'temporary_output_dirs'
        )

        if os.path.exists(temporary_output_dirs):
            with open(temporary_output_dirs, 'r') as f:
                data = json.load(f)
        else:
            data = {'directories': []}

        data['directories'].append(_get_tmp_dir.directory)

        with open(temporary_output_dirs, 'w') as f:
            json.dump(data, f)

        return _get_tmp_dir.directory
예제 #2
0
def _get_tmp_dir():
    if hasattr(_get_tmp_dir, 'directory'):
        return _get_tmp_dir.directory
    else:
        _get_tmp_dir.directory = tempfile.mkdtemp()

        # register directory to be deleted on next start-up
        # unfortunately, there is no reliable way to do clean-up on exit
        # see https://github.com/SublimeTextIssues/Core/issues/10
        cache_dir = get_cache_directory()
        make_dirs(cache_dir)

        temporary_output_dirs = os.path.join(cache_dir,
                                             'temporary_output_dirs')

        if os.path.exists(temporary_output_dirs):
            with open(temporary_output_dirs, 'r') as f:
                data = json.load(f)
        else:
            data = {'directories': []}

        data['directories'].append(_get_tmp_dir.directory)

        with open(temporary_output_dirs, 'w') as f:
            json.dump(data, f)

        return _get_tmp_dir.directory
예제 #3
0
    def save(self, key=None):
        '''
        saves the cache entry specified to disk

        :param key:
            the entry to flush to disk; if None, all entries in the cache will
            be written to disk
        '''
        if not self._dirty:
            return

        # lock is aquired here so that all keys being flushed reflect the
        # same state; note that this blocks disk reads, but not cache reads
        with self._disk_lock:
            # operate on a stable copy of the object
            with self._write_lock:
                _objs = pickle.loads(pickle.dumps(self._objects, protocol=-1))
                self._dirty = False

            if key is None:
                # remove all InvalidObjects
                delete_keys = [
                    k for k in _objs if _objs[k] == _invalid_object
                ]

                for k in delete_keys:
                    del _objs[k]
                    file_path = os.path.join(self.cache_path, key)
                    try:
                        os.path.remove(file_path)
                    except OSError:
                        pass

                if _objs:
                    make_dirs(self.cache_path)
                    for k in _objs.keys():
                        try:
                            self._write(k, _objs)
                        except:
                            traceback.print_exc()
                else:
                    # cache has been emptied, so remove it
                    try:
                        shutil.rmtree(self.cache_path)
                    except:
                        print(
                            'error while deleting {0}'.format(self.cache_path))
                        traceback.print_exc()
            elif key in _objs:
                if _objs[key] == _invalid_object:
                    file_path = os.path.join(self.cache_path, key)
                    try:
                        os.path.remove(file_path)
                    except:
                        print('error while deleting {0}'.format(file_path))
                        traceback.print_exc()
                else:
                    make_dirs(self.cache_path)
                    self._write(key, _objs)
예제 #4
0
    def save(self, key=None):
        '''
        saves the cache entry specified to disk

        :param key:
            the entry to flush to disk; if None, all entries in the cache will
            be written to disk
        '''
        if not self._dirty:
            return

        # lock is aquired here so that all keys being flushed reflect the
        # same state; note that this blocks disk reads, but not cache reads
        with self._disk_lock:
            # operate on a stable copy of the object
            with self._write_lock:
                _objs = pickle.loads(pickle.dumps(self._objects, protocol=-1))
                self._dirty = False

            if key is None:
                # remove all InvalidObjects
                delete_keys = [k for k in _objs if _objs[k] == _invalid_object]

                for k in delete_keys:
                    del _objs[k]
                    file_path = os.path.join(self.cache_path, key)
                    try:
                        os.path.remove(file_path)
                    except OSError:
                        pass

                if _objs:
                    make_dirs(self.cache_path)
                    for k in _objs.keys():
                        try:
                            self._write(k, _objs)
                        except:
                            traceback.print_exc()
                else:
                    # cache has been emptied, so remove it
                    try:
                        shutil.rmtree(self.cache_path)
                    except:
                        print('error while deleting {0}'.format(
                            self.cache_path))
                        traceback.print_exc()
            elif key in _objs:
                if _objs[key] == _invalid_object:
                    file_path = os.path.join(self.cache_path, key)
                    try:
                        os.path.remove(file_path)
                    except:
                        print('error while deleting {0}'.format(file_path))
                        traceback.print_exc()
                else:
                    make_dirs(self.cache_path)
                    self._write(key, _objs)
예제 #5
0
 def _write_bib_cache():
     try:
         cache.pickle.dumps(bib_entries, protocol=-1)
     except cache.pickle.PicklingError:
         print('bib_entries must be pickleable')
     else:
         with self._disk_lock:
             make_dirs(self.cache_path)
             self._write(self.cache_name,
                         {self.cache_name: bib_entries})
예제 #6
0
def resolve_to_absolute_path(root, value, root_path):
    # special values
    if (
        len(value) > 4 and
        value[0] == '<' and
        value[1] == '<' and
        value[-2] == '>' and
        value[-1] == '>'
    ):
        root_hash = _get_root_hash(root)
        if root_hash is None:
            raise UnsavedFileException()

        if value == '<<temp>>':
            result = os.path.join(
                _get_tmp_dir(), root_hash
            )
        elif value == '<<project>>':
            result = os.path.join(
                root_path, root_hash
            )
        elif value == '<<cache>>':
            result = os.path.join(
                get_cache_directory(),
                root_hash
            )
        else:
            print(u'unrecognized special value: {0}'.format(value))

            # NOTE this assumes that the value provided is a typo, etc.
            # and tries not to do anything harmful. This may not be the
            # best assumption
            return None

        # create the directory
        make_dirs(result)

        return result

    result = os.path.expandvars(
        os.path.expanduser(
            value
        )
    )

    if not os.path.isabs(result):
        result = os.path.join(root_path, result)
    result = os.path.normpath(result)
    if os.path.exists(result):
        result = os.path.realpath(result)

    return result
예제 #7
0
 def _write_bib_cache():
     try:
         cache.pickle.dumps(bib_entries, protocol=-1)
     except cache.pickle.PicklingError:
         print('bib_entries must be pickleable')
         traceback.print_exc()
     else:
         with self._disk_lock:
             make_dirs(self.cache_path)
             self._write(
                 self.cache_name,
                 {self.cache_name: bib_entries}
             )
예제 #8
0
def resolve_to_absolute_path(root, value, root_path):
    # special values
    if (len(value) > 4 and value[0] == '<' and value[1] == '<'
            and value[-2] == '>' and value[-1] == '>'):
        root_hash = _get_root_hash(root)
        if root_hash is None:
            raise UnsavedFileException()

        if value == '<<temp>>':
            result = os.path.join(_get_tmp_dir(), root_hash)
        elif value == '<<project>>':
            result = os.path.join(root_path, root_hash)
        elif value == '<<cache>>':
            result = os.path.join(get_cache_directory(), root_hash)
        else:
            print(u'unrecognized special value: {0}'.format(value))

            # NOTE this assumes that the value provided is a typo, etc.
            # and tries not to do anything harmful. This may not be the
            # best assumption
            return None

        # create the directory
        make_dirs(result)

        return result

    result = os.path.expandvars(os.path.expanduser(value))

    if not os.path.isabs(result):
        result = os.path.join(root_path, result)
    result = os.path.normpath(result)
    if os.path.exists(result):
        result = os.path.realpath(result)

    return result