def test_unpickle(): fd, path = mkstemp() # Pickle an object to this temporary file. obj = {'a': 1} with os.fdopen(fd, 'wb') as file: pickle.dump(obj, file) # Check it's unpickled well. unpickled_obj = unpickle(path) assert obj == unpickled_obj assert obj is not unpickled_obj # The unpickling is cached. assert unpickled_obj is unpickle(path) # Clean up. os.remove(path)
def _frequency(cls, word=None): """frequency""" frequency = unpickle(settings.FREQUENCY) if word is None: return frequency.keys() return frequency.get(word, np.nan)
def _clustering(cls, word=None): """clustering""" clustering = unpickle(settings.CLUSTERING) if word is None: return clustering.keys() return clustering.get(word, np.nan)
def _betweenness(cls, word=None): """betweenness""" betweenness = unpickle(settings.BETWEENNESS) if word is None: return betweenness.keys() return betweenness.get(word, np.nan)
def _pagerank(cls, word=None): """pagerank""" pagerank = unpickle(settings.PAGERANK) if word is None: return pagerank.keys() return pagerank.get(word, np.nan)
def _degree(cls, word=None): """degree""" degree = unpickle(settings.DEGREE) if word is None: return degree.keys() return degree.get(word, np.nan)
def _letters_count(cls, word=None): """#letters""" if word is None: return unpickle(settings.TOKENS) return len(word)