Exemplo n.º 1
0
def move(n: int, target_stack_index: int, context: Context,
         logs: List[str]) -> Tuple[Context, List[str]]:
    current_stack_index = where(n, context)
    current_stack = nth(current_stack_index, context)
    target_stack = nth(target_stack_index, context)
    untouched_stack_index = nth(
        0,
        set(range(3)) - {target_stack_index, current_stack_index})
    untouched_stack = nth(untouched_stack_index, context)

    if n == min(current_stack):
        if len(target_stack) == 0 or n < min(target_stack):
            _, *new_current_stack = current_stack
            new_target_stack = list(cons(n, target_stack))

            new_context = tuple(
                map(
                    lambda tup: nth(1, tup),
                    sorted(((target_stack_index, new_target_stack),
                            (current_stack_index, new_current_stack),
                            (untouched_stack_index, untouched_stack)))))

            log = (f"{index_to_name[current_stack_index]}" +
                   f" to {index_to_name[target_stack_index]}")
            new_logs = logs + [log]

            return new_context, new_logs
        else:
            new_context, new_logs = move(min(target_stack),
                                         untouched_stack_index, context, logs)
            return move(n, target_stack_index, new_context, new_logs)
    else:
        new_context, new_logs = move(min(current_stack), untouched_stack_index,
                                     context, logs)
        return move(n, target_stack_index, new_context, new_logs)
Exemplo n.º 2
0
def where(n: int, context: Context) -> int:
    return nth(0,
               nth(0,
                   filter(lambda result: result[1],
                          list(enumerate(
                          map(lambda stack: n in stack,
                              context))))))
Exemplo n.º 3
0
def where(n: int, context: Context) -> int:
    return nth(
        0,
        nth(
            0,
            filter(lambda result: result[1],
                   list(enumerate(map(lambda stack: n in stack, context))))))

    return [(n in stack) for stack in context].index(True)
Exemplo n.º 4
0
def bottom(iterable, key=toolz.identity):
    """Generates elements from min to max."""
    h = []
    for i, value in enumerate(iterable):
        # Use the index as a tie breaker.
        heapq.heappush(h, (key(value), i, value))
    while h:
        yield toolz.nth(2, heapq.heappop(h))
Exemplo n.º 5
0
    def __getitem__(self, idx_or_slice):
        if isinstance(idx_or_slice, str): # one category only
            result = self._corpora[idx_or_slice]
        elif isinstance(idx_or_slice, tuple): # category and indices
            cat, corpus_idx_or_slice = idx_or_slice
            result = self._corpora[cat][corpus_idx_or_slice]
        elif isinstance(idx_or_slice, int): # single item
            if idx_or_slice < 0:
                seq = reversed(self)
                idx_or_slice = -idx_or_slice - 1
            else:
                seq = iter(self)
            result = tlz.nth(idx_or_slice, seq)
        elif isinstance(idx_or_slice, slice):
            result = list(islice(iter(self),
                                 idx_or_slice.start,
                                 idx_or_slice.stop,
                                 idx_or_slice.step))
        else:
            raise KeyError(f'invalid index {idx_or_slice}')

        return result
Exemplo n.º 6
0
import matplotlib.pyplot as plt, numpy as np 
from toolz import iterate, nth


X = np.random.multivariate_normal(np.array([0, 0]), np.array([[5,1.5], [1.5, 2]]), 20)
r = nth(1000, iterate(lambda r: X.T @ X @ r / np.linalg.norm(X.T @ X @ r), np.random.rand(2)))
plt.plot(X[:,0], X[:,1], "ro")
plt.plot(np.linspace(-8, 8, 200), np.linspace(-8, 8, 200) * r[1] / r[0], "g")
plt.show()
Exemplo n.º 7
0
def newton_f(n, guess=2, step=10):
    next_step = lambda a: (a + n/a)/2
    return nth(step, iterate(next_step, guess))
Exemplo n.º 8
0
np.set_printoptions(precision = 4, suppress = True)
f = lambda x, w : x@w
E = lambda X, W, T : 0.5 * np.sum((f(X, W)-T).T @ (f(X, W)-T))
model = lambda x : -(2.3*x-2)**2 -10

def gradient_descent(W, X, T, e):
    d = (X.T @ (X@W-T))
    return W - d*e / np.linalg.norm(d)   #Adagrad

if __name__ == "__main__":
    data_sigma = 0.1
    data_num = 4
    M = 6   #次数+1を指定(3次式なら4を入力)
    e = np.array([[0.8], [0.8], [0.3], [0.3], [0.1], [0.5]]) * 0.001
    w_init = np.random.normal(-5, 10, M).reshape(M, 1)
    X = np.array([[i**j for j in range(M)] for i in np.linspace(0.1, 2*np.pi, data_num)])
    T = (model(X.T[1]) + np.random.normal(0, data_sigma, data_num)).reshape(data_num, 1)
    
    w_ans = nth(5000000, iterate(lambda w:gradient_descent(w, X, T, e), w_init))

    print("\nerror ",E(X, w_ans, T))    #評価関数
    print("w_ans ", (w_ans.T[0]))       #推定したparemeter
    print("Taylor", [(-1)**int(i%4/2)/factorial(i)*(i%2) for i in range(M)])  #sinのテイラー展開parameter
    
    plotX = np.array([[i**j for j in range(M)] for i in np.linspace(0, 2*np.pi, 100)])
    plt.plot(X.T[1], T, "bo")                   #観測データ
    plt.plot(plotX.T[1], f(plotX, w_ans), "g")      #モデル計算後
    plt.plot(plotX.T[1], model(plotX.T[1]))    #理論値

    plt.show()
Exemplo n.º 9
0
 def nth(self, n):
     return tz.nth(n, self)
Exemplo n.º 10
0
 def kth_last(self, k):
     try:
         return tz.nth(len(self) - k - 1, self)
     except (ValueError, StopIteration):
         return
Exemplo n.º 11
0
 def nth(seq):
     return toolz.nth(n, seq)
Exemplo n.º 12
0
    def __init__(self, path):
        self.path = path
        super(ArxivAbstracts, self).__init__(input=True)

    def get_texts(self):
        for article_file in os.listdir(self.path):
            with open(os.path.join(self.path, article_file)) as article:
                tree = etree.parse(article)
                # ElementTree needs to namespace passed explicitly
                description_element = tree.find(
                    './/dc:description',
                    {'dc': 'http://purl.org/dc/elements/1.1/'}
                )
                yield tokenize.word_tokenize(description_element.text.lower())

    def __len__(self):
        return len(os.listdir(self.path))

# Perform LSI/LSA  on TF-IDF representation of abstracts
corpus = ArxivAbstracts('../harvest/data/')
corpus.dictionary.filter_extremes()
tfidf = models.tfidfmodel.TfidfModel(corpus, dictionary=corpus.dictionary)
lsi = models.lsimodel.LsiModel(tfidf[corpus], id2word=corpus.dictionary)

# Example of similarity query
index = similarities.MatrixSimilarity(lsi[tfidf[corpus]])
first_abstract = next(iter(corpus))
vec_lsi = lsi[first_abstract]
most_similar = numpy.argpartition(index[vec_lsi], -2)[-2]
print(' '.join(nth(most_similar, corpus.get_texts())))