Ejemplo n.º 1
0
def act():
    try:
        print("EXECUTING>>>>")
        webscraper.webscr(str(svalue.get()))
        analyser.analyse()
        my_msg.set("Scraping done!")
    except:
        my_msg.set("Enter valid URL!")
Ejemplo n.º 2
0
def launch_analyses(source_path, start_date, end_date, symbol, out_root, gen_title):

	(dates, high, low, openn, close, volume) = load_columns(source_path, start_date, end_date)

	max_data_date = max(dates)
	if end_date > max_data_date:
		end_date = max_data_date

	min_data_date = min(dates)
	if start_date < min_data_date:
		start_date = min_data_date

	print('source data @ {0}'.format(source_path))
	print('time sample bounds : {0} - {1}'.format(format_dt(start_date), format_dt(end_date)))

	file_name = symbol.replace(' ', '') + '.png'

	return analyse(symbol, start_date, end_date, dates, openn, high, low, close, volume, out_root, gen_title)
Ejemplo n.º 3
0
from connector import fetch
from analyser import Normalizer
from analyser import analyse
from indexor import Index
from searcher import Searcher

proc = Normalizer()
myDocs = analyse(fetch('../20news-bydate'), proc)
myIndex = Index()
myIndex.build(myDocs)
myIndex.saveIndex("index")

mySearcher = Searcher()
mySearcher.index.loadIndex("index")
#words = ["do", "we", "fall", "to", "understand", "love"]
#print mySearcher.search(words)
#words = ["do", "we"]
#print mySearcher.search(words)
Ejemplo n.º 4
0
                    data = conn.recv(BUFFER_SIZE)

                    if not data:
                        break

                    f.write(data)
                    print("writing...")
    return filename


#Loop endlessly
while True:
    #While filename is false try to recieve filename from host
    filename = False
    while not filename:
        try:
            filename = receive()
        except:
            raise
    #Send file to analyser to analyse
    analyser.analyse(filename)

    #If pe-sieve.json is of 0 then pe-sive failed so return "failed"
    if os.stat("pe-sieve.json").st_size == 0:
        print("pe-sieve failed.")
        with open("pe-sieve.json", "w") as f:
            f.write("failed")
    #Else return pe-seive results
    else:
        send("pe-sieve.json")
Ejemplo n.º 5
0
    def load_mnist_dataset(self):
        with gzip.open('./mnist.pkl.gz', 'rb') as f:
            train_set, valid_set, test_set = pickle.load(f)
            train_set = zip(train_set[0], train_set[1])
            valid_set = zip(valid_set[0], valid_set[1])
            test_set = zip(test_set[0], test_set[1])
            return train_set, valid_set, test_set

    def show_mnist_grid(self, model, test_set, n=10):
        test_data = test_set[0]
        fig, ax = plt.subplots(nrows=n, ncols=n, sharex=True, sharey=True)
        ax = ax.flatten()
        for i in xrange(n**2):
            y = model.forward(test_data[i])
            output = np.argmax(y)
            img = test_data[i].reshape(28, 28)
            ax[i].imshow(img, cmap='Greys', interpolation='nearest')
            ax[i].set_title('%d' % output)
        ax[0].set_xticks([])
        ax[0].set_yticks([])
        plt.tight_layout()


experiment = MnistExperiment()
results = analyser.analyse(experiment.run,
                           learning_rate=[0.01],
                           batch_size=60,
                           epochs=[5],
                           train_set_percentage=[1])
analyser.plot_analyser_results(results)
Ejemplo n.º 6
0
def render_result():
    stock = request.form['stock']
    scraped_data = scrape(stock)
    analysis_result = analyse(scraped_data)
    return render_template('results.html', suggestion=analysis_result)
def post_method():
	complaint = request.form['data']
	output = analyse(complaint)
	return output