def convert_stock_data_to_stock_files(): timer = Timer() timer.start_timer() only_files = [f for f in listdir(temporary_zipping_location_path) if isfile(join(temporary_zipping_location_path, f))] start_symbol = set() good_stocks = set() bad_stocks = set() file_index = 0 for file_name in only_files: print("File name: " + file_name + "\t" + str(file_index)) file_index += 1 with open(temporary_zipping_location_path + file_name, "r") as csv_file: data_reader = csv.reader(csv_file) for row in data_reader: purged_stock_name = stock.get_purged_name(row[0]) if purged_stock_name not in good_stocks and purged_stock_name not in bad_stocks: good_stocks.add(purged_stock_name) new_file = open((all_stocks_path + purged_stock_name + '.csv'), 'w') new_file.close() start_symbol.add(row[0][0]) for start in start_symbol: print(start) timer.end_timer() timer.print_time()
def fill_stock_files_with_information(): timer = Timer() timer.start_timer() only_files = [f for f in listdir(temporary_zipping_location_path) if isfile(join(temporary_zipping_location_path, f))] all_stock_names = [f for f in listdir(all_stocks_path) if isfile(join(all_stocks_path, f))] date_list = [] for file_name in only_files: x = file_name[1:].replace('.csv', '') month = x[0] + x[1] day = x[2] + x[3] year = x[4] + x[5] date_list.append([year, month, day]) file_index = 0 total_file_index = 0 all_stock_information_as_dictionary = {} ignore_list = get_stock_ignore_list() for date in reversed(sorted(date_list)): temp_file_name = 's' + date[1] + date[2] + date[0] + '.csv' print("Opening: " + temp_file_name + "\tthis is file #: " + str(file_index)) file_index += 1 total_file_index += 1 with open(temporary_zipping_location_path + temp_file_name, "r") as csv_file: data_reader = csv.reader(csv_file) for row in data_reader: purged_stock_name = stock.get_purged_name(row[0]) if purged_stock_name not in ignore_list: file_name_to_append_to = all_stocks_path + purged_stock_name + '.csv' if file_name_to_append_to in all_stock_information_as_dictionary: all_stock_information_as_dictionary[file_name_to_append_to].append(DayData(row[1], row[2], row[3], row[4], row[5], row[6])) else: all_stock_information_as_dictionary[file_name_to_append_to] = [DayData(row[1], row[2], row[3], row[4], row[5], row[6])] if file_index >= 200: print('Dumping Information') for key in all_stock_information_as_dictionary: #print("Stock file to append to: " + key) with open(key, 'a') as apple: writer = csv.writer(apple) for r in all_stock_information_as_dictionary[key]: writer.writerow( [r.get_date(), r.get_open(), r.get_high(), r.get_low(), r.get_close(), r.get_volume()]) all_stock_information_as_dictionary.clear() print('Dumping finished.\ton file ' + str(total_file_index)) file_index = 0 timer.end_timer() timer.print_time()
def simulation_for_single_stock(all_simulations, stock_file_path, stock_name_as_a_file): single_stock = Stock(stock_name_as_a_file.replace('.csv', '')) with open(stock_file_path, "r") as csv_file: data_reader = csv.reader(csv_file) for row in data_reader: if not row == []: #print(row) day_data = DayData(row[0], row[1], row[2], row[3], row[4], row[5]) single_stock.add_data(day_data) good_data = 0 has_2015 = False for day in single_stock.get_data(): #print(day.get_open() <= 5.0) if day.get_open() >= 5.0: good_data += 1 else: good_data -= 1 if day.get_date().split('/')[2] == '2015': has_2015 = True # Filters. if has_2015: if good_data > 0: if len(single_stock.get_data()) >= 90: my_timer = Timer() my_timer.start_timer() do_math_for_single_stock(all_simulations, 5, 247, single_stock, 'LONG') do_math_for_single_stock(all_simulations, 5, 247, single_stock, 'SHORT') #do_math_for_single_stock(all_simulations, 5, 247, single_stock, 'COMBINED') my_timer.end_timer() my_timer.print_time()