Ejemplo n.º 1
0
def create_data_json_pcr_79():
    wb = xlrd.open_workbook(
        os.path.join(BASE_DIR, 'PCR_CheckList_Sprint_6.xlsx'))
    sheet = wb.sheet_by_name('Schedulers Create')
    payload = []
    Expect = list()

    for rownum in range(1, 69):
        user = OrderedDict()
        row_value = sheet.row_values(rownum)
        des = row_value[0]
        expect = row_value[10]
        code = row_value[11]
        user['name'] = row_value[1]
        user['taskId'] = row_value[2]
        user['sellerId'] = row_value[3]
        user['active'] = row_value[4]
        user['settings'] = row_value[5]
        user['scheduleType'] = row_value[6]
        # user['startDate'] = row_value[7]
        date = int(row_value[7]) if isinstance(
            row_value[7], float) else datetime.now().strftime('%d-%m-%Y')
        if isinstance(date, str):
            user['startDate'] = date
        else:
            user['startDate'] = datetime.fromordinal(
                datetime(1900, 1, 1).toordinal() + date -
                2).strftime('%d-%m-%Y')
        user['scheduleTime'] = row_value[8]
        user['variables'] = row_value[9]
        payload.append(user)
        Expect.append((des, user, int(expect), code))

    return Expect
Ejemplo n.º 2
0
def convert_to_datetime(input):
    """
    Converts the given object to a datetime object, if possible.
    If an actual datetime object is passed, it is returned unmodified.
    If the input is a string, it is parsed as a datetime.

    Date strings are accepted in three different forms: date only (Y-m-d),
    date with time (Y-m-d H:M:S) or with date+time with microseconds
    (Y-m-d H:M:S.micro).

    :rtype: datetime
    """
    from datetime import date, datetime, timedelta
    if isinstance(input, datetime):
        return input
    elif isinstance(input, date):
        return datetime.fromordinal(input.toordinal())
    elif isinstance(input, basestring):
        m = _DATE_REGEX.match(input)
        if not m:
            raise ValueError('Invalid date string')
        values = [(k, int(v or 0)) for k, v in m.groupdict().items()]
        values = dict(values)
        return datetime(**values)
    raise TypeError('Unsupported input type: %s' % type(input))
Ejemplo n.º 3
0
def create_data_json_pcr_39_2():
    wb = xlrd.open_workbook(
        os.path.join(BASE_DIR, 'PCR_CheckList_Sprint_5.xlsx'))
    sheet = wb.sheet_by_name('PCR-39 API Create schedule')
    payload = []

    for rownum in range(21, 58):
        user = OrderedDict()
        row_value = sheet.row_values(rownum)
        user['crawlType'] = row_value[5]
        user['scheduleName'] = row_value[6]
        user['scheduleSettings'] = row_value[7]
        user['scheduleType'] = row_value[8]
        date = int(row_value[9]) if isinstance(
            row_value[9], float) else datetime.now().strftime('%Y-%m-%d')
        if isinstance(date, str):
            user['startDate'] = date
        else:
            user['startDate'] = datetime.fromordinal(
                datetime(1900, 1, 1).toordinal() + date -
                2).strftime('%Y-%m-%d')
        user['scheduleTime'] = row_value[10]
        user['activate'] = random.choice([True, False])
        payload.append(user)

    return payload
Ejemplo n.º 4
0
def convert_to_datetime(input):
    """
    Converts the given object to a datetime object, if possible.
    If an actual datetime object is passed, it is returned unmodified.
    If the input is a string, it is parsed as a datetime.

    Date strings are accepted in three different forms: date only (Y-m-d),
    date with time (Y-m-d H:M:S) or with date+time with microseconds
    (Y-m-d H:M:S.micro).

    :rtype: datetime
    """
    from datetime import date, datetime, timedelta
    if isinstance(input, datetime):
        return input
    elif isinstance(input, date):
        return datetime.fromordinal(input.toordinal())
    elif isinstance(input, basestring):
        m = _DATE_REGEX.match(input)
        if not m:
            raise ValueError('Invalid date string')
        values = [(k, int(v or 0)) for k, v in m.groupdict().items()]
        values = dict(values)
        return datetime(**values)
    raise TypeError('Unsupported input type: %s' % type(input))
 def basicCalculationTimeOnly(self,excel_date):
     if(excel_date==0):
         return "null"
     dt = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + int(excel_date) - 2)
     hour, minute, second = self.floatHourToTime(excel_date % 1)
     dt = dt.replace(hour=hour, minute=minute, second=second)
     Created_On = "' %s'" % (dt.strftime("%m/%d/%Y"))
     return Created_On
Ejemplo n.º 6
0
    def __init__(self, split, data_dir=None):
        super(ExampleDataset, self).__init__()

        if split == 'train':
            time_start = 0
            time_end = datetime(2013, 8, 31, tzinfo=self.TZ).toordinal()
        elif split == 'test':
            time_start = datetime(2013, 9, 1, tzinfo=self.TZ).toordinal()
            time_end = datetime(2014, 1, 1, tzinfo=self.TZ).toordinal()
        else:
            raise ValueError('invalid split', split)

        self.FIRST_DATE = datetime(2012, 12, 28, tzinfo=self.TZ)

        self.TEST_TIMESLOTS = [datetime(2013, 9, 1, tzinfo=self.TZ),
                               datetime(2013, 9, 25, tzinfo=self.TZ),
                               datetime(2013, 10, 20, tzinfo=self.TZ),
                               datetime(2013, 11, 15, tzinfo=self.TZ),
                               datetime(2013, 12, 10, tzinfo=self.TZ),
                               datetime(2014, 1, 1, tzinfo=self.TZ)]



        self.N_nodes = 100

        self.A_initial = np.random.randint(0, 2, size=(self.N_nodes, self.N_nodes))
        self.A_last = np.random.randint(0, 2, size=(self.N_nodes, self.N_nodes))

        print('\nA_initial', np.sum(self.A_initial))
        print('A_last', np.sum(self.A_last), '\n')

        self.n_events = 10000
        all_events = []
        for i in range(self.n_events):
            user_id1 = np.random.randint(0, self.N_nodes)
            user_id2 = np.random.choice(np.delete(np.arange(self.N_nodes), user_id1))
            ts = max((time_start, self.FIRST_DATE.toordinal()))
            event_time = datetime.fromordinal(ts + np.random.randint(0, time_end - ts) )
            assert event_time.timestamp() >= self.FIRST_DATE.timestamp(), (event_time, self.FIRST_DATE)
            all_events.append((user_id1, user_id2, np.random.choice(['communication event',
                                                                     'association event']), event_time))

        self.event_types = ['communication event']

        self.all_events = sorted(all_events, key=lambda t: t[3].timestamp())
        print('\n%s' % split.upper())
        print('%d events between %d users loaded' % (len(self.all_events), self.N_nodes))
        print('%d communication events' % (len([t for t in self.all_events if t[2] == 1])))
        print('%d assocition events' % (len([t for t in self.all_events if t[2] == 0])))

        self.event_types_num = {'association event': 0}
        k = 1  # k >= 1 for communication events
        for t in self.event_types:
            self.event_types_num[t] = k
            k += 1

        self.n_events = len(self.all_events)
Ejemplo n.º 7
0
def ordinal_to_date_1(date_data, base_time='2018-01-01'):
    from datetime import datetime
    d = datetime.strptime(base_time, '%Y-%m-%d').date()
    start_0 = d.toordinal() - 1
    all_dates = date_data.copy()
    modified_all_dates = []
    for ind in all_dates.index:
        dt = datetime.fromordinal(start_0 + all_dates.loc[ind])
        new_date = '%s/%s/%s' % (dt.month, dt.day, dt.year)
        modified_all_dates.append(new_date)

    return modified_all_dates
Ejemplo n.º 8
0
def convertSerialDate(sh_cell_val):
    try:
        dt = datetime.fromordinal(
            datetime(1900, 1, 1).toordinal() + int(sh_cell_val) - 2)
        dt_str = str(dt)
        year = dt_str.split('-')[0][-2:]
        month = dt_str.split('-')[1]
        day = dt_str.split('-')[2].split(' ')[0]
        CSV_Date = month + '/' + day + '/' + year
        return sh_cell_val
    except:
        return False
Ejemplo n.º 9
0
def get_year(mat_date):
    """
    Calc year from matlab's date format
    Parameters
    ----------
    mat_date : matlab's datenum
        Date to be converted
    Return
    ----------
    year : int
        Year from matlab datenum
    """
    temp = int(mat_date)
    year = datetime.fromordinal(max(temp - 366, 1)).year
    return year
Ejemplo n.º 10
0
def datenum_to_datetime(datenum):
    """
    Convert Matlab datenum into Python datetime.
    :param datenum: Date in datenum format
    :return:        Datetime object corresponding to datenum.
    """
    days = datenum % 1
    hours = days % 1 * 24
    minutes = hours % 1 * 60
    seconds = minutes % 1 * 60
    return datetime.fromordinal(int(datenum)) \
           + timedelta(days=int(days)) \
           + timedelta(hours=int(hours)) \
           + timedelta(minutes=int(minutes)) \
           + timedelta(seconds=round(seconds)) \
           - timedelta(days=366)
Ejemplo n.º 11
0
def draw_graph(dates, counts):
    ###########################################################
    # Drawing takes place here.
    figure(1)

    ax = subplot(111)
    plot_date(dates,
              counts,
              color='r',
              linestyle='-',
              marker='o',
              markersize=3)

    ax.xaxis.set_major_formatter(DateFormatter('%Y'))
    ax.xaxis.set_major_locator(YearLocator())
    ax.xaxis.set_minor_locator(MonthLocator())
    ax.set_xlim((dates[0] - 92, dates[len(dates) - 1] + 92))

    ax.yaxis.set_major_formatter(FormatStrFormatter('%d'))

    ylabel('Total # of Public DAV Servers')

    lastdate = datetime.fromordinal(dates[len(dates) - 1]).strftime("%B %Y")
    xlabel("Data as of " + lastdate)
    title('Security Space Survey of\nPublic Subversion DAV Servers')
    # End drawing
    ###########################################################
    png = open(OUTPUT_FILE, 'w')
    savefig(png)
    png.close()
    os.rename(OUTPUT_FILE, OUTPUT_FILE + ".tmp.png")
    try:
        im = Image.open(OUTPUT_FILE + ".tmp.png", 'r')
        (width, height) = im.size
        print "Original size: %d x %d pixels" % (width, height)
        scale = float(OUTPUT_IMAGE_WIDTH) / float(width)
        width = OUTPUT_IMAGE_WIDTH
        height = int(float(height) * scale)
        print "Final size: %d x %d pixels" % (width, height)
        im = im.resize((width, height), Image.ANTIALIAS)
        im.save(OUTPUT_FILE, im.format)
        os.unlink(OUTPUT_FILE + ".tmp.png")
    except Exception, e:
        sys.stderr.write("Error attempting to resize the graphic: %s\n" %
                         (str(e)))
        os.rename(OUTPUT_FILE + ".tmp.png", OUTPUT_FILE)
        raise
Ejemplo n.º 12
0
    def get_cpr_for_cusip(self, cusip, fix_lag=True):
        """BBG Mortgage data is specified a month forward of the GNM files'
        'as_of_date'. Probably b/c it's really as-of the middle of the
        specified month.
        
        Calling fix_lag will put it on the same footing as the GNM data, i.e.,
        it'll still need to be lagged another month for prediction."""
        #Check if we already have it
        cusip = "_" + cusip
        address = self.p_root + cusip[3:5]
        toc = self.hdfstore.get_node(address)
        address = address + "/" + cusip

        if toc is not None and cusip in toc:
            data = self.hdfstore[address]
            if data.name == 'blank':
                logger.error("No {} data for {}".format(
                    self.data_name, cusip[1:]))
                data = None
            elif data.index.dtype == 'int64':  #legacy conversion
                logger.info("Legacy conversion of stored {} data index".format(
                    self.data_name))
                data.index = [
                    datetime.fromordinal(x).date() for x in data.index
                ]
                self.hdfstore[address] = data
        else:
            data = self.bbg.get_hist_pool_data(cusip[1:],
                                               field_code=self.field)
            if len(data) > 0:
                data.name = cusip
                self.hdfstore.append(address, data)
            else:  #No data available
                logger.error("No {} data for {}".format(
                    self.data_name, cusip[1:]))
                data = pd.Series({0: 0})
                data.name = 'blank'
                self.hdfstore.append(address, data)
                data = None
            #put in a blank entry if there's no data.
            self.hdfstore.flush(fsync=True)

        if fix_lag and data is not None:
            data = data.shift(-1).dropna()
        return data
Ejemplo n.º 13
0
def draw_graph(dates, counts):
    ###########################################################
    # Drawing takes place here.
    ax = subplot(111)
    ax.xaxis.set_major_formatter(DateFormatter('%b,%y'))
    ax.yaxis.set_major_formatter(FormatStrFormatter('%d'))

    line = bar(dates, counts, color='r', width=24)

    ylabel('Total # of Public DAV Servers')

    lastdate = datetime.fromordinal(dates[len(dates) - 1]).strftime("%B %Y")
    xlabel("Data as of " + lastdate)
    title('Security Space Survey of Public Subversion DAV Servers')
    # End drawing
    ###########################################################
    png = open(OUTPUT_FILE, 'w')
    savefig(png)
    png.close()
    close()
Ejemplo n.º 14
0
def draw_graph(dates, counts):
  ###########################################################
  # Drawing takes place here.
  figure(1)

  ax = subplot(111)
  plot_date(dates, counts, color='r', linestyle='-', marker='o', markersize=3)

  ax.xaxis.set_major_formatter( DateFormatter('%Y') )
  ax.xaxis.set_major_locator( YearLocator() )
  ax.xaxis.set_minor_locator( MonthLocator() )
  ax.set_xlim( (dates[0] - 92, dates[len(dates) - 1] + 92) )

  ax.yaxis.set_major_formatter( FormatStrFormatter('%d') )

  ylabel('Total # of Public DAV Servers')

  lastdate = datetime.fromordinal(dates[len(dates) - 1]).strftime("%B %Y")
  xlabel("Data as of " + lastdate)
  title('Security Space Survey of\nPublic Subversion DAV Servers')
  # End drawing
  ###########################################################
  png = open(OUTPUT_FILE, 'w')
  savefig(png)
  png.close()
  os.rename(OUTPUT_FILE, OUTPUT_FILE + ".tmp.png")
  try:
    im = Image.open(OUTPUT_FILE + ".tmp.png", 'r')
    (width, height) = im.size
    print "Original size: %d x %d pixels" % (width, height)
    scale = float(OUTPUT_IMAGE_WIDTH) / float(width)
    width = OUTPUT_IMAGE_WIDTH
    height = int(float(height) * scale)
    print "Final size: %d x %d pixels" % (width, height)
    im = im.resize((width, height), Image.ANTIALIAS)
    im.save(OUTPUT_FILE, im.format)
    os.unlink(OUTPUT_FILE + ".tmp.png")
  except Exception, e:
    sys.stderr.write("Error attempting to resize the graphic: %s\n" % (str(e)))
    os.rename(OUTPUT_FILE + ".tmp.png", OUTPUT_FILE)
    raise
Ejemplo n.º 15
0
def dia_util():
    cal = Brazil()
    cal.holidays(2019)

    # dia de hj
    hoje = datetime.now()

    ano = hoje.year
    mes = hoje.month
    dia = hoje.day

    #verifica
    data = ('{0}/{1}/{2}').format(ano, mes, dia)
    data = datetime.strptime(data, '%Y/%m/%d').date()

    varind = (cal.is_working_day((data)))  # é domingo

    # Retorna se o dia atual é util
    verifica_dia = varind

    #verifica e altera se não for um dia util.

    while varind == False:
        data = datetime.fromordinal(data.toordinal() + 1)
        varind = (cal.is_working_day((data)))  # é domingo

    # coleta o ANO, MES e DIA  da data final para inserir no código
    ano = data.year
    mes = int(data.month)
    dia = int(data.day)

    if mes < 10:
        mes = '0{0}'.format(mes)

    if dia < 10:
        dia = '0{0}'.format(dia)

    datafinal = '{0}/{1}/{2}'.format(dia, mes, ano)

    return datafinal, verifica_dia
Ejemplo n.º 16
0
	def get_cal_menu(self, qpoint):
		table = self.calendar._table
		item = table.itemAt(qpoint)

		day = item.data(QtCore.Qt.UserRole)
		date2 = None
		date3 = None
		tzone = clnxcfg.observer.timezone
		date = datetime.fromordinal(day.toordinal())
		date = date.replace(hour=12, minute=0, second=0, tzinfo=tzone)

		if self.calendar.lunarReturn:
			idx = self.calendar.fetchLunarReturn(day)
			if idx >= 0:
				date2 = self.calendar.lunarReturns[idx]
		if self.calendar.solarReturn and day == self.calendar.solarReturnTime.date():
			date3 = self.calendar.solarReturnTime

		#self.calendar.setGridVisible(True)
		menu = QtGui.QMenu(self.calendar)
		if date2:
			lritem = menu.addAction("Lunar Return for %s" %(date.strftime("%m/%d/%Y")))
			lritem.triggered.connect(lambda: self.get_info_for_date(date2))
			lritem.setIcon(QtGui.QIcon.fromTheme("dialog-information"))
		if date3:
			sritem = menu.addAction("Solar Return for %s" %(date.strftime("%m/%d/%Y")))
			sritem.triggered.connect(lambda: self.get_info_for_date(date3))
			sritem.setIcon(QtGui.QIcon.fromTheme("dialog-information"))

		infoitem = menu.addAction("Info for %s" %(date.strftime("%m/%d/%Y")))
		infoitem.triggered.connect(lambda: self.get_info_for_date(date))
		infoitem.setIcon(QtGui.QIcon.fromTheme("dialog-information"))

		copymenu = menu.addMenu("Copy")
		copymenu.setIcon(QtGui.QIcon.fromTheme("edit-copy"))
		copyall = copymenu.addAction("All")
		copydate = copymenu.addAction("Date")
		copyplanetdata = copymenu.addAction("Planetary Hours")
		copymoonphasedata = copymenu.addAction("Moon Phases")
		copysignsdata = copymenu.addAction("Signs for this date")
		copyeventdata = copymenu.addAction("Events")

		copyall.triggered.connect(lambda: self.copy_to_clipboard("All", date))
		copydate.triggered.connect(lambda: app.clipboard().setText(date.strftime("%m/%d/%Y")))
		copyplanetdata.triggered.connect(lambda: self.copy_to_clipboard("Planetary Hours", date))
		copymoonphasedata.triggered.connect(lambda: self.copy_to_clipboard("Moon Phase", date))
		copysignsdata.triggered.connect(lambda: self.copy_to_clipboard("Planetary Signs", date))
		copyeventdata.triggered.connect(lambda: self.copy_to_clipboard("Events", date))

		savemenu = menu.addMenu("Save to File")
		savemenu.setIcon(QtGui.QIcon.fromTheme("document-save-as"))
		saveall = savemenu.addAction("All")
		saveplanetdata = savemenu.addAction("Planetary Hours")
		savemoonphasedata = savemenu.addAction("Moon Phases")
		savesignsdata = savemenu.addAction("Signs for this date")
		saveeventdata = savemenu.addAction("Events")

		saveall.triggered.connect(lambda: self.print_to_file("All", date))
		saveplanetdata.triggered.connect(lambda: self.print_to_file("Planetary Hours", date))
		savemoonphasedata.triggered.connect(lambda: self.print_to_file("Moon Phase", date))
		savesignsdata.triggered.connect(lambda: self.print_to_file("Planetary Signs", date))
		saveeventdata.triggered.connect(lambda: self.print_to_file("Events", date))

		menu.exec_(self.calendar.mapToGlobal(qpoint))
Ejemplo n.º 17
0
def _plot_res(results=None, original_pixel=None, band=None, file_name=None):
    """Plots CCD residual for a given band. Accepts a 1x1xt xarray if a scatter-plot overlay of original acquisitions over the ccd results is needed."""

    fig = plt.figure(1, figsize=(20, 5))
    fig.suptitle(band.title() + " Residual ",
                 fontsize=18,
                 verticalalignment='bottom')

    lastdt = None

    dateLabels = []

    for change_model in results["change_models"]:
        ax1 = fig.add_subplot(111)
        model = getattr(change_model, band)
        time = original_pixel.sel(
            time=slice(datetime.fromordinal(change_model.start_day),
                       datetime.fromordinal(change_model.end_day))).time.values
        ordinal_time = list(map(n64_to_ordinal, time))
        actual = original_pixel[band].values

        predicted = list(
            map(
                partial(ccd._lasso_eval,
                        bias=model.intercept,
                        weights=model.coefficients), ordinal_time))
        residual = list(
            map(lambda x: euclidean(x[0], x[1]), zip(actual, predicted)))

        x = time
        y = residual

        ax1.plot(x, y, label=model.coefficients)

        # Curve Fit Code
        polycoeffs = np.polyfit(ordinal_time, residual, 5)

        f = np.poly1d(polycoeffs)

        x_new = np.linspace(ordinal_time[0], ordinal_time[-1], 50)
        y_new = f(x_new)
        ax1.plot(x_new, y_new, c="#333333")

        # Draw Vertical Lines
        dt = datetime.fromordinal(change_model.start_day)
        dateLabels.append(dt)

        if lastdt is not None:
            ax1.axvspan(lastdt, dt, color=(0, 0, 0, 0.1))

        dt = datetime.fromordinal(change_model.end_day)
        dateLabels.append(dt)

        lastdt = dt

    ymin, ymax = ax1.get_ylim()
    for idx, dt in enumerate(dateLabels):
        plt.axvline(x=dt, linestyle='dotted', color=(0, 0, 0, 0.5))
        # Top, inside
        plt.text(
            dt,
            ymax,
            "\n" +  # HACK TO FIX SPACING
            dt.strftime('%b %d') + "  \n"  # HACK TO FIX SPACING
            ,
            rotation=90,
            horizontalalignment='right' if (idx % 2) else 'left',
            verticalalignment='top')

    plt.tight_layout()

    if file_name is not None:
        _save_plot_to_file(plot=plt, file=filename, band_name=band)
    plt.show()
Ejemplo n.º 18
0
print(d.replace(year=2005))  # new object : datetime.date(2005, 3, 11))
print(d)   # unchanged

######### class datetime is a subclass of datetime with timezone, utc
# class datetime.datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *, fold=0)
from time import time  # import function time()
from datetime import datetime # import function if not must write datetime.datetime()

print(datetime.today())   # localtime
print(datetime.utcnow())  # utc time

print(datetime.fromtimestamp(time()))  # time() -> timestamp as float

print(datetime.utcfromtimestamp(time()))  # utc

print(datetime.fromordinal(100))  # arg int

# classmethod datetime.combine(date, time, tzinfo=self.tzinfo)¶
print(d,datetime.combine(d, datetime.time(datetime(2014,12,1,8,15,20))))
# YYYY-MM-DD[*HH[:MM[:SS[.fff[fff]]]][+HH:MM[:SS[.ffffff]]]]
print(datetime.fromisoformat('2014-12-21*12:05:45'))

print(datetime.fromisoformat('2014-12-21+11:05:08'))

# example
print('Examples 2')
d = datetime.fromisoformat('2011-11-04')
print(d)
d = datetime.fromisoformat('2011-11-04T00:05:23')
print(d)
d = datetime.fromisoformat('2011-11-04 00:05:23.283')
Ejemplo n.º 19
0
 def get_total_days(self):
     now = datetime.utcnow()
     ordinal = datetime.fromordinal(1)
     totalNumDays = (now - ordinal).days
     return totalNumDays
Ejemplo n.º 20
0
def random_date():
    start_date = datetime.now().replace(day=1, month=1).toordinal()
    end_date = datetime.now().toordinal()
    random_day = datetime.fromordinal(random.randint(start_date, end_date))
    return random_day
Ejemplo n.º 21
0
Xtrain['Date'] = Xtrain['Date']

# In[370]:

scaler = StandardScaler()

xtrain = scaler.fit_transform(Xtrain.values)
xtest = scaler.transform(Xtest.values)

# In[376]:

from datetime import datetime as dt

traindateseries = pd.Series(
    Xtrain['Date'].apply(lambda x: dt.fromordinal(x)).dt.strftime('%Y-%m-%d'))
testdateseries = pd.Series(
    Xtrain['Date'].apply(lambda x: dt.fromordinal(x)).dt.strftime('%Y-%m-%d'))
traindateseries

# In[ ]:

# In[ ]:

# In[ ]:

# In[377]:

Xtest.info()

# In[ ]:
def datenum_to_datetime(datenum):
    datenum = float(datenum)
    days = datenum % 1
    return datetime.fromordinal(int(datenum)) \
           + timedelta(days=days) \
           - timedelta(days=366)
Ejemplo n.º 23
0
def datenum_mat_2_python_datetime(datenum_vec):

    Pydater = datetime.fromordinal(int(datenum_vec)) + timedelta(
        days=datenum_vec % 1) - timedelta(days=366)

    return Pydater
Ejemplo n.º 24
0
    def __init__(self, split, data_dir=None):
        super(ExampleDataset, self).__init__()

        if split == 'train':
            time_start = 0
            # TODO: Training Data의 종료일 기입
            time_end = datetime(2013, 8, 31, tzinfo=self.TZ).toordinal()
        elif split == 'test':
            # TODO: Test 데이터의 시작일/종료일 기입
            time_start = datetime(2013, 9, 1, tzinfo=self.TZ).toordinal()
            time_end = datetime(2014, 1, 1, tzinfo=self.TZ).toordinal()
        else:
            raise ValueError('invalid split', split)

        #TODO: 데이터의 시작일 기입
        self.FIRST_DATE = datetime(2012, 12, 28, tzinfo=self.TZ)

        #TODO: 시간별로 테스트데이터를 n개의 Slot으로 나눔 (n=6)
        self.TEST_TIMESLOTS = [
            datetime(2013, 9, 1, tzinfo=self.TZ),
            datetime(2013, 9, 25, tzinfo=self.TZ),
            datetime(2013, 10, 20, tzinfo=self.TZ),
            datetime(2013, 11, 15, tzinfo=self.TZ),
            datetime(2013, 12, 10, tzinfo=self.TZ),
            datetime(2014, 1, 1, tzinfo=self.TZ)
        ]

        self.N_nodes = 100

        self.A_initial = np.random.randint(0,
                                           2,
                                           size=(self.N_nodes, self.N_nodes))
        self.A_last = np.random.randint(0,
                                        2,
                                        size=(self.N_nodes, self.N_nodes))

        print('\nA_initial', np.sum(self.A_initial))
        print('A_last', np.sum(self.A_last), '\n')

        #TODO: 이벤트 갯수 정함
        self.n_events = 10000
        all_events = []
        for i in range(self.n_events):
            user_id1 = np.random.randint(0, self.N_nodes)
            user_id2 = np.random.choice(
                np.delete(np.arange(self.N_nodes),
                          user_id1))  #u1이 아닌 사람들 중에 한명
            ts = max((time_start, self.FIRST_DATE.toordinal()))  #시작시간을 정함
            # 시작시간 ~ 끝시간 사이값으로 이벤트 정함
            event_time = datetime.fromordinal(
                ts + np.random.randint(0, time_end - ts))
            #이벤트 시간이 시작일보다는 커야 한다!
            assert event_time.timestamp() >= self.FIRST_DATE.timestamp(), (
                event_time, self.FIRST_DATE)
            all_events.append(
                (user_id1, user_id2,
                 np.random.choice(['communication event',
                                   'association event']), event_time))

        #TODO: 모든 이벤트 타입에 대해 여기다가 추가함!
        self.event_types = ['communication event']

        self.all_events = sorted(all_events, key=lambda t: t[3].timestamp())
        print('\n%s' % split.upper())
        print('%d events between %d users loaded' %
              (len(self.all_events), self.N_nodes))
        print('%d communication events' %
              (len([t for t in self.all_events if t[2] == 1])))
        print('%d assocition events' %
              (len([t for t in self.all_events if t[2] == 0])))

        #'association event'는 0으로 지정해두고, 나머지 이벤트는 1부터 증가하는 값으로 인덱싱
        self.event_types_num = {'association event': 0}
        k = 1  # k >= 1 for communication events
        for t in self.event_types:
            self.event_types_num[t] = k
            k += 1

        self.n_events = len(self.all_events)
Ejemplo n.º 25
0
def main_page(request):
    if request.user.is_authenticated:
        incomes = FlowOfFunds.objects.filter(family_id=request.user)
        sum = 0
        #sum = FlowOfFunds.objects.filter(family_id=request.user).aggregate(Sum('sum')) #как это работает?
        for inc in incomes:
            sum = sum + inc.sum
        if SavingMoney.objects.filter(user_id=request.user):
            today = datetime.today()
            '''
            year = int(today[0:4])
            month = int(today[5:7])
            day = int(today[8:10])
            '''
            sal_day = SavingMoney.objects.filter(
                user_id=request.user).first().salary_day
            # start_salary_date = datetime(year, month, sal_day)
            today_date = datetime.fromordinal(today.toordinal())
            if sal_day < today.day:
                if today.month < 12:
                    finish_salary_date = datetime(today.year, today.month + 1,
                                                  sal_day)
                else:
                    finish_salary_date = datetime(today.year + 1, 1, sal_day)
            else:
                n = sal_day - today.day
                finish_salary_date = today_date + timedelta(days=n)
            delta = (finish_salary_date - today_date).days

            plans = ExpensesPlan.objects.filter(user_id=request.user)
            types = Categories.objects.filter(family_id=request.user,
                                              is_it_expense=True)
            i = 0
            '''
            names = []
            while i < len(types):
                names.append(types.filter(type_name=types[i].type_name).first().type_name)
                i = i+1
            '''
            names = [type.type_name for type in types]
            all_plan_sum = []
            all_money_spent = []
            rest_of_money = []
            n = 0

            for plan in plans:
                all_plan_sum.append(plan.sum_plan)
                money_spent = FlowOfFunds.objects.filter(
                    family_id=request.user, type_id=plan.type_id)
                mon_sum = 0
                for mon in money_spent:
                    mon_sum = mon_sum + mon.sum
                all_money_spent.append(abs(mon_sum))
                rest_of_money.append(plan.sum_plan - abs(mon_sum))
            final_list = []
            fl = []
            i = 0
            for name in names:
                fl = []
                fl.append(name)
                fl.append(all_plan_sum[i])
                fl.append(all_money_spent[i])
                fl.append(rest_of_money[i])
                final_list.append(fl)
                i = i + 1
        # days = SavingMoney.objects.filter(user_id=request.user).first().salary_day


            return render_to_response('index.html', {
                'name': request.user.username, 'flag': True,\
                'flows': FlowOfFunds.objects.filter(family_id=request.user).order_by('date'),\
                'money': sum, 'delta': delta, 'plans': final_list,\
                'sum_euro': SavingMoney.objects.filter(user_id=request.user).first().euro,\
                'sum_dollars': SavingMoney.objects.filter(user_id=request.user).first().dollars,\
                'categories': Categories.objects.filter(family_id=request.user)}
                                      )
        else:
            return render_to_response('index.html', {
                'name': request.user.username, 'flag': True,\
                'flows': FlowOfFunds.objects.filter(family_id=request.user),\
                'money': sum,\
                'categories': Categories.objects.filter(family_id=request.user)}
                                      )

    else:
        return render_to_response('index.html', {'flag': False})
Ejemplo n.º 26
0
def makeForecast(inputList):
    import urllib2#
    import urllib
    import datetime
    import time#
    import os
    import dateutil

###-----------------------------------------------------------------------------
### START part One "DATA COLLECTION" - getting fresch intraday data from YahooFinance API, to work with

    #### Get specified content for the instrument list, from file in Python root dir. 
    yahoo_ticker_list = []
    readThisFile = r'lista_' + inputList +'.txt'
    TickerFile = open(readThisFile)
    fleraTickers = TickerFile.read()
    yahoo_ticker_list = fleraTickers.split('\n')
    TickerFile.close()

    yahoo_RealNames_list = []
    readThisFile = r'lista_' + inputList +'_RealNames.txt'
    TickerFile = open(readThisFile)
    fleraTickers = TickerFile.read()
    yahoo_RealNames_list = fleraTickers.split('\n')
    TickerFile.close()

    #### Get content for the FEATURE list, from file in Python root dir.
    FEATURES = []
    readThisFile = r'FEATURES03.txt'
    featuresFile = open(readThisFile)
    fleraFeatures = featuresFile.read()
    FEATURES = fleraFeatures.split('\n')
    featuresFile.close()
    printToFile = ('DONE: reading instrument and features')####
    LocationToSave = r"loooooooooooooooooooooogFile.txt"
    saveFile = open(LocationToSave,'a')
    saveFile.write(printToFile)

    #### Remove, possible old files in Python root dir.
    for eachTicker in yahoo_ticker_list:
        try:        
            os.remove(r'for'+eachTicker+'_excel.xlsx')
        except Exception as e:
            print(str(e))
    for eachTicker in yahoo_ticker_list:
        try:                     
            os.remove(r'EOD'+eachTicker+'.txt')###
        except Exception as e:
            printToFile = (str(e))
            logFile = open('lo0gFile.txt','a')
            logFile.write("\n"+printToFile)
            logFile.close()


    print('DONE: deleting old files')####
    time.sleep(4)


    #### Parse EOD data for every instrument from finance.yahoo and save it linebyline in a 1st .txt file
    for eachTicker in yahoo_ticker_list:
        try:
            urlToVisit =  'http://chartapi.finance.yahoo.com/instrument/1.0/'+eachTicker+'/chartdata;type=quote;range=2y/csv'
            sourceCode = urllib2.urlopen(urlToVisit).read()
            splitSource = sourceCode.split('\n')
            LocationToSave = r'EOD'+eachTicker+'.txt'

            for eachLine in splitSource:
                splitLine = eachLine.split(',')
                if len(splitLine)==6:
                    if 'values' not in eachLine:
                        saveFile = open(LocationToSave,'a')
                        lineToWrite = eachLine+'\n'
                        saveFile.write(lineToWrite)
            saveFile.close()
            time.sleep(1) ### in respect to yahoo.finance 
        except Exception as e:
            print(str(e))
            #pass
    print('DONE: parsing EOD data and save as EOD_ticker_txt')####


    #### Parse 5min data for every instrument from yahoo AND save in 2nd .txt file for each ticker
    for eachTicker in yahoo_ticker_list:
        try:
            urlToVisit =  'http://chartapi.finance.yahoo.com/instrument/1.0/'+eachTicker+'/chartdata;type=quote;range=40d/csv'
            sourceCode = urllib2.urlopen(urlToVisit).read()
            splitSource = sourceCode.split('\n')
            LocationToSave = r'5min'+eachTicker+'.txt'

            for eachLine in splitSource:
                splitLine = eachLine.split(',')
                if len(splitLine)==6:
                    if 'values' not in eachLine:
                        saveFile = open(LocationToSave,'a')
                        lineToWrite = eachLine+'\n'
                        saveFile.write(lineToWrite)

            saveFile.close()
            time.sleep(1) ### in respect to yahoo.finance 
        except Exception as e:
            print(str(e))
            #pass
    print('DONE: parsing 5min data and save as 5min_ticker_txt')####


    #### Sort out only todays 5min data from 2nd .txt file AND save todays data in a 3rd .txt file for each ticker
    for eachTicker in yahoo_ticker_list:
        try:
            FileLocation = r'5min'+eachTicker+'.txt'
            Unix,Open,High,Low,Close,Volume = np.loadtxt(FileLocation, unpack=True, delimiter=',')
            today = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d')
            UnixToday = datetime.datetime.fromtimestamp(Unix[-2]).strftime('%Y-%m-%d')
            if today != UnixToday:
                try:                     
                    os.remove(r'EOD'+eachTicker+'.txt')###
                except Exception as e:
                    print(str(e))
            else:
                for x in range(1, 400):#############
                    UnixToday = datetime.datetime.fromtimestamp(Unix[-x]).strftime('%Y-%m-%d')
                    if today == UnixToday:
                        forUnix = Unix[-x]
                        forOpen = Open[-x]
                        forHigh = High[-x]
                        forLow = Low[-x]
                        forClose = Close[-x]
                        LocationToSave = r'todayEOD'+eachTicker+'.txt'
                        saveFile = open(LocationToSave,'a')
                        lineToWrite = str(forUnix)+','+str(forOpen)+','+str(forHigh)+','+str(forLow)+','+str(forClose)+'\n'
                        saveFile.write(lineToWrite)
                        saveFile.close()

        except Exception as e:
            #pass
            print(str(e))
    e = str('DONE: sort out todays 5min data')####
    printToFile = (str(e))
    logFile = open('lo0gFile.txt','a')
    logFile.write(printToFile)
    logFile.close()
    

    #### Read the 3rd .txt file with only todays 5min data AND convert to EOD format to 4th .txt file
    for eachTicker in yahoo_ticker_list:
        try:
            FileLocation = r'todayEOD'+eachTicker+'.txt'
            Unix,Open,High,Low,Close = np.loadtxt(FileLocation, unpack=True, delimiter=',')
            NoLen = len(Unix)

            forUnix = datetime.datetime.fromtimestamp(Unix[-2]).strftime('%Y%m%d')
            forOpen = Open[-2]
            forHigh = np.amax(High[0:NoLen])
            forLow = np.amin(Low[0:NoLen])
            forClose = Close[0]
    #        print(str(forUnix)+str(eachTicker)+str(UnixTodayInLoop))
            LocationToSave = r'EODtoday'+eachTicker+'.txt'
            saveFile = open(LocationToSave,'w')
            lineToWrite = str(forUnix)+','+str(forOpen)+','+str(forHigh)+','+str(forLow)+','+str(forClose)+',1\n'
            saveFile.write(lineToWrite)
            saveFile.close()

        except Exception as e:
            #pass
            print(str(e))
    print('DONE: convert 5min data tot EOD')####    
    printToFile = (str('DONE: convert 5min data tot EOD'))
    logFile = open('lo0gFile.txt','a')
    logFile.write("\n"+printToFile)
    logFile.close()


    #### append todays EOD from 4th .txt file to the list of all EOD data in 1st .txt file
    for eachTicker in yahoo_ticker_list:
        try:
            EODFile = open(r'EODtoday'+eachTicker+'.txt')
            EODline = EODFile.readlines()
            EODtoday = EODline[0]
            EODFile.close()
            LocationToSave = r'EOD'+eachTicker+'.txt'
            saveFile = open(LocationToSave,'a')
            lineToWrite = EODtoday
            saveFile.write(lineToWrite)
            saveFile.close()
        except Exception as e:
            print(str(e))
            #pass

### END part One - getting fresch intraday data to work with
###-----------------------------------------------------------------------------
### START part Two "PREPROCESS" - Load fresh data and create all additional Features AND
### save all data to one .xlsx file for each ticker


    from datetime import datetime       
    for eachTicker in yahoo_ticker_list:
        try:
            FileLocation = r'EOD'+eachTicker+'.txt'
            Date, Open, High, Low, Close, Volume = np.loadtxt(FileLocation, delimiter=',', unpack=True,converters={ 0: mdates.strpdate2num('%Y%m%d')})
            Zeros = [1]*len(Date)
            date = int(Date[-1])
            dt = datetime.fromordinal(date)
            ### create the individual Features
### START calculation of choosen list of FEATURES for the MACHINE LEARNING process ###
            _DayOfYear = float(dt.strftime('%j')) # part with calender based FEATURES
            _DayOfMonth = float(dt.strftime('%d'))
            _DayOfWeek = float(dt.strftime('%w'))
            # part with FEATURES based on % relative from last Close,
            _Diff_CtoH = round((Close[-1]-High[-1])/High[-1],3)  
            _Diff_CtoH1 = round((Close[-1]-High[-2])/High[-2],3)
            _Diff_CtoH2 = round((Close[-1]-High[-3])/High[-3],3)
            _Diff_CtoH3 = round((Close[-1]-High[-4])/High[-4],3)
            _Diff_CtoH4 = round((Close[-1]-High[-5])/High[-5],3)
            _Diff_CtoH5 = round((Close[-1]-High[-6])/High[-6],3)
            _Diff_CtoH6 = round((Close[-1]-High[-7])/High[-7],3)
            _Diff_CtoH7 = round((Close[-1]-High[-8])/High[-8],3)
            _Diff_CtoH8 = round((Close[-1]-High[-9])/High[-9],3)
            _Diff_CtoH9 = round((Close[-1]-High[-10])/High[-10],3)
            _Diff_CtoH10 = round((Close[-1]-High[-11])/High[-11],3)
            _Diff_CtoH11 = round((Close[-1]-High[-12])/High[-12],3)
            _Diff_CtoH12 = round((Close[-1]-High[-13])/High[-13],3)
            _Diff_CtoH13 = round((Close[-1]-High[-14])/High[-14],3)
            _Diff_CtoH14 = round((Close[-1]-High[-15])/High[-15],3)
            _Diff_CtoH15 = round((Close[-1]-High[-16])/High[-16],3)
            _Diff_CtoH16 = round((Close[-1]-High[-17])/High[-17],3)
            _Diff_CtoH17 = round((Close[-1]-High[-18])/High[-18],3)
            _Diff_CtoH18 = round((Close[-1]-High[-19])/High[-19],3)
            _Diff_CtoH19 = round((Close[-1]-High[-20])/High[-20],3)
            _Diff_CtoH20 = round((Close[-1]-High[-21])/High[-21],3)
            _Diff_CtoH21 = round((Close[-1]-High[-22])/High[-22],3)
            _Diff_CtoH22 = round((Close[-1]-High[-23])/High[-23],3)
            _Diff_CtoH23 = round((Close[-1]-High[-24])/High[-24],3)
            _Diff_CtoH24 = round((Close[-1]-High[-25])/High[-25],3)
            _Diff_CtoH25 = round((Close[-1]-High[-26])/High[-26],3)

            _Diff_CtoL = round((Close[-1]-Low[-1])/Low[-1],3)
            _Diff_CtoL1 = round((Close[-1]-Low[-2])/Low[-2],3)
            _Diff_CtoL2 = round((Close[-1]-Low[-3])/Low[-3],3)
            _Diff_CtoL3 = round((Close[-1]-Low[-4])/Low[-4],3)
            _Diff_CtoL4 = round((Close[-1]-Low[-5])/Low[-5],3)
            _Diff_CtoL5 = round((Close[-1]-Low[-6])/Low[-6],3)
            _Diff_CtoL6 = round((Close[-1]-Low[-7])/Low[-7],3)
            _Diff_CtoL7 = round((Close[-1]-Low[-8])/Low[-8],3)
            _Diff_CtoL8 = round((Close[-1]-Low[-9])/Low[-9],3)
            _Diff_CtoL9 = round((Close[-1]-Low[-10])/Low[-10],3)
            _Diff_CtoL10 = round((Close[-1]-Low[-11])/Low[-11],3)
            _Diff_CtoL11 = round((Close[-1]-Low[-12])/Low[-12],3)
            _Diff_CtoL12 = round((Close[-1]-Low[-13])/Low[-13],3)
            _Diff_CtoL13 = round((Close[-1]-Low[-14])/Low[-14],3)
            _Diff_CtoL14 = round((Close[-1]-Low[-15])/Low[-15],3)
            _Diff_CtoL15 = round((Close[-1]-Low[-16])/Low[-16],3)
            _Diff_CtoL16 = round((Close[-1]-Low[-17])/Low[-17],3)
            _Diff_CtoL17 = round((Close[-1]-Low[-18])/Low[-18],3)
            _Diff_CtoL18 = round((Close[-1]-Low[-19])/Low[-19],3)
            _Diff_CtoL19 = round((Close[-1]-Low[-20])/Low[-20],3)
            _Diff_CtoL20 = round((Close[-1]-Low[-21])/Low[-21],3)
            _Diff_CtoL21 = round((Close[-1]-Low[-22])/Low[-22],3)
            _Diff_CtoL22 = round((Close[-1]-Low[-23])/Low[-23],3)
            _Diff_CtoL23 = round((Close[-1]-Low[-24])/Low[-24],3)
            _Diff_CtoL24 = round((Close[-1]-Low[-25])/Low[-25],3)
            _Diff_CtoL25 = round((Close[-1]-Low[-26])/Low[-26],3)

            _Diff_CtoO = round((Close[-1]-Open[-1])/Open[-1],3)
            _Diff_CtoO1 = round((Close[-1]-Open[-2])/Open[-2],3)
            _Diff_CtoO2 = round((Close[-1]-Open[-3])/Open[-3],3)
            _Diff_CtoO3 = round((Close[-1]-Open[-4])/Open[-4],3)
            _Diff_CtoO4 = round((Close[-1]-Open[-5])/Open[-5],3)
            _Diff_CtoO5 = round((Close[-1]-Open[-6])/Open[-6],3)
            _Diff_CtoO6 = round((Close[-1]-Open[-7])/Open[-7],3)
            _Diff_CtoO7 = round((Close[-1]-Open[-8])/Open[-8],3)
            _Diff_CtoO8 = round((Close[-1]-Open[-9])/Open[-9],3)
            _Diff_CtoO9 = round((Close[-1]-Open[-10])/Open[-10],3)

            _Diff_CtoC1 = round((Close[-1]-Close[-1])/Close[-1],3)
            _Diff_CtoC2 = round((Close[-1]-Close[-3])/Close[-3],3)
            _Diff_CtoC3 = round((Close[-1]-Close[-4])/Close[-4],3)
            _Diff_CtoC4 = round((Close[-1]-Close[-5])/Close[-5],3)
            _Diff_CtoC5 = round((Close[-1]-Close[-6])/Close[-6],3)
            _Diff_CtoC6 = round((Close[-1]-Close[-7])/Close[-7],3)
            _Diff_CtoC7 = round((Close[-1]-Close[-8])/Close[-8],3)
            _Diff_CtoC8 = round((Close[-1]-Close[-9])/Close[-9],3)
            _Diff_CtoC9 = round((Close[-1]-Close[-10])/Close[-10],3)
            ### PART with onle basic HLOC data
            _justOpen = Open[-1] 
            _justHigh = High[-1]
            _justLow = Low[-1]
            _justClose = Close[-1]
            ### PART with FEATURES based on % relation Close or Sub-indicator to upper/lower BollingerBand
            _SMA_H3 = float(round(np.sum(High[:-4:-1])/5,2)) # short moving average based on H & L
            _SMA_L3 = float(round(np.sum(Low[:-4:-1])/5,2))  # this two are sub-indicators

            _BBU5 = round(np.sum(Close[:-4:-1])/5,3)+(round(np.std(Close[-4:-1])*2,3)) # Upper BollingerBand
            _BBD5 = round(np.sum(Close[:-4:-1])/5,3)-(round(np.std(Close[-4:-1])*2,3)) # Lower BollingerBand
            _DiffU5_C = round((Close[-1]-_BBU5)/_BBU5,3) 
            _DiffD5_C = round((Close[-1]-_BBD5)/_BBD5,3)

            _BBU13 = round(np.sum(Close[:-12:-1])/13,3)+(round(np.std(Close[:-12:-1])*2,3))
            _BBD13 = round(np.sum(Close[:-12:-1])/13,3)-(round(np.std(Close[:-12:-1])*2,3))
            _DiffU13_L3 = round((_SMA_L3-_BBU13)/_BBU13,3)

            _BBU21 = round(np.sum(Close[:-20:-1])/21,3)+(round(np.std(Close[:-20:-1])*2,3))
            _BBD21 = round(np.sum(Close[:-20:-1])/21,3)-(round(np.std(Close[:-20:-1])*2,3))
            _DiffD21_C = round((Close[-1]-_BBD21)/_BBD21,3)
            _DiffD21_H3 = round((_SMA_H3-_BBD21)/_BBD21,3)  

            _BBU34 = round(np.sum(Close[:-33:-1])/34,3)+(round(np.std(Close[:-33:-1])*2,3))
            _BBD34 = round(np.sum(Close[:-33:-1])/34,3)-(round(np.std(Close[:-33:-1])*2,3))
            _DiffU34_C = round((Close[-1]-_BBU34)/_BBU34,3)
            _DiffD34_H3 = round((_SMA_H3-_BBD34)/_BBD34,3)   

            _BBU55 = round(np.sum(Close[:-54:-1])/55,3)+(round(np.std(Close[:-54:-1])*2,3))
            _BBD55 = round(np.sum(Close[:-54:-1])/55,3)-(round(np.std(Close[:-54:-1])*2,3))
            _DiffU55_L3 = round((_SMA_L3-_BBU55)/_BBU55,3)

            _BBU89 = round(np.sum(Close[:-88:-1])/89,3)+(round(np.std(Close[:-88:-1])*2,3))
            _BBD89 = round(np.sum(Close[:-88:-1])/89,3)-(round(np.std(Close[:-88:-1])*2,3))
            _DiffU89_C = round((Close[-1]-_BBU89)/_BBU89,3)
            _DiffU89_L3 = round((_SMA_L3-_BBU89)/_BBU89,3)

            _BBU144 = round(np.sum(Close[:-143:-1])/144,3)+(round(np.std(Close[:-143:-1])*2,3))
            _BBD144 = round(np.sum(Close[:-143:-1])/144,3)-(round(np.std(Close[:-143:-1])*2,3))
            _DiffU144_L3 = round((_SMA_L3-_BBU144)/_BBU144,3)

            _BBU233 = round(np.sum(Close[:-232:-1])/233,3)+(round(np.std(Close[:-232:-1])*2,3))
            _BBD233 = round(np.sum(Close[:-232:-1])/233,3)-(round(np.std(Close[:-232:-1])*2,3))
            _DiffU233_C = round((Close[-1]-_BBU233)/_BBU233,3)

            _BBU300 = round(np.sum(Close[:299:-1])/300,3)+(round(np.std(Close[:299:-1])*2,3))
            _BBD300 = round(np.sum(Close[:299:-1])/300,3)-(round(np.std(Close[:299:-1])*2,3))
            _DiffU300_C = round((Close[-1]-_BBU300)/_BBU300,3)
            _DiffD300_C = round((Close[-1]-_BBD300)/_BBD300,3)
            ### PART with % relation, Close to Maxium High or Low from varius days in history
            _High3_H = round((Close[-1]-np.amax(High[:-2:-1]))/np.amax(High[:-2:-1]),3)
            _High5_H = round((Close[-1]-np.amax(High[:-4:-1]))/np.amax(High[:-4:-1]),3)
            _High100_H = round((Close[-1]-np.amax(High[:-99:-1]))/np.amax(High[:-99:-1]),3)
            _High377_H = round((Close[-1]-np.amax(High[:-376:-1]))/np.amax(High[:-376:-1]),3)
            _Low3_L = round((Close[-1]-np.amin(Low[:-2:-1]))/np.amin(Low[:-2:-1]),3)
            _Low34_L = round((Close[-1]-np.amin(Low[:-33:-1]))/np.amin(Low[:-33:-1]),3)
            _Low55_L = round((Close[-1]-np.amin(Low[:-54:-1]))/np.amin(Low[:-54:-1]),3)
            _Hi3to5 = round((Close[-1]-np.amax(High[:-4:-2]))/np.amax(High[:-4:-2]),3)
            _Hi5to8 = round((Close[-1]-np.amax(High[:-7:-4]))/np.amax(High[:-7:-4]),3)
            _Hi8to13 = round((Close[-1]-np.amax(High[:-12:-7]))/np.amax(High[:-12:-7]),3)
            _Hi34to55 = round((Close[-1]-np.amax(High[:-54:-33]))/np.amax(High[:-54:-33]),3)
            _Hi233to377 = round((Close[-1]-np.amax(High[:-376:-232]))/np.amax(High[:-376:-232]),3)
            _Lo3to5 = round((Close[-1]-np.amin(Low[:-4:-2]))/np.amin(Low[:-4:-2]),3)
            _Lo233to377 = round((Close[-1]-np.amin(Low[:-376:-232]))/np.amin(Low[:-376:-232]),3)        
            ### PART with simple aritmic Feature
            _EvNo5 = round((Close[-1]-5)/5,2) 
### END calculation of choosen list of FEATURES for the MACHINE LEARNING process ###            

            ### append the individual Features to Pandas Dataframe
            df = pd.DataFrame(columns = FEATURES)
            df = df.append({
                        '_DayOfYear':_DayOfYear,
                        '_DayOfMonth':_DayOfMonth,
                        '_DayOfWeek':_DayOfWeek,
                        '_Diff_CtoH':_Diff_CtoH,
                        '_Diff_CtoH1':_Diff_CtoH1,
                        '_Diff_CtoH2':_Diff_CtoH2,
                        '_Diff_CtoH3':_Diff_CtoH3,
                        '_Diff_CtoH4':_Diff_CtoH4,
                        '_Diff_CtoH5':_Diff_CtoH5,
                        '_Diff_CtoH6':_Diff_CtoH6,
                        '_Diff_CtoH7':_Diff_CtoH7,
                        '_Diff_CtoH8':_Diff_CtoH8,
                        '_Diff_CtoH9':_Diff_CtoH9,
                        '_Diff_CtoH10':_Diff_CtoH10,
                        '_Diff_CtoH11':_Diff_CtoH11,
                        '_Diff_CtoH12':_Diff_CtoH12,
                        '_Diff_CtoH13':_Diff_CtoH13,
                        '_Diff_CtoH14':_Diff_CtoH14,
                        '_Diff_CtoH15':_Diff_CtoH15,
                        '_Diff_CtoH16':_Diff_CtoH16,
                        '_Diff_CtoH17':_Diff_CtoH17,
                        '_Diff_CtoH18':_Diff_CtoH18,
                        '_Diff_CtoH19':_Diff_CtoH19,
                        '_Diff_CtoH20':_Diff_CtoH20,
                        '_Diff_CtoH21':_Diff_CtoH21,
                        '_Diff_CtoH22':_Diff_CtoH22,
                        '_Diff_CtoH23':_Diff_CtoH23,
                        '_Diff_CtoH24':_Diff_CtoH24,
                        '_Diff_CtoH25':_Diff_CtoH25,
                        '_Diff_CtoL':_Diff_CtoL,
                        '_Diff_CtoL1':_Diff_CtoL1,
                        '_Diff_CtoL2':_Diff_CtoL2,
                        '_Diff_CtoL3':_Diff_CtoL3,
                        '_Diff_CtoL4':_Diff_CtoL4,
                        '_Diff_CtoL5':_Diff_CtoL5,
                        '_Diff_CtoL6':_Diff_CtoL6,
                        '_Diff_CtoL7':_Diff_CtoL7,
                        '_Diff_CtoL8':_Diff_CtoL8,
                        '_Diff_CtoL9':_Diff_CtoL9,
                        '_Diff_CtoL10':_Diff_CtoL10,
                        '_Diff_CtoL11':_Diff_CtoL11,
                        '_Diff_CtoL12':_Diff_CtoL12,
                        '_Diff_CtoL13':_Diff_CtoL13,
                        '_Diff_CtoL14':_Diff_CtoL14,
                        '_Diff_CtoL15':_Diff_CtoL15,
                        '_Diff_CtoL16':_Diff_CtoL16,
                        '_Diff_CtoL17':_Diff_CtoL17,
                        '_Diff_CtoL18':_Diff_CtoL18,
                        '_Diff_CtoL19':_Diff_CtoL19,
                        '_Diff_CtoL20':_Diff_CtoL20,
                        '_Diff_CtoL21':_Diff_CtoL21,
                        '_Diff_CtoL22':_Diff_CtoL22,
                        '_Diff_CtoL23':_Diff_CtoL23,
                        '_Diff_CtoL24':_Diff_CtoL24,
                        '_Diff_CtoL25':_Diff_CtoL25,
                        '_Diff_CtoO':_Diff_CtoO,
                        '_Diff_CtoO1':_Diff_CtoO1,
                        '_Diff_CtoO2':_Diff_CtoO2,
                        '_Diff_CtoO3':_Diff_CtoO3,
                        '_Diff_CtoO4':_Diff_CtoO4,
                        '_Diff_CtoO5':_Diff_CtoO5,
                        '_Diff_CtoO6':_Diff_CtoO6,
                        '_Diff_CtoO7':_Diff_CtoO7,
                        '_Diff_CtoO8':_Diff_CtoO8,
                        '_Diff_CtoO9':_Diff_CtoO9,
                        '_Diff_CtoC1':_Diff_CtoC1,
                        '_Diff_CtoC2':_Diff_CtoC2,
                        '_Diff_CtoC3':_Diff_CtoC3,
                        '_Diff_CtoC4':_Diff_CtoC4,
                        '_Diff_CtoC5':_Diff_CtoC5,
                        '_Diff_CtoC6':_Diff_CtoC6,
                        '_Diff_CtoC7':_Diff_CtoC7,
                        '_Diff_CtoC8':_Diff_CtoC8,
                        '_Diff_CtoC9':_Diff_CtoC9,
                        '_justOpen':_justOpen,
                        '_justHigh':_justHigh,
                        '_justLow':_justLow,
                        '_justClose':_justClose,
                        '_DiffU5_C':_DiffU5_C,
                        '_DiffD5_C':_DiffD5_C,
                        '_DiffU13_L3':_DiffU13_L3,
                        '_DiffD21_C':_DiffD21_C,
                        '_DiffD21_H3':_DiffD21_H3,
                        '_DiffU34_C':_DiffU34_C,
                        '_DiffD34_H3':_DiffD34_H3,
                        '_DiffU55_L3':_DiffU55_L3,
                        '_DiffU89_C':_DiffU89_C,
                        '_DiffU89_L3':_DiffU89_L3,
                        '_DiffU144_L3':_DiffU144_L3,
                        '_DiffU233_C':_DiffU233_C,
                        '_DiffU300_C':_DiffU300_C,
                        '_DiffD300_C':_DiffD300_C,
                        '_High3_H':_High3_H,
                        '_High5_H':_High5_H,
                        '_High100_H':_High100_H,
                        '_High377_H':_High377_H,
                        '_Low3_L':_Low3_L,
                        '_Low34_L':_Low34_L,
                        '_Low55_L':_Low55_L,
                        '_Hi3to5':_Hi3to5,
                        '_Hi5to8':_Hi5to8,
                        '_Hi8to13':_Hi8to13,
                        '_Hi34to55':_Hi34to55,
                        '_Hi233to377':_Hi233to377,
                        '_Lo3to5':_Lo3to5,
                        '_Lo233to377':_Lo233to377,
                        '_EvNo5':_EvNo5,
                        }, ignore_index = True)
            ### Write Pandas Datafram to .xlsx file                        
            FileLocation4excel = r'for'+eachTicker+'_excel.xlsx'
            df.to_excel(FileLocation4excel, index=False)
        except Exception as e:
            printToFile = (str(e))
            logFile = open('lo0gFile.txt','a')
            logFile.write(printToFile)
            logFile.close()

### END part Two - Load fresh data and create all additional Features AND
### save all data to one .xlsx file for each ticker
###-----------------------------------------------------------------------------
### START part Three "PREDICTION PROCESS" - Load fresh data from .xlsc file and make Predictions from todays intraday data


    ### Cleaning up unnecessary files
    for eachTicker in yahoo_ticker_list:
        try:
            os.remove(r''+eachTicker+'.txt')###
        except Exception as e:
            print(str(e))
    for eachTicker in yahoo_ticker_list:
        try:
            os.remove(r'EODtoday'+eachTicker+'.txt')###
        except Exception as e:
            print(str(e))
    for eachTicker in yahoo_ticker_list:
        try:                
            os.remove(r'todayEOD'+eachTicker+'.txt')###
        except Exception as e:
            print(str(e))
    for eachTicker in yahoo_ticker_list:
        try:         
            os.remove(r'5min'+eachTicker+'.txt')###        
        except Exception as e:
            print(str(e))    
    ### END Cleaning up unnecessary files

### START part with letting the algo make prediction/Outlook from the fresh data from the .xlsx file
            
    printToFile = (str("Start makeing Forecasts"))
    logFile = open('lo0gFile.txt','a')
    logFile.write("\n" + printToFile)
    logFile.close()

    barChartName = [] #making some empty datalists
    barChartForecast = []         

    Ultimate_df2 = pd.DataFrame(columns = ['[Name]', '[Forecast]']) # creating empty Pandas dataframe

    for eachTicker, eachRealNames in zip(yahoo_ticker_list, yahoo_RealNames_list):
        try:
            global GLBeachName 
            GLBeachName = eachRealNames            
            
            Location = r'for'+eachTicker+'_excel.xlsx'
            data = pd.read_excel(Location)
            X = np.array(data[FEATURES].values) # making a Numpay array from the Pandas dataset
            y1 = data['_justClose'].values # saves the intraday close value 

    ### START loading the saved .pkl files with trained algo information and creating final Prediction/Outlook
    ### Every joblib.load indicates one .pkl file
    ### The file name indicates for how many future days the algo is trained to predict the outcome for (1d = One day)
    ### The first 7 algos calculate on how high the Risk/Reward Ratio was for future move 
    ### the second 7 algos calculate how Big the future move was, using a lot of averageing out to smother the result.
    ### Every trained algo give 5 predictions; StrogSell, Sell, Neutral, Buy and StrongBuy (5cat = five categories)
    ### Every predictions contains a percanage chanse for all of the 5 possible outcomes
    ### Each of the five predictions in % are stored in variable, for every trained algo
            logreg1 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat1d.pkl')
            Value1D = logreg1.predict_proba(X)
            Sell1D = round(Value1D[0][0],4)
            Under1D = round(Value1D[0][1],4)
            Hold1D = round(Value1D[0][2],4)
            Over1D = round(Value1D[0][3],4)
            Buy1D = round(Value1D[0][4],4) 
    
            logreg2 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat2d.pkl')
            Value2D = logreg2.predict_proba(X)
            Sell2D = round(Value2D[0][0],4)
            Under2D = round(Value2D[0][1],4)
            Hold2D = round(Value2D[0][2],4)
            Over2D = round(Value2D[0][3],4)
            Buy2D = round(Value2D[0][4],4)          
    
            logreg3 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat3d.pkl')
            Value3D = logreg3.predict_proba(X)
            Sell3D = round(Value3D[0][0],4)
            Under3D = round(Value3D[0][1],4)
            Hold3D = round(Value3D[0][2],4)
            Over3D = round(Value3D[0][3],4)
            Buy3D = round(Value3D[0][4],4)           
    
            logreg5 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat5d.pkl')
            Value5D = logreg5.predict_proba(X)
            Sell5D = round(Value5D[0][0],4)
            Under5D = round(Value5D[0][1],4)
            Hold5D = round(Value5D[0][2],4)
            Over5D = round(Value5D[0][3],4)
            Buy5D = round(Value5D[0][4],4) 
    
            logreg8 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat8d.pkl')
            Value8D = logreg8.predict_proba(X)
            Sell8D = round(Value8D[0][0],4)
            Under8D = round(Value8D[0][1],4)
            Hold8D = round(Value8D[0][2],4)
            Over8D = round(Value8D[0][3],4)
            Buy8D = round(Value8D[0][4],4) 
    
            logreg13 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat13d.pkl')
            Value13D = logreg13.predict_proba(X)
            Sell13D = round(Value13D[0][0],4)
            Under13D = round(Value13D[0][1],4)
            Hold13D = round(Value13D[0][2],4)
            Over13D = round(Value13D[0][3],4)
            Buy13D = round(Value13D[0][4],4) 
            
            logreg21 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat21d.pkl')
            Value21D = logreg21.predict_proba(X)
            Sell21D = round(Value21D[0][0],4)
            Under21D = round(Value21D[0][1],4)
            Hold21D = round(Value21D[0][2],4)
            Over21D = round(Value21D[0][3],4)
            Buy21D = round(Value21D[0][4],4) 

            ### Part with algos based on calculating how Big the future move was

            roc_logreg1 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat1dROC.pkl')
            roc_Value1D = roc_logreg1.predict_proba(X)
            roc_Sell1D = round(roc_Value1D[0][0],4)
            roc_Under1D = round(roc_Value1D[0][1],4)
            roc_Hold1D = round(roc_Value1D[0][2],4)
            roc_Over1D = round(roc_Value1D[0][3],4)
            roc_Buy1D = round(roc_Value1D[0][4],4) 
    
            roc_logreg2 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat2dROC.pkl')
            roc_Value2D = roc_logreg2.predict_proba(X)
            roc_Sell2D = round(roc_Value2D[0][0],4)
            roc_Under2D = round(roc_Value2D[0][1],4)
            roc_Hold2D = round(roc_Value2D[0][2],4)
            roc_Over2D = round(roc_Value2D[0][3],4)
            roc_Buy2D = round(roc_Value2D[0][4],4)          
    
            roc_logreg3 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat3dROC.pkl')
            roc_Value3D = roc_logreg3.predict_proba(X)
            roc_Sell3D = round(roc_Value3D[0][0],4)
            roc_Under3D = round(roc_Value3D[0][1],4)
            roc_Hold3D = round(roc_Value3D[0][2],4)
            roc_Over3D = round(roc_Value3D[0][3],4)
            roc_Buy3D = round(roc_Value3D[0][4],4)           
    
            roc_logreg5 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat5dROC.pkl')
            roc_Value5D = roc_logreg5.predict_proba(X)
            roc_Sell5D = round(roc_Value5D[0][0],4)
            roc_Under5D = round(roc_Value5D[0][1],4)
            roc_Hold5D = round(roc_Value5D[0][2],4)
            roc_Over5D = round(roc_Value5D[0][3],4)
            roc_Buy5D = round(roc_Value5D[0][4],4) 
    
            roc_logreg8 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat8dROC.pkl')
            roc_Value8D = roc_logreg8.predict_proba(X)
            roc_Sell8D = round(roc_Value8D[0][0],4)
            roc_Under8D = round(roc_Value8D[0][1],4)
            roc_Hold8D = round(roc_Value8D[0][2],4)
            roc_Over8D = round(roc_Value8D[0][3],4)
            roc_Buy8D = round(roc_Value8D[0][4],4) 
    
            roc_logreg13 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat13dROC.pkl')
            roc_Value13D = roc_logreg13.predict_proba(X)
            roc_Sell13D = round(roc_Value13D[0][0],4)
            roc_Under13D = round(roc_Value13D[0][1],4)
            roc_Hold13D = round(roc_Value13D[0][2],4)
            roc_Over13D = round(roc_Value13D[0][3],4)
            roc_Buy13D = round(roc_Value13D[0][4],4) 
            
            roc_logreg21 = joblib.load(r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat21dROC.pkl')
            roc_Value21D = roc_logreg21.predict_proba(X)
            roc_Sell21D = round(roc_Value21D[0][0],4)
            roc_Under21D = round(roc_Value21D[0][1],4)
            roc_Hold21D = round(roc_Value21D[0][2],4)
            roc_Over21D = round(roc_Value21D[0][3],4)
            roc_Buy21D = round(roc_Value21D[0][4],4)

    ### END loading the saved .pkl files with trained algo information

    ### START building own indicators for final prediction values
    
        ### 1st stage, adding all StrongBuy percentage vs all StrongSell percantage
        ### continuing with comparing all Buy percantage vs all Sell percentage
            ### Doing this for the 7 algos based on future Pattern first
            Buy2stdav =  Buy1D+Buy2D+Buy3D+Buy5D+Buy8D+Buy13D+Buy21D
            Sell2stdav = Sell1D+Sell2D+Sell3D+Sell5D+Sell8D+Sell13D+Sell21D
            Over1stdav = Over1D+Over2D+Over3D+Over5D+Over8D+Over13D+Over21D
            Under1stdav = Under1D+Under2D+Under3D+Under5D+Under8D+Under13D+Under21D
            ### 2nd stage, Comparing all positive percentage vs all negative percentage
            BuyVsSell = Buy2stdav-Sell2stdav 
            OverVsUnder = (Over1stdav-Under1stdav)/27.0 ### using divide by 27 to normalise Strong vs Normal Buy/Sell
            preOutlook = BuyVsSell + OverVsUnder
                        
            ### Will only use value if it is high enough, else = 0
            pat_Outlook = float(0.0) 
            if  BuyVsSell > 0.010 and OverVsUnder > 0.010: 
                pat_Outlook = round(preOutlook*100,2)
            elif BuyVsSell < -0.009 and OverVsUnder < -0.009:
                pat_Outlook = round(preOutlook*100,2)
            else:
                pass

            ### Doing this one mote time, for the 7 algos based on future Big move last
            roc_Buy2stdav =  roc_Buy1D+roc_Buy2D+roc_Buy3D+roc_Buy5D+roc_Buy8D+roc_Buy13D+roc_Buy21D
            roc_Sell2stdav = roc_Sell1D+roc_Sell2D+roc_Sell3D+roc_Sell5D+roc_Sell8D+roc_Sell13D+roc_Sell21D
            roc_Over1stdav = roc_Over1D+roc_Over2D+roc_Over3D+roc_Over5D+roc_Over8D+roc_Over13D+roc_Over21D
            roc_Under1stdav = roc_Under1D+roc_Under2D+roc_Under3D+roc_Under5D+roc_Under8D+roc_Under13D+roc_Under21D
            roc_BuyVsSell = roc_Buy2stdav-roc_Sell2stdav
            roc_OverVsUnder = (roc_Over1stdav-roc_Under1stdav)/2.2
            roc_preOutlook = (roc_BuyVsSell + roc_OverVsUnder)/6.1
            roc_Outlook = float(0.0)

            if  roc_BuyVsSell > 0.017 and roc_OverVsUnder > 0.017:  
                roc_Outlook = round(roc_preOutlook*100,2)
            elif roc_BuyVsSell < -0.014 and roc_OverVsUnder < -0.014:
                roc_Outlook = round(roc_preOutlook*100,2)
            else:
                pass
        ### This value is the Final Outlook for the individual Instrument
            Outlook = round(pat_Outlook + (roc_Outlook),2)
            global GLBoutlook             
            GLBoutlook = Outlook
            print(roc_Outlook)
            print(pat_Outlook)
            print(Outlook)             

        ### In some circumstances I put Names to the Outlook values, if the value is not strong enough it gets set to 0
            Trade = '--On Hold--'      
            if roc_Outlook > 0.017 and pat_Outlook > 0.01 and Outlook > 0.04:
                Trade = 'BUY'
            elif roc_Outlook > 0.017 or pat_Outlook > 0.01 and Outlook > 0.04:
                Trade = 'Strong'
            elif roc_Outlook < -0.014 and pat_Outlook < -0.009 and Outlook < -0.009:
                Trade = 'SHORT'
            elif roc_Outlook < -0.014 or pat_Outlook < -0.009 and Outlook < -0.009:
                Trade = 'Weak'                
            else:
                pass                
 
            barChartName.append(GLBeachName)
            barChartForecast.append(GLBoutlook)

            Ultimate_df2 = Ultimate_df2.append({'[Name]':eachRealNames,
                                                '[Forecast]':Outlook,
                                                }, ignore_index = True) 
        
        except Exception as e:
            printToFile = (str(e))
            logFile = open('lo0gFile.txt','a')
            logFile.write("\n" + printToFile)
            logFile.close()

    printToFile = (str("Done makeing Forecasts"))
    logFile = open('lo0gFile.txt','a')
    logFile.write("\n" + printToFile)
    logFile.close()            
    
    ### Saving the results to .xlsx file
    File2Location4excel = r'TEST_DB_excel.xlsx'
    import datetime
    import time#   
    unixTimestamp = int(time.time())
    timestamp = str(datetime.datetime.fromtimestamp(int(unixTimestamp)).strftime('%Y-%m-%d %H_%M'))
    print(timestamp)
    Ultimate_df = Ultimate_df2.sort('[Forecast]', ascending=False)
    print(Ultimate_df)
    global theList
    theList = Ultimate_df
    time.sleep(3)

### END part with letting the algo make prediction/Outlook from the fresh data from the .xlsx file


### START - Part that create barchart of all predictions in instrument list

    import matplotlib
    matplotlib.style.use('ggplot')
    import matplotlib.pyplot as plt
    import datetime
    import time#   
    unixTimestamp = int(time.time())
    timestamp = str(datetime.datetime.fromtimestamp(int(unixTimestamp)).strftime('%Y-%m-%d %H_%M'))
    
    pltTitle = str(timestamp + ' CET  predictive algo IQ1.4: ' + inputList)    
    
    Ultimate_df.set_index(["[Name]"],inplace=True)
    Ultimate_df.plot(kind='bar',alpha=0.75, rot=75, title="", legend=False)
    plt.xlabel("")
    fig1 = plt.gcf()
    fig1.set_size_inches(16, 9)
    plt.title(pltTitle,fontsize=26, fontweight='bold', color='#7f7f7f',family='Courier New')
    plt.show()
    plt.draw()
    fig1.savefig(r'C:\Users\UserTrader\Documents\_Image\\'+inputList+'.png', dpi=72)
Ejemplo n.º 27
0
def random_date():
    start_date = datetime.now().replace(day=1, month=1).toordinal()
    end_date = datetime.now().toordinal()
    random_day = datetime.fromordinal(random.randint(start_date, end_date))
    return random_day
Ejemplo n.º 28
0
import datetime
print(datetime.datetime.today())
print(datetime.datetime.today().weekday())

# convert integer to a data
from datetime import datetime
dt = datetime.fromordinal(733828)
print(dt)
print(dt.weekday())

integer_date = datetime(2012, 10, 1, 0, 0)
print(integer_date.toordinal())
print(integer_date.month)

integer_date1 = datetime(2012, 10, 1, 0, 0)
integer_date2 = datetime(2012, 12, 31, 0, 0)
integer_date3 = datetime(2013, 12, 31, 0, 0)
print(integer_date1.toordinal())
print(integer_date2.toordinal())
print(integer_date3.toordinal())
print(integer_date2.toordinal() - integer_date1.toordinal())
print(integer_date3.toordinal() - integer_date2.toordinal())
Ejemplo n.º 29
0
                _Avg_CtoC21 = round(
                    (np.average(Close[x + 1:x + 22]) - Close[x]) / Close[x], 4)
                _Men_CtoC21 = round(
                    (np.median(Close[x + 1:x + 22]) - Close[x]) / Close[x], 4)
                _end21 = round(
                    (_Avg_OtoO21 + _Men_OtoO21 + _Avg_HtoH21 + _Men_HtoH21 +
                     _Avg_LtoL21 + _Men_LtoL21 + _Avg_CtoC21 + _Men_CtoC21) /
                    8, 4)
            except Exception as e:
                print(str('part14'))
### END Second part - calculate how Big the future move was, using a lot of averageing out to smother the result.

### START calculation of choosen list of FEATURES for the MACHINE LEARNING process ###
#Get Date info from .txt file and convet it to string format
            date = int(Date[x])
            dt = datetime.fromordinal(date)
            _DateStamp = str(dt.strftime('%Y-%m-%d'))
            #part with date related Features
            _DayOfYear = float(dt.strftime('%j'))
            _DayOfMonth = float(dt.strftime('%d'))
            _DayOfWeek = float(dt.strftime('%w'))
            _DayOfWeek = float(dt.strftime('%w'))
            #part with percentual relations with past price levels
            _Diff_CtoH = round((Close[x] - High[x]) / High[x], 3)
            _Diff_CtoH1 = round((Close[x] - High[x - 1]) / High[x - 1], 3)
            _Diff_CtoH2 = round((Close[x] - High[x - 2]) / High[x - 2], 3)
            _Diff_CtoH3 = round((Close[x] - High[x - 3]) / High[x - 3], 3)
            _Diff_CtoH4 = round((Close[x] - High[x - 4]) / High[x - 4], 3)
            _Diff_CtoH5 = round((Close[x] - High[x - 5]) / High[x - 5], 3)
            _Diff_CtoH6 = round((Close[x] - High[x - 6]) / High[x - 6], 3)
            _Diff_CtoH7 = round((Close[x] - High[x - 7]) / High[x - 7], 3)
Ejemplo n.º 30
0
def md2date(md, instr):
    return dt.fromordinal(md + dateOffset(instr).toordinal())
Ejemplo n.º 31
0
def makeForecast(inputList):
    import urllib2  #
    import urllib
    import datetime
    import time  #
    import os
    import dateutil

    ###-----------------------------------------------------------------------------
    ### START part One "DATA COLLECTION" - getting fresch intraday data from YahooFinance API, to work with

    #### Get specified content for the instrument list, from file in Python root dir.
    yahoo_ticker_list = []
    readThisFile = r'lista_' + inputList + '.txt'
    TickerFile = open(readThisFile)
    fleraTickers = TickerFile.read()
    yahoo_ticker_list = fleraTickers.split('\n')
    TickerFile.close()

    yahoo_RealNames_list = []
    readThisFile = r'lista_' + inputList + '_RealNames.txt'
    TickerFile = open(readThisFile)
    fleraTickers = TickerFile.read()
    yahoo_RealNames_list = fleraTickers.split('\n')
    TickerFile.close()

    #### Get content for the FEATURE list, from file in Python root dir.
    FEATURES = []
    readThisFile = r'FEATURES03.txt'
    featuresFile = open(readThisFile)
    fleraFeatures = featuresFile.read()
    FEATURES = fleraFeatures.split('\n')
    featuresFile.close()
    printToFile = ('DONE: reading instrument and features')  ####
    LocationToSave = r"loooooooooooooooooooooogFile.txt"
    saveFile = open(LocationToSave, 'a')
    saveFile.write(printToFile)

    #### Remove, possible old files in Python root dir.
    for eachTicker in yahoo_ticker_list:
        try:
            os.remove(r'for' + eachTicker + '_excel.xlsx')
        except Exception as e:
            print(str(e))
    for eachTicker in yahoo_ticker_list:
        try:
            os.remove(r'EOD' + eachTicker + '.txt')  ###
        except Exception as e:
            printToFile = (str(e))
            logFile = open('lo0gFile.txt', 'a')
            logFile.write("\n" + printToFile)
            logFile.close()

    print('DONE: deleting old files')  ####
    time.sleep(4)

    #### Parse EOD data for every instrument from finance.yahoo and save it linebyline in a 1st .txt file
    for eachTicker in yahoo_ticker_list:
        try:
            urlToVisit = 'http://chartapi.finance.yahoo.com/instrument/1.0/' + eachTicker + '/chartdata;type=quote;range=2y/csv'
            sourceCode = urllib2.urlopen(urlToVisit).read()
            splitSource = sourceCode.split('\n')
            LocationToSave = r'EOD' + eachTicker + '.txt'

            for eachLine in splitSource:
                splitLine = eachLine.split(',')
                if len(splitLine) == 6:
                    if 'values' not in eachLine:
                        saveFile = open(LocationToSave, 'a')
                        lineToWrite = eachLine + '\n'
                        saveFile.write(lineToWrite)
            saveFile.close()
            time.sleep(1)  ### in respect to yahoo.finance
        except Exception as e:
            print(str(e))
            #pass
    print('DONE: parsing EOD data and save as EOD_ticker_txt')  ####

    #### Parse 5min data for every instrument from yahoo AND save in 2nd .txt file for each ticker
    for eachTicker in yahoo_ticker_list:
        try:
            urlToVisit = 'http://chartapi.finance.yahoo.com/instrument/1.0/' + eachTicker + '/chartdata;type=quote;range=40d/csv'
            sourceCode = urllib2.urlopen(urlToVisit).read()
            splitSource = sourceCode.split('\n')
            LocationToSave = r'5min' + eachTicker + '.txt'

            for eachLine in splitSource:
                splitLine = eachLine.split(',')
                if len(splitLine) == 6:
                    if 'values' not in eachLine:
                        saveFile = open(LocationToSave, 'a')
                        lineToWrite = eachLine + '\n'
                        saveFile.write(lineToWrite)

            saveFile.close()
            time.sleep(1)  ### in respect to yahoo.finance
        except Exception as e:
            print(str(e))
            #pass
    print('DONE: parsing 5min data and save as 5min_ticker_txt')  ####

    #### Sort out only todays 5min data from 2nd .txt file AND save todays data in a 3rd .txt file for each ticker
    for eachTicker in yahoo_ticker_list:
        try:
            FileLocation = r'5min' + eachTicker + '.txt'
            Unix, Open, High, Low, Close, Volume = np.loadtxt(FileLocation,
                                                              unpack=True,
                                                              delimiter=',')
            today = datetime.datetime.fromtimestamp(
                time.time()).strftime('%Y-%m-%d')
            UnixToday = datetime.datetime.fromtimestamp(
                Unix[-2]).strftime('%Y-%m-%d')
            if today != UnixToday:
                try:
                    os.remove(r'EOD' + eachTicker + '.txt')  ###
                except Exception as e:
                    print(str(e))
            else:
                for x in range(1, 400):  #############
                    UnixToday = datetime.datetime.fromtimestamp(
                        Unix[-x]).strftime('%Y-%m-%d')
                    if today == UnixToday:
                        forUnix = Unix[-x]
                        forOpen = Open[-x]
                        forHigh = High[-x]
                        forLow = Low[-x]
                        forClose = Close[-x]
                        LocationToSave = r'todayEOD' + eachTicker + '.txt'
                        saveFile = open(LocationToSave, 'a')
                        lineToWrite = str(forUnix) + ',' + str(
                            forOpen) + ',' + str(forHigh) + ',' + str(
                                forLow) + ',' + str(forClose) + '\n'
                        saveFile.write(lineToWrite)
                        saveFile.close()

        except Exception as e:
            #pass
            print(str(e))
    e = str('DONE: sort out todays 5min data')  ####
    printToFile = (str(e))
    logFile = open('lo0gFile.txt', 'a')
    logFile.write(printToFile)
    logFile.close()

    #### Read the 3rd .txt file with only todays 5min data AND convert to EOD format to 4th .txt file
    for eachTicker in yahoo_ticker_list:
        try:
            FileLocation = r'todayEOD' + eachTicker + '.txt'
            Unix, Open, High, Low, Close = np.loadtxt(FileLocation,
                                                      unpack=True,
                                                      delimiter=',')
            NoLen = len(Unix)

            forUnix = datetime.datetime.fromtimestamp(
                Unix[-2]).strftime('%Y%m%d')
            forOpen = Open[-2]
            forHigh = np.amax(High[0:NoLen])
            forLow = np.amin(Low[0:NoLen])
            forClose = Close[0]
            #        print(str(forUnix)+str(eachTicker)+str(UnixTodayInLoop))
            LocationToSave = r'EODtoday' + eachTicker + '.txt'
            saveFile = open(LocationToSave, 'w')
            lineToWrite = str(forUnix) + ',' + str(forOpen) + ',' + str(
                forHigh) + ',' + str(forLow) + ',' + str(forClose) + ',1\n'
            saveFile.write(lineToWrite)
            saveFile.close()

        except Exception as e:
            #pass
            print(str(e))
    print('DONE: convert 5min data tot EOD')  ####
    printToFile = (str('DONE: convert 5min data tot EOD'))
    logFile = open('lo0gFile.txt', 'a')
    logFile.write("\n" + printToFile)
    logFile.close()

    #### append todays EOD from 4th .txt file to the list of all EOD data in 1st .txt file
    for eachTicker in yahoo_ticker_list:
        try:
            EODFile = open(r'EODtoday' + eachTicker + '.txt')
            EODline = EODFile.readlines()
            EODtoday = EODline[0]
            EODFile.close()
            LocationToSave = r'EOD' + eachTicker + '.txt'
            saveFile = open(LocationToSave, 'a')
            lineToWrite = EODtoday
            saveFile.write(lineToWrite)
            saveFile.close()
        except Exception as e:
            print(str(e))
            #pass

### END part One - getting fresch intraday data to work with
###-----------------------------------------------------------------------------
### START part Two "PREPROCESS" - Load fresh data and create all additional Features AND
### save all data to one .xlsx file for each ticker

    from datetime import datetime
    for eachTicker in yahoo_ticker_list:
        try:
            FileLocation = r'EOD' + eachTicker + '.txt'
            Date, Open, High, Low, Close, Volume = np.loadtxt(
                FileLocation,
                delimiter=',',
                unpack=True,
                converters={0: mdates.strpdate2num('%Y%m%d')})
            Zeros = [1] * len(Date)
            date = int(Date[-1])
            dt = datetime.fromordinal(date)
            ### create the individual Features
            ### START calculation of choosen list of FEATURES for the MACHINE LEARNING process ###
            _DayOfYear = float(
                dt.strftime('%j'))  # part with calender based FEATURES
            _DayOfMonth = float(dt.strftime('%d'))
            _DayOfWeek = float(dt.strftime('%w'))
            # part with FEATURES based on % relative from last Close,
            _Diff_CtoH = round((Close[-1] - High[-1]) / High[-1], 3)
            _Diff_CtoH1 = round((Close[-1] - High[-2]) / High[-2], 3)
            _Diff_CtoH2 = round((Close[-1] - High[-3]) / High[-3], 3)
            _Diff_CtoH3 = round((Close[-1] - High[-4]) / High[-4], 3)
            _Diff_CtoH4 = round((Close[-1] - High[-5]) / High[-5], 3)
            _Diff_CtoH5 = round((Close[-1] - High[-6]) / High[-6], 3)
            _Diff_CtoH6 = round((Close[-1] - High[-7]) / High[-7], 3)
            _Diff_CtoH7 = round((Close[-1] - High[-8]) / High[-8], 3)
            _Diff_CtoH8 = round((Close[-1] - High[-9]) / High[-9], 3)
            _Diff_CtoH9 = round((Close[-1] - High[-10]) / High[-10], 3)
            _Diff_CtoH10 = round((Close[-1] - High[-11]) / High[-11], 3)
            _Diff_CtoH11 = round((Close[-1] - High[-12]) / High[-12], 3)
            _Diff_CtoH12 = round((Close[-1] - High[-13]) / High[-13], 3)
            _Diff_CtoH13 = round((Close[-1] - High[-14]) / High[-14], 3)
            _Diff_CtoH14 = round((Close[-1] - High[-15]) / High[-15], 3)
            _Diff_CtoH15 = round((Close[-1] - High[-16]) / High[-16], 3)
            _Diff_CtoH16 = round((Close[-1] - High[-17]) / High[-17], 3)
            _Diff_CtoH17 = round((Close[-1] - High[-18]) / High[-18], 3)
            _Diff_CtoH18 = round((Close[-1] - High[-19]) / High[-19], 3)
            _Diff_CtoH19 = round((Close[-1] - High[-20]) / High[-20], 3)
            _Diff_CtoH20 = round((Close[-1] - High[-21]) / High[-21], 3)
            _Diff_CtoH21 = round((Close[-1] - High[-22]) / High[-22], 3)
            _Diff_CtoH22 = round((Close[-1] - High[-23]) / High[-23], 3)
            _Diff_CtoH23 = round((Close[-1] - High[-24]) / High[-24], 3)
            _Diff_CtoH24 = round((Close[-1] - High[-25]) / High[-25], 3)
            _Diff_CtoH25 = round((Close[-1] - High[-26]) / High[-26], 3)

            _Diff_CtoL = round((Close[-1] - Low[-1]) / Low[-1], 3)
            _Diff_CtoL1 = round((Close[-1] - Low[-2]) / Low[-2], 3)
            _Diff_CtoL2 = round((Close[-1] - Low[-3]) / Low[-3], 3)
            _Diff_CtoL3 = round((Close[-1] - Low[-4]) / Low[-4], 3)
            _Diff_CtoL4 = round((Close[-1] - Low[-5]) / Low[-5], 3)
            _Diff_CtoL5 = round((Close[-1] - Low[-6]) / Low[-6], 3)
            _Diff_CtoL6 = round((Close[-1] - Low[-7]) / Low[-7], 3)
            _Diff_CtoL7 = round((Close[-1] - Low[-8]) / Low[-8], 3)
            _Diff_CtoL8 = round((Close[-1] - Low[-9]) / Low[-9], 3)
            _Diff_CtoL9 = round((Close[-1] - Low[-10]) / Low[-10], 3)
            _Diff_CtoL10 = round((Close[-1] - Low[-11]) / Low[-11], 3)
            _Diff_CtoL11 = round((Close[-1] - Low[-12]) / Low[-12], 3)
            _Diff_CtoL12 = round((Close[-1] - Low[-13]) / Low[-13], 3)
            _Diff_CtoL13 = round((Close[-1] - Low[-14]) / Low[-14], 3)
            _Diff_CtoL14 = round((Close[-1] - Low[-15]) / Low[-15], 3)
            _Diff_CtoL15 = round((Close[-1] - Low[-16]) / Low[-16], 3)
            _Diff_CtoL16 = round((Close[-1] - Low[-17]) / Low[-17], 3)
            _Diff_CtoL17 = round((Close[-1] - Low[-18]) / Low[-18], 3)
            _Diff_CtoL18 = round((Close[-1] - Low[-19]) / Low[-19], 3)
            _Diff_CtoL19 = round((Close[-1] - Low[-20]) / Low[-20], 3)
            _Diff_CtoL20 = round((Close[-1] - Low[-21]) / Low[-21], 3)
            _Diff_CtoL21 = round((Close[-1] - Low[-22]) / Low[-22], 3)
            _Diff_CtoL22 = round((Close[-1] - Low[-23]) / Low[-23], 3)
            _Diff_CtoL23 = round((Close[-1] - Low[-24]) / Low[-24], 3)
            _Diff_CtoL24 = round((Close[-1] - Low[-25]) / Low[-25], 3)
            _Diff_CtoL25 = round((Close[-1] - Low[-26]) / Low[-26], 3)

            _Diff_CtoO = round((Close[-1] - Open[-1]) / Open[-1], 3)
            _Diff_CtoO1 = round((Close[-1] - Open[-2]) / Open[-2], 3)
            _Diff_CtoO2 = round((Close[-1] - Open[-3]) / Open[-3], 3)
            _Diff_CtoO3 = round((Close[-1] - Open[-4]) / Open[-4], 3)
            _Diff_CtoO4 = round((Close[-1] - Open[-5]) / Open[-5], 3)
            _Diff_CtoO5 = round((Close[-1] - Open[-6]) / Open[-6], 3)
            _Diff_CtoO6 = round((Close[-1] - Open[-7]) / Open[-7], 3)
            _Diff_CtoO7 = round((Close[-1] - Open[-8]) / Open[-8], 3)
            _Diff_CtoO8 = round((Close[-1] - Open[-9]) / Open[-9], 3)
            _Diff_CtoO9 = round((Close[-1] - Open[-10]) / Open[-10], 3)

            _Diff_CtoC1 = round((Close[-1] - Close[-1]) / Close[-1], 3)
            _Diff_CtoC2 = round((Close[-1] - Close[-3]) / Close[-3], 3)
            _Diff_CtoC3 = round((Close[-1] - Close[-4]) / Close[-4], 3)
            _Diff_CtoC4 = round((Close[-1] - Close[-5]) / Close[-5], 3)
            _Diff_CtoC5 = round((Close[-1] - Close[-6]) / Close[-6], 3)
            _Diff_CtoC6 = round((Close[-1] - Close[-7]) / Close[-7], 3)
            _Diff_CtoC7 = round((Close[-1] - Close[-8]) / Close[-8], 3)
            _Diff_CtoC8 = round((Close[-1] - Close[-9]) / Close[-9], 3)
            _Diff_CtoC9 = round((Close[-1] - Close[-10]) / Close[-10], 3)
            ### PART with onle basic HLOC data
            _justOpen = Open[-1]
            _justHigh = High[-1]
            _justLow = Low[-1]
            _justClose = Close[-1]
            ### PART with FEATURES based on % relation Close or Sub-indicator to upper/lower BollingerBand
            _SMA_H3 = float(round(np.sum(High[:-4:-1]) / 5,
                                  2))  # short moving average based on H & L
            _SMA_L3 = float(round(np.sum(Low[:-4:-1]) / 5,
                                  2))  # this two are sub-indicators

            _BBU5 = round(np.sum(Close[:-4:-1]) / 5, 3) + (round(
                np.std(Close[-4:-1]) * 2, 3))  # Upper BollingerBand
            _BBD5 = round(np.sum(Close[:-4:-1]) / 5, 3) - (round(
                np.std(Close[-4:-1]) * 2, 3))  # Lower BollingerBand
            _DiffU5_C = round((Close[-1] - _BBU5) / _BBU5, 3)
            _DiffD5_C = round((Close[-1] - _BBD5) / _BBD5, 3)

            _BBU13 = round(np.sum(Close[:-12:-1]) / 13, 3) + (round(
                np.std(Close[:-12:-1]) * 2, 3))
            _BBD13 = round(np.sum(Close[:-12:-1]) / 13, 3) - (round(
                np.std(Close[:-12:-1]) * 2, 3))
            _DiffU13_L3 = round((_SMA_L3 - _BBU13) / _BBU13, 3)

            _BBU21 = round(np.sum(Close[:-20:-1]) / 21, 3) + (round(
                np.std(Close[:-20:-1]) * 2, 3))
            _BBD21 = round(np.sum(Close[:-20:-1]) / 21, 3) - (round(
                np.std(Close[:-20:-1]) * 2, 3))
            _DiffD21_C = round((Close[-1] - _BBD21) / _BBD21, 3)
            _DiffD21_H3 = round((_SMA_H3 - _BBD21) / _BBD21, 3)

            _BBU34 = round(np.sum(Close[:-33:-1]) / 34, 3) + (round(
                np.std(Close[:-33:-1]) * 2, 3))
            _BBD34 = round(np.sum(Close[:-33:-1]) / 34, 3) - (round(
                np.std(Close[:-33:-1]) * 2, 3))
            _DiffU34_C = round((Close[-1] - _BBU34) / _BBU34, 3)
            _DiffD34_H3 = round((_SMA_H3 - _BBD34) / _BBD34, 3)

            _BBU55 = round(np.sum(Close[:-54:-1]) / 55, 3) + (round(
                np.std(Close[:-54:-1]) * 2, 3))
            _BBD55 = round(np.sum(Close[:-54:-1]) / 55, 3) - (round(
                np.std(Close[:-54:-1]) * 2, 3))
            _DiffU55_L3 = round((_SMA_L3 - _BBU55) / _BBU55, 3)

            _BBU89 = round(np.sum(Close[:-88:-1]) / 89, 3) + (round(
                np.std(Close[:-88:-1]) * 2, 3))
            _BBD89 = round(np.sum(Close[:-88:-1]) / 89, 3) - (round(
                np.std(Close[:-88:-1]) * 2, 3))
            _DiffU89_C = round((Close[-1] - _BBU89) / _BBU89, 3)
            _DiffU89_L3 = round((_SMA_L3 - _BBU89) / _BBU89, 3)

            _BBU144 = round(np.sum(Close[:-143:-1]) / 144, 3) + (round(
                np.std(Close[:-143:-1]) * 2, 3))
            _BBD144 = round(np.sum(Close[:-143:-1]) / 144, 3) - (round(
                np.std(Close[:-143:-1]) * 2, 3))
            _DiffU144_L3 = round((_SMA_L3 - _BBU144) / _BBU144, 3)

            _BBU233 = round(np.sum(Close[:-232:-1]) / 233, 3) + (round(
                np.std(Close[:-232:-1]) * 2, 3))
            _BBD233 = round(np.sum(Close[:-232:-1]) / 233, 3) - (round(
                np.std(Close[:-232:-1]) * 2, 3))
            _DiffU233_C = round((Close[-1] - _BBU233) / _BBU233, 3)

            _BBU300 = round(np.sum(Close[:299:-1]) / 300, 3) + (round(
                np.std(Close[:299:-1]) * 2, 3))
            _BBD300 = round(np.sum(Close[:299:-1]) / 300, 3) - (round(
                np.std(Close[:299:-1]) * 2, 3))
            _DiffU300_C = round((Close[-1] - _BBU300) / _BBU300, 3)
            _DiffD300_C = round((Close[-1] - _BBD300) / _BBD300, 3)
            ### PART with % relation, Close to Maxium High or Low from varius days in history
            _High3_H = round(
                (Close[-1] - np.amax(High[:-2:-1])) / np.amax(High[:-2:-1]), 3)
            _High5_H = round(
                (Close[-1] - np.amax(High[:-4:-1])) / np.amax(High[:-4:-1]), 3)
            _High100_H = round(
                (Close[-1] - np.amax(High[:-99:-1])) / np.amax(High[:-99:-1]),
                3)
            _High377_H = round((Close[-1] - np.amax(High[:-376:-1])) /
                               np.amax(High[:-376:-1]), 3)
            _Low3_L = round(
                (Close[-1] - np.amin(Low[:-2:-1])) / np.amin(Low[:-2:-1]), 3)
            _Low34_L = round(
                (Close[-1] - np.amin(Low[:-33:-1])) / np.amin(Low[:-33:-1]), 3)
            _Low55_L = round(
                (Close[-1] - np.amin(Low[:-54:-1])) / np.amin(Low[:-54:-1]), 3)
            _Hi3to5 = round(
                (Close[-1] - np.amax(High[:-4:-2])) / np.amax(High[:-4:-2]), 3)
            _Hi5to8 = round(
                (Close[-1] - np.amax(High[:-7:-4])) / np.amax(High[:-7:-4]), 3)
            _Hi8to13 = round(
                (Close[-1] - np.amax(High[:-12:-7])) / np.amax(High[:-12:-7]),
                3)
            _Hi34to55 = round((Close[-1] - np.amax(High[:-54:-33])) /
                              np.amax(High[:-54:-33]), 3)
            _Hi233to377 = round((Close[-1] - np.amax(High[:-376:-232])) /
                                np.amax(High[:-376:-232]), 3)
            _Lo3to5 = round(
                (Close[-1] - np.amin(Low[:-4:-2])) / np.amin(Low[:-4:-2]), 3)
            _Lo233to377 = round((Close[-1] - np.amin(Low[:-376:-232])) /
                                np.amin(Low[:-376:-232]), 3)
            ### PART with simple aritmic Feature
            _EvNo5 = round((Close[-1] - 5) / 5, 2)
            ### END calculation of choosen list of FEATURES for the MACHINE LEARNING process ###

            ### append the individual Features to Pandas Dataframe
            df = pd.DataFrame(columns=FEATURES)
            df = df.append(
                {
                    '_DayOfYear': _DayOfYear,
                    '_DayOfMonth': _DayOfMonth,
                    '_DayOfWeek': _DayOfWeek,
                    '_Diff_CtoH': _Diff_CtoH,
                    '_Diff_CtoH1': _Diff_CtoH1,
                    '_Diff_CtoH2': _Diff_CtoH2,
                    '_Diff_CtoH3': _Diff_CtoH3,
                    '_Diff_CtoH4': _Diff_CtoH4,
                    '_Diff_CtoH5': _Diff_CtoH5,
                    '_Diff_CtoH6': _Diff_CtoH6,
                    '_Diff_CtoH7': _Diff_CtoH7,
                    '_Diff_CtoH8': _Diff_CtoH8,
                    '_Diff_CtoH9': _Diff_CtoH9,
                    '_Diff_CtoH10': _Diff_CtoH10,
                    '_Diff_CtoH11': _Diff_CtoH11,
                    '_Diff_CtoH12': _Diff_CtoH12,
                    '_Diff_CtoH13': _Diff_CtoH13,
                    '_Diff_CtoH14': _Diff_CtoH14,
                    '_Diff_CtoH15': _Diff_CtoH15,
                    '_Diff_CtoH16': _Diff_CtoH16,
                    '_Diff_CtoH17': _Diff_CtoH17,
                    '_Diff_CtoH18': _Diff_CtoH18,
                    '_Diff_CtoH19': _Diff_CtoH19,
                    '_Diff_CtoH20': _Diff_CtoH20,
                    '_Diff_CtoH21': _Diff_CtoH21,
                    '_Diff_CtoH22': _Diff_CtoH22,
                    '_Diff_CtoH23': _Diff_CtoH23,
                    '_Diff_CtoH24': _Diff_CtoH24,
                    '_Diff_CtoH25': _Diff_CtoH25,
                    '_Diff_CtoL': _Diff_CtoL,
                    '_Diff_CtoL1': _Diff_CtoL1,
                    '_Diff_CtoL2': _Diff_CtoL2,
                    '_Diff_CtoL3': _Diff_CtoL3,
                    '_Diff_CtoL4': _Diff_CtoL4,
                    '_Diff_CtoL5': _Diff_CtoL5,
                    '_Diff_CtoL6': _Diff_CtoL6,
                    '_Diff_CtoL7': _Diff_CtoL7,
                    '_Diff_CtoL8': _Diff_CtoL8,
                    '_Diff_CtoL9': _Diff_CtoL9,
                    '_Diff_CtoL10': _Diff_CtoL10,
                    '_Diff_CtoL11': _Diff_CtoL11,
                    '_Diff_CtoL12': _Diff_CtoL12,
                    '_Diff_CtoL13': _Diff_CtoL13,
                    '_Diff_CtoL14': _Diff_CtoL14,
                    '_Diff_CtoL15': _Diff_CtoL15,
                    '_Diff_CtoL16': _Diff_CtoL16,
                    '_Diff_CtoL17': _Diff_CtoL17,
                    '_Diff_CtoL18': _Diff_CtoL18,
                    '_Diff_CtoL19': _Diff_CtoL19,
                    '_Diff_CtoL20': _Diff_CtoL20,
                    '_Diff_CtoL21': _Diff_CtoL21,
                    '_Diff_CtoL22': _Diff_CtoL22,
                    '_Diff_CtoL23': _Diff_CtoL23,
                    '_Diff_CtoL24': _Diff_CtoL24,
                    '_Diff_CtoL25': _Diff_CtoL25,
                    '_Diff_CtoO': _Diff_CtoO,
                    '_Diff_CtoO1': _Diff_CtoO1,
                    '_Diff_CtoO2': _Diff_CtoO2,
                    '_Diff_CtoO3': _Diff_CtoO3,
                    '_Diff_CtoO4': _Diff_CtoO4,
                    '_Diff_CtoO5': _Diff_CtoO5,
                    '_Diff_CtoO6': _Diff_CtoO6,
                    '_Diff_CtoO7': _Diff_CtoO7,
                    '_Diff_CtoO8': _Diff_CtoO8,
                    '_Diff_CtoO9': _Diff_CtoO9,
                    '_Diff_CtoC1': _Diff_CtoC1,
                    '_Diff_CtoC2': _Diff_CtoC2,
                    '_Diff_CtoC3': _Diff_CtoC3,
                    '_Diff_CtoC4': _Diff_CtoC4,
                    '_Diff_CtoC5': _Diff_CtoC5,
                    '_Diff_CtoC6': _Diff_CtoC6,
                    '_Diff_CtoC7': _Diff_CtoC7,
                    '_Diff_CtoC8': _Diff_CtoC8,
                    '_Diff_CtoC9': _Diff_CtoC9,
                    '_justOpen': _justOpen,
                    '_justHigh': _justHigh,
                    '_justLow': _justLow,
                    '_justClose': _justClose,
                    '_DiffU5_C': _DiffU5_C,
                    '_DiffD5_C': _DiffD5_C,
                    '_DiffU13_L3': _DiffU13_L3,
                    '_DiffD21_C': _DiffD21_C,
                    '_DiffD21_H3': _DiffD21_H3,
                    '_DiffU34_C': _DiffU34_C,
                    '_DiffD34_H3': _DiffD34_H3,
                    '_DiffU55_L3': _DiffU55_L3,
                    '_DiffU89_C': _DiffU89_C,
                    '_DiffU89_L3': _DiffU89_L3,
                    '_DiffU144_L3': _DiffU144_L3,
                    '_DiffU233_C': _DiffU233_C,
                    '_DiffU300_C': _DiffU300_C,
                    '_DiffD300_C': _DiffD300_C,
                    '_High3_H': _High3_H,
                    '_High5_H': _High5_H,
                    '_High100_H': _High100_H,
                    '_High377_H': _High377_H,
                    '_Low3_L': _Low3_L,
                    '_Low34_L': _Low34_L,
                    '_Low55_L': _Low55_L,
                    '_Hi3to5': _Hi3to5,
                    '_Hi5to8': _Hi5to8,
                    '_Hi8to13': _Hi8to13,
                    '_Hi34to55': _Hi34to55,
                    '_Hi233to377': _Hi233to377,
                    '_Lo3to5': _Lo3to5,
                    '_Lo233to377': _Lo233to377,
                    '_EvNo5': _EvNo5,
                },
                ignore_index=True)
            ### Write Pandas Datafram to .xlsx file
            FileLocation4excel = r'for' + eachTicker + '_excel.xlsx'
            df.to_excel(FileLocation4excel, index=False)
        except Exception as e:
            printToFile = (str(e))
            logFile = open('lo0gFile.txt', 'a')
            logFile.write(printToFile)
            logFile.close()

### END part Two - Load fresh data and create all additional Features AND
### save all data to one .xlsx file for each ticker
###-----------------------------------------------------------------------------
### START part Three "PREDICTION PROCESS" - Load fresh data from .xlsc file and make Predictions from todays intraday data

### Cleaning up unnecessary files
    for eachTicker in yahoo_ticker_list:
        try:
            os.remove(r'' + eachTicker + '.txt')  ###
        except Exception as e:
            print(str(e))
    for eachTicker in yahoo_ticker_list:
        try:
            os.remove(r'EODtoday' + eachTicker + '.txt')  ###
        except Exception as e:
            print(str(e))
    for eachTicker in yahoo_ticker_list:
        try:
            os.remove(r'todayEOD' + eachTicker + '.txt')  ###
        except Exception as e:
            print(str(e))
    for eachTicker in yahoo_ticker_list:
        try:
            os.remove(r'5min' + eachTicker + '.txt')  ###
        except Exception as e:
            print(str(e))
    ### END Cleaning up unnecessary files

### START part with letting the algo make prediction/Outlook from the fresh data from the .xlsx file

    printToFile = (str("Start makeing Forecasts"))
    logFile = open('lo0gFile.txt', 'a')
    logFile.write("\n" + printToFile)
    logFile.close()

    barChartName = []  #making some empty datalists
    barChartForecast = []

    Ultimate_df2 = pd.DataFrame(columns=['[Name]', '[Forecast]'
                                         ])  # creating empty Pandas dataframe

    for eachTicker, eachRealNames in zip(yahoo_ticker_list,
                                         yahoo_RealNames_list):
        try:
            global GLBeachName
            GLBeachName = eachRealNames

            Location = r'for' + eachTicker + '_excel.xlsx'
            data = pd.read_excel(Location)
            X = np.array(data[FEATURES].values
                         )  # making a Numpay array from the Pandas dataset
            y1 = data['_justClose'].values  # saves the intraday close value

            ### START loading the saved .pkl files with trained algo information and creating final Prediction/Outlook
            ### Every joblib.load indicates one .pkl file
            ### The file name indicates for how many future days the algo is trained to predict the outcome for (1d = One day)
            ### The first 7 algos calculate on how high the Risk/Reward Ratio was for future move
            ### the second 7 algos calculate how Big the future move was, using a lot of averageing out to smother the result.
            ### Every trained algo give 5 predictions; StrogSell, Sell, Neutral, Buy and StrongBuy (5cat = five categories)
            ### Every predictions contains a percanage chanse for all of the 5 possible outcomes
            ### Each of the five predictions in % are stored in variable, for every trained algo
            logreg1 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat1d.pkl')
            Value1D = logreg1.predict_proba(X)
            Sell1D = round(Value1D[0][0], 4)
            Under1D = round(Value1D[0][1], 4)
            Hold1D = round(Value1D[0][2], 4)
            Over1D = round(Value1D[0][3], 4)
            Buy1D = round(Value1D[0][4], 4)

            logreg2 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat2d.pkl')
            Value2D = logreg2.predict_proba(X)
            Sell2D = round(Value2D[0][0], 4)
            Under2D = round(Value2D[0][1], 4)
            Hold2D = round(Value2D[0][2], 4)
            Over2D = round(Value2D[0][3], 4)
            Buy2D = round(Value2D[0][4], 4)

            logreg3 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat3d.pkl')
            Value3D = logreg3.predict_proba(X)
            Sell3D = round(Value3D[0][0], 4)
            Under3D = round(Value3D[0][1], 4)
            Hold3D = round(Value3D[0][2], 4)
            Over3D = round(Value3D[0][3], 4)
            Buy3D = round(Value3D[0][4], 4)

            logreg5 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat5d.pkl')
            Value5D = logreg5.predict_proba(X)
            Sell5D = round(Value5D[0][0], 4)
            Under5D = round(Value5D[0][1], 4)
            Hold5D = round(Value5D[0][2], 4)
            Over5D = round(Value5D[0][3], 4)
            Buy5D = round(Value5D[0][4], 4)

            logreg8 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat8d.pkl')
            Value8D = logreg8.predict_proba(X)
            Sell8D = round(Value8D[0][0], 4)
            Under8D = round(Value8D[0][1], 4)
            Hold8D = round(Value8D[0][2], 4)
            Over8D = round(Value8D[0][3], 4)
            Buy8D = round(Value8D[0][4], 4)

            logreg13 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat13d.pkl')
            Value13D = logreg13.predict_proba(X)
            Sell13D = round(Value13D[0][0], 4)
            Under13D = round(Value13D[0][1], 4)
            Hold13D = round(Value13D[0][2], 4)
            Over13D = round(Value13D[0][3], 4)
            Buy13D = round(Value13D[0][4], 4)

            logreg21 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat21d.pkl')
            Value21D = logreg21.predict_proba(X)
            Sell21D = round(Value21D[0][0], 4)
            Under21D = round(Value21D[0][1], 4)
            Hold21D = round(Value21D[0][2], 4)
            Over21D = round(Value21D[0][3], 4)
            Buy21D = round(Value21D[0][4], 4)

            ### Part with algos based on calculating how Big the future move was

            roc_logreg1 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat1dROC.pkl'
            )
            roc_Value1D = roc_logreg1.predict_proba(X)
            roc_Sell1D = round(roc_Value1D[0][0], 4)
            roc_Under1D = round(roc_Value1D[0][1], 4)
            roc_Hold1D = round(roc_Value1D[0][2], 4)
            roc_Over1D = round(roc_Value1D[0][3], 4)
            roc_Buy1D = round(roc_Value1D[0][4], 4)

            roc_logreg2 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat2dROC.pkl'
            )
            roc_Value2D = roc_logreg2.predict_proba(X)
            roc_Sell2D = round(roc_Value2D[0][0], 4)
            roc_Under2D = round(roc_Value2D[0][1], 4)
            roc_Hold2D = round(roc_Value2D[0][2], 4)
            roc_Over2D = round(roc_Value2D[0][3], 4)
            roc_Buy2D = round(roc_Value2D[0][4], 4)

            roc_logreg3 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat3dROC.pkl'
            )
            roc_Value3D = roc_logreg3.predict_proba(X)
            roc_Sell3D = round(roc_Value3D[0][0], 4)
            roc_Under3D = round(roc_Value3D[0][1], 4)
            roc_Hold3D = round(roc_Value3D[0][2], 4)
            roc_Over3D = round(roc_Value3D[0][3], 4)
            roc_Buy3D = round(roc_Value3D[0][4], 4)

            roc_logreg5 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat5dROC.pkl'
            )
            roc_Value5D = roc_logreg5.predict_proba(X)
            roc_Sell5D = round(roc_Value5D[0][0], 4)
            roc_Under5D = round(roc_Value5D[0][1], 4)
            roc_Hold5D = round(roc_Value5D[0][2], 4)
            roc_Over5D = round(roc_Value5D[0][3], 4)
            roc_Buy5D = round(roc_Value5D[0][4], 4)

            roc_logreg8 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat8dROC.pkl'
            )
            roc_Value8D = roc_logreg8.predict_proba(X)
            roc_Sell8D = round(roc_Value8D[0][0], 4)
            roc_Under8D = round(roc_Value8D[0][1], 4)
            roc_Hold8D = round(roc_Value8D[0][2], 4)
            roc_Over8D = round(roc_Value8D[0][3], 4)
            roc_Buy8D = round(roc_Value8D[0][4], 4)

            roc_logreg13 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat13dROC.pkl'
            )
            roc_Value13D = roc_logreg13.predict_proba(X)
            roc_Sell13D = round(roc_Value13D[0][0], 4)
            roc_Under13D = round(roc_Value13D[0][1], 4)
            roc_Hold13D = round(roc_Value13D[0][2], 4)
            roc_Over13D = round(roc_Value13D[0][3], 4)
            roc_Buy13D = round(roc_Value13D[0][4], 4)

            roc_logreg21 = joblib.load(
                r'C:\Users\UserTrader\Documents\_PKLfiles_02\IQ14_5cat21dROC.pkl'
            )
            roc_Value21D = roc_logreg21.predict_proba(X)
            roc_Sell21D = round(roc_Value21D[0][0], 4)
            roc_Under21D = round(roc_Value21D[0][1], 4)
            roc_Hold21D = round(roc_Value21D[0][2], 4)
            roc_Over21D = round(roc_Value21D[0][3], 4)
            roc_Buy21D = round(roc_Value21D[0][4], 4)

            ### END loading the saved .pkl files with trained algo information

            ### START building own indicators for final prediction values

            ### 1st stage, adding all StrongBuy percentage vs all StrongSell percantage
            ### continuing with comparing all Buy percantage vs all Sell percentage
            ### Doing this for the 7 algos based on future Pattern first
            Buy2stdav = Buy1D + Buy2D + Buy3D + Buy5D + Buy8D + Buy13D + Buy21D
            Sell2stdav = Sell1D + Sell2D + Sell3D + Sell5D + Sell8D + Sell13D + Sell21D
            Over1stdav = Over1D + Over2D + Over3D + Over5D + Over8D + Over13D + Over21D
            Under1stdav = Under1D + Under2D + Under3D + Under5D + Under8D + Under13D + Under21D
            ### 2nd stage, Comparing all positive percentage vs all negative percentage
            BuyVsSell = Buy2stdav - Sell2stdav
            OverVsUnder = (
                Over1stdav - Under1stdav
            ) / 27.0  ### using divide by 27 to normalise Strong vs Normal Buy/Sell
            preOutlook = BuyVsSell + OverVsUnder

            ### Will only use value if it is high enough, else = 0
            pat_Outlook = float(0.0)
            if BuyVsSell > 0.010 and OverVsUnder > 0.010:
                pat_Outlook = round(preOutlook * 100, 2)
            elif BuyVsSell < -0.009 and OverVsUnder < -0.009:
                pat_Outlook = round(preOutlook * 100, 2)
            else:
                pass

            ### Doing this one mote time, for the 7 algos based on future Big move last
            roc_Buy2stdav = roc_Buy1D + roc_Buy2D + roc_Buy3D + roc_Buy5D + roc_Buy8D + roc_Buy13D + roc_Buy21D
            roc_Sell2stdav = roc_Sell1D + roc_Sell2D + roc_Sell3D + roc_Sell5D + roc_Sell8D + roc_Sell13D + roc_Sell21D
            roc_Over1stdav = roc_Over1D + roc_Over2D + roc_Over3D + roc_Over5D + roc_Over8D + roc_Over13D + roc_Over21D
            roc_Under1stdav = roc_Under1D + roc_Under2D + roc_Under3D + roc_Under5D + roc_Under8D + roc_Under13D + roc_Under21D
            roc_BuyVsSell = roc_Buy2stdav - roc_Sell2stdav
            roc_OverVsUnder = (roc_Over1stdav - roc_Under1stdav) / 2.2
            roc_preOutlook = (roc_BuyVsSell + roc_OverVsUnder) / 6.1
            roc_Outlook = float(0.0)

            if roc_BuyVsSell > 0.017 and roc_OverVsUnder > 0.017:
                roc_Outlook = round(roc_preOutlook * 100, 2)
            elif roc_BuyVsSell < -0.014 and roc_OverVsUnder < -0.014:
                roc_Outlook = round(roc_preOutlook * 100, 2)
            else:
                pass
        ### This value is the Final Outlook for the individual Instrument
            Outlook = round(pat_Outlook + (roc_Outlook), 2)
            global GLBoutlook
            GLBoutlook = Outlook
            print(roc_Outlook)
            print(pat_Outlook)
            print(Outlook)

            ### In some circumstances I put Names to the Outlook values, if the value is not strong enough it gets set to 0
            Trade = '--On Hold--'
            if roc_Outlook > 0.017 and pat_Outlook > 0.01 and Outlook > 0.04:
                Trade = 'BUY'
            elif roc_Outlook > 0.017 or pat_Outlook > 0.01 and Outlook > 0.04:
                Trade = 'Strong'
            elif roc_Outlook < -0.014 and pat_Outlook < -0.009 and Outlook < -0.009:
                Trade = 'SHORT'
            elif roc_Outlook < -0.014 or pat_Outlook < -0.009 and Outlook < -0.009:
                Trade = 'Weak'
            else:
                pass

            barChartName.append(GLBeachName)
            barChartForecast.append(GLBoutlook)

            Ultimate_df2 = Ultimate_df2.append(
                {
                    '[Name]': eachRealNames,
                    '[Forecast]': Outlook,
                },
                ignore_index=True)

        except Exception as e:
            printToFile = (str(e))
            logFile = open('lo0gFile.txt', 'a')
            logFile.write("\n" + printToFile)
            logFile.close()

    printToFile = (str("Done makeing Forecasts"))
    logFile = open('lo0gFile.txt', 'a')
    logFile.write("\n" + printToFile)
    logFile.close()

    ### Saving the results to .xlsx file
    File2Location4excel = r'TEST_DB_excel.xlsx'
    import datetime
    import time  #
    unixTimestamp = int(time.time())
    timestamp = str(
        datetime.datetime.fromtimestamp(
            int(unixTimestamp)).strftime('%Y-%m-%d %H_%M'))
    print(timestamp)
    Ultimate_df = Ultimate_df2.sort('[Forecast]', ascending=False)
    print(Ultimate_df)
    global theList
    theList = Ultimate_df
    time.sleep(3)

    ### END part with letting the algo make prediction/Outlook from the fresh data from the .xlsx file

    ### START - Part that create barchart of all predictions in instrument list

    import matplotlib
    matplotlib.style.use('ggplot')
    import matplotlib.pyplot as plt
    import datetime
    import time  #
    unixTimestamp = int(time.time())
    timestamp = str(
        datetime.datetime.fromtimestamp(
            int(unixTimestamp)).strftime('%Y-%m-%d %H_%M'))

    pltTitle = str(timestamp + ' CET  predictive algo IQ1.4: ' + inputList)

    Ultimate_df.set_index(["[Name]"], inplace=True)
    Ultimate_df.plot(kind='bar', alpha=0.75, rot=75, title="", legend=False)
    plt.xlabel("")
    fig1 = plt.gcf()
    fig1.set_size_inches(16, 9)
    plt.title(pltTitle,
              fontsize=26,
              fontweight='bold',
              color='#7f7f7f',
              family='Courier New')
    plt.show()
    plt.draw()
    fig1.savefig(r'C:\Users\UserTrader\Documents\_Image\\' + inputList +
                 '.png',
                 dpi=72)