Пример #1
0
def AllUser(*_):
    os.system('cls||clear')
    global mornitorFilePath
    global transacFilePath
    for user in configJson:
        if os.name == 'nt':
            print('[For Dev Testing...]')
            Realtime(user, sendNotify=False)
        else:
            try:
                Realtime(user)
            except Exception as e:
                print('\nError To Record ! : {}  then skip\n'.format(e))
                print('!!!! ==========================')
                exc_type, exc_obj, exc_tb = sys.exc_info()
                fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
                print('Error Type {}\nFile {}\n Line {}'.format(
                    exc_type, fname, exc_tb.tb_lineno))
                print('!!!! ==========================')
    while isInternetConnect and not os.name == 'nt':
        try:
            if os.path.exists(mornitorFilePath):
                gSheet.updateFromCSV(mornitorFilePath, 'Mornitor')
            if os.path.exists(transacFilePath):
                gSheet.updateFromCSV(transacFilePath, 'Transaction')
        except:
            pass
        else:
            break
        time.sleep(10)
Пример #2
0
def AllUser(*_):
    os.system('cls||clear')
    global mornitorFilePath
    global transacFilePath
    for user in configJson:
        if os.name == 'nt':
            print('[For Dev Testing...]')
            MornitoringUser(user, sendNotify=False)
        else:
            try:
                MornitoringUser(user)
            except Exception as e:
                print('\nError To Record ! : {}  then skip\n'.format(e))
                continue
    while isInternetConnect and not os.name == 'nt':
        try:
            #print('Uploading mornitoring data...')
            if os.path.exists(mornitorFilePath):
                gSheet.updateFromCSV(mornitorFilePath, 'Mornitor')
            #print('Upload mornitoring data finish')
            #print('Uploading Transaction data...')
            if os.path.exists(transacFilePath):
                gSheet.updateFromCSV(transacFilePath, 'Transaction')
            #print('Upload Transaction data finish')
        except:
            pass
        else:
            break
        time.sleep(10)
Пример #3
0
def getHistDataframe(*_):
    print('load history data from google sheet...')
    sheetData = gSheet.getAllDataS('History')
    print('row count {}'.format(len(sheetData)))
    if sheetData == []:
        allHistPath = dataPath + '/cryptoHist.csv'
        gSheet.updateFromCSV(allHistPath, 'History')
        df = pd.read_csv(allHistPath)
    else:
        df = pd.DataFrame.from_records(sheetData)
    return df
Пример #4
0
def updateGSheetHistory(limit=45000):
    ticker = kbApi.getTicker()
    symbols = kbApi.getSymbol()

    df = pd.DataFrame()
    df = df.append(getHistDataframe())

    date = dt.now().strftime('%Y-%m-%d')
    hour = int(dt.now().strftime('%H'))
    epoch = float(time.time())
    minute = int(dt.now().strftime('%M'))
    second = int(dt.now().strftime('%S'))
    date_time = str(dt.now().strftime('%Y-%m-%d %H:%M:%S'))

    #backup hist
    backupPath = dataPath + '/hist_backup/cryptoHist_{}_{}.csv'.format(
        date.replace('-', '_'), 0)
    if hour >= 8 and hour <= 16:
        backupPath = dataPath + '/hist_backup/cryptoHist_{}_{}.csv'.format(
            date.replace('-', '_'), 1)
    elif hour > 16:
        backupPath = dataPath + '/hist_backup/cryptoHist_{}_{}.csv'.format(
            date.replace('-', '_'), 2)
    df.to_csv(backupPath, index=False)
    #spare backup path
    backupPath = dataPath + '/hist_backup/cryptoHist_{}_{}.csv'.format(
        date.replace('-', '_'), 3)

    # append backup
    backupList = os.listdir(dataPath + '/hist_backup')
    backupList.sort()
    if len(backupList) > 10:
        backupList = backupList[-10:]
    for f in backupList:
        filePath = dataPath + '/hist_backup/{}'.format(f)
        print('Read [ {} ]'.format(filePath))
        try:
            df = df.append(pd.read_csv(filePath).sort_values(
                ['dateTime'], ascending=[True]).tail(5000),
                           ignore_index=True)
        except:
            print('Can\'t Read {}   Column DateTime..'.format(filePath))
        else:
            pass

    os.system('cls||clear')
    for data in symbols:
        sym = data['symbol']
        if not sym in ticker:
            continue

        print('{}   {} Baht'.format(sym, ticker[sym]['last']))

        rowData = {
            'epoch': epoch,
            'date': date,
            'hour': hour,
            'minute': minute,
            'second': second,
            'symbol': sym,
            'dateTime': date_time
        }

        for colName in ticker[sym]:
            rowData[colName] = [ticker[sym][colName]]
        df = df.append(pd.DataFrame(rowData), ignore_index=True)

    # delete duplicate
    df.drop_duplicates(['symbol', 'date', 'hour', 'minute'],
                       keep='last',
                       inplace=True)
    #cleanup & sort
    epoch_limit = time.time() - (((5 * 24) * 60) * 60)
    df.dropna(subset=['epoch', 'dateTime'], inplace=True)
    df['epoch'] = pd.to_numeric(df['epoch'], errors='coerce')
    df['dateTime'] = df['dateTime'].astype(str)
    df = df[df['dateTime'] != 'nan']
    #df.sort_values(['epoch'], ascending=[True])
    df.sort_values(['epoch', 'dateTime'], ascending=[True, True])
    df.sort_index(inplace=True)
    df = df.drop(df[(df['date'].str.isdigit() == True)].index)
    df = df.drop(df[(df['dateTime'].str.isdigit() == True)].index)
    df = df.drop(df[(df['epoch'] < epoch_limit)].index)
    #limit row
    df = df.tail(limit)

    print('Save Historical Data...')
    allHistPath = dataPath + '/cryptoHist.csv'
    df = df[list(rowData)]
    df.to_csv(allHistPath, index=False)
    df.tail(5000).to_csv(backupPath, index=False)

    while isInternetConnect():
        try:
            if not os.name == 'nt':  #for raspi
                print('uploading history data...')
                gSheet.updateFromCSV(allHistPath, 'History')
                print('upload history data finish')
        except:
            pass
        time.sleep(10)
        if gSheet.getAllDataS('History') != []:
            break
Пример #5
0
def getSignalAllPreset():
    rec_date = dt.datetime.now().strftime('%Y-%m-%d %H:00:00')
    minute = int(dt.datetime.now().strftime('%M'))
    if minute >= 30 and minute < 60:
        rec_date = dt.datetime.now().strftime('%Y-%m-%d %H:30:00')
    """
    elif minute >= 30 and minute < 45:
        rec_date = dt.datetime.now().strftime('%Y-%m-%d %H:30:00')
    elif minute >= 45 and minute < 59:
        rec_date = dt.datetime.now().strftime('%Y-%m-%d %H:45:00')
    """

    signal_df = pd.DataFrame()
    # Clear Directory
    imgPath = dataPath + '/analysis_img/'
    oldImgFiles = os.listdir(imgPath)
    for f in oldImgFiles:
        os.remove(imgPath + f)

    count = 0
    for file in histFileList:
        quote = file.split('.')[0]
        count += 1
        os.system('cls||clear')
        print('{}/{}  {}'.format(count, len(histFileList), quote))
        for ps in presetJson:
            try:
                df = getAnalysis(histPath + os.sep + file,
                                 ps,
                                 saveImage=False,
                                 showImage=False)
                df['Preset'] = ps
                df['Symbol'] = quote
                df['Rec_Date'] = rec_date

                # Condition List
                entry_condition_list = [
                    df['SMA_S'][0] > df['SMA_L'][0],  #0
                    df['%K'][0] > df['%D'][0],  #1
                    df['GL_Ratio'][0] > df['GL_Ratio_Slow'][0]  #2
                ]

                exit_condition_list = [
                    df['SMA_S'][0] < df['SMA_L'][0],
                    df['GL_Ratio'][0] < df['GL_Ratio_Slow'][0]
                ]

                # Condition Setting
                filter_condition = (
                    #df['SMA_L'][0] >= df['Close'].mean() and
                    df['Volume'][0] >= df['Volume'][1]
                    and df['GL_Ratio_Slow'][0] > 1
                    #True
                )
                entry_condition = (entry_condition_list[0]
                                   and entry_condition_list[1]
                                   and entry_condition_list[2])
                exit_condition = (exit_condition_list[0]
                                  and exit_condition_list[1])

                # Trade Entry
                if filter_condition and entry_condition:
                    print('Preset : {} | Entry : {}'.format(ps, file))
                    df['Signal'] = 'Entry'
                    signal_df = signal_df.append(df.iloc[0])
                    getAnalysis(histPath + os.sep + file,
                                ps,
                                saveImage=True,
                                showImage=False)
                # Trade Exit
                elif filter_condition and exit_condition:
                    print('Preset : {} | Exit : {}'.format(ps, file))
                    df['Signal'] = 'Exit'
                    signal_df = signal_df.append(df.iloc[0])
                    getAnalysis(histPath + os.sep + file,
                                ps,
                                saveImage=True,
                                showImage=False)
                else:
                    df['Signal'] = ''
                    signal_df = signal_df.append(df.iloc[0])
            except:
                pass

    signal_df = signal_df.sort_values(
        ['Signal', 'Preset', 'Value_M', 'GL_Ratio'],
        ascending=[True, True, False, False])
    csvPath = dataPath + os.sep + 'signal.csv'
    if not os.path.exists(csvPath):
        signal_df.to_csv(csvPath, index=False)

    # New Signal DataFrame (All Signal Record)
    new_signal_df = pd.read_csv(csvPath)
    new_signal_df = new_signal_df[new_signal_df['Rec_Date'] != rec_date]
    new_signal_df = new_signal_df.append(signal_df)
    new_signal_df = new_signal_df.sort_values(['Date', 'Rec_Date', 'Signal'],
                                              ascending=[True, True, True])
    new_signal_df.drop_duplicates(['Rec_Date', 'Preset', 'Symbol'],
                                  keep='last',
                                  inplace=True,
                                  ignore_index=False)
    new_signal_df = new_signal_df.tail(5000)
    new_signal_df.to_csv(csvPath, index=False)

    if not os.name == 'nt':
        # Update G Sheet
        gsheet_csvPath = dataPath + os.sep + 'signal_gsheet.csv'
        new_signal_df.to_csv(gsheet_csvPath, index=False)
        gSheet.updateFromCSV(gsheet_csvPath, 'SignalRecord')