'https://www.googleapis.com/auth/drive'
]
creds = ServiceAccountCredentials.from_json_keyfile_name(cred_file, scope)
client = gspread.authorize(creds)

# In[49]:

sheet = client.open('Social Distancing Form URLs')

# In[50]:

ws = sheet.worksheet('Sheet1')

# In[51]:

urls = get_as_dataframe(ws, evaluate_formulas=True,
                        header=0)[['Form Type', 'Borough', 'URL']].iloc[0:20]

# In[36]:

#urls['URL'] = urls.apply(lambda x: x['URL'] + '/edit?usp=drive_web', axis = 1)

# In[65]:


def get_urls(urls):
    final_sites = []
    for i, v in urls.iterrows():
        #print(v['URL'])
        #Make a GET request to fetch the raw HTML content
        html_content = requests.get(v['URL']).text
示例#2
0
gc = gspread.authorize(creds)

#subAsset = ["BNB","XRP"]

subAsset = ["BTC"]

#Update while Loop
while True:
    #try:
    timeBegin = time.time()
    print(datetime.datetime.now().strftime('%H:%M'))
    for i in range(len(subAsset)):

        ws = gc.open("Data").worksheet(subAsset[i])  # เรียกชีทหน้า BNB XRP
        df = get_as_dataframe(ws).set_index(
            'indexAround'
        )  # เรียกข้อมูลใน google sheet และตั้งให้ คอลัม indexAround เป็น index ไว้ให้ pandas เรียกใช้
        Around = df.loc['Around']['Balance']  # ตัวนับ

        df = callFuntion.updatee(df, Around, subAsset[i])
        df = df.loc[:, ~df.columns.str.contains(
            '^Unnamed')]  # ลบคอลัม์ที่ไม่ต้องการ

        #print(" รอบ " + str(Around) + ' ของ ' + str(subAsset[i]) +' มีปริมาณ '+df.loc[Around]['Asset'] +' Balance = ' + df.loc[Around]['Balance'] + ' ' + str(callFuntion.MainAsset))
        print(df.loc[Around].to_frame().T)
        set_with_dataframe(
            gc.open("Data").worksheet(subAsset[i]),
            df.reset_index())  # บันทึกลง ชีทหน้า

    timeEnd = time.time()
    timeElapsed = timeEnd - timeBegin
示例#3
0
client = gspread.authorize(creds)

# =============================================================================
# Load incremental_id to for primary key
# =============================================================================

print('\n' + 'Loading Incremental ID from Google Sheets...')
gspread_incremental_id = '11o64o422YMR6-wkYDQgrrjN60nc0gvzks2aZ306Yt2c'
gsheet_name_incremental_id = 'Incremental ID'
incremental_id_sheet = client.open_by_key(gspread_incremental_id)
incremental_id_worksheet = incremental_id_sheet.worksheet(
    gsheet_name_incremental_id)

## Load the sheet into a DataFrame with index set at col 0 and header set as row 0
incremental_id = get_as_dataframe(incremental_id_worksheet,
                                  header=0,
                                  index_col=0)
## Remove NaN to save memory
incremental_id = incremental_id.dropna(how='all')
incremental_id = incremental_id.dropna(axis=1)

pk_id = int(incremental_id.loc['keyword_id', 'Id'])
print('\n' + f'Incremental ID is: {pk_id}')

# =============================================================================
# Load client list from google sheets
# =============================================================================

print('\n' + 'Loading Client list from Google Sheets')

## Load tech_seo_client_list from Google Sheets
示例#4
0
import requests
import numpy as np
import pandas as pd
from pandas.io.json import json_normalize
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from gspread_dataframe import get_as_dataframe, set_with_dataframe

scope = ["https://spreadsheets.google.com/feeds",'https://www.googleapis.com/auth/spreadsheets',"https://www.googleapis.com/auth/drive.file","https://www.googleapis.com/auth/drive"]
creds = ServiceAccountCredentials.from_json_keyfile_name("API.json", scope)
gc = gspread.authorize(creds)

sheetname = 'Data2'

# เรียกข้อมูลใน google sheet และตั้งให้ คอลัม Product เป็น index ไว้ให้ pandas เรียกใช้
df = get_as_dataframe(gc.open(sheetname).worksheet('Monitor') ).set_index('Product')
dfMap = get_as_dataframe(gc.open(sheetname).worksheet('Map'))


#### รายละเอียด ก่อนเทรด -------------------------------------------------------
tradeFuntion = 'RSI'
Balance = 'USD'
whatsymbol = "XRP-PERP"
###########  ตั้งค่า API -------------------------------------------------------
subaccount = 'Benz-Test-Bot'  # ถ้ามี ซับแอคเคอร์ของ FTX
exchange = ccxt.ftx({
        'apiKey': '***********',
        'secret': '************',
        'enableRateLimit': True,
    })
if subaccount == "":
示例#5
0
tweets.drop(labels=['Extended_Entities'], axis=1, inplace=True)

tweets['Date'] = tweets['Date'].map(lambda x: tz_convert(x))
tweets['Date'] = tweets['Date'].astype(
    str)  #write PST datetime to string so it can be appended to Google Sheets

scope = [
    'https://spreadsheets.google.com/feeds',
    'https://www.googleapis.com/auth/drive'
]

credentials = ServiceAccountCredentials.from_json_keyfile_name(
    './PyFilter-34d3cda723bf.json', scope)

gc = gspread.authorize(credentials)

ws = gc.open("PyFilter").worksheet(
    "Twitter_Data")  #open google sheet and worksheet
existing = gd.get_as_dataframe(worksheet=ws)  #get worksheet as dataframe
updated = existing.append(tweets, ignore_index=False, sort=False)

gd.set_with_dataframe(ws, updated, resize=True)
print('appended to google sheet')

# delete photos that have been downloaded
for file in os.listdir("../images"):
    if file[-4:] == '.jpg':
        os.remove("../images/" + file)

print('Photos Removed from Folder')
print('Executed Successfully')
 def test_parse_dates_true(self):
     df = get_as_dataframe(self.sheet, index_col=4, parse_dates=True)
     self.assertEqual(df.index[0], pd.Timestamp("2017-03-04 00:00:00"))
示例#7
0
def webhook():

    GHkit_ID = ['p27bw7yga4', '52sbt8fd8b']
    now = datetime.datetime.now()
    event_date = str(now.year) + "年" + str(now.month) + "月" + str(
        now.day) + "日"
    speak_date = "今日"
    scope = ['https://www.googleapis.com/auth/drive']

    #ダウンロードしたjsonファイルを同じフォルダに格納して指定する
    credentials = ServiceAccountCredentials.from_json_keyfile_name(
        'My First Project-fc3744a8d618.json', scope)
    gc = gspread.authorize(credentials)

    # # 共有設定したスプレッドシートの名前を指定する
    worksheet = gc.open("Event_Info").sheet1

    #dataframeにする
    df = get_as_dataframe(worksheet, parse_dates=False, index=None)

    #TODO ここから下はdataframeとして操作
    text = ""

    df_filtered = df[df['日付'].isin([event_date])]
    length = len(df_filtered.index)

    #指定した日付のピタリ賞があった場合
    if length > 0:
        titles = df_filtered['イベント名'].values.tolist()
        places = df_filtered['場所'].values.tolist()
        timestamps = df_filtered['時間'].values.tolist()
        regions = df_filtered['地区'].values.tolist()
        text = 'おはようございます。' + speak_date + 'は、'

        for i in range(length):
            if i > 0:
                text = text + 'また、'
            if timestamps[i] == '-':
                text = text + places[i] + "で、" + titles[i] + "があります。"
            else:
                text = text + places[i] + "で、" + timestamps[i] + "から" + titles[
                    i] + "があります。"

    #なかった場合、一番近いものを持ってくる
    else:
        Founded = False
        date_list = df['日付'].values.tolist()
        dt_format_query = datetime.datetime.strptime(event_date, '%Y年%m月%d日')

        for j in range(1, len(date_list)):
            #datetimeに変換
            dt_format = datetime.datetime.strptime(date_list[j], '%Y年%m月%d日')
            if dt_format_query < dt_format:
                df_filtered = df[df['日付'].isin([date_list[j]])]
                Founded = True
                break
        if Founded:
            length = len(df_filtered.index)
            titles = df_filtered['イベント名'].values.tolist()
            places = df_filtered['場所'].values.tolist()
            timestamps = df_filtered['時間'].values.tolist()
            regions = df_filtered['地区'].values.tolist()
            text = 'おはようございます。今日はイベントはありません。近い日にちだと、' + str(
                date_list[j]).replace('2018年', '') + 'に、'

            for i in range(length):
                if i > 0:
                    text = text + 'また、'
                if timestamps[i] == '-':
                    text = text + places[i] + "で、" + titles[i] + "があります。"
                else:
                    text = text + places[i] + "で、" + timestamps[
                        i] + "から" + titles[i] + "があります。"

        else:
            text = 'おはようございます。近くでイベントは特にありませんが、'

    url = 'http://ifttt.ghkit.jp/'
    headers = {"Content-Type": "application/json"}
    text = text + '外に出かけてみては、いかがでしょうか。'

    r = []

    for j in range(len(GHkit_ID)):
        #GHkitにPOST

        text_post = GHkit_ID[j] + text
        text_post = '"' + text_post + '"'
        obj = {"message": text_post}
        json_data = json.dumps(obj).encode("utf-8")

        # httpリクエストを準備してPOST
        r.append(requests.post(url, data=json_data, headers=headers))

    return r
 def test_evaluate_formulas_true(self):
     df = get_as_dataframe(self.sheet, evaluate_formulas=True)
     self.assertEqual(list(df.columns.values), COLUMN_NAMES)
     self.assertEqual(df["Formula Column"][0], 2.226)
 def test_evaluate_formulas_false(self):
     df = get_as_dataframe(self.sheet)
     self.assertEqual(list(df.columns.values), COLUMN_NAMES)
     self.assertEqual(df["Formula Column"][0], "=R[0]C[-1]*2")
fp_folder = [file['id'] for file in file_list if file['title'] == 'redakcja FP'][0]
file_list = drive.ListFile({'q': f"'{fp_folder}' in parents and trashed=false"}).GetList() 
last_number = max(file_list, key=lambda x: x['createdDate'])
print(f"{last_number['title']}  |  {last_number['id']}")

pd.options.display.max_colwidth = 10000

now = datetime.datetime.now()
year = now.year
month = '{:02d}'.format(now.month)
day = '{:02d}'.format(now.day)

table_id = input('Podaj id arkusza bieżącego numeru: ')
aktualny_numer_sheet = gc.open_by_key(table_id)

aktualny_numer = get_as_dataframe(aktualny_numer_sheet.worksheet('artykuły'), evaluate_formulas=True).dropna(how='all').dropna(how='all', axis=1)
kategorie_wpisow = get_as_dataframe(gc.open_by_key('1hPVa854YZ4DIajzVoWshXFotd3A7lHHz_d-GqEgn4BI').worksheet('Arkusz1')).dropna(how='all').dropna(how='all', axis=1)

foldery_lokalne = aktualny_numer['folder lokalny'].drop_duplicates().to_list()

for folder in foldery_lokalne:
    if 'pl' in folder.lower():
        folder_pl = folder + '/'
    else:
        folder_eng = folder + '/'
        
pdf_pl= [f for f in glob.glob(folder_pl + '*.pdf', recursive=True)]
jpg_pl= [f for f in glob.glob(folder_pl + '*.jpg', recursive=True)]

pdf_eng= [f for f in glob.glob(folder_eng + '*.pdf', recursive=True)]
jpg_eng= [f for f in glob.glob(folder_eng + '*.jpg', recursive=True)]
示例#11
0
文件: test.py 项目: tech-deb/covidoff
         'https://www.googleapis.com/auth/drive']
credentials = ServiceAccountCredentials.from_json_keyfile_name(
    'covidoff-ecef33b9fe0b.json', scope)
gc = gspread.authorize(credentials)
st.write("# **CovidOff View Data Portal**")
st.write("This is the Portal for all the Data Resources. These resources are filled by general people. Verification at utmostlevel is not guaranteed. Select your required service from the below dropdown")
selected = st.selectbox(
    "Select Service", ("Choose from the DropDown", "View Plasma Requirements", "View Plasma Donors", "View Oxygen Requirements", "View Oxygen Suppliers/Availability", "Search for Bed Availability", "Search for Remdesivr Distributors", "Other Resources"))
if selected == "Choose from the DropDown":
    pass
if selected == "View Plasma Requirements":

    sht2 = gc.open_by_url(
        'https://docs.google.com/spreadsheets/d/1kcj_u4j9269CcdQGXuLzdU-WzrtWdR5vdOEwN7-46JM/edit#gid=0')
    worksheet = sht2.sheet1
    df2 = get_as_dataframe(worksheet)
    df2 = df2[df2['phone'].notna()]
    df2.rename(columns={'timestamp': 'Timestamp', 'name': 'Name / Contact Person', 'phone': 'Contact Number', 'bloodGroup': 'Blood Group', 'age': 'Age',
                        'gender': 'Gender', 'Cities': 'City / Area / Location', 'Category': 'State / Territory', 'selectDistrict': 'District', 'email': 'Email Id'}, inplace=True)
    df2.fillna(0)
    st.write("## **Showing Plasma Requirements**")
    page_name = ['Choose Option', 'View Full Data',
                 'View Filtered Data']
    page = st.radio('Choose your preferred type', page_name)
    if page == "Choose Option":
        st.info("👆 You have to choose any of the above two Options.")
    elif page == "View Full Data":
        df2 = df2.iloc[::-1]
        st.write(df2.iloc[:, 1:11])
    elif page == "View Filtered Data":
        st.write(
import pandas as pd
import gspread
from gspread_dataframe import get_as_dataframe, set_with_dataframe
from oauth2client.service_account import ServiceAccountCredentials

scope = [
    'https://spreadsheets.google.com/feeds',
    'https://www.googleapis.com/auth/drive'
]

# Create a new project and service account on Google API backend.
creds = ServiceAccountCredentials.from_json_keyfile_name(
    'auth/fieyo_gsuite_service.json', scope)

gsheets = gspread.authorize(creds)

# Need to use the API console and activate Google Sheets API and Google Drive API.
# You must give the service key email permissions via Spreadsheet Share.
wb = gsheets.open_by_url(
    'https://docs.google.com/spreadsheets/d/1UPx6fJCqY2KYXf3_S_QKe-RBLD0zuzOZTAgekRA_mxo/edit?usp=sharing&fbclid=IwAR3BJRJFBVZLz_MViROz5L4hj3Fe1bG-U-WekIisEECZuSqV-nLSlkQMX_Y'
)
ws = wb.get_worksheet(index=0)

df = get_as_dataframe(ws)
df = df.dropna(subset=['contact_email', 'agent_email']).dropna(axis='columns',
                                                               how='all')

import ipdb
ipdb.set_trace()
    "'103h2kWIAKDBG2D6KydU4NjTyldBfxBAK' in parents and trashed=false"
}).GetList()
#[print(e['title'], e['id']) for e in file_list]
nstl_folder = [file['id'] for file in file_list if file['title'] == 'STL'][0]
file_list = drive.ListFile({
    'q': f"'{nstl_folder}' in parents and trashed=false"
}).GetList()
#[print(e['title'], e['id']) for e in file_list]
nstl_sheet = [
    file['id'] for file in file_list
    if file['title'] == 'biblioteki cyfrowe i repozytoria PL'
][0]
s_journals = gc.open_by_key(nstl_sheet)
s_journals.worksheets()
journals_df = get_as_dataframe(
    s_journals.worksheet('Czasopisma')).dropna(how='all').dropna(how='all',
                                                                 axis=1)
journals_df = journals_df[journals_df['OAI-PMH'] != ''].reset_index(drop=True)

list_of_dicts = []
for i, row in journals_df.iterrows():
    print(f"{i+1}/{len(journals_df)}")
    oai_url = row['OAI-PMH']
    tytul_czasopisma = row['Czasopisma'].replace('"', '')
    sickle = Sickle(oai_url)
    if tytul_czasopisma == 'Czytanie Literatury':
        records = sickle.ListRecords(metadataPrefix='oai_dc',
                                     set='com_11089_5783')
    else:
        records = sickle.ListRecords(metadataPrefix='oai_dc')
示例#14
0
def webhook():

    import sys
    print('Received instruction', file=sys.stderr)

    # If update from cloud each time, then do it
    if EL.continuous_cloud_update:
        EL.df = gsd.get_as_dataframe(EL.raw_worksheet, include_index=True,
                                     parse_dates=True, userows=[0,1])

    # Obtain request object and parse JSON
    req = request.get_json(force=True)
    
    # Decode the intent
    intent = EL.get_intent(req)
    print(f'---------------', file=sys.stderr)
    print(f'Intent={intent}', file=sys.stderr)
    print(f'---------------', file=sys.stderr)

    # Default text to respond with unless modified
    fulfillmentText = req.get('queryResult').get('fulfillmentText')

    ## ADD TO TABLE
    intent_recognized = True
    if intent == "adjust-tetrode":
        # Acquire endogenous fulfillment text and return
        fulfillmentAddon = EL.entry_adjust_tetrode(
            *[Elogger.get_parameter(req, x) for x in
                                  ('direction','tetrode','turns')])
        fulfillmentText += fulfillmentAddon
    elif intent == "ripples":
        fulfillmentText  = EL.entry_ripples(Elogger.get_parameter(req, 'magnitude'))
    elif intent == "theta":
        fulfillmentText  = EL.entry_theta(Elogger.get_parameter(req,   'magnitude'))
    elif intent == "delta":
        fulfillmentText  = EL.entry_delta(Elogger.get_parameter(req,   'magnitude'))
    elif intent == "cells":
        fulfillmentText  = EL.entry_cells(Elogger.get_parameter(req,   'magnitude'))
    elif intent == "backup":
        fulfillmentText  = EL.backup()
    elif intent == "undo":
        fulfillmentText = EL.entry_undo_entry()
        # Acquire endogenous fulfillment text and return
        #fulfillmentText = req.get('queryResult').get('fulfillmentText')
    elif intent == "notes":
        fulfillmentText = EL.entry_notes(Elogger.get_parameter(req, 'note'))
    elif intent == "marker":
        fulfillmentText = EL.entry_marker(Elogger.get_parameter(req, 'marker'))
    elif intent == "raw2pretty":
        EL.raw2pretty()
    elif intent == "pretty2raw":
        EL.pretty2raw()
    elif intent == "change-tetrode":
        EL.entry_adjust_tetrode('down', Elogger.get_parameter(req, 'tetrode'), 0)
    elif intent == "get-depth":
        fulfillmentText = EL.get_depth(*Elogger.get_parameters(req, ['tetrode', 'depth_type']))
    elif intent == "set-area":
        fulfillmentText = EL.entry_set_area(*Elogger.get_parameters(req, ['area', 'tetrode']))
    elif intent == "set-time":
        fulfillmentText = EL.entry_set_time(*Elogger.get_parameters(req, ['mode', 'time']))
    elif intent == "dead":
        fulfillmentText = EL.entry_dead(*Elogger.get_parameters(req, ['tetrode', 'channel']))
    else:
        intent_recognized = False
        fulfillmentText = "Intent not recognized!"

    # Upload new table
    # ----------------
    gsd.set_with_dataframe(EL.raw_worksheet, EL.df, include_index=True, resize=True, allow_formulas=False)

    return jsonify({"fulfillmentText":fulfillmentText})
 def test_no_nafilter(self):
     df = get_as_dataframe(self.sheet, na_filter=False)
     self.assertEqual(df["Dialect-specific implementations"][7], "")
 def test_usecols(self):
     df = get_as_dataframe(self.sheet, usecols=USECOLS_COLUMN_NAMES)
     self.assertEqual(list(df.columns.values), USECOLS_COLUMN_NAMES)
 def test_nafilter(self):
     df = get_as_dataframe(self.sheet, na_filter=True)
     self.assertTrue(np.isnan(df["Dialect-specific implementations"][7]))
 def test_indexcol(self):
     df = get_as_dataframe(self.sheet, index_col=4)
     self.assertEqual(len(df.columns), 9)
     self.assertEqual(df.index.name, "Date Column")
     self.assertEqual(type(df.index).__name__, "Index")
     self.assertEqual(df.index.values[0], "2017-03-04")
示例#19
0
import random

# use creds to create a client to interact with the Google Drive API
scope = [
    'https://spreadsheets.google.com/feeds',
    'https://www.googleapis.com/auth/drive'
]
creds = ServiceAccountCredentials.from_json_keyfile_name(
    'jsonFileFromGoogle.json', scope)
client = gspread.authorize(creds)

# Find a workbook by name and open the first sheet
# Make sure you use the right name here.
sheet = client.open("ROK data").sheet1

existing = gd.get_as_dataframe(sheet)

kds = []
# Extract and print all of the values
list_of_hashes = sheet.get_all_records()
for i in list_of_hashes:
    kds.append(i['server'])

total_kd_list = np.arange(1000, 1700)

cou = set(kds)
total_set = np.arange(1, 700)
missing = set(total_set) - cou
missing = list(missing)

total_kds_pulling = 1
 def test_indexcol_none(self):
     df = get_as_dataframe(self.sheet, index_col=False)
     self.assertEqual(len(df.columns), 10)
     self.assertEqual(df.index.name, None)
     self.assertEqual(type(df.index).__name__, "RangeIndex")
     self.assertEqual(list(df.index.values), list(range(9)))
示例#21
0
]
df4['Title'] = np.select(conditions, choices, default='none')
df4.head()

# In[130]:

import gspread_dataframe as gd
import gspread
import google.cloud as gc
from oauth2client.service_account import ServiceAccountCredentials

# In[131]:

scope = ['https://spreadsheets.google.com/feeds']
credentials = ServiceAccountCredentials.from_json_keyfile_name(
    'C:\\Users\\HassSarw\\gcs_service_account_file\\itv-ds-dev-6cd968b7542d.json',
    scope)
gc = gspread.authorize(credentials)

# In[141]:

ws2 = gc.open("LoveIsland Facebook Video Views").worksheet('Sheet4')
existing2 = gd.get_as_dataframe(ws2)
updated2 = existing2.append(df4)
updated2 = updated2.drop_duplicates(['Date', 'Title'], keep='last')
cell_list2 = ws2.range('A2:D500')
for cell in cell_list2:
    cell.value = ''
ws2.update_cells(cell_list2)
gd.set_with_dataframe(ws2, updated2)
 def test_header_false(self):
     df = get_as_dataframe(self.sheet, header=None)
     self.assertEqual(len(df), 10)
示例#23
0
def Trigger_trade():
    difZone = df.loc[whatsymbol]['DifZone']
    for i, row in dfMap.iterrows():
        if pd.notna(row['IDorderBuy']):
            idOrderbuy = row['IDorderBuy']
            orderMatchedBUY = checkByIDoder(idOrderbuy)
            if orderMatchedBUY['filled'] == 0:
                # ถ้าหมดเวลา cooldown แล้วไม่ได้เปิดสักทีให้ ยกเลิกออเดอร์ลิมิต Sell
                if pd.notna(row['timecancelbuy']):
                    # ผ่านไป 10 นาที หรือยัง ถ้าจริง ให้ ยกเลิกออเดอร์
                    first_time = row['timecancelbuy']
                    start_time = first_time + 600  # นับถอยหลัง 10 นาที เพื่อยกเลิกออเดอร์
                    target_time = time.time()
                    timeElapsed = target_time - start_time
                    if timeElapsed > 0:
                        cancelOrder(idOrderbuy)
                        # ลบ ข้อมูลกระสุนนัดนี้ เมื่อยกเลิกออเดอร์
                        # ถ้า cancel แล้วต้องเคลียร์ค่าเก่าออกให้หมด ไม่นั้นจะ error ccxt.base.errors.InvalidOrder: order_not_exist_or_not_allow_to_cancel
                        row['IDorderBuy'] = np.nan
                        row['OpenPrice'] = np.nan
                        row['AmountBuy'] = np.nan
                        row['FilledBuy'] = np.nan
                        row['ExposureBuy'] = np.nan
                        row['timecancelbuy'] = np.nan

            # จะเปิด ออเดอร์ sell ได้ต้องมี Position Szie ด้าน Buy ก่อน
            elif orderMatchedBUY['filled'] == orderMatchedBUY['amount']:
                row['timecancelbuy'] = np.nan
                if pd.isna(row['FilledBuy']):
                    row['FilledBuy'] = orderMatchedBUY['filled']
                    row['feeBuy'] = Getfee_ByIDoderinMyTrades(idOrderbuy, orderMatchedBUY['side']) #fee
                    # บันทึก TradeLog
                    # ต้องแปลงเป็น สติงทั้งหมดไม่งั้นบันทึกไม่ได้
                    # กำหนด PD ก่อน
                    print('OpenOrder Price : '+str(orderMatchedBUY['price']))
                    print('Amount : '+str(orderMatchedBUY['filled']))

                if pd.notna(row['IDorderSell']):
                    idOrdersell = row['IDorderSell']
                    orderMatchedSELL = checkByIDoder(idOrdersell)
                    # sell filled ทั้งหมด แสดงว่าปิด กำไร ได้
                    if orderMatchedSELL['filled'] == orderMatchedSELL['amount']:
                        row['LastClosePrice'] = orderMatchedSELL['price']
                        row['feeSell'] = Getfee_ByIDoderinMyTrades(idOrdersell, orderMatchedSELL['side'])  # fee
                        ExposureBuy = row['ExposureBuy']
                        ExposureSell = orderMatchedSELL['filled'] * orderMatchedSELL['price']

                        feesell = row['feeSell']
                        feebuy = row['feeBuy']
                        if pd.isna(feesell):
                            feesell = 0
                        if pd.isna(feebuy):
                            feebuy = 0

                        profitshow = (ExposureSell - ExposureBuy) - (feesell + feebuy)

                        if pd.isna(row['Profit']):
                            row['Profit'] = profitshow
                        elif pd.notna(row['Profit']):
                            row['Profit'] = row['Profit'] + profitshow

                        if pd.isna(row['round']):
                            row['round'] = 1
                        elif pd.notna(row['round']):
                            row['round'] = row['round'] + 1

                        print('ราคาขาย : ' + str(orderMatchedSELL['price']))
                        print('กำไร : ' + str(profitshow))
                        profitshowLine =  round(profitshow,4)
                        LineNotify('\n'+'ราคาขาย : ' + str(orderMatchedSELL['price']) +'\n'+ 'กำไร : ' + str(profitshowLine) + ' usd', 'change')
                        if pd.isna(profitshow):
                            LineNotify(
                                'บัค nan ExposureSell : ' + str(ExposureSell) + '\n' +
                                'บัค nan ExposureBuy : ' + str(ExposureBuy) + '\n' +
                                'บัค nan feeSell : ' + str(row['feeSell']) + '\n' +
                                'บัค nan feeBuy : ' + str(row['feeBuy'])
                                ,'change')

                        dfTradeLog = get_as_dataframe(gc.open(sheetname).worksheet('TradeLog'))
                        # บันทึก TradeLog
                        # ต้องแปลงเป็น สติงทั้งหมดไม่งั้นบันทึกไม่ได้
                        # กำหนด PD ก่อน
                        dfTradeLog3 = pd.DataFrame({'IDorderOrderBuy': [str(idOrderbuy)]
                                                       , 'IDorderOrderSell': [str(idOrdersell)]
                                                       , 'Open': [str(orderMatchedBUY['price'])]
                                                       , 'Close': [str(orderMatchedSELL['price'])]
                                                       , 'Amount': [str(orderMatchedSELL['filled'])]
                                                       , 'TradeTrigger': [str(row['TradeTrigger'])]
                                                       , 'Zone': [str(row['Zone'])]
                                                       , 'OpenTime': [str(orderMatchedBUY['datetime'])]
                                                       , 'CloseTime': [str(orderMatchedSELL['datetime'])]
                                                       , 'Profit': [str(profitshow)]
                                                       , 'feeBuy': [str(row['feeBuy'])]
                                                       , 'feeSell': [str(row['feeSell'])]

                                                    })
                        dfTradeLog = dfTradeLog.append(dfTradeLog3, ignore_index=True)
                        dfTradeLogg = dfTradeLog.drop(columns=[c for c in dfTradeLog.columns if "Unnamed" in c]).dropna(how="all")
                        set_with_dataframe(gc.open(sheetname).worksheet('TradeLog'),dfTradeLogg)  # บันทึกชีทหน้า TradeLog

                        # ลบ ข้อมูลกระสุน เมื่อจบครบรอบ ทำให้กระสุนว่าง
                        # ข้อมูลกระสุน buy
                        row['IDorderBuy'] = np.nan
                        row['OpenPrice'] = np.nan
                        row['AmountBuy'] = np.nan
                        row['FilledBuy'] = np.nan
                        row['timecancelsell'] = np.nan
                        row['ExposureBuy'] = np.nan
                        row['NAV'] = np.nan
                        row['feeBuy'] = np.nan

                        # คืนสถานะ รูปแบบการเทรด เพื่อสุ่มใหม่
                        row['TradeTrigger'] = np.nan

                        # ข้อมูลกระสุน sell
                        row['IDorderSell'] = np.nan
                        row['ClosePrice'] = np.nan
                        row['AmountSell'] = np.nan
                        row['feeSell'] = np.nan


                    elif orderMatchedSELL['filled'] == 0:
                        # ถ้าหมดเวลา cooldown แล้วไม่ได้เปิดสักทีให้ ยกเลิกออเดอร์ลิมิต Sell
                        if pd.notna(row['timecancelsell']):
                            # ผ่านไป 10 นาที หรือยัง ถ้าจริง ให้ ยกเลิกออเดอร์
                            first_time = row['timecancelsell']
                            start_time = first_time + 600  # นับถอยหลัง 10 นาที เพื่อยกเลิกออเดอร์
                            target_time = time.time()
                            timeElapsed = target_time - start_time
                            if timeElapsed > 0:
                                cancelOrder(idOrdersell)
                                # ลบ ข้อมูลกระสุนนัดนี้ เพื่อยกเลิกออเดอร์
                                # ถ้า cancel แล้วต้องเคลียร์ค่าเก่าออกให้หมด ไม่นั้นจะ error ccxt.base.errors.InvalidOrder: order_not_exist_or_not_allow_to_cancel
                                row['IDorderSell'] = np.nan
                                row['ClosePrice'] = np.nan
                                row['AmountSell'] = np.nan
                                row['timecancelsell'] = np.nan


                # เงื่อนไข ยิงกระสุน sell
                if pd.isna(row['IDorderSell']):
                    NowPrice = getPrice(whatsymbol)
                    if pd.notna(row['OpenPrice']):
                        if NowPrice > (row['OpenPrice'] + (difZone*2)):  # ต้องมากกว่า อย่างน้อย 2 โซน ถึงจะปิดกำไรได้
                            # MapTrigger = -1 คือ พื้นที่ๆ ลดของที่มีอยู่ โดยลด Buy Hold ที่ถือไว้ โดย เปิด Sell เท่ากับ จำนวน Position ของกระสุนนัดนั้นๆ
                            if row['MapTrigger'] == -1 and row['Zone'] > 0:
                                checktradesell = False
                                if tradeFuntion == 'RSI':
                                    if row['TradeTrigger'] >= 1 and row['TradeTrigger'] <= 25:
                                        getRSIvalue = RSI('5m')
                                        if getRSIvalue > 70:
                                            print(getRSIvalue)
                                            checktradesell = True

                                    if row['TradeTrigger'] >= 26 and row['TradeTrigger'] <= 50:
                                        getRSIvalue = RSI('15m')
                                        if getRSIvalue > 70:
                                            print(getRSIvalue)
                                            checktradesell = True

                                    if row['TradeTrigger'] >= 51 and row['TradeTrigger'] <= 75:
                                        getRSIvalue = RSI('1h')
                                        if getRSIvalue > 70:
                                            print(getRSIvalue)
                                            checktradesell = True

                                    if row['TradeTrigger'] >= 76:
                                        getRSIvalue = RSI('4h')
                                        if getRSIvalue > 70:
                                            print(getRSIvalue)
                                            checktradesell = True
                                if tradeFuntion == 'percent':
                                    pricenow = getPrice(whatsymbol)
                                    Openprice_ = row['OpenPrice']
                                    minpercenttore = Openprice_ / 100
                                    Closeprice_ = Openprice_ + minpercenttore
                                    if pricenow > Closeprice_:
                                        checktradesell = True

                                if checktradesell == True:
                                    positionSizeClose = orderMatchedBUY['filled']

                                    # เปิดออเดอร์ Sell เพื่อปิดออเดอร์ Buy
                                    orderSell = re(whatsymbol, 'limit', 'sell', positionSizeClose)

                                    row['IDorderSell'] = orderSell['id']
                                    row['ClosePrice'] = orderSell['price']
                                    row['AmountSell'] = orderSell['amount']
                                    row['timecancelsell'] = time.time()

        # เงื่อนไข ยิงกระสุน buy ใช้งานกระสุนนัดนี้
        if pd.isna(row['IDorderBuy']):
            if row['MapTrigger'] == 1 and row['Zone'] > 0 and row['Exposure'] > 0 and row['UseZone'] == 1:  # MapTrigger = 1 คือ พื้นที่ๆ ควรมีกระสุน
                checktradebuy = False

                if tradeFuntion == 'RSI':
                    if row['TradeTrigger'] >= 1 and row['TradeTrigger'] <= 25:
                        getRSIvalue = RSI('5m')
                        if getRSIvalue < 30:
                            checktradebuy = True

                    if row['TradeTrigger'] >= 26 and row['TradeTrigger'] <= 50:
                        getRSIvalue = RSI('15m')
                        if getRSIvalue < 30:
                            checktradebuy = True

                    if row['TradeTrigger'] >= 51 and row['TradeTrigger'] <= 75:
                        getRSIvalue = RSI('1h')
                        if getRSIvalue < 30:
                            checktradebuy = True

                    if row['TradeTrigger'] >= 76:
                        getRSIvalue = RSI('4h')
                        if getRSIvalue < 30:
                            checktradebuy = True
                            # ถ่วงเวลา ตอนโวเข้า
                            # df._set_value(whatsymbol, 'TimerDelay', time.time())
                            # df._set_value(whatsymbol, 'Stat', 'Cooldown')

                if tradeFuntion == 'percent':
                    pricenow = getPrice(whatsymbol)
                    if pricenow < row['Zone']:
                        checktradebuy = True

                if checktradebuy == True :
                    # ต้นทุนกระสุนต่อนัด
                    expousre = row['Exposure']
                    # ปริมาณสินค้าที่จะตั้งออเดอร์ ต่อ กระสุน 1นัด
                    amount = abs(expousre) / float(getPrice(whatsymbol))

                    orderBuy = re(whatsymbol, 'limit', 'buy', amount)

                    row['IDorderBuy'] = orderBuy['id']
                    row['OpenPrice'] = orderBuy['price']
                    row['AmountBuy'] = orderBuy['amount']
                    row['ExposureBuy'] = orderBuy['amount'] * orderBuy['price']
                    row['timecancelbuy'] = time.time()
 def test_header_first_row(self):
     df = get_as_dataframe(self.sheet, header=0)
     self.assertEqual(len(df), 9)
            df4['Support Scores'] = df4['Support Scores'].astype(str)
            df4['Phone'] = df4['Preferred Phone'].astype(str)
            df4['VANID'] = df4['VANID'].astype(str).replace('\.0',
                                                            '',
                                                            regex=True)
            df4 = df4[[
                'id', 'VANID', 'Full Name', 'Address', 'Phone',
                'Support Scores'
            ]]

            print('Airtable df created')
            print(df4.head())

            sh = client.open_by_key(new_spreadsheet_id)
            worksheet = sh.get_worksheet(0)
            df5 = get_as_dataframe(worksheet)
            df5 = df5[[
                'id', 'VANID', 'Full Name', 'Address', 'Phone',
                'Support Scores'
            ]]
            df5['VANID'] = df5['VANID'].astype(str).replace('\.0',
                                                            '',
                                                            regex=True)
            print(df5.head())
            df6 = pd.concat([df5, df4])
            print(df6.head())
            df6 = df6.drop_duplicates(subset='VANID', keep="first")
            print(df6.head())
            df6 = df6.dropna()
            print(df6.head())
 def test_skiprows(self):
     df = get_as_dataframe(self.sheet, skiprows=range(1, 4))
     self.assertEqual(len(df), 6)
示例#27
0
# get client list

gspread_id = '1ckZh9TSaSp1Ucu2HKIA7TELPdd24oylFDXVEx6dtdSw'
gsheet_name = 'Client List'

scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive']
google_auth = r'C:\Users\JLee35\OneDrive - Dentsu Aegis Network\PROJECTS\Python\APIs\keys\iprospectseonorth\google_auth.json'
creds = ServiceAccountCredentials.from_json_keyfile_name(google_auth, scope)
client = gspread.authorize(creds)

sheet = client.open_by_key(gspread_id)
worksheet = sheet.worksheet(gsheet_name)

# load data to dataframe from gsheet
print('Retrieving client list from Google Sheets...')
client_list = get_as_dataframe(sheet.worksheet(gsheet_name), parse_dates=True)# usecols=range(0, NUMBERCOLS))

# loading gsheets automatically loads 25 cols x 1k rows, so we trim it:
client_list = client_list.loc[:,~client_list.columns.str.contains('unnamed', case=False)] # remove columns containing 'unnamed' in label
client_list = client_list.dropna(axis=0, how='all') # remove rows that are empty
client_list['STAT ID'] = client_list['STAT ID'].astype(int) # ensure that STAT ID is not a float
client_list['STAT ID'] = client_list['STAT ID'].astype(str) # ensure that STAT ID is str (for some reason, 'int' on its own didn't work...)

print('Done!')

#%%

client = 'Jacamo'
client_list = client_list[client_list['Client Name'] == client].reset_index(drop=True)

 def test_squeeze(self):
     df = get_as_dataframe(self.sheet, usecols=[0], squeeze=True)
     self.assertTrue(isinstance(df, pd.Series))
     self.assertEqual(len(df), 9)
示例#29
0
][0]
mapping_files_650 = [
    file['id'] for file in file_list
    if file['title'].startswith('mapowanie BN-Oracle')
    if file['id'] != mapping_files_655
]

#%% deskryptory do harvestowania BN
#lista deskryptorów do wzięcia - wąska (z selekcji Karoliny)
deskryptory_do_filtrowania = [
    file['id'] for file in file_list
    if file['title'] == 'deskryptory_do_filtrowania'
][0]
deskryptory_do_filtrowania = gc.open_by_key(deskryptory_do_filtrowania)
deskryptory_do_filtrowania = get_as_dataframe(
    deskryptory_do_filtrowania.worksheet('deskryptory_do_filtrowania'),
    evaluate_formulas=True).dropna(how='all').dropna(how='all', axis=1)
BN_descriptors = deskryptory_do_filtrowania[
    deskryptory_do_filtrowania['deskryptor do filtrowania'] ==
    'tak']['deskryptory'].to_list()


def uproszczenie_nazw(x):
    try:
        if x.index('$') == 0:
            return x[2:]
        elif x.index('$') == 1:
            return x[4:]
    except ValueError:
        return x
示例#30
0
import numpy as np
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from gspread_dataframe import get_as_dataframe, set_with_dataframe

scope = [
    "https://spreadsheets.google.com/feeds",
    'https://www.googleapis.com/auth/spreadsheets',
    "https://www.googleapis.com/auth/drive.file",
    "https://www.googleapis.com/auth/drive"
]
creds = ServiceAccountCredentials.from_json_keyfile_name("API.json", scope)
gc = gspread.authorize(creds)

wsTradeLog = gc.open("Data").worksheet('TradeLog')  # เปิดหน้าชีท
dfTradeLog = get_as_dataframe(wsTradeLog)

#### รายละเอียด ก่อนเทรด -------------------------------------------------------
Balance = 'USD'
whatsymbol = "XRP-PERP"
###########  ตั้งค่า API -------------------------------------------------------
subaccount = '-------'  # ถ้ามี ซับแอคเคอร์ของ FTX
exchange = ccxt.ftx({
    'apiKey': '-------------------',
    'secret': '--------------------',
    'enableRateLimit': True,
})
if subaccount == "":
    print("This is Main Account")
else:
    exchange.headers = {