def elo_gain(id): gc = gspread.service_account(filename='client_secret.json') entries = database.get_rows([str(id)], gc) if entries: entry = entries[0] elochange = elo_change(entry[3], True, False, False) newelo = int(entry[4]) + elochange entry[4] = str(newelo if newlo > 0 else 0) database.update_entries([entry], gc) return True
def update(ip, port): # create an account gc = gspread.service_account(filename='scripts//credentials.json') # open the sheet group sh = gc.open_by_key("1zf5ov6wUMEzfYPRukwDjKY7qafM6bkP37nl1U02De94") # select the first sheet worksheet = sh.sheet1 worksheet.update("A2", ip) # updating the ip cell worksheet.update("B2", port) # updating the port cell
async def main(): await load_configs() gc = gspread.service_account() sh = gc.open(config.SPREAD) ws = sh.worksheet('Pokemon') pkmn_dict = await(get_poke_dict(ws)) ws = sh.worksheet(config.SHEET) await asyncio.gather( img_gen_main(pkmn_dict, ws))
def upload_parsed_file_to_gdrive(self, sheets_id = None): self.SGD.parsed_df.to_csv(f'{self.file_loc}/{self.parsed_fname}') self.SGD.upload_parsed_file(self.file_loc, self.parsed_fname, sheets_id=sheets_id) gc = gspread.service_account(filename = self.SGD.credentials_file) sht1 = gc.open_by_key(sheets_id) worksheet = sht1.add_worksheet(title="dates", rows = '1', cols = '1') date_df = pd.DataFrame({'Date': self.date_range.date}) set_with_dataframe(worksheet, date_df)
def __init__(self, client_secret: str, spreadsheet: str, worksheet: str = 'Sheet1'): self.gc = gspread.service_account(client_secret) self.spreadsheet = spreadsheet self.worksheet = worksheet self.sheet = self.gc.open(self.spreadsheet).worksheet(self.worksheet) # apparently sheets start index is 1 self.param_lst = self.sheet.row_values(1)
def __init__(self, sheet, worksheet=None): siscoin_service_account = os.getenv("SISCOIN_SERVICE_ACCOUNT") with open("siscoin-service-account.json", "w") as f: f.write(siscoin_service_account) self.client = gspread.service_account( filename='siscoin-service-account.json') self.sheet = self.client.open(sheet) if worksheet: self.worksheet = self.sheet.worksheet(worksheet)
def programs_weekly(): gc = gspread.service_account(filename="credentials/bd-sheet.json") key = open("credentials/key_drive_google.txt").read() sh = gc.open_by_key(key) worksheet = sh.sheet1 re = worksheet.get_all_values() return (re)
def users_db_creation(): db = SQL(db_path) spreadsheet = gspread.service_account('google.json').open('UNITED USERS') users = spreadsheet.worksheet(os.environ['folder']).get( 'A1:Z50000', major_dimension='ROWS') raw_columns = db.create_table('users', users.pop(0), additional=True) users_ids, columns = db.upload('users', raw_columns, users) _zero_user = db.get_user(0) db.close() return _zero_user, ['id', *users_ids], columns
def update_google_sheet(file, **kwargs): scope = [ "https://spreadsheets.google.com/feeds", "https://www.googleapis.com/auth/drive", ] gc = gspread.service_account(filename="./client_secret.json", scopes=scope) sheet = gc.open("MySheet") content = open(file, "r").read() gc.import_csv(sheet.id, content)
def gspread_factory(): if Settings.DEV_ENVIRONMENT: gs = gspread.service_account('gspread_key.json') else: GSPRED_JSON_KEY_DICT = json.loads( base64.b64decode(Settings.GSPREAD_B64_KEY).decode()) gs = gspread.service_account_from_dict(GSPRED_JSON_KEY_DICT) logger.info('Gspread have connected') return gs
def __init__( self, worksheet_name: str, spreadsheet_name: str = "Нейрософия.Выгрузка", ): self.spreadsheet_name = spreadsheet_name self.spreadsheet = None self.worksheet_name = worksheet_name self.worksheet = None self.ga = g.service_account("../access/google.json")
def read_gsheet(sheet_name): gc = gspread.service_account(filename="cred/credentials.json") # read json with open("google_sheets.json", 'r') as google_sheets: sheets = google_sheets.read() # parse url url = json.loads(sheets)[sheet_name] sh = gc.open_by_url(url) worksheet = sh.get_worksheet(0) return worksheet
def main(): #Disable warnings (optional) GPIO.setwarnings(False) #Select GPIO mode GPIO.setmode(GPIO.BCM) #Set buzzer - pin 23 as output GPIO.setup(BUZZER_GPIO, GPIO.OUT) GPIO.setup(SYNC_GPIO, GPIO.OUT) buzzer_off() gc = gspread.service_account("/home/pi/wii-alarm-clock/client_secret.json") sh = gc.open_by_key("<sheet key>") ws = sh.get_worksheet(0) # Get row required startdate_raw = sh.sheet1.get('B1')[0][0] startdate = datetime.datetime.strptime(startdate_raw, "%Y-%m-%dT%H:%M:%S") days = (datetime.datetime.now() - startdate).days # 4 here is the size of the headers on the sheet yvalue = 4 + days start_sync() processor = EventProcessor(ws, yvalue) board = Wiiboard(processor) if len(sys.argv) == 1: print "Discovering board..." address = board.discover() else: address = sys.argv[1] try: # Disconnect already-connected devices. # This is basically Linux black magic just to get the thing to work. subprocess.check_output(["bluez-test-input", "disconnect", address], stderr=subprocess.STDOUT) subprocess.check_output(["bluez-test-input", "disconnect", address], stderr=subprocess.STDOUT) except: pass print "Trying to connect..." board.connect(address) # The wii board must be in sync mode at this time board.wait(200) # Flash the LED so we know we can step on. board.setLight(False) board.wait(500) board.setLight(True) buzzer_pulse() board.wait(5000) board.receive()
def update_master_sheet_results(date, gpd): """ Both params `date` and `gpd` should be strings. """ try: import gspread gc = gspread.service_account(filename=CREDS_FILE) except AttributeError: os.system('pip3 install --upgrade gspread') import gspread gc = gspread.service_account(filename=CREDS_FILE) sh = gc.open(MASTER_SPREADSHEET) ws = sh.worksheet(MASTER_SHEET_NAME) # ws.append_row((date, gpd), value_input_option='USER_ENTERED') insert_row = ws.row_count if ws.acell('A{}'.format(insert_row-1)).value == '': insert_row -= 1 if NO_WEEKEND_PRODUCTION: # split_date = date.split('/') try: # month = int(split_date[0]) # day = int(split_date[1]) # year = int(split_date[2]) month, day, year = [int(d) for d in date.split('/')] day_of_week = calendar.weekday(year, month, day) is_weekend = 5 <= day_of_week <= 6 if is_weekend: gpd = 'no production' except ValueError: pass ws.insert_row([date, gpd], index=insert_row, value_input_option='USER_ENTERED') ws.update_acell('C{}'.format(insert_row), ws.acell('C{}'.format(insert_row-1)).value) ws.update_acell('C{}'.format(insert_row-1), '') ws.update_acell('D{}'.format(insert_row), '=SUM(B2:B{})'.format(insert_row)) new_row_range = "A{0}:B{0}".format(insert_row) ws.format(new_row_range, { "horizontalAlignment": "CENTER", "textFormat": { "fontSize": 11, }, } ) print("[update_master_sheet_results] {} updated for {}".format(MASTER_SPREADSHEET, date))
def export_gsheet(): gc = gspread.service_account(filename='./client_secret.json') sh = gc.open_by_key(SPREADSHEET_ID) worksheet = sh.get_worksheet(0) df = pd.read_csv("MyQualtricsDownload/download.csv") df.fillna('', inplace=True) print(df.values.tolist()) print(df.columns.values.tolist()) worksheet.update([df.columns.values.tolist()] + df.values.tolist())
def find_worksheet(google_creds_file, spreadsheet_url, worksheet_id): """ Authenticate to Google and return the matching worksheet. """ all_worksheets = gspread.service_account(filename=google_creds_file) \ .open_by_url(spreadsheet_url) \ .worksheets() matching = list(filter(lambda w: w.id == worksheet_id, all_worksheets)) if not matching: raise KnownError(f"Cannot find a worksheet with ID {worksheet_id}") return matching[0]
def get_data(): # setup service account and add confic file --> https://gspread.readthedocs.io/en/latest/index.html gc = gspread.service_account() sh = gc.open("test_data") # sheet name data = sh.sheet1.get_all_values() # all rows and cols header, data = data[0], data[1:] # header and data df = pd.DataFrame(data, columns=header) result = df.to_json(orient="records") parsed = json.loads(result) return parsed
def pull_data_frame_from_google(): gc = gspread.service_account() sh = gc.open_by_key(GSKEY) worksheet = sh.get_worksheet(0) dataframe = pd.DataFrame(worksheet.get_all_records()) times_stamp = "Timestamp" dataframe[times_stamp] = pd.to_datetime( dataframe[times_stamp], format="%d/%m/%Y %H:%M:%S" ) dataframe.set_index(times_stamp, inplace=True) dataframe.to_csv(PATH_TO_DATA)
def EXAMPLE(year): if year == 9: gc = gspread.service_account(filename=service_account_9_filepath) sh = gc.open_by_key(spreadsheet_key) worksheet = sh.worksheet("YEAR9") elif year == 10: gc = gspread.service_account(filename=service_account_10_filepath) sh = gc.open_by_key(spreadsheet_key) worksheet = sh.worksheet("YEAR10") elif year == 11: gc = gspread.service_account(filename=service_account_11_filepath) sh = gc.open_by_key(spreadsheet_key) worksheet = sh.worksheet("YEAR11") elif year == 12: gc = gspread.service_account(filename=service_account_12_filepath) sh = gc.open_by_key(spreadsheet_key) worksheet = sh.worksheet("YEAR12")
def getGspSpreadsheetObj(spreadsheetName): #return gspread spreadsheet object pathToRepos = _myPyFunc.getPathUpFolderTree(pathToThisPythonFile, 'repos') arrayOfPartsToAddToPath = ['privateData', 'python', 'googleCredentials', 'usingServiceAccount', 'jsonWithAPIKey.json'] pathToCredentialsFileServiceAccount = _myPyFunc.addToPath(pathToRepos, arrayOfPartsToAddToPath) gspObj = gspread.service_account(filename=pathToCredentialsFileServiceAccount) return gspObj.open(spreadsheetName)
def brno_part_budget(): # Import data from API response = rq.get( 'https://gis.brno.cz/ags1/rest/services/Hosted/ProjektyPARO/FeatureServer/0/query?where=1%3D1&outFields=*&outSR=4326&f=json' ) data = response.json() proj_data = [i['attributes'] for i in data['features']] api_data = pd.DataFrame(proj_data) # Scrape web page (property IDs and votes data) pids_data = [] votes_data = [] for year in ['2017', '2018', '2019', '2020']: page_res = rq.get('https://damenavas.brno.cz/vysledky-hlasovani/?y=' + year) soup = bs4.BeautifulSoup(page_res.content, 'html.parser') for projects in soup.find_all( 'div', attrs={re.compile('col-xs-12 vap-project-name')}): pids = int(re.compile(r'id=(\d{1,})').findall(str(projects.a))[0]) pids_data.append(pids) for votes in soup.find_all( 'span', attrs={'class': 'vap-project-balance-number'}): votes = int(votes.text.replace(' ', '')) votes_data.append(votes) wp_data = pd.DataFrame(list(zip(pids_data, votes_data)), columns=['properties_id', 'votes']) # Join data together and clean full_data = api_data.join(wp_data.set_index('properties_id'), on='properties_id') full_data = full_data.fillna('').sort_values('properties_id') # Clean district names and add column with their shorted version full_data['properties_district'] = full_data['properties_district'].apply( lambda x: 'Brno' if x in ('Brno', ' - ') else x.replace(' - ', '-').replace('A', 'a')) full_data.insert( 7, 'properties_district_short', full_data['properties_district'].apply( lambda x: 'Brno' if x == 'Brno' else x.split('-')[1])) # Push to GSheet gc = gs.service_account( filename=os.environ['GOOGLE_APPLICATION_CREDENTIALS']) sh = gc.open_by_key(os.environ['GOOGLE_SPREADSHEET_ID']) ws = sh.get_worksheet(0) ws.update([full_data.columns.values.tolist()] + full_data.values.tolist()) # Return message if successful return print('Data successfully updated.')
def write_lineup_data(self): current_lineups = self.get_lineup_data() current_lineups = current_lineups.sort_values(by=['TEAM', 'START', 'PLAYERCHANCE'], ascending=[True, False, False]) print('writing currnet lineup data to excel...') gc = gspread.service_account() sh = gc.open('Current Lineups') sh.values_clear("Data!A1:Z9999") worksheet = sh.worksheet("Data") worksheet.update([current_lineups.columns.values.tolist()] + current_lineups.values.tolist()) sh.share('*****@*****.**', perm_type='user', role='writer')
def authenticate(self) -> None: if self.is_service_account: logger.debug("Using SERVICE_ACCOUNT auth") self.google_client = gspread.service_account(self.creds_path) else: logger.debug("Using END_USER auth") # ! This override should be temporary ideally we'll have a more long term solution in: # ! https://github.com/burnash/gspread/issues/826 self._override_gspread_default_creds() self.google_client = gspread.oauth() self.is_authenticated = True
def record_grid_edits(token, layer, gsheet, sheet_name): gsheets_vals = [] layer_features = query_layer(token, layer, "1=1") if not layer_features['features']: return ({404: "No survey features found"}, 404) else: layer_features = layer_features['features'] try: gc = gspread.service_account(filename = './config/tnc205-auto-upload-a97fe4207469.json') workbook = gc.open(gsheet) sh = workbook.worksheet(sheet_name) except Exception as e: return ({500: e}, 500) num_rows = len(sh.col_values(1))+1 headers = sh.row_values(1) oid_list = sh.col_values(headers.index('Object ID')+1) edit_date_list = sh.col_values(headers.index('EpochEditDate')+1) oid_dict = {} if not (len(oid_list) == len(edit_date_list)): return "Error: Object ID and EditDate lists are not the same length" for i in range(1,len(oid_list)): if oid_list[i] in oid_dict: oid_dict[oid_list[i]].append(int(edit_date_list[i])-int(edit_date_list[i])%1000) else: oid_dict[oid_list[i]] = [int(edit_date_list[i])-int(edit_date_list[i])%1000] status = { -2: "Mowed", -1: "High Priority", 0: "Untreated", 1: "Partially Treated", 2: "Fully Treated", 3: "Needs Future Retreatment" } for feature in layer_features: attr = feature['attributes'] geo = feature['geometry'] if (str(attr['OBJECTID']) in oid_dict): if (attr['EditDate']-attr['EditDate']%1000) in oid_dict[str(attr['OBJECTID'])]: continue vals = [] vals.append(attr['OBJECTID']) vals.append(attr['TILE_ID']) vals.append(status[attr['Status']]) vals.append(time.ctime(attr['EditDate']/1000 - (7*3600))), vals.append(attr['Editor']) vals.append(attr['AREA_GEO']) vals.append(attr['EditDate']) vals.append(str(geo)) gsheets_vals.append(vals) if gsheets_vals: sh.update(f"A{num_rows}:AA{num_rows+len(gsheets_vals)}", gsheets_vals) return (len(gsheets_vals))
def get_data(self): # set credentials gc = gspread.service_account(self.cred) # Open a sheet and create dataframe wks = gc.open("Promised_Neverland_Data").sheet1 data = wks.get_all_values() headers = data.pop(0) df = pd.DataFrame(data, columns=headers) return df
def __init__(self): self.gc = gspread.service_account(filename= 'sheetsData.json') self.sheetFollowing = None self.sheetFollowers = None self.sheetDict = None self.toFollowSheet = None self.date = datetime.date.today().isoformat() self.unfollow = {} # self.d = {} with open('curiawesityFollowing.json') as f: self.d = json.load(f)
def custom_form_view(request, spreadsheet_id=None): spreadsheet = Spreadsheet.objects.get(id=spreadsheet_id) if request.method == "GET": form = CustomForm(context=request, spreadsheet=spreadsheet) else: cred = Credential.objects.get(user=spreadsheet.user) gs_client = gspread.service_account(filename=cred.file.path) google_spreadsheet = gs_client.open_by_url(spreadsheet.url) worksheet = google_spreadsheet.sheet1 row = next_available_row(worksheet) try: # if the first row set titles of columns if row == 1: for index, field in enumerate(request.POST): if index == 0: index += 1 if not field == "csrfmiddlewaretoken": worksheet.update_cell(row, index, field) if spreadsheet.track_sub_times: index += 1 worksheet.update_cell(row, index, "Time Submitted (DD/MM/YYYY HH:MM)") row += 1 for index, field in enumerate(request.POST): if index == 0: index += 1 if not field == "csrfmiddlewaretoken": worksheet.update_cell(row, index, request.POST[field]) if spreadsheet.track_sub_times: index += 1 worksheet.update_cell( row, index, timezone.now().strftime("%d/%m/%Y %H:%M")) except: return HttpResponse("Invalid header found.") form = CustomForm(context=request, spreadsheet=spreadsheet) return render( request, "form.html", { "form": form, "title": spreadsheet.title, "success_msg": "Success! Thanks for submitting!", }, ) return render(request, "form.html", { "form": form, "title": spreadsheet.title })
def load_worksheet(ss_name='tcc', path_json_credent='credentials.json', ws_name: Optional[str] = None) -> Worksheet: scopes = [ "https://spreadsheets.google.com/feeds", 'https://www.googleapis.com/auth/spreadsheets', "https://www.googleapis.com/auth/drive.file", "https://www.googleapis.com/auth/drive" ] gc = gspread.service_account(path_json_credent, scopes) spreadsheet = gc.open(ss_name) return spreadsheet.worksheet(ws_name) if ws_name else spreadsheet.sheet1
def private_data_init(): global testing_bot_key global bot_key with open(private_info_file, "r") as f: testing_bot_key = f.readline().strip("\n") bot_key = f.readline().strip("\n") Shared.google_api_key = f.readline().strip("\n") Shared.google_sheet_gid_url = Shared.google_sheets_url_base + Shared.google_sheet_id + "/values:batchGet?" + "key=" + Shared.google_api_key Shared.gc = gspread.service_account( filename='credentials.json').open_by_key( Shared.google_sheet_id).worksheet( Shared.runner_leaderboard_name)
def inseason_standings_sos(): gc = gspread.service_account(filename='./bb-2021-2b810d2e3d25.json') bb2021 = gc.open("BB 2021 InSeason") bbdb = postgres.connect_to_bbdb() # Update standings ff_standings = pd.read_sql_query('SELECT * FROM tracking.standings_sos', con=bbdb, parse_dates=['date']) sheettitle = "Standings" bb2021.values_clear(sheettitle + "!A:Z") gsdf.set_with_dataframe(bb2021.worksheet(sheettitle), ff_standings)