def get_gspread_connection( credentials: service_account.Credentials, max_retries=7, backoff_factor=0.5, status_forcelist=frozenset([403, 413, 429, 503]), ) -> Client: scoped_credentials = credentials.with_scopes([ "https://spreadsheets.google.com/feeds", "https://www.googleapis.com/auth/drive", ]) retry = Retry( total=max_retries, read=max_retries, connect=max_retries, status=max_retries, backoff_factor=backoff_factor, status_forcelist=status_forcelist, ) adapter = HTTPAdapter(max_retries=retry) session = Session() session.mount("https://", adapter) gc = Client(auth=scoped_credentials, session=session) gc.session = AuthorizedSession(scoped_credentials) return gc
def gsheet_upload_csv(self, sh, sheet_name, sheet_id, content): self.SESSION = self.create_assertion_session_local() self.GCLI = Client(None, self.SESSION) self.WRKS = self.GCLI.open(sheet_name).sheet1 ## Import_csv function deletes all the sheets and re-write the data in first sheet self.GCLI.import_csv(sheet_id, content)
def get_all_weight_data(): scopes = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive', ] service_account_credentials = get_service_account_credentials(scopes) gc = Client(service_account_credentials) weight_track_spreadsheet = gc.open("weight-track").sheet1 return weight_track_spreadsheet.get_all_records()
def get_or_create(gc: gspread.Client, name, url=None, key=None) -> gspread.Spreadsheet: if url is not None: return gc.open_by_url(url) elif key is not None: return gc.open_by_key(key) else: try: return gc.open(name) except gspread.SpreadsheetNotFound: logger.info("spreadsheet %r is not found, creating it", name) return gc.create(name)
def configure_client(credentials_path): credentials = service_account.Credentials.from_service_account_file( credentials_path) scoped_credentials = credentials.with_scopes(SCOPES) session = AuthorizedSession(scoped_credentials) gsheet_client = Client(scoped_credentials, session) return gsheet_client
def get_log_worksheet(phone: str, g_client: Client) -> Worksheet: ''' Returns a new Worksheet if not exists, if exists just returns it''' file_list = g_client.list_spreadsheet_files() file_list = list(filter(lambda file: phone == file['name'], file_list)) worksheet = None if len(file_list) == 0: worksheet = create_log(phone, g_client) elif len(file_list) == 1: f_name = file_list[0]['name'] spreadsheet = g_client.open(f_name) worksheet = spreadsheet.sheet1 else: for f in file_list: g_client.del_spreadsheet(f['id']) worksheet = create_log(phone, g_client) return worksheet
def create_log(phone: str, g_client: Client) -> Worksheet: ''' Function that returns a new log file worksheet''' data_f = DATA_FORMATS['logs'] spreadsheet = g_client.create(phone, LOGS_FOLDER_ID) worksheet = spreadsheet.get_worksheet(0) worksheet.append_row(data_f) return worksheet
def __init__(self, _config): super().__init__(_config) # use creds to create a client to interact with the Google Drive API self.scopes = \ [#'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/spreadsheets', 'https://www.googleapis.com/auth/drive.metadata.readonly'] self.session = self.__create_assertion_session(GSS_CRED, self.scopes) self.client = Client(None, self.session) # Find a workbook by name and open the first sheet # Make sure you use the right name here. self.doc = self.client.open_by_url(f'{GSS_BASE_URL}/{GSS_URL}/') self.requests = self.doc.get_worksheet(Sheets.TICKETS.value) self.phone_db = self.doc.get_worksheet(Sheets.PHONES.value) self.pending = self.doc.get_worksheet(Sheets.REGISTRATIONS.value) self.faq = self.doc.get_worksheet(Sheets.FAQ.value) self.service = self.doc.get_worksheet(Sheets.SERVICE.value) self.table_lock = RLock() self.gdrive = self._google_drive_init() root_candidates = self.gdrive.files().\ get(fileId=GSS_URL, fields='parents').execute()['parents'] if len(root_candidates) > 1: parent_query = f" '{root_candidates[0]}' in parents " else: parent_query = ' ('\ + " or ".join([f"'{r}' in parents" for r in root_candidates])\ + ') ' media_dir_id = self.gdrive.\ files().\ list(q=(f"name='{GDRIVE_DIR}' and {parent_query} " "and mimeType = 'application/vnd.google-apps.folder' " "and trashed != True"), fields='files/id').\ execute()['files'][0]['id'] self.gdrive.files(). \ list(q=( f"name='{GDRIVE_DIR}' and ('1ifY0QIEEbyUhzHwmT3QcXWHtfzKjpl8B' in parents or '1o1x7zd4wYwZldS2rx3ZK-yGbIST1X55osKuiI_aHKFg' in parents)" "and mimeType = 'application/vnd.google-apps.folder' " "and trashed != True"), fields='files/id').execute() self.ticket_dir_template = { 'name': "", 'parents': [media_dir_id], 'mimeType': 'application/vnd.google-apps.folder' }
def writeToGspreadMulti(self, sheet_title, listXLTabs, gsheet_id=None, sharelist=[]): """ To Create the Google Spread Sheet """ if gsheet_id is not None: self.SESSION = self.create_assertion_session_local() self.GCLI = Client(None, self.SESSION) sh = self.GCLI.open_by_key(gsheet_id) old_worksheets = [str(sheet).split("'")[1] for sheet in sh.worksheets()] for sheet in listXLTabs: df = pd.DataFrame(sheet['report_result']) cell_list = df.values.tolist() if sheet['report_name'] in old_worksheets: worksheet = sh.worksheet(sheet['report_name']) sh.values_update( sheet['report_name'], params={'valueInputOption': 'USER_ENTERED'}, body={'values': cell_list} ) else: sh.add_worksheet(title=sheet['report_name'], rows="100", cols="20") self.gsheet_upload_and_write_csv(sh, sheet['report_name'], sheet['report_result']) else: self.SESSION = self.create_assertion_session_local() self.GCLI = Client(None, self.SESSION) sh = self.GCLI.create(sheet_title) for email in sharelist: print(email) # To give the permission to the email ID. r = sh.share(email, perm_type='user', role='writer') print(r) for sheet in listXLTabs: sh.add_worksheet(title=sheet['report_name'], rows="100", cols="20") self.gsheet_upload_and_write_csv(sh, sheet['report_name'], sheet['report_result']) worksheet = sh.worksheet("Sheet1") sh.del_worksheet(worksheet) # SpreadSheet Url url = "https://docs.google.com/spreadsheets/d/"+str(sh.id) return url
def main_handler(event, context): scopes = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive' ] session = create_assertion_session(CREDENTIALS_PATH, scopes) gc = Client(None, session) spread_sheet = gc.open_by_key(REMINDER_SHEET_ID) worksheet = spread_sheet.sheet1 list_of_lists = worksheet.get_all_values() header = list_of_lists[0] rows = [] for row in list_of_lists[1:]: data = {key: value for key, value in zip(header, row)} rows.append(data) undone_tasks = [] for row in rows: if not row['達成']: task = create_task(row) undone_tasks.append(task) today = date.today() needs_notify_tasks = [] for task in undone_tasks: due_date = task.due_date if task.is_important: if due_date - today <= timedelta(days=7): needs_notify_tasks.append(task) else: if due_date - today <= timedelta(days=3): needs_notify_tasks.append(task) message = f'<@{USER_MYSELF}> 仕掛け人さま、おはようございます\n' sorted_tasks = sorted(needs_notify_tasks, key=lambda d: d.due_date) if not sorted_tasks: message += '今日も一日、張り切ってまいりましょう' else: message += '本日のやることをお知らせしますね\n\n' for task in sorted_tasks: message += (f'[{task.category}]{task.description}' f' ({task.due_date:%m/%d}まで)\n') post_slack('general', message)
def get_new_weight_data(): scopes = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive', ] number_of_rows = get_db_rows_count() service_account_credentials = get_service_account_credentials(scopes) gc = Client(service_account_credentials) weight_track_spreadsheet = gc.open("weight-track").sheet1 row_count = number_of_rows + 2 new_rows = [] while True: logging.info(f"Getting row {row_count}") new_row = weight_track_spreadsheet.row_values(row_count) if len(new_row) == 0: logging.info(f"Row {row_count} not set") return new_rows else: new_rows.append(tuple(new_row)) row_count += 1
def open_spreadsheet_from_args(google_client: gspread.Client, args): """ Attempt to open the Google Sheets spreadsheet specified by the given command line arguments. """ if args.spreadsheet_id: logger.info("Opening spreadsheet by ID '{}'".format( args.spreadsheet_id)) return google_client.open_by_key(args.spreadsheet_id) elif args.spreadsheet_url: logger.info("Opening spreadsheet by URL '{}'".format( args.spreadsheet_url)) return google_client.open_by_url(args.spreadsheet_url) elif args.spreadsheet_name: logger.info("Opening spreadsheet by name '{}'".format( args.spreadsheet_name)) return google_client.open(args.spreadsheet_name) else: raise ValueError( "Invalid command line arguments - no spreadsheet identifier was provided" )
def get_session(url, worksheet): """ Gets an authenticated session and sheet. :param url: a sheet URL :param worksheet: a worksheet name """ scopes = [ 'https://www.googleapis.com/auth/drive', 'https://spreadsheets.google.com/feeds' ] g_credentials = Credentials.from_service_account_file('svc.json', scopes=scopes) g = Client(auth=g_credentials) g.session = AuthorizedSession(g_credentials) try: sheet = g.open_by_url(url) worksheet = sheet.worksheet(worksheet) except APIError: raise Exception('Failed to fetch sheet.') return sheet, worksheet
def get_dataset(dataset_file: str, g_client: Client) -> dict: ''' Function that returns the static dict of datasets as pandas.DataFrame''' data_f = DATA_FORMATS[dataset_file] spreadsheet = g_client.open(dataset_file) worksheets = [wk.title for wk in spreadsheet.worksheets()] master_dict = dict() for wk_name in worksheets: wk = spreadsheet.worksheet(wk_name) master_dict.update({wk_name: {col: list() for col in data_f}}) for rec in wk.get_all_records(): for col in rec.keys(): master_dict[wk_name][col].append(rec[col]) del wk master_dict[wk_name] = DataFrame(master_dict[wk_name]) del spreadsheet return master_dict
def fetch_sheet( connection: gspread.Client, document: str, tab: str ) -> list: """Try to Load Entries from URL into Account. options include: - document_name -- the Actual Google Doc name - document_tab -- the Tab name on the Doc - default_currency - the entry currency if None is provided - reverse_amount - if true, assume positive entries are credits """ document_name = document document_tab = tab reverse_amount = False if not document_name: False workbook = connection.open(document_name) sheet = None try: document_tab = int(document_tab) sheet = workbook.get_worksheet(document_tab) except ValueError: pass if sheet is None: sheet = workbook.worksheet(document_tab) records = sheet.get_all_records() clean_records = [] for record in records: clean = _scrub(record) if ('account' not in clean or not clean['account']): continue if ('amount' not in clean or not clean['amount']): continue clean_records.append(clean) return clean_records
def google_sheet_auth(cred_file): #Provide the scopes, these should be the only scopes required for reading/writing data frames. #Consult this page if issues arise: https://developers.google.com/identity/protocols/oauth2/scopes scope = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/spreadsheets' ] #Get the service account credentials and apply the appropriate scopes creds = service_account.Credentials.from_service_account_file( cred_file).with_scopes(scope) #Create the session for the google API call, without this the call will be rejected and an error will be thrown authed_session = AuthorizedSession(creds) #Set up the communication between the client and the google api client = Client(creds, authed_session) return client
def __init__(self, file): session = self.create_assertion_session(file, MyClient.scopes) Client.__init__(self, None, session)
audience=token_url, claims=claims, subject=subject, key=key, header=header, ) #authenticates session wihtin scopes scopes = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive', ] session = create_assertion_session('client_secret.json', scopes) #gspread simplifies our interactions with the Google Docs/Sheets API from gspread import Client index = 2 gc = Client(None, session) sheet = gc.open('NNDI Tweet Log').sheet1 #Twitter API key goes here: auth = tweepy.OAuthHandler('your-key-here', 'your-key-here') auth.set_access_token('your-key-here', 'your-key-here') api = tweepy.API(auth) #override tweepy.StreamListener to add logic to on_status class MyStreamListener(tweepy.StreamListener): def on_status(self, status): #only interacts with following users follows = api.show_friendship("your-twitter-id", "your-twitter-username", status.user.id, status.user.screen_name) #if a non-quote-tweet makes it past our filter, the program will crash while we're writting that element to our row
print get.twitter('statesman') print get.insta('statesman') print get.youtube('AmericanStatesman') print get.googleplus('Statesman') print get.tumblr('austinstatesman') """ # Get Google Docs info from the config cfg = ConfigParser() cfg.readfp(open('config.cfg')) user = cfg.get('google_docs', 'user_name') pw = cfg.get('google_docs', 'password') sheet_title = cfg.get('google_docs', 'spreadsheet_title') # Login to Google c = Client(auth=(user, pw)) c.login() # Get the spreadsheet s = c.open(sheet_title) def get_latest_update(network): """ @network: str """ w = s.worksheet(network) cell_val = w.acell('A' + str(w.row_count)).value return str(parse(cell_val).date())
class Toolbox(): def __init__(self): self.GSHEET_SCOPES = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive', ] # Get the Credentials.json from OS Environment self.GSHEET_CREDS = os.environ['credentials.json'] def gsheet_upload_csv(self, sh, sheet_name, sheet_id, content): self.SESSION = self.create_assertion_session_local() self.GCLI = Client(None, self.SESSION) self.WRKS = self.GCLI.open(sheet_name).sheet1 ## Import_csv function deletes all the sheets and re-write the data in first sheet self.GCLI.import_csv(sheet_id, content) def writeToGspreadMulti(self, sheet_title, listXLTabs, gsheet_id=None, sharelist=[]): """ To Create the Google Spread Sheet """ if gsheet_id is not None: self.SESSION = self.create_assertion_session_local() self.GCLI = Client(None, self.SESSION) sh = self.GCLI.open_by_key(gsheet_id) old_worksheets = [str(sheet).split("'")[1] for sheet in sh.worksheets()] for sheet in listXLTabs: df = pd.DataFrame(sheet['report_result']) cell_list = df.values.tolist() if sheet['report_name'] in old_worksheets: worksheet = sh.worksheet(sheet['report_name']) sh.values_update( sheet['report_name'], params={'valueInputOption': 'USER_ENTERED'}, body={'values': cell_list} ) else: sh.add_worksheet(title=sheet['report_name'], rows="100", cols="20") self.gsheet_upload_and_write_csv(sh, sheet['report_name'], sheet['report_result']) else: self.SESSION = self.create_assertion_session_local() self.GCLI = Client(None, self.SESSION) sh = self.GCLI.create(sheet_title) for email in sharelist: print(email) # To give the permission to the email ID. r = sh.share(email, perm_type='user', role='writer') print(r) for sheet in listXLTabs: sh.add_worksheet(title=sheet['report_name'], rows="100", cols="20") self.gsheet_upload_and_write_csv(sh, sheet['report_name'], sheet['report_result']) worksheet = sh.worksheet("Sheet1") sh.del_worksheet(worksheet) # SpreadSheet Url url = "https://docs.google.com/spreadsheets/d/"+str(sh.id) return url def gsheet_upload_and_write_csv(self, sh, sheetname, datafile): """ Upload the CSV as WorkSheet in Google Spread Sheet """ df = pd.DataFrame(datafile) col = df.columns.values.tolist() val = df.values.tolist() data = [col] + val sh.values_update( sheetname, params={'valueInputOption': 'USER_ENTERED'}, body={'values': data} ) def create_assertion_session_local(self, subject=None): # conf = self.GSHEET_CREDS with open('credentials.json') as cred: conf = json.load(cred) token_url = conf['token_uri'] issuer = conf['client_email'] key = conf['private_key'] key_id = conf.get('private_key_id') header = {'alg': 'RS256'} if key_id: header['kid'] = key_id # Google puts scope in payload claims = {'scope': ' '.join(self.GSHEET_SCOPES)} return AssertionSession( grant_type=AssertionSession.JWT_BEARER_GRANT_TYPE, token_url=token_url, token_endpoint="https://oauth2.googleapis.com/token", issuer=issuer, audience=token_url, claims=claims, subject=subject, key=key, header=header, )
class SpreadsheetBridge(AbstractDatabaseBridge): """ Raises gspread.exceptions.GSpreadException in case of errors """ TYPE_QUALIFIER = "google-spreadsheet" MAX_NUMBER_RETRIES_TABLE_UPDATE = 3 # TODO: SODD - sort out this mess @staticmethod def __create_assertion_session(conf_file, scopes, subject=None): with open(conf_file, 'r') as f: conf = json.load(f) token_url = conf['token_uri'] issuer = conf['client_email'] key = conf['private_key'] key_id = conf.get('private_key_id') header = {'alg': 'RS256'} if key_id: header['kid'] = key_id # Google puts scope in payload claims = {'scope': ' '.join(scopes)} from authlib.integrations.requests_client import AssertionSession return AssertionSession( grant_type=AssertionSession.JWT_BEARER_GRANT_TYPE, token_url=token_url, issuer=issuer, audience=token_url, claims=claims, subject=subject, key=key, header=header, token_endpoint=True) # TODO: SODD - sort out this mess (2) def _gdrive_reload_creds(self): creds = None if os.path.exists(GDRIVE_PICKLE): with open(GDRIVE_PICKLE, 'rb') as token: creds = pickle.load(token) if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( GDRIVE_CRED, self.scopes) creds = flow.run_local_server(port=0) with open(GDRIVE_PICKLE, 'wb') as token: pickle.dump(creds, token) return creds def _google_drive_init(self): creds = self._gdrive_reload_creds() return build('drive', 'v3', credentials=creds) def __init__(self, _config): super().__init__(_config) # use creds to create a client to interact with the Google Drive API self.scopes = \ [#'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/spreadsheets', 'https://www.googleapis.com/auth/drive.metadata.readonly'] self.session = self.__create_assertion_session(GSS_CRED, self.scopes) self.client = Client(None, self.session) # Find a workbook by name and open the first sheet # Make sure you use the right name here. self.doc = self.client.open_by_url(f'{GSS_BASE_URL}/{GSS_URL}/') self.requests = self.doc.get_worksheet(Sheets.TICKETS.value) self.phone_db = self.doc.get_worksheet(Sheets.PHONES.value) self.pending = self.doc.get_worksheet(Sheets.REGISTRATIONS.value) self.faq = self.doc.get_worksheet(Sheets.FAQ.value) self.service = self.doc.get_worksheet(Sheets.SERVICE.value) self.table_lock = RLock() self.gdrive = self._google_drive_init() root_candidates = self.gdrive.files().\ get(fileId=GSS_URL, fields='parents').execute()['parents'] if len(root_candidates) > 1: parent_query = f" '{root_candidates[0]}' in parents " else: parent_query = ' ('\ + " or ".join([f"'{r}' in parents" for r in root_candidates])\ + ') ' media_dir_id = self.gdrive.\ files().\ list(q=(f"name='{GDRIVE_DIR}' and {parent_query} " "and mimeType = 'application/vnd.google-apps.folder' " "and trashed != True"), fields='files/id').\ execute()['files'][0]['id'] self.gdrive.files(). \ list(q=( f"name='{GDRIVE_DIR}' and ('1ifY0QIEEbyUhzHwmT3QcXWHtfzKjpl8B' in parents or '1o1x7zd4wYwZldS2rx3ZK-yGbIST1X55osKuiI_aHKFg' in parents)" "and mimeType = 'application/vnd.google-apps.folder' " "and trashed != True"), fields='files/id').execute() self.ticket_dir_template = { 'name': "", 'parents': [media_dir_id], 'mimeType': 'application/vnd.google-apps.folder' } @staticmethod def _address_checker(address: Address, r: Dict): return address == (r[Columns.HOUSE.value], r[Columns.APT.value]) @staticmethod def _ticket_checker(ticket_id: TicketId, r: Dict): return ticket_id == r[Columns.TICKET_ID.value] def _apt_data(self, work_sheet: Worksheet, address: Address)\ ->Iterable[Dict]: yield from filter(partial(self._address_checker, address), work_sheet.get_all_records()) def __ticket(self, ticket_id: TicketId) -> Dict: return next(r for r in self.requests.get_all_records() if r[Columns.TICKET_ID.value] == ticket_id) def registered_phones(self, address: Union[None, Address])\ -> Iterable[Tuple[ChatId, Phone, Address]]: filtered_source = self._apt_data(self.phone_db, address)\ if address else self.phone_db.get_all_records() yield from ((r[Columns.CHAT_ID.value], r[Columns.PHONE.value], (r[Columns.HOUSE.value], r[Columns.APT.value])) for r in filtered_source) def new_ticket(self, ticket: TicketData) -> TicketId: retries = self.MAX_NUMBER_RETRIES_TABLE_UPDATE current_id: int with self.table_lock: while retries: current_id = int( self.service.cell(*_CURRENT_ID_CELL).numeric_value) self.service.update_cell(*_CURRENT_ID_CELL, current_id + 1) updated_id = int( self.service.cell(*_CURRENT_ID_CELL).numeric_value) if updated_id == current_id + 1: break retries = retries - 1 else: raise RuntimeError("Race condition during update") self.requests.insert_row([ current_id, ticket.datetime.strftime("%d.%m.%y"), ticket.datetime.strftime("%H:%M"), ticket.chat_id, ticket.phone, ticket.address[0], ticket.address[1], ticket.category, ticket.description, ticket.media, TicketStatesStr.OPENED.value ], index=2) return current_id def update_ticket(self, _id: TicketId, new_description, new_media): with self.table_lock: # TODO: add update ticket routine pass @staticmethod def __record_to_ticket(record: Dict) -> Tuple[TicketData, Dict]: return TicketData(chat_id=record[Columns.CHAT_ID.value], address=(record[Columns.HOUSE.value], record[Columns.APT.value]), phone=record[Columns.PHONE.value], datetime=datetime.strptime( record[Columns.TICKET_DATE.value] + " " + record[Columns.TICKET_TIME.value], "%d.%m.%y %H:%M"), category=record[Columns.TICKET_CATEGORY.value], description=record[Columns.TICKET_TEXT.value], media=record[Columns.TICKET_MEDIA.value]), { 'id': record[Columns.TICKET_ID.value], 'status': record[Columns.TICKET_STATUS.value], 'comments': record[Columns.TICKET_PUBLIC_COMMENTS.value], 'private': record[Columns.TICKET_PRIVATE_COMMENTS.value], 'date_text': record[Columns.TICKET_DATE.value], 'time_text': record[Columns.TICKET_TIME.value], } def tickets(self, address: Address) -> List[Tuple[TicketData, Dict]]: yield from (self.__record_to_ticket(r) for r in self._apt_data(self.requests, address)) def get_ticket_details(self, ticket_id: TicketId): # all ticket fields return self.__record_to_ticket(self.__ticket(ticket_id)) def new_registration(self, chat_id: ChatId, phone: Phone, address: Address, comment: str): self.pending.insert_row( [chat_id, address[0], address[1], phone, comment], index=2) def peers(self, chat_id: ChatId, address: Address = None)\ -> Iterable[ChatId]: """ Generates peer chat ids by address. Using address speed ups the process. chat_id is used only if address is not supplied """ if not address: address = next((r[Columns.HOUSE.value], r[Columns.APT.value]) for r in self.pending.get_all_records() if r[Columns.CHAT_ID.value] == chat_id) yield from (r[Columns.CHAT_ID.value] for r in self._apt_data(self.phone_db, address)) def peer_confirm(self, phone_or_chat_id: Union[Phone, ChatId]): from operator import itemgetter with self.table_lock: registration = dict( zip([ k for k, _ in sorted(_SCHEMA[Sheets.REGISTRATIONS].items(), key=itemgetter(1)) ], self.peer_reject(phone_or_chat_id))) self.phone_db.insert_row([ registration[k] for k, _ in sorted(_SCHEMA[Sheets.PHONES].items(), key=itemgetter(1)) ], index=2) def peer_reject(self, phone_or_chat_id: Union[Phone, ChatId]): with self.table_lock: reg_found = self.pending.find(str(phone_or_chat_id)) reg_cells = self.pending.row_values(reg_found.row) self.pending.delete_row(reg_found.row) return reg_cells def is_authorized(self, phone_or_chat_id: Union[Phone, ChatId]) -> bool: try: self.phone_db.find(str(phone_or_chat_id)) return True except GSpreadException: return False def is_pending(self, phone_or_chat_id: Union[Phone, ChatId]) -> bool: try: self.pending.find(str(phone_or_chat_id)) return True except GSpreadException: return False def update_registered_chat_id(self, phone: Phone, chat_id: ChatId): with self.table_lock: self.phone_db.update_cell( self.phone_db.find(str(phone)).row, _SCHEMA[Sheets.REGISTRATIONS][Columns.CHAT_ID], chat_id) def snapshot_ticket_statuses(self) -> Dict[TicketId, Tuple[Address, str]]: return { r[Columns.TICKET_ID.value]: ((r[Columns.HOUSE.value], r[Columns.APT.value]), r[Columns.TICKET_STATUS.value]) for r in self.requests.get_all_records() } def fetch_faq(self) -> Iterable[Tuple[str, str]]: yield from ((r[Columns.FAQ_Q.value], r[Columns.FAQ_A.value]) for r in self.faq.get_all_records()) def save_artifacts(self, ticket: TicketId, artifacts: Dict[str, str]): ticket_dir = dict(self.ticket_dir_template) ticket_dir['name'] = str(ticket) try: # Create only if necessary ticket_dir_id = self.gdrive.files(). \ create(body=ticket_dir, fields='id').execute()['id'] finally: pass return [ self.gdrive.files().create(body={ 'name': remote, 'parents': [ticket_dir_id] }, media_body=MediaFileUpload(local), fields='id').execute()['id'] for local, remote in artifacts.items() ]
from gspread import Client import simplejson as json from ConfigParser import ConfigParser # Get config from file cfg = ConfigParser() cfg.readfp(open('creds.cfg')) # Get config info username = cfg.get('google', 'username') password = cfg.get('google', 'password') # Login to Google c = Client(auth=(username, password)) c.login() # Get our election results spreadsheet s = c.open("Top Restaurants") # Loop through all worksheets in the spreadsheet, parse them # and add them to a results list results = {'picks': {}} for sheet in s.worksheets(): if sheet.title == "Top 25": results['top'] = sheet.get_all_records() else: results['picks'][sheet.title.lower().replace(" ", "")] = sheet.get_all_records() # Write it all out to JSON
def load_remote_account(connection: gspread.Client, errors: list, account: str, options: typing.Dict[str, str]): """Try to Load Entries from URL into Account. options include: - document_name -- the Actual Google Doc name - document_tab -- the Tab name on the Doc - default_currency - the entry currency if None is provided - reverse_amount - if true, assume positive entries are credits """ entries = [] document_name = options['document_name'] document_tab = options.get('document_tab', 0) or 0 default_currency = options['default_currency'] reverse_amount = options.get('reverse_amount', False) if not document_name: return m = -1 if reverse_amount else 1 logger.info( f"Attempting to download entries for {account} from {document_name}.{document_tab}" ) workbook = connection.open(document_name) sheet = None try: document_tab = int(document_tab) sheet = workbook.get_worksheet(document_tab) except ValueError: pass if sheet is None: sheet = workbook.worksheet(document_tab) records = sheet.get_all_records() import re row = 0 # logger.info(f"Found {len(records)} entries.") for record in records: row += 1 record = clean_record(record) if 'date' not in record or not record['date']: continue if 'amount' not in record or not record['amount']: continue #if 'account' not in record or not record['account'].strip(): # continue narration = record.pop('narration', None) payee = record.pop('payee', None) tagstr = record.pop('tags', '') tags = set(re.split(r'\W+', tagstr)) if tagstr else set() date = dateparser.parse(record.pop('date')) if date: date = datetime.date(year=date.year, month=date.month, day=date.day) linkstr = record.pop('links', '') links = set(re.split(r'\W+', linkstr)) if linkstr else set() meta = { 'filename': str(options['entry_file']), 'lineno': 0, 'document-sheet-row': f"{document_name}/{document_tab}/{row+1}" } amount = decimal.Decimal(record.pop('amount')) * m currency = record.pop('currency', default_currency) entry_account = record.pop('account') for k, v in record.items(): if v: meta[k] = v try: if not entry_account: errors.append( f"Skipping Record with Blank Account: {meta['document-sheet-row']}" ) logger.warning( f"Skipping Record with Blank Account: {meta['document-sheet-row']}" ) continue entry = data.Transaction( date=date, narration=narration, payee=payee, tags=tags, meta=meta, links=links, flag='*', postings=[ data.Posting(account=account, units=data.Amount(amount, currency), cost=None, price=None, flag='*', meta={}), data.Posting(account=entry_account, units=data.Amount(-amount, currency), cost=None, price=None, flag='*', meta={}) ]) entries.append(entry) except Exception as exc: logger.error(f"Error while parsing {record}", exc_info=exc) errors.append(str(exc)) logger.info( f"Loaded {len(entries)} entries for {account} from {document_name}.{document_tab}" ) return entries
return AssertionSession( grant_type=AssertionSession.JWT_BEARER_GRANT_TYPE, token_endpoint=conf['token_uri'], issuer=conf['client_email'], audience=conf['token_uri'], claims=claims, subject=subject, key=conf['private_key'], header=header, ) # Creating Assersion Session -> requests session # It will get a valid OAuth token automatically, prepare a requests session for you to use session = create_assertion_session('basicbot-2f1fe390f8cf.json') gc = Client(None, session) # Getting Sheets ms_sm_wks = gc.open_by_key('').worksheet("intents") disease_symptom_overview = gc.open_by_key('').worksheet("new_disease_dataframe") class ActionResponse(Action): def name(self) -> Text: return "action_response" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: dict_of_rec = { "about_treatment":"Lifestyle and home remedies" ,"overveiw": "Overview" ,"causes": "Causes" ,"prevention": "Prevention","about_symptoms": "Symptoms"}
key=key, header=header, ) #authenticates session wihtin scopes scopes = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive', ] session = create_assertion_session('client_secret.json', scopes) #gspread simplifies our interactions with the Google Docs/Sheets API from gspread import Client index = 2 gc = Client(None, session) sheet = gc.open('your-google-sheets-name').sheet1 #Twitter API key goes here: auth = tweepy.OAuthHandler('your-key-here', 'your-key-here') auth.set_access_token('your-key-here', 'your-key-here') api = tweepy.API(auth) #override tweepy.StreamListener to add logic to on_status class MyStreamListener(tweepy.StreamListener): def on_status(self, status): #only interacts with following users follows = api.show_friendship("your-twitter-id", "your-twitter-username", status.user.id,
def _setup_client(self, credentials): client = Client(None) credentials.apply(client.session.headers) return client
'count': result.vote_total, 'percent': "%.2f" % result.vote_total_percent, 'party': get_party_name(result.candidate.party), 'shortName': get_abbrev_name(result.candidate.abbrev_name) }) return results # Get config info username = cfg.get('google', 'username') password = cfg.get('google', 'password') sheet_key = cfg.get('google', 'spreadsheet') # Login to Google c = Client(auth=(username, password)) c.login() # Connect to AP and get the state file ap_username = cfg.get('ap', 'username') ap_password = cfg.get('ap', 'password') ap_state_code = cfg.get('ap', 'state') client = AP(ap_username, ap_password) state = client.get_state(ap_state_code) # Get our election results spreadsheet s = c.open_by_key(sheet_key) # Loop through all worksheets in the spreadsheet, parse them # and add them to a results list results = []