def deleteListFunc(_SP, _SPL): '''This function truncate Sharepoint list, it takes two argumentsthe target site and list to be deleted''' print(f'deleting items in the Sharepoint List : {_SPL}....') # Connecting to the destination sharepoint list with try try: site = Site( f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/{_SP}/', authcookie=authcookie) except Exception as e: print(e) # reading the the desitination Sharepoint list items mylist1 = site.List(_SPL) i = 1 count = 0 while i > 0: # lopping while there are items data1 = mylist1.GetListItems('All Items', rowlimit=2000) ids = [item['ID'] for item in data1] # Delete all selected items from the sharepoint list by IDs mylist1.UpdateListItems(ids, kind='Delete') if len(data1) == 2000: print(f"deleted {count + 2000} chunck...") time.sleep(300) i = len(data1) count = count + 2000
def ListsTobePushed(c, _SPL): site1 = Site( f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/bidash', authcookie=authcookie) # Reading The Sharepoiny admin input sp_list = site1.List(_SPL) # Reading the last records in the SQltoshqrepoint list (one sql table at time ) data = sp_list.GetListItems('All Items') # retreiving the header of the list if type(data) is list: return data else: header = sp_list.GetListItems('All Items', rowlimit=1) # getting the list of fields to be inserted to SQL # fields = list(header[0].keys()) # retreiving the first ID i = int(header[0]['ID']) n = i + 5000 m = 5000 data = [] # looping over the 5000 chuncks at time because of the limit of SharePoint while m == 5000: query = { 'Where': ['And', ('Geq', 'ID', str(i)), ('Lt', 'ID', str(n))] } dt = sp_list.GetListItems(viewname='All Items', query=query) df = pd.DataFrame(dt) data_c = df.to_dict('records') # data_c = [{k: v for k, v in mydict.items() if k in (c, 'ID')} for mydict in data_c] print(data_c[:2]) data.extend(data_c) i = i + 5000 n = n + 5000 m = len(dt) return data
def pushToSP(data, mydata): '''This function push python object to SharePoint lis, its takes tow argumnets both should dictionaries(object) the first one should containe the target site and list, the second one should contain the data to be pushed to the list''' _SP = data['SP_Site'] _SPL = data['SP_List'] print(f'updating Sharepoint List... : {_SPL}....') # Connecting to the destination sharepoint list with try try: site = Site(f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/{_SP}/', authcookie=authcookie) except Exception as e : print(e) # reading the the desitination Sharepoint list mylist1 = site.List(_SPL) # # # Adding the new Data to the sharepoint list if data is more than 20000, break it down to batches if len(mydata)>5000: n=0 j = 5000 print("Starting batches ...........") while len(mydata)> 0 : chunk = mydata[n:j] mylist1.UpdateListItems(data=chunk, kind='New') print(f"Completed 1st {j} batch-------------") n = n + 5000 j = j + 5000 time.sleep(60) else : mylist1.UpdateListItems(data=mydata, kind='New') print(f'---------------Done -------------')
def SPList(_SP, _SPL): try: site = Site(f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/{_SP}/', authcookie=authcookie) mylist1 = site.List(_SPL) data = mylist1.GetListItems('All Items') return data except Exception as e : print(e)
def incrementToSP(data, mydata): '''This function push python object to SharePoint lis, its takes tow argumnets both should dictionaries(object) the first one should containe the target site and list, the second one should contain the data to be pushed to the list''' _SP = data['SP_Site'] _SPL = data['SP_List'] col = data['Identity'] print(f'Inserting new Increments to Sharepoint List... : {_SPL}....') # Connecting to the destination sharepoint list with try try: site = Site( f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/{_SP}/', authcookie=authcookie) except Exception as e: print(e) # reading the the desitination Sharepoint list mylist1 = site.List(_SPL) # # # Adding the new Data to the sharepoint list if data is more than 20000, break it down to batches new_ids = [d[col] for d in mydata] all_Sp_ids = [d[col] for d in listObject(mylist1)] # Checking if new increment already being added if set(new_ids).issubset(set(all_Sp_ids)): print("No New increment") else: DatPush(mylist1, mydata, 'New')
def upload_file_sharepoint(self, source_path, sink_path, filename, sharepoint_site): """This fucntion will upload a file from the source path to Sharepoint. Parameters: source_path = r'/full_sink_path/' sink_path = r'Shared Documents/Shared/<Location>' filename = 'filename.ext' sharepoint_site = 'https://xxx.sharepoint.com/sites/<site_name>' """ site = Site(sharepoint_site, version=Version.v2016, authcookie=self.authcookie) full_source_path = os.path.join(source_path, filename) full_sink_path = os.path.join(sink_path, filename) print(full_source_path) print(full_sink_path) folder = site.Folder(sink_path) with open(full_source_path, mode="rb") as file: filecontent = file.read() for attempt in range(0, 3): try: folder.upload_file(filecontent, full_sink_path) print("Attempt #No:", attempt) except Exception as e: if attempt < 2: print("Trying again!") continue print("Error", e) raise e break
def download_dealer_list(): ''' downloads the current dealer listing from SharePoint and saves as CustomerDocuments.xlsx ''' creds = Credentials() cred = HttpNtlmAuth(creds.username, creds.password) site = Site('http://tng/SharedServices/AR', auth=cred) rows = site.List('Document_Circulation').get_list_items() columns = [ 'Service_Location', 'Dealer_Name', 'Master', 'Chain_Master', 'Dealer_Number', 'Statements', 'Invoices', 'Credits', 'Fax Number', 'Attn To:', 'Output', 'Created', 'Modified', 'Item Type', 'Path', 'Email Address' ] dealer_list = [] keys = [] for row in rows: keys = keys + list(row.keys()) # keys = list(row.keys()) # r = [row[i] for i in keys] r = [ row.get('Service_Location'), row.get('Dealer_Name'), row.get('Master'), row.get('Chain_Master'), row.get('Dealer_Number'), row.get('Statements'), row.get('Invoices'), row.get('Credits'), row.get('Fax Number', ''), row.get('Attn To:', ''), row.get('Output', ''), row.get('Created'), row.get('Modified'), row.get('Item Type'), row.get('Path', ''), row.get('Email Address', '') ] dealer_list.append(r) keys = set(keys) print(keys) df = pd.DataFrame(dealer_list, columns=columns) # delete existing listing, if it exists if os.path.isfile('../Customer_Documents.xlsx'): os.remove('../Customer_Documents.xlsx') df.to_excel('../Customer_Documents.xlsx', index=False, header=True)
def get_sp_list(listName): domain = os.environ.get('USERDOMAIN') user = os.environ.get('USERNAME') password = os.environ.get('PASSWORD') auth = HttpNtlmAuth(f'{domain}\\{user}', password) site = Site('https://data.erg.net/', auth=auth) sp_list = site.List(listName) list_data = sp_list.GetListItems() spRccEng = {} spRccRus = {} n = 0 for rcc in list_data: if list_data[n]['Active/Inactive'] == 'Inactive': n += 1 else: if list_data[n].get('RCC code') is None: n += 1 else: spRccEng[list_data[n] ['RCC code']] = list_data[n]['HFM RCC name'] spRccRus[list_data[n] ['RCC code']] = list_data[n]['Наименование ЦЗО'] n += 1 return (spRccEng, spRccRus)
def Lists(Sp): site = Site( f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/{Sp}/', authcookie=authcookie, huge_tree=True) # mylist = site.List(li) li = site.GetListCollection() # df = pd.DataFrame(li) # df.to_excel('bidash_lists.xlsx', index=False) mylists = [l['Title'] for l in li] secur = {l['Title']: l['InheritedSecurity'] for l in li} readSecurity = {l['Title']: l['ReadSecurity'] for l in li} Allowance = {l['Title']: l['AllowAnonymousAccess'] for l in li} depends = {} cols = ['Business', 'Operating', 'Deal', 'DataSource'] # Get all list that have dependencies for lis in mylists: temp = [] try: mylist = site.List(lis) data = mylist.GetListItems('All Items', rowlimit=1)[0] for col in cols: if any(col in s for s in data.keys()): temp.append(col) if len(temp) != 0: depends[lis] = temp except Exception as e: print(e) print(depends)
def download_file_sharepoint(self, source_path, sink_path, filename, sharepoint_site): """This fucntion will download a file from the Sharepoint to specified sink path. Parameters: source_path = r'Shared Documents/Shared/<Location>' sink_path = r'/full_sink_path/' filename = 'filename.ext' sharepoint_site = 'https://xxx.sharepoint.com/sites/<site_name>' """ site = Site(sharepoint_site, version=Version.v2016, authcookie=self.authcookie) full_source_path = os.path.join(source_path, filename) full_sink_path = os.path.join(sink_path, filename) print(full_source_path) print(full_sink_path) folder = site.Folder(source_path) for attempt in range(0, 3): try: output_file = open(full_sink_path, 'wb') input_file = folder.get_file(filename) binary_format = bytearray(input_file) output_file.write(binary_format) output_file.close() print("Attempt #No: ", attempt) print("Downlowded file size is ", round(os.path.getsize(full_sink_path) / 1024, 2), " KB") except Exception as e: if (attempt < 2): print("Try again!") continue print("Error", e) raise e break
def pushToSP(_SP, _SPL, mydata): try: site = Site(f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/{_SP}/', authcookie=authcookie) except Exception as e : print(e) # reading the the desitination Sharepoint list mylist1 = site.List(_SPL) # data1 = mylist1.GetListItems('All Items') # Retreiving all IDs of the list if it not empty # ids = [item['ID'] for item in data1] # Delete all items from the sharepoint list by IDs # mylist1.UpdateListItems(ids, kind='Delete') # # # Adding the new Data to the sharepoint list if data is more than 20000, break it down to batches if len(mydata)>20000: n=0 j = 20000 print("Starting batches ...........") while len(mydata)> 0 : chunk = mydata[n:j] mylist1.UpdateListItems(data=chunk, kind='New') print(f"Completed 1st {j} batch-------------") n = n + 20000 j = j + 20000 else : mylist1.UpdateListItems(data=mydata, kind='New') print(f'---------------Done -------------')
class FolderTestCase(unittest.TestCase): def setUp(self): if TEST_SETTINGS["version"] in ["2014", "2016", "2019", "365"]: version = Version.v2016 else: version = Version.v2007 authcookie = Office365(TEST_SETTINGS["server_url"], username=TEST_SETTINGS["username"], password=TEST_PASSWORD).GetCookies() self.site = Site(TEST_SETTINGS["site_url"], version=version, authcookie=authcookie) def tearDown(self): self.site._session.close() def test_folder(self): print("Testing Folder") self.folder = self.site.Folder(TEST_SETTINGS["test_folder"]) self.folder.upload_file("Hello", "new.txt") self.assertEqual(self.folder.get_file("new.txt"), b"Hello") self.folder.delete_file("new.txt") print("Testing Folder and files with apostrophes") self.folder = self.site.Folder(TEST_SETTINGS["test_folder_apostrophe"]) self.folder.upload_file("Hello", "new'.txt") self.assertEqual(self.folder.get_file("new'.txt"), b"Hello") self.folder.delete_file("new'.txt")
def conn(SP, li): site1 = Site(f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/{Sp}/', authcookie=authcookie, huge_tree=True) # Reading The Sharepoiny admin input sp_list = site1.List(li) # Reading the last records in the SQltoshqrepoint list (one sql table at time ) data = sp_list.GetListItems('All Items') return data
def main(req: func.HttpRequest) -> func.HttpResponse: # Configuration Sanity Check o365HostUri = os.environ['O365HostUri'] o365UserName = os.environ['O365UserName'] o365UserPassword = os.environ['O365UserPassword'] o365SiteUri = os.environ['O365SiteUri'] o365ListName = os.environ['O365ListName'] validationError = False if not o365HostUri: validationError = True logging.error('O365HostUri configuration missing.') if not o365UserName: validationError = True logging.error('O365UserName configuration missing.') if not o365UserPassword: validationError = True logging.error('O365UserPassword configuration missing.') if not o365SiteUri: validationError = True logging.error('O365SiteUri configuration missing.') if not o365ListName: validationError = True logging.error('O365ListName configuration missing.') if validationError: return func.HttpResponse("Invalid configuration.", status_code=400) authcookie = Office365(o365HostUri, username=o365UserName, password=o365UserPassword).GetCookies() site = Site(o365SiteUri, version=Version.v365, authcookie=authcookie) sp_list = site.List(o365ListName) data = sp_list.GetListItems('All Items') logging.info(data) return func.HttpResponse(json.dumps(data), status_code=200, mimetype='application/json')
def login(self): authcookie = Office365( StringUtils.StringUtils.website, username=StringUtils.StringUtils.email, password=StringUtils.StringUtils.passwort).GetCookies() self.__site = Site(StringUtils.StringUtils.websiteAPI, version=Version.v365, authcookie=authcookie)
def run(): wb = load_workbook(filename='data.xlsx', read_only=True) ws = wb['Raw Data'] data = ws.values columns = next(data)[1:] data = list(data) idx = [r[0] for r in data] data = (islice(r, 1, None) for r in data) df = pd.DataFrame(data, index=idx, columns=columns) # print(df.loc[[1130359]]) authcookie = Office365('https://purdue0.sharepoint.com', username='', password='').GetCookies() site = Site('https://purdue0.sharepoint.com/sites/HRIS', version=Version.o365, authcookie=authcookie) test_list = site.List('ELT-Test') # while True: # list_items = test_list.GetListItems('All Items', fields=['ID'], row_limit=500) # if len(list_items) == 0: # break # id_list = [x['ID'] for x in list_items] # log.info('Starting deletion of {} records'.format(str(len(list_items)))) # test_list.UpdateListItems(data=id_list, kind='Delete') # log.info('Deletion complete.') # print (len(list_items)) list_items = [] try: list_items = test_list.GetListItems( 'All Items', fields=['ID', 'Employee Name'], # query={'Where': ['Or', ('Eq', 'Employee Name', 'Mark Holmes'), ('Eq', 'Employee Name', 'Patricia Prince')]}, query={'Where': [('Eq', 'Employee Name', 'Mark Holmes')]}, ) except shareplum.errors.ShareplumRequestError as err: log.error(err) if err.details and type( err.details) == requests.exceptions.HTTPError: if err.details.response.status_code in [429, 503]: # TODO: Sleep for Retry-After to prevent further throttling pass elif err.details.response.status_code in [500]: log.error(err.details.response.request.body) log.error(err.details.response.content) for list_item in list_items: log.info(list_item)
def o365_login(tenant, username, password, site_o365, shared_folder): authcookie = Office365( tenant, username=base64.b64decode(username).decode('utf-8'), password=base64.b64decode(password).decode('utf-8')).GetCookies() site = Site(site_o365, version=Version.v365, authcookie=authcookie) folder_shared_o365 = site.Folder(shared_folder) print(time_now(), f' Login to O365 to {site_o365} is successful!') return folder_shared_o365
def download(self) -> list: dl_files = [] # parse url parsed = urlparse(self.site) scheme = 'https' if parsed.scheme == '' else parsed.scheme version = 365 if self.version == '365': sp_version = Version.v365 else: sp_version = Version.v2007 try: if sp_version == Version.v365: authcookie = Office365(f'{scheme}://{parsed.netloc}', username=self.username, password=self.password).GetCookies() else: cred = HttpNtlmAuth(self.username, self.password) except: raise Exception( f'Unable to authenticate using supplied user name and password.' ) else: self.display_info('Sucessfully authnticated') try: if sp_version == Version.v365: site = Site(self.site, version=sp_version, authcookie=authcookie) else: site = Site(self.site, version=sp_version, auth=cred) except: raise Exception(f'{self.site} is not a valid site') else: self.display_info(f'Sucessfully accessed site {self.site}') # build path to document folder doc_path = os.path.join(parsed.path, self.docs) try: folder = site.Folder(doc_path) for f in folder.files: fname = f['Name'] if fnmatch.fnmatch(fname, self.filespec): dest = os.path.join(self.savepath, fname) with open(dest, mode='wb') as file: file.write(folder.get_file(fname)) dl_files.append(dest) except: raise Exception(f'Unable to download files from {self.docs}') return dl_files
def list_item_sharepoint(self, source_path, sharepoint_site): """This function will list all files in a given source path of Sharepoint. Parameters: source_path = r'Shared Documents/Shared/<Location>' sharepoint_site = 'https://xxx.sharepoint.com/sites/<site_name>' """ site = Site(sharepoint_site, version=Version.v2016, authcookie=self.authcookie) folder_source = site.Folder(source_path) #Get object for files in a directory files_item = folder_source.files items_df = pd.DataFrame() for i in files_item: items_df = items_df.append(pd.DataFrame.from_dict([i])) if len(items_df) > 0: # Subset the columns subset_cols = [ "Length", "LinkingUrl", "MajorVersion", "MinorVersion", "Name", "TimeCreated", "TimeLastModified", ] items_df = items_df[subset_cols] # Parse url to remove everything after ? mark items_df["LinkingUrl"] = [ i.split("?")[0] for i in items_df["LinkingUrl"] ] # convert bytes to KB items_df["Length"] = [ round(int(i) / 1000, 2) for i in items_df["Length"] ] # sort based on file names items_df.sort_values("Name", inplace=True) # rename to more friendlier names items_df.columns = [ "FileSize", "FullFileUrl", "FileVersion", "MinorVersion", "FileName", "TimeCreated", "TimeLastModified", ] return items_df else: print(f"No files in {source_path} directory") return pd.DataFrame()
def deepLokkup (Sp, df1, l2, col, col1, col2): site1 = Site(f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/{Sp}/', authcookie=authcookie, huge_tree=True) sp_list = site1.List(l2) data = sp_list.GetListItems('All Items') df2 = pd.DataFrame(data) df2 = df2.drop_duplicates(subset=col2) df2[col1] = df2[col1].astype(np.int64).astype(str) merge = pd.merge(df1, df2[[col1, col2]], how='left', left_on = col , right_on= col2, validate='m:1', suffixes=('', '_y')) merge[col] = np.where(pd.notnull(merge[col1]), merge[col1].astype(str).str.cat(merge[col2],sep=";#"), merge[col]) merge = merge.replace(np.nan, '', regex=True) return merge[list(df1.columns)].to_dict('records')
def MetaData(): '''This function retreive metadat from the admin sharepoint list where there we specify the source SQL table and destination sharepoint list''' # Connecting to the Sharepoint site where the Sharepoint_Admin lists lives. site1 = Site( f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/bidash/', authcookie=authcookie) # Reading The Sharepoiny admin input sp_list = site1.List('Adm SqlToSharepoint') # Reading the last records in the SQltoshqrepoint list (one sql table at time ) data = sp_list.GetListItems('All Items') return data
def get_data(team_slug, folder_name, file_name): authcookie = Office365('https://devpatrol.sharepoint.com', username=SP_USERNAME, password=SP_PASSWORD).GetCookies() site = Site('https://devpatrol.sharepoint.com/sites/' + team_slug, version=Version.v365, authcookie=authcookie, verify_ssl=False) folder = site.Folder('Shared Documents/' + folder_name) response = folder.get_file(file_name) return response
def mySharepoint(): cred = HttpNtlmAuth(config.USERNAME, config.PASSWORD) site = Site('https://applications.level3.com/sites/ProactiveRehabTracking', auth=cred) sp_list = site.List( 'Proactive Rehab Tracking List') # Get the List by Name data = sp_list.GetListItems('All Items') # Called 'AllItems' In Sharepoint df = pd.DataFrame(data[0:]) # Brings in 'live' Sharepoint to DataFrame return df
def setUp(self): if TEST_SETTINGS["version"] in ["2014", "2016", "2019", "365"]: version = Version.v2016 else: version = Version.v2007 authcookie = Office365(TEST_SETTINGS["server_url"], username=TEST_SETTINGS["username"], password=TEST_PASSWORD).GetCookies() self.site = Site(TEST_SETTINGS["site_url"], version=version, authcookie=authcookie)
def ListsTobePushed(Sp, li): site1 = Site(f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/{Sp}/', authcookie=authcookie, huge_tree=True) # Reading The Sharepoiny admin input sp_list = site1.List(li) # Reading the last records in the SQltoshqrepoint list (one sql table at time ) data = sp_list.GetListItems('All Items') if type(data) is list: # print(len(data)) print("this is small list...getting data.....") return data else : # retreiving the header of the list print("large list skipping the try...pushing...") return largeList(sp_list)
def sp_conn(data): _SP = data['SP_Site'] _SPL = data['SP_List'] # Connecting to the destination sharepoint list with try try: site = Site( f'https://foundationriskpartners.sharepoint.com.us3.cas.ms/sites/{_SP}/', authcookie=authcookie) except Exception as e: print(e) # reading the the desitination Sharepoint list mylist1 = site.List(_SPL) return mylist1
def call_for_content(): APP_ID = '5995b22a-e094-4d5b-8523-edd7bb089d89' APP_PASSWORD = '******' CHANNEL_ID = 'msteams' BOT_ID = '5995b22a-e094-4d5b-8523-edd7bb089d89' authcookie = Office365('https://thespurgroup.sharepoint.com', username='******', password='******').GetCookies() site = Site('https://thespurgroup.sharepoint.com/sites/bot_project_test/', version=Version.v2016, authcookie=authcookie) folder_path = 'Shared Documents/Lono2docs/' teams_tracker = folder.get_file('testing.csv') source_stream = io.BytesIO(teams_tracker) df = pd.read_csv(source_stream).drop(columns=['Unnamed: 0']) for index, row in df.iterrows(): SERVICE_URL = row.serviceURL recipient_id = row.recipientID TENANT_ID = row.tenantID NAME = row.Name to = ChannelAccount(id=recipient_id) bot_channel = ChannelAccount(id=CHANNEL_ID) MicrosoftAppCredentials.trust_service_url(SERVICE_URL) credentials = MicrosoftAppCredentials(APP_ID, APP_PASSWORD) conn_client = ConnectorClient(credentials, SERVICE_URL) message_activity = MessageFactory.text(f"Hello {NAME}, are you going to submit an article for this month's newsletter?"); conversation_params = ConversationParameters(members=[to], channel_data={ 'tenant': { 'id': TENANT_ID } }) conversation = conn_client.conversations.create_conversation(conversation_params) conn_client.conversations.send_to_conversation(conversation.id, message_activity)
def setUp(self): with open("test_server.json") as f: self.server = json.load(f) if self.server["version"] in ["2014", "2016", "2019", "365"]: version = Version.v2016 else: version = Version.v2007 authcookie = Office365( self.server["server_url"], username=self.server["username"], password=os.environ.get('TEST_PASSWORD')).GetCookies() self.site = Site(self.server["site_url"], version=version, authcookie=authcookie)
class FolderTestCase(unittest.TestCase): def setUp(self): with open("test_server.json") as f: self.server = json.load(f) if self.server["version"] in ["2014", "2016", "2019", "365"]: version = Version.v2016 else: version = Version.v2007 authcookie = Office365( self.server["server_url"], username=self.server["username"], password=os.environ.get('TEST_PASSWORD')).GetCookies() self.site = Site(self.server["site_url"], version=version, authcookie=authcookie) def tearDown(self): self.site._session.close() def test_folder(self): print("Testing Folder") self.folder = self.site.Folder(self.server["test_folder"]) self.folder.upload_file("Hello", "new.txt") self.assertEqual(self.folder.read_txt_file("new.txt"), "Hello") self.folder.delete_file("new.txt")
def sharepoint_list(Secrets): """ Connection to SharePoint API with password authentication """ username = Secrets['sharepoint']['user_name'] password = Secrets['sharepoint']['password'] web_site = Secrets['sharepoint']['web_site'] my_site = Secrets['sharepoint']['my_site'] authcookie = Office365(my_site, username=username, password=password).GetCookies() site = Site(web_site, authcookie=authcookie) result = site.List('Risk Registry').GetListItems() return result