def alterarGeradas(ID, sumGeradas): # Definindo parametros de conexão # ID e Senha criados pelo portal do sharepoint app_settings = { 'url': 'https://usinacoruripe.sharepoint.com/sites/FaturamentoTorta', 'client_id': 'c74022f1-d1b5-47e3-913f-84d7a98cf032', 'client_secret': 'qfHtOWl6YieOhGAAavzuzUDvuf9pl2ZvD/0JSqvZhsQ=' } # Chamando conexão com API Rest context_auth = AuthenticationContext(url=app_settings['url']) context_auth.acquire_token_for_app( client_id=app_settings['client_id'], client_secret=app_settings['client_secret']) ctx = ClientContext(app_settings['url'], context_auth) # Puxando valores da lista listaShare = ctx.web.lists.get_by_title("Autorizações") items = listaShare.get_items() ctx.load(items) ctx.execute_query() item = listaShare.get_item_by_id(ID) item.set_property('OData__x006d_kv6', sumGeradas) item.update() ctx.execute_query()
def connectToSite(site_url): ctx = ClientContext(site_url).with_credentials( UserCredential(sys.argv[1], sys.argv[2])) web = ctx.web ctx.load(web) ctx.execute_query() return ctx
def test6_read_site(self): url = "https://mediadev8.sharepoint.com/sites/team" ctx_auth = AuthenticationContext(url) ctx_auth.acquire_token_for_user( username=settings['user_credentials']['username'], password=settings['user_credentials']['password']) client = ClientContext(url, ctx_auth) client.load(client.web) client.execute_query()
def test5_get_batch_request(self): client = ClientContext(settings['url']).with_credentials(user_credentials) current_user = client.web.currentUser client.load(current_user) current_web = client.web client.load(current_web) client.execute_batch() self.assertIsNotNone(current_web.url) self.assertIsNotNone(current_user.user_id)
def test8_execute_multiple_queries_sequentially(self): client = ClientContext(test_site_url).with_credentials(test_user_credentials) current_user = client.web.current_user client.load(current_user) current_web = client.web client.load(current_web) client.execute_query() self.assertIsNotNone(current_web.url) self.assertIsNotNone(current_user.user_id)
def test9_execute_get_batch_request(self): client = ClientContext(test_site_url).with_credentials(test_user_credentials) current_user = client.web.current_user client.load(current_user) current_web = client.web client.load(current_web) client.execute_batch() self.assertIsNotNone(current_web.url) self.assertIsNotNone(current_user.user_id)
def test8_execute_multiple_queries(self): client = ClientContext( settings['url']).with_credentials(user_credentials) current_user = client.web.current_user client.load(current_user) current_web = client.web client.load(current_web) client.execute_query() self.assertIsNotNone(current_web.url) self.assertIsNotNone(current_user.user_id)
def test_11_execute_get_and_update_batch_request(self): client = ClientContext(test_site_url).with_credentials(test_user_credentials) list_item = client.web.get_file_by_server_relative_url("/SitePages/Home.aspx").listItemAllFields new_title = create_unique_name("Page") list_item.set_property("Title", new_title).update() client.execute_batch() updated_list_item = client.web.get_file_by_server_relative_url("/SitePages/Home.aspx").listItemAllFields client.load(updated_list_item) client.execute_query() self.assertEqual(updated_list_item.properties['Title'], new_title)
def test9_execute_update_batch_request(self): client = ClientContext(settings['url']).with_credentials(user_credentials) web = client.web new_web_title = "Site %s" % random_seed web.set_property("Title", new_web_title) web.update() client.execute_batch() updated_web = client.web client.load(updated_web) client.execute_query() self.assertEqual(updated_web.properties['Title'], new_web_title)
def test_10_execute_get_and_update_batch_request(self): client = ClientContext(settings['url']).with_credentials(user_credentials) list_item = client.web.get_file_by_server_relative_url("/SitePages/Home.aspx").listItemAllFields new_title = "Page %s" % random_seed list_item.set_property("Title", new_title) list_item.update() client.execute_batch() updated_list_item = client.web.get_file_by_server_relative_url("/SitePages/Home.aspx").listItemAllFields client.load(updated_list_item) client.execute_query() self.assertEqual(updated_list_item.properties['Title'], new_title)
def test_10_execute_update_batch_request(self): client = ClientContext(test_site_url).with_credentials( test_user_credentials) web = client.web new_web_title = create_unique_name("Site") web.set_property("Title", new_web_title) web.update() client.execute_batch() updated_web = client.web client.load(updated_web) client.execute_query() self.assertEqual(updated_web.properties['Title'], new_web_title)
def authSharpoint() -> ClientContext: ''' Authenticates with sharepoint ''' ctx_auth = AuthenticationContext(env.sp_url) if ctx_auth.acquire_token_for_user(env.sp_username, env.sp_password): ctx = ClientContext(env.sp_url, ctx_auth) web = ctx.web ctx.load(web) ctx.execute_query() return(ctx) else: print(ctx_auth.get_last_error()) return(False)
def test_13_get_and_delete_batch_request(self): file_name = create_unique_file_name("TestFile", "txt") client = ClientContext(test_site_url).with_credentials(test_user_credentials) list_pages = client.web.lists.get_by_title("Documents") files = list_pages.root_folder.files.get().execute_query() files_count_before = len(files) new_file = list_pages.root_folder.upload_file(file_name, "-some content goes here-").execute_query() self.assertTrue(new_file.name, file_name) new_file.delete_object() files_after = list_pages.root_folder.files client.load(files_after) client.execute_batch() self.assertTrue(len(files_after), files_count_before)
def test5_construct_batch_request(self): client = ClientContext( settings['url']).with_credentials(user_credentials) current_user = client.web.currentUser client.load(current_user) current_web = client.web client.load(current_web) batch_request = ODataBatchRequest( client, JsonLightFormat(ODataMetadataLevel.Verbose)) def _prepare_request(request): client.ensure_form_digest(request) batch_request.beforeExecute += _prepare_request batch_request.execute_query()
def test_12_get_and_delete_batch_request(self): file_name = "TestFile{0}.txt".format(random_seed) client = ClientContext(settings['url']).with_credentials(user_credentials) list_pages = client.web.lists.get_by_title("Documents") files = list_pages.rootFolder.files client.load(files) client.execute_query() files_count_before = len(files) new_file = list_pages.rootFolder.upload_file(file_name, "-some content goes here-") client.execute_query() self.assertTrue(new_file.name, file_name) new_file.delete_object() files_after = list_pages.rootFolder.files client.load(files_after) client.execute_batch() self.assertTrue(len(files_after), files_count_before)
def openShiftPlan(creds): ctx_auth = AuthenticationContext(siteURL) if ctx_auth.acquire_token_for_user(creds[0], creds[1]): ctx = ClientContext(siteURL, ctx_auth) web = ctx.web ctx.load(web) ctx.execute_query() print("Web title: {0}".format(web.properties['Title'])) else: print(ctx_auth.get_last_error()) response = File.open_binary(ctx, relativeURL) #print(ctx.service_root_url()) #save data to BytesIO stream bytes_file_obj = io.BytesIO() bytes_file_obj.write(response.content) bytes_file_obj.seek(0) #set file object to start #read file into pandas dataframe return pd.read_excel(bytes_file_obj, sheet_name='Daily - Infra')
def get_site_users(): """ Fetch SharepointUsers users :return: """ def generate(entities): yield "[" for index, entity in enumerate(entities): if index > 0: yield "," yield json.dumps(entity.properties) yield ']' ctx_auth = AuthenticationContext(URL) if ctx_auth.acquire_token_for_user(USERNAME, PASSWORD): ctx = ClientContext(URL, ctx_auth) user_col = ctx.web.site_users ctx.load(user_col) ctx.execute_query() return Response(generate(user_col), mimetype='application/json')
def get_from_list(list_name): """ Fetch list of entities from given sharepoint list :param list_name: :return: """ def generate(entities): yield "[" for index, entity in enumerate(entities): if index > 0: yield "," yield json.dumps(entity.properties) yield ']' ctx_auth = AuthenticationContext(URL) if ctx_auth.acquire_token_for_user(USERNAME, PASSWORD): ctx = ClientContext(URL, ctx_auth) list_object = ctx.web.lists.get_by_title(list_name) items = list_object.get_items() ctx.load(items) ctx.execute_query() return Response(generate(items), mimetype='application/json')
def __create_folder_utility(ctx: ClientContext, folder_url: str, folder_name: str) -> str: print("folder_url: " + folder_url) print("folder_name: " + folder_name) folder_name_new = folder_name.replace("/", "") print("New folder name: " + folder_name_new) result = ctx.web.get_folder_by_server_relative_url( folder_url).folders.filter("Name eq '{0}'".format(folder_name_new)) ctx.load(result) ctx.execute_query() if len(result) > 0: print("Folder has been found: {0}".format( result[0].properties["Name"])) else: ctx.web.get_folder_by_server_relative_url(folder_url).folders.add( folder_name_new) ctx.execute_query() print("Folder created: " + folder_name_new) new_path = folder_url + "\\" + folder_name_new print("New Path: " + new_path) return new_path
class Sharepoint_Store: def __init__(self, unit_info_tuples, authentication_dict, sharepoint_info_dict): '''initialization method inputs: unit_info_tuples from the main tc class authentication dict - holds user and password, gotta confirm how to inputgather these sharepoint_inf_dict - holds the site url, tenant info, maybe target folder? ''' self.site_url = sharepoint_info_dict['site_url'] self.target_list_title = sharepoint_info_dict['list_title'] self.authentication_dict = authentication_dict self.unit_dict = dict(unit_info_tuples) ###### DEV Vars ###### def make_context(self): ctx_auth = AuthenticationContext(url=self.site_url) ctx_auth.acquire_token_for_user( username=self.authentication_dict['username'], password=self.authentication_dict['password']) #ctx_auth.acquire_token_for_user(username='******', password='******') self.context = ClientContext(self.site_url, ctx_auth) if self.context.auth_context.provider.error is not '': #the the login didnt work! returning_variable = False else: #the login worked! returning_variable = True return returning_variable def spare_method(self): print("hi") def upload_file(self, file_path, file_name, data_type, software_version): upload_into_library = True # rn_test_1.csv with open(file_path, 'rb') as content_file: file_content = content_file.read() if upload_into_library: list_title = "Solar Test Data" target_folder = self.context.web.lists.get_by_title( list_title).root_folder file_extension = os.path.basename(file_path).split(".")[-1] # [filename][mdl] # [filename][xlsx][xlsm] split_file_name = file_name.split(".")[0] uploaded_file_name = split_file_name + "_" + data_type[ 0] + "." + file_extension file = self.upload_file_alt(target_folder, uploaded_file_name, file_content) new_file_url = file.properties["ServerRelativeUrl"] print("File url: {0}".format(new_file_url)) #for testing you dafty remove later ######################### ################## #data_type = "Clean data" #software_version = "3.1" update_properties = self.edit_uploaded_properties( file, uploaded_file_name, new_file_url, self.unit_dict, data_type, software_version) print(update_properties) else: target_url = "/Shared Documents/{0}".format( os.path.basename(file_path)) File.save_binary(self.context, target_url, file_content) def upload_file_alt(self, target_folder, name, content): '''method that actually uploads the file to the sharepoint puts it in the target folder, with the name and content, content being the actual data in the file it returns the sharepoint info about the file so we can find it again''' context = target_folder.context info = FileCreationInformation() info.content = content info.url = name info.overwrite = True target_file = target_folder.files.add(info) context.execute_query() return target_file def edit_uploaded_properties(self, file, file_name, new_file_url, unit_info_dict, data_type, software_version): #possible_properties_list = file.listitem_allfields file_info = self.context.web.get_file_by_server_relative_url( new_file_url) #file_data = self.context.load(file_info) self.context.execute_query() list_items = file_info.listitem_allfields self.context.load(list_items) self.context.execute_query() field_editor = list_items.parent_list.fields.get_by_internal_name_or_title( "GP SN") self.context.load(field_editor) self.context.execute_query() if field_editor.properties['ReadOnlyField']: field_editor.set_property('ReadOnlyField', False) field_editor.update() self.context.execute_query() #list_items.set_property("GPFamily", "sammy") for item in unit_info_dict: #print(item) #print(unit_info_dict[item]) sharepoint_name = item.replace("_", "") new_property_value = unit_info_dict[item] list_items.set_property(sharepoint_name, new_property_value) list_items.update() self.context.execute_query() #print(list_items.properties['GPFamily']) #GP Family work removed and implemented in tc_trigger_class #GP_Family = make_turbine_family(unit_info_dict) #list_items.set_property("GPFamily", GP_Family) #list_items.update() #context.execute_query() list_items.set_property("DataType", data_type) list_items.update() self.context.execute_query() list_items.set_property("ToolVersion", software_version) list_items.update() self.context.execute_query() return True
from office365.runtime.auth.clientCredential import ClientCredential from office365.sharepoint.client_context import ClientContext from office365.sharepoint.listitems.caml import CamlQuery from settings import settings download_path = tempfile.mkdtemp() client_creds = ClientCredential( settings['client_credentials']['client_id'], settings['client_credentials']['client_secret']) ctx = ClientContext(settings['url']).with_credentials(client_creds) list_obj = ctx.web.lists.get_by_title("Tasks123") items = list_obj.get_items(CamlQuery.create_all_items_query()) ctx.execute_query() for item in items: if item.properties[ 'Attachments']: # 1. determine whether ListItem contains attachments # 2. Explicitly load attachments for ListItem attachment_files = item.attachmentFiles ctx.load(attachment_files) ctx.execute_query() # 3. Enumerate and save attachments for attachment_file in attachment_files: download_file_name = os.path.join( download_path, os.path.basename(attachment_file.properties["FileName"])) with open(download_file_name, 'wb') as fh: content = attachment_file.read() fh.write(content)
web_to_delete.context.execute_query() print("Web site has been deleted") def print_webs_recursively(parent_web): print(parent_web.properties["ServerRelativeUrl"]) webs = parent_web.webs parent_web.context.load(webs) parent_web.context.execute_query() for web in webs: print_webs_recursively(web) if __name__ == '__main__': ctxAuth = AuthenticationContext(url=settings['url']) if ctxAuth.acquire_token_for_user( username=settings['user_credentials']['username'], password=settings['user_credentials']['password']): ctx = ClientContext(settings['url'], ctxAuth) # web = load_web(ctx) # web = create_web(ctx) # update_web(web) # delete_web(web) root_web = ctx.web ctx.load(root_web) ctx.execute_query() print_webs_recursively(root_web) else: print(ctxAuth.get_last_error())
import os import tempfile from settings import settings from office365.runtime.auth.client_credential import ClientCredential from office365.sharepoint.client_context import ClientContext ctx = ClientContext(settings['url']).with_credentials( ClientCredential(settings['client_credentials']['client_id'], settings['client_credentials']['client_secret'])) # retrieve files from library files = ctx.web.lists.get_by_title("Documents").rootFolder.files ctx.load(files) ctx.execute_query() download_path = tempfile.mkdtemp() for file in files: print("Downloading file: {0} ...".format( file.properties["ServerRelativeUrl"])) download_file_name = os.path.join( download_path, os.path.basename(file.properties["Name"])) with open(download_file_name, "wb") as local_file: file.download(local_file) ctx.execute_query() print("[Ok] file has been downloaded: {0}".format(download_file_name))
class SharepointUpload(Downloader): def __init__(self, settings_path): super().__init__(__version__, r"/home/downloader_settings", settings_path) context_auth = AuthenticationContext(url=site_url) context_auth.acquire_token_for_app( client_id=app_principal['client_id'], client_secret=app_principal['client_secret']) self.ctx = ClientContext(site_url, context_auth) @retry(Exception, 4, delay=60, backoff=2) def upload_file_to_sp(self, file_path, *remote_path): folder_url = "/".join(["Shared Documents", "beta_builds"] + list(remote_path)) target_folder = self.ctx.web.ensure_folder_path(folder_url) size_chunk = 100 * 1024 * 1024 # MB file_size = os.path.getsize(file_path) if file_size > size_chunk: result_file = target_folder.files.create_upload_session( file_path, size_chunk, self.print_upload_progress, file_size) else: with open(file_path, 'rb') as content_file: file_content = content_file.read() name = os.path.basename(file_path) result_file = target_folder.upload_file(name, file_content) self.ctx.execute_query() request = RequestOptions( r"{0}web/getFileByServerRelativeUrl('{1}')/\$value".format( self.ctx.service_root_url(), result_file.serverRelativeUrl)) request.stream = True response = self.ctx.execute_request_direct(request) remote_size = int(response.headers['Content-Length']) if abs(file_size - remote_size) > 0.05 * file_size: raise UploaderError("File size difference is more than 5%") logging.info('File {0} has been uploaded successfully'.format( result_file.serverRelativeUrl)) return folder_url @retry(Exception, 4) def add_list_item(self, file_url, build_date, folder_url): product_list = self.ctx.web.lists.get_by_title("product_list") product_list.add_item({ "Title": self.settings.version, "build_date": build_date, "relative_url": file_url, "shareable_folder": f"{site_url}/{folder_url}" }) self.ctx.execute_query() @retry(Exception, 4) def get_list_items(self): product_list = self.ctx.web.lists.get_by_title("product_list") items = product_list.items self.ctx.load(items) self.ctx.execute_query() return items @staticmethod def print_upload_progress(offset, total_size): logging.info("Uploaded '{}' MB from '{}'...[{}%]".format( round(offset / 1024 / 1024, 2), round(total_size / 1024 / 1024, 0), round(offset / total_size * 100, 2)))
from settings import settings from office365.runtime.auth.client_credential import ClientCredential from office365.sharepoint.client_context import ClientContext def set_proxy(request): proxies = {settings['url']: 'https://127.0.0.1:8888'} request.proxies = proxies ctx = ClientContext(settings['url']).with_credentials(ClientCredential(settings['client_credentials']['client_id'], settings['client_credentials']['client_secret'])) ctx.pending_request().beforeExecute += set_proxy target_web = ctx.web ctx.load(target_web) ctx.execute_query()
from office365.runtime.auth.authentication_context import AuthenticationContext from office365.sharepoint.client_context import ClientContext from office365.sharepoint.files.file import File import io import pandas as pd url = 'https://eafit.sharepoint.com/sites/Proyectoinformedecoyunturaeconomica' username = '******' password = '' relative_url = r'/sites/Proyectoinformedecoyunturaeconomica/Documentos compartidos/PowerBI/Data/Base de datos.xlsx' ctx_auth = AuthenticationContext(url) ctx_auth.acquire_token_for_user(username, password) ctx = ClientContext(url, ctx_auth) web = ctx.web ctx.load(web) ctx.execute_query() response = File.open_binary(ctx, relative_url) #save data to BytesIO stream bytes_file_obj = io.BytesIO() bytes_file_obj.write(response.content) bytes_file_obj.seek(0) #set file object to start df_embig = pd.read_excel(bytes_file_obj, sheet_name='25.EMBIG') df_ise = pd.read_excel(bytes_file_obj, sheet_name='29.Indice de calidad y cu') df_ise["date"] = pd.period_range('2016-01-01', '2020-12-01', freq='M')
from office365.runtime.auth.authentication_context import AuthenticationContext from office365.sharepoint.client_context import ClientContext tenant_url = "https://company.sharepoint.com" site_url = "https://company.sharepoint.com/sites/sbdev" ctx_auth = AuthenticationContext(tenant_url) if ctx_auth.acquire_token_for_user("*****@*****.**", "mypassword"): ctx = ClientContext(site_url, ctx_auth) lists = ctx.web.lists ctx.load(lists) ctx.execute_query() for l in lists: print(l.properties["Title"]) else: print(ctx_auth.get_last_error())
class SharePoint(object): def __init__(self, url, username, password, log): """ SharePoint object initialization. :param url: SharePoint destination URL (See DEFAULT_SP_URL above for example) :param username: Username to access SharePoint (See DEFAULT_SP_USERNAME above for example) :param log: where to send log messages :type log: logging.Logger """ self._url = url self._username = username self._password = password self.log = log # # Authenticate for this run # log.debug( "SharePoint: Acquiring AuthenticationContext {} for {}".format( url, username)) self._ctx_auth = AuthenticationContext(url=url) # # Looks like, at the time of this writing, the Office365-REST_Python-Client # library exception handling leaves a lot to be desired as their internal # method calls don't test the results of lower-level method calls in order to # percolate up problems they encounter. # # NOTE: Errors will actually go to the log file but not standard output for some # unknown reason # log.debug("SharePoint: Acquiring token from {} for {}".format( url, username)) self._ctx_auth.acquire_token_for_user(username=username, password=password) # # Kludgy test to see if a problem occurred # Admittedly this test will break if a different authentication provider is used # if self._ctx_auth.provider.token is None: _msg = "SharePoint: Unable to get SharePoint authorization. Error: {}".format( self._ctx_auth.get_last_error()) log.critical(_msg) raise ValueError(_msg) log.debug( "SharePoint: Authentication token obtained from {} for {}".format( url, username)) # # Get cookies # log.debug("SharePoint: Getting cookies from {} for {}".format( url, username)) self._ctx = ClientContext(self.url, self._ctx_auth) log.debug("SharePoint: Obtained ClientContext from {} for {}".format( url, username)) @property def url(self): return self._url @property def username(self): return self._username @property def password(self): return self._password def list_files(self, path): """ Get a list of the filenames in passed folder. :param path: Path to the folder from which to get the list of files :type path: string :return: List of filenames found in the folder """ self.log.debug( "list_files: Get list of files for path: {}".format(path)) # # Build and execute the folder query # folder = self._ctx.web.get_folder_by_server_relative_url(path) self._ctx.load(folder) self._ctx.execute_query() # # Build and execute the files query from the folder query results # files = folder.files self._ctx.load(files) self._ctx.execute_query() ##print ("files ", files) # # Build list of files found in the path # #_file_list = [] _file_dict = {} for _f in files: self.log.debug("list_files: Filename: {}".format( _f.properties["Name"])) _file_dict[_f.properties["Name"]] = _f.properties["TimeCreated"] # # Return the list of files # ##print ("_file_dict = ", _file_dict) #return _file_list return _file_dict def check_archived_folder(self, src_path): """ After downloading files, they're moved to an Archived folder on SharePoint. Check to make sure the Archiv folders exists or create them if they do not. The Archived folder path is comprised of two parts, "Archived" and "Month Year" folder as in Archived/Oct 2018 and is placed within the download folder path like: .../Shared Documents/NPT/Sep 2018 Download source path .../Shared Documents/NPT/Archived/Sep 2018 Archived path :param src_path: Download source base directory (ie. /sites/SageDS/Shared Documents/NPT/Oct 2018) """ self.log.debug("check_archived_folders: src_path:{}".format(src_path)) # # Create some useful variables from download source folder # Incoming src_path variable should be like: /sites/SageDS/Shared Documents/NPT/Sep 2018 # _base_path = os.path.dirname( src_path ) # Grab the head of src_path (ie. /sites/SageDS/Shared Documents/NPT) _date_folder = os.path.basename( src_path) # Grab the base of src_path (ie. Sep 2018) self.log.debug( "check_archived_folders: base_path:{} date_folder:{}".format( _base_path, _date_folder)) # # Get the list of folders in our base folder # _folder = self._ctx.web.get_folder_by_server_relative_url(_base_path) _folders = _folder.folders self._ctx.load(_folders) self._ctx.execute_query() # # Look for Archived folder in base folder # _archived_found = False for _f in _folders: self.log.debug( "check_archived_folders: is 'Archived' = '{}'".format( _f.properties["Name"])) if "Archived" == _f.properties["Name"]: self.log.debug( "check_archived_folders: 'Archived' folder found, no need to create" ) _archived_found = True # # Did we find an Archived folder # if not _archived_found: # # Nope, need to add it # self.log.debug( "check_archived_folders: Creating 'Archived' folder") _folders.add("Archived") self._ctx.execute_query() self.log.debug("check_archived_folders: Created 'Archived' folder") # # Now check for a date folder within the Archived folder # _base_path += "/Archived" _folder = self._ctx.web.get_folder_by_server_relative_url(_base_path) _folders = _folder.folders self._ctx.load(_folders) self._ctx.execute_query() # # Look for the date folder obtained from original path within the Archived folder # _date_folder_found = False for _f in _folders: self.log.debug("check_archived_folders: is '{}' = '{}'".format( _date_folder, _f.properties["Name"])) if _date_folder == _f.properties["Name"]: self.log.debug( "check_archived_folders: '{}' folder found, no need to create" .format(_date_folder)) _date_folder_found = True # # Did we find a date folder within the Archived folder # if not _date_folder_found: # # Nope, need to create one # self.log.debug( "check_archived_folders: Creating '{}' folder".format( _date_folder)) _folders.add(_date_folder) self._ctx.execute_query() self.log.debug( "check_archived_folders: '{}' folder created ".format( _date_folder)) def download_files(self, src, dest, file_pattern, period, worksheet_names, table_names, exec_id, script_path): #print ("In download_files") """ Download all the files in a src SharePoint directory to local dest directory. :param src: Source SharePoint folder from which to download files :type src: string :param dest: Destination local directory into which to download files :type dest: string :return: Number of files downloaded """ # # Create some useful variables from download source folder # Incoming src variable should be like: /sites/SageDS/Shared Documents/NPT/Oct 2018 # _base_path = os.path.dirname( src ) # Grab the head of dir (ie. /sites/SageDS/Shared Documents/NPT) #print("_base_path :", _base_path) _date_folder = os.path.basename( src) # Grab the base of dir (ie. Oct 2018) #print("_date_folder :", _date_folder) _move_to_path = "{}/Archived/{}/".format(_base_path, _date_folder) ##print("_move_to_path :", _move_to_path) self.log.debug( "download_files: Starting src:{} base:{} date:{}".format( src, _base_path, _date_folder)) # # Make sure Archived folder exists in SharePoint before moving on # self.check_archived_folder(src) # # Get the list of filenames in the SharePoint folder # _files = self.list_files(src) ##print ("_files :", _files) # # Keep a count of the number of files downloaded # _num_downloaded = 0 # # Make sure there's something to download # if not len(_files): self.log.info( "download_files: No files found to download from {} of {}". format(src, self.url)) return _num_downloaded self.log.info( "files_found: Number of files {} found from {} of {}".format( len(_files), src, self.url)) # # Walk the list of files, downloading each one into destination directory # for _f, _fct in _files.items(): #print ("In _files Loop") _spn = "{}/{}".format(src, _f) # Source pathname ##print ('_spn =', _spn) _dpn = os.path.join(dest, _f) # Destination pathname ##print ('_dpn =', _dpn) _upper_file_name = _f.upper() ##print ('_upper_file_name =', _upper_file_name) # self.log.info('_upper_file_name = ' + _upper_file_name) _upper_file_pattern = file_pattern.upper() # self.log.info('_upper_file_pattern = ' + _upper_file_pattern) ##print ('_upper_file_pattern =', _upper_file_pattern) file_pattern_match = re.findall(_upper_file_pattern, _upper_file_name) # self.log.info('file_pattern_match = ' + str(file_pattern_match)) if file_pattern_match: ##print ("Found files with the pattern {}".format(file_pattern)) self.log.debug("download_files: Downloading {} to {}".format( _spn, _dpn)) # self.log.info("download_files: Downloading {} to {}".format(_spn, _dpn)) # Insert Log proc_name = _f statement_type = "Download File -> " + _f table_name = 'null' # success_failure = "Download Complete" # self.log.info('exec_id = ' + str(exec_id) + ' proc_name = ' + proc_name + ' statement_type = ' # + statement_type) qrs.insert_log(exec_id, proc_name, statement_type, table_name, 'null', "Started") # # Download the file # try: with open(_dpn, 'wb') as _ofd: _response = File.open_binary(self._ctx, _spn) _ofd.write(_response.content) # success_failure = "Download Complete" qrs.insert_log(exec_id, proc_name, statement_type, table_name, 'null', "Completed") except Exception as e: _msg = "{} error encountered creating destination file {}".format( e, _dpn) log.critical(_msg) raise ValueError(_msg) dct = datetime.fromtimestamp(os.path.getmtime(_dpn)) ##print ("down load time : " ,dct) ##print ("type of _fct = ", type(_fct)) #_fp.append(dct) ##print ("_fp = ", _fp) #valid_files_dict[_f] = _fp ##print ("valid_files_dict = ", valid_files_dict) sqlcmd = "INSERT INTO [T_STG_FILES_DOWNLOADED]" \ + "([FILE_NAME_PATTERN],[FILE_NAME],[DOWNLOADED_TIME],[PERIOD],[PROJECT],[FILE_CREATION_TIME]) " \ + "VALUES(" \ + "'" + file_pattern + "'," \ + "'" + _f + "'," \ + "CAST(left('" + str(dct) + "',23)" + " AS datetime),'" \ + period + "'," \ +"'" + qrs.Project_Name + "'," \ + "CAST ('" +_fct + "' AS datetime))" ##print('sqlcmd = ', sqlcmd) qrs.dml_cursor(sqlcmd) LOG.info( "Uploaded to data base strarts:Work sheet {} Table: {} ". format(worksheet_names, table_names)) os_cmd = script_path + 'Opex_dynamic_upload_to_DB.py ' + str( exec_id ) + ' "' + _dpn + '" "' + table_names + '" "' + worksheet_names + '"' print('Initiating System Command -> \n' + os_cmd) qrs.run_os_cmd(os_cmd) LOG.info( "All Work sheet {} Uploaded to Table: {} was successful". format(worksheet_names, table_names)) #print ("file = " , "'" + dest + "\\" +_f + "'") #print ("worksheet_names = " , "'" + worksheet_names+ "'") #print ("table_names = " , "'" + table_names + "'") _num_downloaded += 1 # # File downloaded, move it to Archived folder # _to_path = _move_to_path + _f #commented by Mukesh not archieve for testing self.log.debug("download_files: Moving {} to {}".format( _spn, _to_path)) # # Moving the file # try: print("in moving block") LOG.info("\n Sharepoint:File {} has been archived in {} ". format(self._ctx, _spn, _to_path)) _resp = File.move( self._ctx, _spn, _to_path ) #commented by Mukesh not archieve for testing ##print ("in moving block after MOve") except Exception as e: _msg = "{} error encountered moving {} to file {}".format( e, _spn, _to_path) log.critical(_msg) raise ValueError(_msg) #_resp = File.move(self._ctx, _spn, _to_path) # # Was move successful # if _resp.status_code != 200: self.log.error( "download_files: Error: {} moving {} to {}".format( _resp.status_code, _spn, _to_path)) else: self.log.debug("download_files: Moved {} to {}".format( _spn, _to_path)) # # Return count of files downloaded # return _num_downloaded
import requests from office365.runtime.auth.authentication_context import AuthenticationContext from office365.sharepoint.client_context import ClientContext context_auth = AuthenticationContext( url='https://share-intra.philips.com/sites/STS020161021115113') context_auth.acquire_token_for_app( client_id='a6a115ee-fe4b-401d-b706-fbeb557f8669', client_secret='GKlYLuxQTDOrrBsDwE2a3TQ9IJihKL9Yr2r5J/ZPSbw=') # session = requests.Session() # session.cookies ctx = ClientContext('https://share-intra.philips.com/sites/STS020161021115113', context_auth) folder_url = "Shared%20Documents" #folder url where to find folder_name = "ADM" #folder name to find result = ctx.web.get_folder_by_server_relative_url(folder_url).folders.filter( "Name eq '{0}'".format(folder_name)) ctx.load(result) ctx.execute_query() if len(result) > 0: print("Folder has been found: {0}".format(result[0].properties["Name"]))
from office365.runtime.auth.client_credential import ClientCredential from office365.sharepoint.client_context import ClientContext from office365.sharepoint.listitems.caml.caml_query import CamlQuery download_path = tempfile.mkdtemp() client_creds = ClientCredential( settings['client_credentials']['client_id'], settings['client_credentials']['client_secret']) ctx = ClientContext(settings['url']).with_credentials(client_creds) list_obj = ctx.web.lists.get_by_title("Tasks123") #items = list_obj.get_items(CamlQuery.create_all_items_query()) #items = list_obj.get_items() items = list_obj.items ctx.load(items, ["ID", "UniqueId", "FileRef", "LinkFilename", "Title", "Attachments"]) ctx.execute_query() for item in items: if item.properties[ 'Attachments']: # 1. determine whether ListItem contains attachments # 2. Explicitly load attachments for ListItem attachment_files = item.attachmentFiles ctx.load(attachment_files) ctx.execute_query() # 3. Enumerate and save attachments for attachment_file in attachment_files: download_file_name = os.path.join( download_path, os.path.basename(attachment_file.properties["FileName"])) with open(download_file_name, 'wb') as fh: content = attachment_file.read()