def test_id_perm_changes(): fd = FileSys.file_read_json('Document-4780_DocBasic') permdata = FileSys.file_read_json('Document-4780_Perms') handle = 'Document-4780' set = PERM_DEFS.setA [removelist, changelist, addlist] = id_perm_changes(handle, fd, permdata, set) print('Remove:', removelist) print('Change:', changelist) print('Add:', addlist) make_perm_changes(s, handle, permdata, removelist, changelist, addlist)
def return_tree(s, target, rootfilename, **kwargs): load_flag = kwargs.get("Load") if load_flag != False and FileSys.file_check_json(s, rootfilename): if load_flag == None: load_flag = MyUtil.get_yn("File already exists. [Load from disk = Y, re-create = N] (Y/N)?") tr = FileSys.file_read_json(rootfilename) return tr tr = get_tree(s, target) # print_tree(s, tr) FileSys.file_write_json(tr, rootfilename, path=CF.dccfilepath) return tr
def return_tree(s, target, rootfilename, **kwargs): load_flag = kwargs.get('Load') if FileSys.file_check_json(s, rootfilename) == True: if load_flag == None: print('File [',rootfilename,'] already exists - use existing file? ', sep = '', end = '') load_flag = MyUtil.get_yn('[Load from disk = Y, re-create = N] (Y/N)? ') if load_flag == True: tr = FileSys.file_read_json(rootfilename) FileSys.file_write_json(tr, rootfilename, path = CF.dccfilepath) return(tr) tr = get_tree(s,target, **kwargs) FileSys.file_write_json(tr, rootfilename, path = CF.dccfilepath) # print_tree(s, tr) return(tr)
def return_tree(s, target, rootfilename, **kwargs): load_flag = kwargs.get('Load') if FileSys.file_check_json(s, rootfilename) == True: if load_flag == None: print('File [', rootfilename, '] already exists ', sep='', end='') load_flag = MyUtil.get_yn( '[Load from disk = Y, re-create = N] (Y/N)?') if load_flag == True: tr = FileSys.file_read_json(rootfilename) FileSys.file_write_json(tr, rootfilename, path=CF.dccfilepath) return (tr) tr = get_tree(s, target, **kwargs) FileSys.file_write_json(tr, rootfilename, path=CF.dccfilepath) # print_tree(s, tr) return (tr)
# my modules import DCC import Config as CF import FileSys # Prerequisite: # 1. Have a collection number for which collections and documents you would like mirrored on # your personal computer. If there are collections you would like to exclude ensure # that they are appended to the 'exclude' variable with a comma # 2. Have a directory in which you would like to save all the documents on your PC # Expected Output: # As this script runs all collections and documents (except for excluded ones) # will be printed to the screen. Once traversed, it will the display all # information about documents located in the collection chosen and determine if they already exist # in the directory selected. Once the process is complete # you will be able to locate all files in the directory selected. # Login to DCC s = DCC.login(CF.dcc_url + CF.dcc_login) coll = 'Collection-13987' dir = '/Users/sroberts/Dropbox/TMT/Current Tasks/TCS Honeywell/' exclude = [] FileSys.create_DCC_mirror(s, coll, dir, SaveFiles=True, MaxFileSize=20000000, Exclude=exclude)
def prop_get(s, handle, **kwargs): # kwargs options: # Depth - Level to get Collection children information ('0', '1' or 'infinity') # '0' returns information on Collection itself # '1' and 'infinity' return information on Collection content # InfoSet = Children - Collection Children # InfoSet = CollData - Information about Collection # InfoSet = CollCont - Information about Collection Content (See Depth) # InfoSet = DocBasic - Document basic information # InfoSet = DocDate - Document last modified date # InfoSet = Group - Group information # InfoSet = Parents - Locations of documents or collections # InfoSet = Perms - Document Permissions # InfoSet = User - User information # InfoSet = VerAll - All Version information # RetDom - Return BeautifulSoup object rather than file data structure # WriteProp = (True|False) - Write .html to disk? # Print = (True|False) - Call print function on InfoSet? url = DCC_URL + "/dsweb/PROPFIND/" + handle headers = { "DocuShare-Version": "5.0", "Content-Type": "text/xml", "Accept": "text/xml" } infoDic = { 'Children': '<children/>', 'CollCont': '<title/><summary/><entityowner/><getlastmodified/>', 'CollData': '<title/><summary/><keywords/><entityowner/><getlastmodified/>', 'DocBasic': '<author/><handle/><document/><getlastmodified/><size/><summary/><entityowner/><keywords/>', 'DocDate': '<getlastmodified/>', 'Group': '<entityowner/><handle/><parents/><children/>', 'Parents': '<parents/>', 'Perms': '<private/><acl/>', 'Summary': '<summary/>', 'Title': '<handle/>', 'User': '******', 'VerAll': '<revision_comments/><title/><version_number/><parents/><handle/><entityowner/><getlastmodified/>', 'Versions': '<versions/><document_tree/>' } infoSet = kwargs.get('InfoSet', 'DocBasic') if debug: print('infoSet:', infoSet) writeRes = kwargs.get('WriteProp', False) retDom = kwargs.get('RetDom', False) depth = kwargs.get('Depth', '0') headers['Depth'] = depth printFlag = kwargs.get('Print', False) if infoSet == 'CollCont': fRoot = handle + '_' + infoSet + depth else: fRoot = handle + '_' + infoSet if debug: print('fRoot:', fRoot) if infoSet == 'DocDate': isCached = False else: [isCached, fd] = FileSys.check_cache_fd_json(s, handle, infoSet, fRoot) if debug: print('isCached:', isCached) if not isCached: if debug: print('Calling DCC API from prop_get: ', infoSet, handle) if infoSet in infoDic: xml = """<?xml version="1.0" ?><propfind><prop>""" + infoDic[ infoSet] + """</prop></propfind>""" r = s.post(url, data=xml, headers=headers) else: print('Calling without XML') r = s.post(url, headers=headers) if writeRes: FileSys.file_write_props(r, fRoot) dom = BeautifulSoup(r.text) if retDom: return (dom) if infoSet in infoDic: fd = prop_scrape(dom, infoSet) FileSys.file_write_json(fd, fRoot) if printFlag: prop_print(infoSet, fd) return (fd)
def prop_get(s, handle, **kwargs): # kwargs options: # Depth - Level to get Collection children information ('0', '1' or 'infinity') # '0' returns information on Collection itself # '1' and 'infinity' return information on Collection content # InfoSet = Children - Collection Children # InfoSet = CollData - Information about Collection # InfoSet = CollCont - Information about Collection Content (See Depth) # InfoSet = DocBasic - Document basic information # InfoSet = DocDate - Document last modified date # InfoSet = Group - Group information # InfoSet = Parents - Locations of documents or collections # InfoSet = Perms - Document Permissions # InfoSet = User - User information # InfoSet = VerAll - All Version information # RetDom - Return BeautifulSoup object rather than file data structure # WriteProp = (True|False) - Write .html to disk? # Print = (True|False) - Call print function on InfoSet? url = DCC_URL + "/dsweb/PROPFIND/" + handle headers = {"DocuShare-Version":"5.0", "Content-Type":"text/xml", "Accept":"text/xml"} infoDic = { 'Children' : '<children/>', 'CollCont' : '<title/><summary/><entityowner/><getlastmodified/>', 'CollData' : '<title/><summary/><keywords/><entityowner/><getlastmodified/>', 'DocBasic':'<author/><handle/><document/><getlastmodified/><size/><summary/><entityowner/><keywords/>', 'DocDate': '<getlastmodified/>', 'Group': '<entityowner/><handle/><parents/><children/>', 'Parents': '<parents/>', 'Perms': '<private/><acl/>', 'Summary' : '<summary/>', 'Title': '<handle/>', 'User': '******', 'VerAll' : '<revision_comments/><title/><version_number/><parents/><handle/><entityowner/><getlastmodified/>', 'Versions' : '<versions/><document_tree/>'} infoSet = kwargs.get('InfoSet','DocBasic') if debug: print('infoSet:',infoSet) writeRes = kwargs.get('WriteProp', False) retDom = kwargs.get('RetDom',False) depth = kwargs.get('Depth','0') headers['Depth'] = depth printFlag = kwargs.get('Print', False) if infoSet == 'CollCont': fRoot = handle + '_' + infoSet + depth else: fRoot = handle + '_' + infoSet if debug: print('fRoot:', fRoot) if infoSet == 'DocDate': isCached = False else: [isCached, fd] = FileSys.check_cache_fd_json(s, handle, infoSet, fRoot) if debug: print('isCached:', isCached) if not isCached: if debug: print('Calling DCC API from prop_get: ',infoSet, handle) if infoSet in infoDic: xml = """<?xml version="1.0" ?><propfind><prop>""" + infoDic[infoSet] + """</prop></propfind>""" r = s.post(url,data=xml,headers=headers) else: print('Calling without XML') r = s.post(url,headers=headers) if writeRes: FileSys.file_write_props(r, fRoot) dom = BeautifulSoup(r.text, "html.parser") if retDom: return(dom) if infoSet in infoDic: fd = prop_scrape(dom, infoSet) FileSys.file_write_json(fd, fRoot) if printFlag: prop_print(infoSet,fd) return(fd)