def _unsafe_get_box_object_from_oid(self, client: Client, oid: str, object_type: OType, strict: bool) \ -> Optional[BoxItem]: assert isinstance(client, Client) assert object_type in (FILE, DIRECTORY) box_object = None try: with self._api(): if object_type == FILE: box_object = client.file(file_id=oid) if object_type == DIRECTORY: box_object = client.folder(folder_id=oid) if box_object: box_object = self._unsafe_box_object_populate(client, box_object) return box_object except CloudFileNotFoundError: pass except (CloudFileExistsError, PermissionError): raise except Exception as e: log.exception(e) raise # try again with the other type log.debug("Trying again") if object_type == FILE: box_object = client.folder(folder_id=oid) if object_type == DIRECTORY: box_object = client.file(file_id=oid) box_object = self._unsafe_box_object_populate(client, box_object) # should raise FNF if the object doesn't exists if strict: # if we are here, then the object exists and retval does not comply with "strict" raise CloudFileExistsError() return box_object
def get(self, request, *args, **kwargs): from io import BytesIO from django.http import HttpResponse boxuser = BoxUser.objects.order_by('-id')[0] oauth = RedisManagedOAuth2( client_id='5dn98104cyf535v4581cbb1wxnag6e5y', client_secret='8z6ysMEnsrickMWBwpnysxYJ9SvqaNlY', unique_id=boxuser.unique_id ) client = Client(oauth) folder_items = ( client.folder(folder_id='0').get_items(limit=100, offset=0)) file_items = [f for f in folder_items if f.type == "file"] first_item_id = file_items[0].id file_name = client.file(file_id=first_item_id).get()['name'] # RESPONSE response = HttpResponse(content_type='application/octet-stream') response['Content-Disposition'] = ( 'attachment; filename="%s"' % file_name) content = BytesIO(client.file(file_id=first_item_id).content()) file_content = content.getvalue() content.close() response.write(file_content) return response
class TransferData(): # Define client ID, client secret, and developer token. def __init__(self): self.CLIENT_ID = None self.CLIENT_SECRET = None self.ACCESS_TOKEN = None try: with open('box.cfg', 'r') as box_cfg: self.CLIENT_ID = box_cfg.readline().rstrip('\n') self.CLIENT_SECRET = box_cfg.readline().rstrip('\n') self.ACCESS_TOKEN = box_cfg.readline().rstrip('\n') # Create OAuth2 object. self.oauth2 = OAuth2(self.CLIENT_ID, self.CLIENT_SECRET, access_token=self.ACCESS_TOKEN) # Create the authenticated client self.client = Client(self.oauth2) except: print("box.cfg not found for boxsdk") print( "please make an box.cfg file with CLIENT_ID,CLIENT_SECRET, and ACCESS_TOKEN on 3 lines." ) traceback.print_exc() exit() def upload_file(self, file_from): """upload a file to Box """ try: self.client.folder('0').upload(file_from, file_from, preflight_check=True) except: pass os.remove(file_from) def download_file(self, file_from): """download a file from Box """ file_list = self.client.folder(folder_id='0').get_items(limit=100, offset=0) for file1 in file_list: if (file1['name'] == file_from): #print("downloaded from box") with open(file_from, 'wb') as open_file: self.client.file(file1.id).download_to(open_file) open_file.close() def delete_file(self, file_from): """delete file from box """ file_list = self.client.folder(folder_id='0').get_items(limit=100, offset=0) for file1 in file_list: if (file1['name'] == file_from): self.client.file(file1.id).delete()
def download_file(file_id, file_path): """ Downloads a Box file given its identifier to a specific path. :param file_id: File identifier. :param file_path: File path. :return: True if the download was successful, False otherwise. """ box_client = Client(JWTAuth.from_settings_file(BOX_CONFIG_FILE_PATH)) for i in range(0, BOX_RETRIES): try: with open(file_path, 'wb') as file: box_client.file(file_id).download_to(file) return True except Exception as e: time.sleep(BOX_RTM) if i == BOX_RETRIES - 1: print(f'Error calling Box API downloading the file [{file_id}] to file [{file_path}]: {e}') return False
def file_id_to_path(file_id, client=None): if client is None: client = Client(jwt_auth()) parent_dirs = [] f = client.file(file_id).get() parent_path = f.path_collection['entries'] parent_path = [folder.name for folder in parent_path[1:]] parent_dirs.extend(parent_path) path = '/'+'/'.join(parent_dirs)+'/' return path + f.name
def upload_queue_processor(): """ :return: """ while True: if upload_queue.not_empty: callable_up = upload_queue.get() # blocks # TODO: pass in the actual item being updated/uploaded, so we can do more intelligent retry mechanisms was_list = isinstance(callable_up, list) last_modified_time = None if was_list: last_modified_time, callable_up = callable_up num_retries = 15 for x in range(15): try: ret_val = callable_up() if was_list: item = ret_val # is the new/updated item if isinstance(item, File): client = Client(oauth) file_obj = client.file( file_id=item.object_id).get() # version_info[item.object_id] = version_info.get(file_obj['id'], # {'fresh_download': True, # 'time_stamp': 0, 'etag': '0'}) # version_info[file_obj['id']]['fresh_download'] = False # version_info[file_obj['id']]['time_stamp'] = last_modified_time # version_info[file_obj['id']]['etag'] = file_obj['etag'] redis_set(file_obj, last_modified_time) break except BoxAPIException as e: print(traceback.format_exc()) if e.status == 409: print( 'Apparently Box says this item already exists...' 'and we were trying to create it. Need to handle this better' ) break except (ConnectionError, BrokenPipeError, ProtocolError, ConnectionResetError): time.sleep(3) print(traceback.format_exc()) if x >= num_retries - 1: print('Upload giving up on: {}'.format(callable_up)) # no immediate plans to do anything with this info, yet. uploads_given_up_on.append(callable_up) except (TypeError, FileNotFoundError) as e: print(traceback.format_exc()) break upload_queue.task_done()
def getFilesFromFolder(folderid="75058858589", frompath="", topath=""): """gets all the files from a folder and puts them into the corresponding path""" # Define client ID, client secret, and developer token. CLIENT_ID = None CLIENT_SECRET = None ACCESS_TOKEN = None # Read app info from text file with open('app.cfg', 'r') as app_cfg: CLIENT_ID = app_cfg.readline().strip() CLIENT_SECRET = app_cfg.readline().strip() ACCESS_TOKEN = app_cfg.readline().strip() # Create OAuth2 object. It's already authenticated, thanks to the developer token. oauth2 = OAuth2(CLIENT_ID, CLIENT_SECRET, access_token=ACCESS_TOKEN) print("got authentication") #print(oauth2) # Create the authenticated client client = Client(oauth2) #, LoggingNetwork()) print("made client") for item in client.folder(folder_id=folderid).get_items(limit=1000): path = 'anyFileName.xlsx' with open(path, 'wb') as f: client.file(file_id).download_to(f)
def update_file(file_id, file_path): """ Updates a file (that must exist in Box folder) given its identifier. :param file_id: File identifier. :param file_path: File path. :return: File identifier if the update was successful, None otherwise. """ box_client = Client(JWTAuth.from_settings_file(BOX_CONFIG_FILE_PATH)) for i in range(0, BOX_RETRIES): try: return box_client.file(file_id).update_contents(file_path).id except Exception as e: time.sleep(BOX_RTM) if i == BOX_RETRIES - 1: print(f'Error calling Box API updating the file [{file_id}] with file [{file_path}]: {e}') return None
def upload_queue_processor(): """ Implements a simple re-try mechanism for pending uploads :return: """ while True: if upload_queue.not_empty: callable_up = upload_queue.get() # blocks # TODO: pass in the actual item being updated/uploaded, so we can do more intelligent retry mechanisms was_list = isinstance(callable_up, list) last_modified_time = oauth = None if was_list: last_modified_time, callable_up, oauth = callable_up args = callable_up.args if isinstance(callable_up, partial) else None num_retries = 15 for x in range(15): try: ret_val = callable_up() if was_list: item = ret_val # is the new/updated item if isinstance(item, File): client = Client(oauth) file_obj = client.file(file_id=item.object_id).get() redis_set(r_c, file_obj, last_modified_time, box_dir_path=BOX_DIR) break except BoxAPIException as e: crate_logger.debug('{the_args}, {the_trace}'.format(the_args=args, the_trace=traceback.format_exc())) if e.status == 409: crate_logger.debug('Apparently Box says this item already exists...' 'and we were trying to create it. Need to handle this better') break except (ConnectionError, BrokenPipeError, ProtocolError, ConnectionResetError): time.sleep(3) crate_logger.debug('{the_args}, {the_trace}'.format(the_args=args, the_trace=traceback.format_exc())) if x >= num_retries - 1: crate_logger.debug('Upload giving up on: {}'.format(callable_up)) # no immediate plans to do anything with this info, yet. uploads_given_up_on.append(callable_up) except (TypeError, FileNotFoundError): crate_logger.debug(traceback.format_exc()) break upload_queue.task_done()
def capture(): # Capture auth code and csrf token via state code = request.args.get('code') state = request.args.get('state') # If csrf token matches, fetch tokens assert state == csrf_token access_token, refresh_token = oauth.authenticate(code) ## Check user info client = Client(oauth) user = client.user().get() print('The current user ID is {}'.format(user.id)) ### Get file info myDir = client.search().query('workWithJay', type=['folder'], content_type='names', limit=10, offset=0) myFile = client.search().query('test', file_extensions=['txt'], type=['file'], content_type='names', limit=10, offset=0) # for d in myDir: # print(d) for f in myFile: print("Name: ", f.name, " -- ID: ", f.id) if f.name == "test.txt": box_file = client.file(file_id=f.id).get() output_file = open(box_file.name, 'wb') box_file.download_to(output_file) ## Kill the server func = request.environ.get('werkzeug.server.shutdown') func() return 'Data have been downloaded. Please close firefox'
def downloadcsv( self, authPath: "path to your box config file, a json file", folderID: "the id of the folder on box drive containing the csv files", date: "the date taht the csv file was created"): """Download the csv file from box.""" from boxsdk import JWTAuth from boxsdk import Client import os sdk = JWTAuth.from_settings_file(authPath) client = Client(sdk) items = client.folder(folder_id=folderID).get_items() dir = os.getcwd() for item in items: if date in item.name: item_id = item.id box_file = client.file(file_id=item_id).get() output_file = open(f'{os.getcwd()}/raw_csv/{box_file.name}', 'wb') box_file.download_to(output_file) output_file.close()
from boxsdk.exception import BoxAPIException import logging import sys import os # import time config_path='C:\\Users\\ischlesinger\\Code\\JWT_Server_Auth\\12922821_p90tqhsu_config.json' sdk = JWTAuth.from_settings_file(config_path) client = Client(sdk) ###Search automation_user box account for specified search-term: #word = input("What word are you searching for? ") #items = client.search().query(query=word, limit=100) #for item in items: # print('The item ID is {0} and the item name is {1}'.format(item.id, item.name)) ###Upload files from folder on Disk to Box.com. If file with same name exists, update file: source = 'C:\\users\\ischlesinger\\Desktop\\_sdkTest\\' files_in_src = os.listdir(source) folder_id = '101217738692' for i in files_in_src: try: new_file = client.folder(folder_id).upload(source+i) print('File "{0}" uploaded to Box with file ID {1}'.format(new_file.name, new_file.id)) except BoxAPIException as e: #print('File #s exists. Updaing contents.' %source+i) file_id = e.context_info['conflicts']['id'] updated_file = client.file(file_id).update_contents(source+i) print('File "{0}" has been updated'.format(updated_file.name))
) client = Client(oauth) root_folder = client.folder(folder_id='0') root_folder_with_info = root_folder.get() # shared_folder = root_folder.create_subfolder('shared_folder') # uploaded_file = shared_folder.upload('test.txt') # shared_link = shared_folder.get_shared_link() print('root_folder_with_info.name:' + root_folder_with_info.name) root_folder_with_limited_info = root_folder.get(fields=['owned_by']) print(root_folder_with_limited_info.owned_by) # print('root_folder_with_limited_info:' + root_folder_with_limited_info.owned_by) folder_info = client.folder(folder_id='me') print(folder_info) print(client.file(file_id='me')) me = client.user(user_id='me').get() print(me) print('name:' + me.name) print('login:'******'0').get() print('folder owner:' + root_folder.owned_by['login']) print('folder name:' + root_folder['name']) items = client.folder(folder_id='0').get_items(limit=100, offset=0) print('list files') for item in items: print('name:{0}, id:{1}'.format(item.name, item.id))
class Box(object): _CLIENT_ID = Configuration.CLIENT_ID _CLIENT_SECRET = Configuration.CLIENT_SECRET _ENTERPRISE_ID = Configuration.ENTERPRISE_ID _PASSPHRASE = Configuration.PASSPHRASE def __init__(self): self._db_engine = sqlalchemy.create_engine('sqlite+pysqlite:///photobooth.db') self._session_maker = sessionmaker(bind=self._db_engine, autoflush=True) self._session = self._session_maker() DeclarativeBase.metadata.create_all(self._db_engine) self._auth = JWTAuth( client_id=self._CLIENT_ID, client_secret=self._CLIENT_SECRET, enterprise_id=self._ENTERPRISE_ID, rsa_private_key_file_sys_path='private_key.pem', rsa_private_key_passphrase=self._PASSPHRASE, ) self._client = Client(self._auth) try: user_id = self._session.query(PhotoBoothInfo).filter_by(key='user_id').one().value from boxsdk.object.user import User self._upload_user = User(None, user_id) except NoResultFound: self._upload_user = self._client.create_user('Photobooth Uploader') self._session.add(PhotoBoothInfo(key='user_id', value=self._upload_user.object_id)) self._session.commit() self._uploader_auth = JWTAuth( client_id=self._CLIENT_ID, client_secret=self._CLIENT_SECRET, enterprise_id=self._ENTERPRISE_ID, rsa_private_key_file_sys_path='private_key.pem', rsa_private_key_passphrase=self._PASSPHRASE, ) self._uploader_auth.authenticate_app_user(self._upload_user) self._uploader = Client(self._uploader_auth) try: folder_id = self._session.query(PhotoBoothInfo).filter_by(key='folder_id').one().value self._folder = self._uploader.folder(folder_id) except NoResultFound: self._folder = self._uploader.folder('0').create_subfolder('Photobooth Images') self._session.add(PhotoBoothInfo(key='folder_id', value=self._folder.object_id)) self._session.commit() def upload_photo(self, name, message, photo_sys_path): print 'uploading photo ', photo_sys_path, ' to box' photo = self._folder.upload(photo_sys_path) photo.metadata().create({ 'name': name, 'message': message, }) def download_photo(self, file_id, photo_sys_path): print 'downloading photo ', photo_sys_path, ' from box' with open(photo_sys_path, 'wb') as file_handle: self._client.file(file_id).download_to(file_handle) def list_files(self): return self._folder.get_items(1000)
class BoxAPI(StorageAPI): def __init__(self, credential_dir, credential_filename=CREDENTIAL_FILENAME): super(BoxAPI, self).__init__(credential_dir) self.auth_file = join(credential_dir, credential_filename) self.oauth = OAuth2(client_id=CLIENT_ID, client_secret=CLIENT_SECRET, store_tokens=self.write_access_token) try: self.get_tokens_from_file() self.authorize() except IOError: pass def get_auth_url(self): auth_url, csrf_token = self.oauth.get_authorization_url(REDIRECT_URI) return auth_url def build(self): self.client = Client(self.oauth) self.create_folder("demerio") def create_folder(self, folder_name): search_results = self.client.search(folder_name, limit=100, offset=0, ancestor_folders=[self.client.folder(folder_id='0')]) folder_filter = [result for result in search_results if result._item_type == "folder"] if len(folder_filter) == 0: demerio_folder = self.client.folder(folder_id='0').create_subfolder('demerio') else: assert len(folder_filter) == 1 demerio_folder = folder_filter[0].get(fields=["name"]) self.root_folder_id = demerio_folder.id def get_tokens_from_file(self): with open(self.auth_file, "r") as f: access_token = f.readline().rstrip() refresh_token = f.readline().rstrip() return access_token, refresh_token def write_access_token(self, access_token, refresh_token): with open(self.auth_file, 'w') as f: f.write(access_token + "\n") f.write(refresh_token + "\n") def authorize(self): if os.path.exists(self.auth_file): access_token, refresh_token = self.get_tokens_from_file() self.oauth._access_token = access_token self.oauth._refresh_token = refresh_token else: httpd = ClientRedirectServer(("localhost", 8888), ClientRedirectHandler) webbrowser.open(self.get_auth_url()) httpd.handle_request() self.oauth.authenticate(httpd.query_params['code']) self.build() def is_connected(self): ## TODO: There must be a better way to check connection, with self.oauth ?? try: self.client.user(user_id='me').get() except: return False return True def download_file(self, file_id, path_to_download): with open(path_to_download, "wb") as f: f.write(self.client.file(file_id=file_id).content()) def upload_new_file(self, local_file_path): new_file = self.client.folder(folder_id=self.root_folder_id).upload(local_file_path, file_name = generate_random_string()) return new_file.get()['id'] def delete_file(self, file_id): self.client.file(file_id=file_id).delete() def update_file(self, local_file_path, file_id): self.client.file(file_id=file_id).update_contents(local_file_path)
class BoxProvider(OAuthProvider): BOX_ROOT_ID = '0' # The root of the Box system (per Box docs) MAX_BOX_LIMIT = 1000 # the maximum number of items returned from a Box request @classmethod def provider_identifier(cls): return "box" @classmethod def provider_name(cls): return "Box" def __init__(self, credential_manager): super(BoxProvider, self).__init__(credential_manager) self.id_cache, self._email, self._app_credentials = None, "", None self.access_token, self.refresh_token = None, None self.write_tokens = True @contextmanager def exception_handler(self): try: yield except BoxOAuthException: raise exceptions.AuthFailure(self) except BoxAPIException: raise exceptions.ProviderOperationFailure(self) except ReadTimeout: raise exceptions.ConnectionFailure(self) except Exception: raise exceptions.ProviderOperationFailure(self) finally: self._persist_tokens() def start_connection(self): self.oauth = OAuth2(client_id=self.app_credentials["client_id"], client_secret=self.app_credentials["client_secret"]) with self.exception_handler(): authorize_url, self.csrf_token = self.oauth.get_authorization_url(self.get_oauth_redirect_url()) return authorize_url def finish_connection(self, url): params = parse_url(url) try: # get auth_token auth_token = params["code"] assert self.csrf_token == params["state"] except AssertionError: # csrf mismatch or csrf not found raise exceptions.AuthFailure(self) except KeyError: try: error_code = params["error"] except KeyError: raise exceptions.ProviderOperationFailure(self) if error_code == "invalid_request" or error_code == "unsupported_response_type": raise exceptions.ProviderOperationFailure(self) elif error_code == "access_denied" or error_code == "server_error": raise exceptions.AuthFailure(self) elif error_code == "temporarily_unavailable": raise exceptions.ConnectionFailure(self) else: raise exceptions.ProviderOperationFailure(self) credentials = {} with self.exception_handler(): credentials["access_token"], credentials["refresh_token"] = self.oauth.authenticate(auth_token) self._connect(credentials) def _persist_tokens(self): if self.write_tokens and self.access_token is not None and self.uid is not None: user_credentials = {"access_token": self.access_token, "refresh_token": self.refresh_token} self.credential_manager.set_user_credentials(self.__class__, self.uid, user_credentials) self.write_tokens = False def _connect(self, user_credentials): def store_tokens_callback(access_token, refresh_token): self.write_tokens = True self.access_token = access_token self.refresh_token = refresh_token def load_email(): with self.exception_handler(): self._email = self.client.user(user_id='me').get()['login'] def make_app_folder(): with self.exception_handler(): box_root_folder = self.client.folder(self.BOX_ROOT_ID) try: # make an app-specific folder if one does not already exist _, folder_id, _ = box_root_folder.create_subfolder(self.ROOT_DIR) except BoxAPIException as e: folder_id = e.context_info['conflicts'][0]['id'] self.app_folder = self.client.folder(folder_id) def prime_cache(): with self.exception_handler(): # get all items files = [] offset = 0 while len(files) == offset: files += self.app_folder.get_items(self.MAX_BOX_LIMIT, offset=offset) offset += self.MAX_BOX_LIMIT self.id_cache = {user_file.name: user_file.object_id for user_file in files} # if this came from cache, it is a json string that needs to be converted if type(user_credentials) in [unicode, str]: user_credentials = json.loads(user_credentials) self.access_token, self.refresh_token = user_credentials["access_token"], user_credentials["refresh_token"] oauth = OAuth2(client_id=self.app_credentials["client_id"], client_secret=self.app_credentials["client_secret"], store_tokens=store_tokens_callback, access_token=self.access_token, refresh_token=self.refresh_token) self.client = Client(oauth) load_email() make_app_folder() prime_cache() @property def uid(self): return self._email def get(self, filename): with self.exception_handler(): box_file = self.client.file(self.id_cache[filename]) return box_file.content() def put(self, filename, data): data_stream = StringIO(data) with self.exception_handler(): if filename in self.id_cache: existing_file = self.client.file(self.id_cache[filename]) existing_file.update_contents_with_stream(data_stream) else: new_file = self.app_folder.upload_stream(data_stream, filename) self.id_cache[filename] = new_file.object_id def delete(self, filename): with self.exception_handler(): box_file = self.client.file(self.id_cache[filename]) box_file.delete() self.id_cache.pop(filename, None) def wipe(self): with self.exception_handler(): try: for _, file_id in self.id_cache.items(): box_file = self.client.file(file_id) box_file.delete() except: raise finally: self.id_cache = {}
def process_event(self, event, operation): """ Wrapper to process the given event on the operation. :param event: :param operation: :return: """ if operation == 'delete': crate_logger.debug('Doing a delete on {}'.format(event.pathname)) folders_to_traverse = self.folders_to_traverse(event.path) crate_logger.debug(folders_to_traverse) client = Client(self.oauth) box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in (event.path, event.path[:-1],) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: AssertionError(cur_box_folder['name'] == last_dir, cur_box_folder['name'] + 'not equals ' + last_dir) event_was_for_dir = 'IN_ISDIR'.lower() in event.maskname.lower() num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): if not event_was_for_dir and entry['type'] == 'file' and entry['name'] == event.name: if entry['id'] not in self.files_from_box: cur_file = client.file(file_id=entry['id']).get() if cur_file.delete(): # does not actually check correctly...unless not "ok" is false # del version_info[cur_file['id']] r_c.delete(redis_key(cur_file['id'])) else: self.files_from_box.remove(entry['id']) # just wrote if, assuming create event didn't run break elif event_was_for_dir and entry['type'] == 'folder' and entry['name'] == event.name: if entry['id'] not in self.folders_from_box: self.get_folder(client, entry['id']).delete() # cur_folder = client.folder(folder_id=entry['id']).get() # upload_queue.put(partial(cur_folder.update_contents, event.pathname)) else: self.folders_from_box.remove(entry['id']) # just wrote if, assuming create event didn't run break elif operation == 'move': crate_logger.debug('Doing a move on: {}'.format(event)) src_event, dest_event = event folders_to_traverse = self.folders_to_traverse(dest_event.path) crate_logger.debug(folders_to_traverse) client = Client(self.oauth) box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder # if we're modifying in root box dir, then we've already found the folder cur_box_folder = self.traverse_path(client, dest_event, cur_box_folder, folders_to_traverse) src_folders_to_traverse = self.folders_to_traverse(src_event.path) src_box_folder = box_folder src_box_folder = self.traverse_path(client, src_event, src_box_folder, src_folders_to_traverse) is_rename = src_event.path == dest_event.path # is_a_directory = 'IN_ISDIR'.lower() in dest_event.maskname.lower() did_find_src_file = os.path.isdir(dest_event.pathname) # true if we are a directory :) did_find_src_folder = os.path.isfile(dest_event.pathname) # true if we are a regular file :) is_file = os.path.isfile(dest_event.pathname) is_dir = os.path.isdir(dest_event.pathname) move_from_remote = False src_num_entries = src_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, src_num_entries, limit): for entry in src_box_folder.get_items(offset=offset, limit=limit): did_find_src_file = is_file and entry['name'] == src_event.name and entry['type'] == 'file' did_find_src_folder = is_dir and entry['name'] == src_event.name and entry['type'] == 'folder' if did_find_src_file: src_file = client.file(file_id=entry['id']).get() if is_rename: src_file.rename(dest_event.name) else: did_find_cur_file = os.path.isdir(dest_event.pathname) # should check box instead did_find_cur_folder = os.path.isfile(dest_event.pathname) # should check box instead cur_num_entries = cur_box_folder['item_collection']['total_count'] for cur_offset in range(0, cur_num_entries, limit): for cur_entry in cur_box_folder.get_items(offset=cur_offset, limit=limit): matching_name = cur_entry['name'] == dest_event.name did_find_cur_file = is_file and matching_name and isinstance(cur_entry, File) did_find_cur_folder = is_dir and matching_name and isinstance(cur_entry, Folder) if did_find_cur_file: self.upload_queue.put([os.path.getmtime(dest_event.pathname), partial(cur_entry.update_contents, dest_event.pathname), self.oauth]) self.upload_queue.put(partial(src_file.delete)) break elif did_find_cur_folder: crate_logger.debug( 'do not currently support movinga same name folder into parent with' 'folder inside of the same name -- would may need to update the ' 'contents') break if (is_file and did_find_cur_file) or (is_dir and did_find_cur_folder): break if is_file and not did_find_cur_file: src_file.move(cur_box_folder) # do not yet support moving and renaming in one go assert src_file['name'] == dest_event.name elif did_find_src_folder: src_folder = client.folder(folder_id=entry['id']).get() if is_rename: src_folder.rename(dest_event.name) else: src_folder.move(cur_box_folder) # do not yet support moving and renaming in one go assert src_folder['name'] == dest_event.name elif entry['name'] == dest_event.name: move_from_remote = True if not move_from_remote: # if it was moved from a different folder on remote, could be false still dest_box_folder = box_folder dest_folders_to_traverse = self.folders_to_traverse(dest_event.path) dest_box_folder = self.traverse_path(client, dest_event, dest_box_folder, dest_folders_to_traverse) dest_num_entries = dest_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, dest_num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): if entry['name'] == dest_event.name: move_from_remote = True break if not move_from_remote: if is_file and not did_find_src_file: # src file [should] no longer exist[s]. this file did not originate in box, too. last_modified_time = os.path.getmtime(dest_event.pathname) self.upload_queue.put([last_modified_time, partial(cur_box_folder.upload, dest_event.pathname, dest_event.name), self.oauth]) elif is_dir and not did_find_src_folder: self.upload_queue.put(partial(cur_box_folder.create_subfolder, dest_event.name)) wm.add_watch(dest_event.pathname, rec=True, mask=mask) elif operation == 'create': crate_logger.debug("Creating: {}".format(event.pathname)) folders_to_traverse = self.folders_to_traverse(event.path) crate_logger.debug(folders_to_traverse) client = Client(self.oauth) box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in (event.path, event.path[:-1],) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: assert cur_box_folder['name'] == last_dir did_find_the_file = os.path.isdir(event.pathname) # true if we are a directory :) did_find_the_folder = os.path.isfile(event.pathname) # true if we are a regular file :) is_file = os.path.isfile(event.pathname) is_dir = os.path.isdir(event.pathname) num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): did_find_the_file = is_file and entry['type'] == 'file' and entry['name'] == event.name did_find_the_folder = is_dir and entry['type'] == 'folder' and entry['name'] == event.name if did_find_the_file: if entry['id'] not in self.files_from_box: # more accurately, was this created offline? AssertionError(False, 'We should not be able to create a ' 'file that exists in box; should be a close/modify.') crate_logger.debug('Update the file: {}'.format(event.pathname)) a_file = client.file(file_id=entry['id']).get() # seem it is possible to get more than one create (without having a delete in between) self.upload_queue.put(partial(a_file.update_contents, event.pathname)) # cur_box_folder.upload(event.pathname, event.name) else: self.files_from_box.remove(entry['id']) # just downloaded it break elif did_find_the_folder: # we are not going to re-create the folder, but we are also not checking if the contents in this # local creation are different from the contents in box. if entry['id'] in self.folders_from_box: self.folders_from_box.remove(entry['id']) # just downloaded it break if is_file and not did_find_the_file: crate_logger.debug('Upload the file: {}'.format(event.pathname)) last_modified_time = os.path.getctime(event.pathname) self.upload_queue.put([last_modified_time, partial(cur_box_folder.upload, event.pathname, event.name), self.oauth]) elif is_dir and not did_find_the_folder: crate_logger.debug('Upload the folder: {}'.format(event.pathname)) self.upload_queue.put(partial(cur_box_folder.create_subfolder, event.name)) wm.add_watch(event.pathname, rec=True, mask=mask) elif operation == 'modify': crate_logger.debug("{op}...: {pathname}".format(op=operation, pathname=event.pathname)) folders_to_traverse = self.folders_to_traverse(event.path) crate_logger.debug(folders_to_traverse) client = Client(self.oauth) cur_box_folder = None folder_id = '0' retry_limit = 5 cur_box_folder = get_box_folder(client, cur_box_folder, folder_id, retry_limit) # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in (event.path, event.path[:-1],) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: AssertionError(cur_box_folder['name'] == last_dir, cur_box_folder['name'] + 'not equals ' + last_dir) did_find_the_file = os.path.isdir(event.pathname) # true if we are a directory :) did_find_the_folder = os.path.isfile(event.pathname) # true if we are a regular file :) is_file = os.path.isfile(event.pathname) is_dir = os.path.isdir(event.pathname) num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): did_find_the_file = is_file and entry['type'] == 'file' and entry['name'] == event.name did_find_the_folder = is_dir and entry['type'] == 'folder' and entry['name'] == event.name if did_find_the_file: last_modified_time = os.path.getmtime(event.pathname) if entry['id'] not in self.files_from_box: cur_file = client.file(file_id=entry['id']).get() can_update = True was_versioned = r_c.exists(redis_key(cur_file['id'])) try: info = redis_get(r_c, cur_file) if was_versioned else None info = info if was_versioned else {'fresh_download': True, 'etag': '0', 'time_stamp': 0} item_version = info if cur_file['etag'] == item_version['etag'] and \ ((item_version['fresh_download'] and item_version[ 'time_stamp'] >= last_modified_time) or (not item_version['fresh_download'] and item_version[ 'time_stamp'] >= last_modified_time)): can_update = False if can_update: self.upload_queue.put([last_modified_time, partial(cur_file.update_contents, event.pathname), self.oauth]) else: is_new_time_stamp = item_version['time_stamp'] >= last_modified_time crate_logger.debug('Skipping the update because not versioned: {not_versioned}, ' 'fresh_download: {fresh_download}, ' 'version time_stamp >= ' 'new time stamp: {new_time_stamp}, ' 'event pathname: {path_name}, ' 'cur file id: {obj_id}'.format(not_versioned=not was_versioned, fresh_download=item_version[ 'fresh_download'], new_time_stamp=is_new_time_stamp, path_name=event.pathname, obj_id=cur_file['id'])) except TypeError: crate_logger.debug(traceback.format_exc()) except Exception: crate_logger.debug(traceback.format_exc()) else: self.files_from_box.remove(entry['id']) # just wrote if, assuming create event didn't run break elif did_find_the_folder: if entry['id'] not in self.folders_from_box: crate_logger.debug('Cannot create a subfolder when it already exists: {}'.format(event.pathname)) # cur_folder = client.folder(folder_id=entry['id']).get() # upload_queue.put(partial(cur_folder.update_contents, event.pathname)) else: self.folders_from_box.remove(entry['id']) # just wrote if, assuming create event didn't run break if is_file and not did_find_the_file: crate_logger.debug('Uploading contents...: {}'.format(event.pathname)) last_modified_time = os.path.getmtime(event.pathname) self.upload_queue.put([last_modified_time, partial(cur_box_folder.upload, event.pathname, event.name), self.oauth]) if is_dir and not did_find_the_folder: crate_logger.debug('Creating a sub-folder...: {}'.format(event.pathname)) self.upload_queue.put(partial(cur_box_folder.create_subfolder, event.name)) wm.add_watch(event.pathname, rec=True, mask=mask) elif operation == 'real_close': crate_logger.debug("Real close...: {}".format(event.pathname)) folders_to_traverse = self.folders_to_traverse(event.path) crate_logger.debug(folders_to_traverse) client = Client(self.oauth) cur_box_folder = None cur_box_folder = get_box_folder(client, cur_box_folder, '0', 5) # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in (event.path, event.path[:-1],) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: AssertionError(cur_box_folder['name'] == last_dir, cur_box_folder['name'] + 'not equals ' + last_dir) did_find_the_file = os.path.isdir(event.pathname) # true if we are a directory :) did_find_the_folder = os.path.isfile(event.pathname) # true if we are a regular file :) is_file = os.path.isfile(event.pathname) is_dir = os.path.isdir(event.pathname) num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): did_find_the_file = is_file and entry['type'] == 'file' and entry['name'] == event.name did_find_the_folder = is_dir and entry['type'] == 'folder' and entry['name'] == event.name if did_find_the_file: break # not a box file/folder (though could have been copied from a local box item) if is_file and not did_find_the_file: last_modified_time = os.path.getmtime(event.pathname) self.upload_queue.put([last_modified_time, partial(cur_box_folder.upload, event.pathname, event.name), self.oauth]) elif is_dir and not did_find_the_folder: cur_box_folder.create_subfolder(event.name) wm.add_watch(event.pathname, rec=True, mask=mask, auto_add=True)
def process_event(self, event, operation): """ Wrapper to process the given event on the operation. :param event: :param operation: :return: """ if operation == 'delete': print('Doing a delete on, ', event.pathname) folders_to_traverse = self.folders_to_traverse(event.path) print(folders_to_traverse) client = Client(oauth) box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in ( event.path, event.path[:-1], ) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: AssertionError( cur_box_folder['name'] == last_dir, cur_box_folder['name'] + 'not equals ' + last_dir) event_was_for_dir = 'IN_ISDIR'.lower() in event.maskname.lower() num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): if not event_was_for_dir and entry[ 'type'] == 'file' and entry['name'] == event.name: if entry['id'] not in self.files_from_box: cur_file = client.file(file_id=entry['id']).get() if cur_file.delete( ): # does not actually check correctly...unless not "ok" is false # del version_info[cur_file['id']] r_c.delete(redis_key(cur_file['id'])) else: self.files_from_box.remove( entry['id'] ) # just wrote if, assuming create event didn't run break elif event_was_for_dir and entry[ 'type'] == 'folder' and entry['name'] == event.name: if entry['id'] not in self.folders_from_box: self.get_folder(client, entry['id']).delete() # cur_folder = client.folder(folder_id=entry['id']).get() # upload_queue.put(partial(cur_folder.update_contents, event.pathname)) else: self.folders_from_box.remove( entry['id'] ) # just wrote if, assuming create event didn't run break elif operation == 'move': print('Doing a move on, ', event) src_event, dest_event = event folders_to_traverse = self.folders_to_traverse(dest_event.path) print(folders_to_traverse) client = Client(oauth) box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder # if we're modifying in root box dir, then we've already found the folder cur_box_folder = self.traverse_path(client, dest_event, cur_box_folder, folders_to_traverse) src_folders_to_traverse = self.folders_to_traverse(src_event.path) src_box_folder = box_folder src_box_folder = self.traverse_path(client, src_event, src_box_folder, src_folders_to_traverse) is_rename = src_event.path == dest_event.path # is_a_directory = 'IN_ISDIR'.lower() in dest_event.maskname.lower() did_find_src_file = os.path.isdir( dest_event.pathname) # true if we are a directory :) did_find_src_folder = os.path.isfile( dest_event.pathname) # true if we are a regular file :) is_file = os.path.isfile(dest_event.pathname) is_dir = os.path.isdir(dest_event.pathname) move_from_remote = False src_num_entries = src_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, src_num_entries, limit): for entry in src_box_folder.get_items(offset=offset, limit=limit): did_find_src_file = is_file and entry[ 'name'] == src_event.name and entry['type'] == 'file' did_find_src_folder = is_dir and entry[ 'name'] == src_event.name and entry['type'] == 'folder' if did_find_src_file: src_file = client.file(file_id=entry['id']).get() if is_rename: src_file.rename(dest_event.name) else: src_file.move(cur_box_folder) # do not yet support moving and renaming in one go assert src_file['name'] == dest_event.name elif did_find_src_folder: src_folder = client.folder(folder_id=entry['id']).get() if is_rename: src_folder.rename(dest_event.name) else: src_folder.move(cur_box_folder) # do not yet support moving and renaming in one go assert src_folder['name'] == dest_event.name elif entry['name'] == dest_event.name: move_from_remote = True if not move_from_remote: # if it was moved from a different folder on remote, could be false still dest_box_folder = box_folder dest_folders_to_traverse = self.folders_to_traverse( dest_event.path) dest_box_folder = self.traverse_path(client, dest_event, dest_box_folder, dest_folders_to_traverse) dest_num_entries = dest_box_folder['item_collection'][ 'total_count'] limit = 100 for offset in range(0, dest_num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): if entry['name'] == dest_event.name: move_from_remote = True break if not move_from_remote: if is_file and not did_find_src_file: # src file [should] no longer exist[s]. this file did not originate in box, too. last_modified_time = os.path.getmtime( dest_event.pathname) upload_queue.put([ last_modified_time, partial(cur_box_folder.upload, dest_event.pathname, dest_event.name) ]) elif is_dir and not did_find_src_folder: upload_queue.put( partial(cur_box_folder.create_subfolder, dest_event.name)) wm.add_watch(dest_event.pathname, rec=True, mask=mask) elif operation == 'create': print("Creating:", event.pathname) folders_to_traverse = self.folders_to_traverse(event.path) print(folders_to_traverse) client = Client(oauth) box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in ( event.path, event.path[:-1], ) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: assert cur_box_folder['name'] == last_dir did_find_the_file = os.path.isdir( event.pathname) # true if we are a directory :) did_find_the_folder = os.path.isfile( event.pathname) # true if we are a regular file :) is_file = os.path.isfile(event.pathname) is_dir = os.path.isdir(event.pathname) num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): did_find_the_file = is_file and entry[ 'type'] == 'file' and entry['name'] == event.name did_find_the_folder = is_dir and entry[ 'type'] == 'folder' and entry['name'] == event.name if did_find_the_file: if entry['id'] not in self.files_from_box: # more accurately, was this created offline? AssertionError( False, 'We should not be able to create a ' 'file that exists in box; should be a close/modify.' ) print('Update the file: ', event.pathname) a_file = client.file(file_id=entry['id']).get() # seem it is possible to get more than one create (without having a delete in between) upload_queue.put( partial(a_file.update_contents, event.pathname)) # cur_box_folder.upload(event.pathname, event.name) else: self.files_from_box.remove( entry['id']) # just downloaded it break elif did_find_the_folder: # we are not going to re-create the folder, but we are also not checking if the contents in this # local creation are different from the contents in box. if entry['id'] in self.folders_from_box: self.folders_from_box.remove( entry['id']) # just downloaded it break if is_file and not did_find_the_file: print('Upload the file: ', event.pathname) last_modified_time = os.path.getctime(event.pathname) upload_queue.put([ last_modified_time, partial(cur_box_folder.upload, event.pathname, event.name) ]) elif is_dir and not did_find_the_folder: print('Upload the folder: ', event.pathname) upload_queue.put( partial(cur_box_folder.create_subfolder, event.name)) wm.add_watch(event.pathname, rec=True, mask=mask) elif operation == 'close': print("Closing...:", event.pathname) folders_to_traverse = self.folders_to_traverse(event.path) print(folders_to_traverse) client = Client(oauth) box_folder = cur_box_folder = None for _ in range(5): try: box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder break except (ConnectionError, BrokenPipeError, ProtocolError, ConnectionResetError, BoxAPIException): print(traceback.format_exc()) # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in ( event.path, event.path[:-1], ) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: AssertionError( cur_box_folder['name'] == last_dir, cur_box_folder['name'] + 'not equals ' + last_dir) did_find_the_file = os.path.isdir( event.pathname) # true if we are a directory :) did_find_the_folder = os.path.isfile( event.pathname) # true if we are a regular file :) is_file = os.path.isfile(event.pathname) is_dir = os.path.isdir(event.pathname) num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): did_find_the_file = is_file and entry[ 'type'] == 'file' and entry['name'] == event.name did_find_the_folder = is_dir and entry[ 'type'] == 'folder' and entry['name'] == event.name if did_find_the_file: last_modified_time = os.path.getmtime(event.pathname) if entry['id'] not in self.files_from_box: cur_file = client.file(file_id=entry['id']).get() can_update = True was_versioned = r_c.exists( redis_key(cur_file['id'])) try: info = redis_get( cur_file) if was_versioned else None info = info if was_versioned else { 'fresh_download': True, 'etag': '0', 'time_stamp': 0 } item_version = info if cur_file['etag'] == item_version['etag'] and \ ((item_version['fresh_download'] and item_version[ 'time_stamp'] >= last_modified_time) or (not item_version['fresh_download'] and item_version[ 'time_stamp'] >= last_modified_time)): can_update = False if can_update: upload_queue.put([ last_modified_time, partial(cur_file.update_contents, event.pathname) ]) else: print( 'Skipping the update because not versioned: {}, ' 'fresh_download: {}, ' 'version time_stamp >= ' 'new time stamp: {}'.format( not was_versioned, item_version['fresh_download'], item_version['time_stamp'] >= last_modified_time), event.pathname, cur_file['id']) except TypeError as e: print(traceback.format_exc()) except Exception: print(traceback.format_exc()) else: self.files_from_box.remove( entry['id'] ) # just wrote if, assuming create event didn't run break elif did_find_the_folder: if entry['id'] not in self.folders_from_box: print( 'Cannot create a subfolder when it already exists: ', event.pathname) # cur_folder = client.folder(folder_id=entry['id']).get() # upload_queue.put(partial(cur_folder.update_contents, event.pathname)) else: self.folders_from_box.remove( entry['id'] ) # just wrote if, assuming create event didn't run break if is_file and not did_find_the_file: print('Uploading contents...', event.pathname) last_modified_time = os.path.getmtime(event.pathname) upload_queue.put([ last_modified_time, partial(cur_box_folder.upload, event.pathname, event.name) ]) if is_dir and not did_find_the_folder: print('Creating a sub-folder...', event.pathname) upload_queue.put( partial(cur_box_folder.create_subfolder, event.name)) wm.add_watch(event.pathname, rec=True, mask=mask) elif operation == 'real_close': print("Real close...:", event.pathname) folders_to_traverse = self.folders_to_traverse(event.path) print(folders_to_traverse) client = Client(oauth) box_folder = cur_box_folder = None for _ in range(5): try: box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder break except (ConnectionError, BrokenPipeError, ProtocolError, ConnectionResetError, BoxAPIException): print(traceback.format_exc()) # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in ( event.path, event.path[:-1], ) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: AssertionError( cur_box_folder['name'] == last_dir, cur_box_folder['name'] + 'not equals ' + last_dir) did_find_the_file = os.path.isdir( event.pathname) # true if we are a directory :) did_find_the_folder = os.path.isfile( event.pathname) # true if we are a regular file :) is_file = os.path.isfile(event.pathname) is_dir = os.path.isdir(event.pathname) num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): did_find_the_file = is_file and entry[ 'type'] == 'file' and entry['name'] == event.name did_find_the_folder = is_dir and entry[ 'type'] == 'folder' and entry['name'] == event.name if did_find_the_file: break # not a box file/folder (though could have been copied from a local box item) if is_file and not did_find_the_file: last_modified_time = os.path.getmtime(event.pathname) upload_queue.put([ last_modified_time, partial(cur_box_folder.upload, event.pathname, event.name) ]) elif is_dir and not did_find_the_folder: cur_box_folder.create_subfolder(event.name) wm.add_watch(event.pathname, rec=True, mask=mask, auto_add=True)
client = Client(auth) # collections = client.collections() # for collection in collections: # print('Collection "{0}" has ID {1}'.format(collection.name, collection.id)) # items = client.collection(collection_id='5731943133').get_items() # for item in items: # print('{0} "{1}" is in the collection'.format(item.type.capitalize(), item.name)) user = client.user().get() print('The current user ID is {0}'.format(user.id)) print('The current user name is {0}'.format(user.name)) items = client.folder(folder_id='0').get_items() for item in items: print(item.id) if item.name == "7.json": print('{0} {1} is named "{2}"'.format(item.type.capitalize(), item.id, item.name)) with open(item.name, 'wb') as open_file: client.file(item.id).download_to(open_file) open_file.close() # file_name = 'abc.pdf' # stream = open('abc.pdf', 'rb') # folder_id = '0' # new_file = client.folder(folder_id).upload_stream(stream, file_name) # print('File "{0}" uploaded to Box with file ID {1}'.format(new_file.name, new_file.id))
class BoxComFSProvider(FSProvider): def __init__(self, root, config, client): """ :param root: the root path for this provider :param config: the dict of the configuration of the object :param plugin_config: contains the plugin settings """ if len(root) > 0 and root[0] == '/': root = root[1:] self.root = root self.connection = client.get("box_com_connection") self.access_token = self.connection['access_token'] self.cache_enabled = config.get("cache_enabled") if self.cache_enabled: cache_file_name = hashlib.sha1(self.access_token.encode('utf-8')).hexdigest() else: cache_file_name = None auth = OAuth2( client_id="", client_secret="", access_token=self.access_token ) main_session = AuthorizedSession(auth, network_layer=LessVerboseLoggingNetwork()) self.client = Client(auth, main_session) self.user = self.client.user().get() self.box_item = BoxItem(cache_file_name, root, self.client) self.box_item.check_path_format(get_normalized_path(root)) def close(self): """ Perform any necessary cleanup """ self.box_item.close() def stat(self, path): """ Get the info about the object at the given path inside the provider's root, or None if the object doesn't exist """ full_path = get_full_path(self.root, path) box_item = self.box_item.get_by_path(full_path) if box_item.not_exists(): return None return box_item.get_stat() def set_last_modified(self, path, last_modified): """ Set the modification time on the object denoted by path. Return False if not possible """ return False def browse(self, path): """ List the file or directory at the given path, and its children (if directory) """ normalized_path = get_normalized_path(path) full_path = get_full_path(self.root, path) item = self.box_item.get_by_path(get_rel_path(full_path)) if item.not_exists(): return {'fullPath' : normalized_path, 'exists' : False} if item.is_folder(): return {'fullPath' : normalized_path, 'exists' : True, 'directory' : True, 'children' : item.get_children(normalized_path), 'lastModified' : item.get_last_modified()} else: return item.get_as_browse() def enumerate(self, path, first_non_empty): """ Enumerate files recursively from prefix. If first_non_empty, stop at the first non-empty file. If the prefix doesn't denote a file or folder, return None """ full_path = get_full_path(self.root, path) normalized_path = get_normalized_path(path) item = self.box_item.get_by_path(full_path) if item.not_exists(): return None paths = [] if item.is_folder(): paths = self.list_recursive(normalized_path, item.id, first_non_empty) else: paths.append({'path':normalized_path.split("/")[-1], 'size':item.size, 'lastModified':int(0) * 1000}) return paths def list_recursive(self, path, folder_id, first_non_empty): paths = [] if path == "/": path = "" for child in self.client.folder(folder_id).get_items(fields = ['modified_at','name','type','size']): if child.type == self.box_item.BOX_FOLDER: paths.extend(self.list_recursive(path + '/' + child.name, child.id, first_non_empty)) else: paths.append({'path':path + '/' + child.name, 'size':child.size}) if first_non_empty: return paths return paths def delete_recursive(self, path): """ Delete recursively from path. Return the number of deleted files (optional) """ full_path = get_full_path(self.root, path) item = self.box_item.get_by_path(full_path, force_no_cache = True) if item.not_exists(): return 0 else: ret = item.delete() self.box_item.cache.reset() return ret def move(self, from_path, to_path): """ Move a file or folder to a new path inside the provider's root. Return false if the moved file didn't exist """ full_from_path = get_full_path(self.root, from_path) full_to_path = get_full_path(self.root, to_path) from_base, from_item_name = os.path.split(full_from_path) to_base, to_item_name = os.path.split(full_to_path) from_item = self.box_item.get_by_path(full_from_path, force_no_cache = True) if from_item.not_exists(): return False from_item_id = from_item.get_id() from_item_is_folder = from_item.is_folder() to_item = self.box_item.get_by_path(full_to_path, force_no_cache = True) if to_item.not_exists(): to_item = self.box_item.get_by_path(to_base, force_no_cache = True) destination_folder = self.client.folder(to_item.get_id()) if from_item_is_folder: source = self.client.folder(from_item_id) else: source = self.client.file(from_item_id) if from_item_name == to_item_name: source.move(destination_folder) else: source.rename(to_item_name) return True def read(self, path, stream, limit): full_path = get_full_path(self.root, path) byte_range = None if limit is not None and limit is not "-1": int_limit = int(limit) if int_limit > 0: byte_range = (0, int(limit) - 1) item = self.box_item.get_by_path(full_path) if item.not_exists(): raise Exception('Path doesn t exist') shutil.copyfileobj(item.get_stream(byte_range), stream) def write(self, path, stream): """ Write the stream to the object denoted by path into the stream """ full_path = get_full_path(self.root, path) item = self.box_item.create_path(full_path, force_no_cache = True) if item.is_folder(): item.write_stream(stream) else: raise Exception('Not a file name')
from boxsdk import JWTAuth from boxsdk import Client import datetime import sys auth = JWTAuth.from_settings_file( '/Users/tanegu/Desktop/197894819_4r2t9oka_config.json') client = Client(auth) #file_path = '/Users/tanegu/Desktop/boxapi/test2.rtf' #file_name = 'test2' #folder_id = '0' file_id_01 = '460527349299' file_info_01 = client.file(file_id_01).get() print(file_info_01, end="") file_id_02 = '460527606671' file_info_02 = client.file(file_id_02).get() print(file_info_02, end="") file_id_03 = '460530323749' file_info_03 = client.file(file_id_03).get() print(file_info_03, end="") file_id_04 = '460540008243' file_info_04 = client.file(file_id_04).get()
def sendfiles2box(remotepath, filenames, remotefilenames=None, overwrite=False): """Send a file(s) to Box. Args: remotepath (str): remote directory to send file(s) to filenames (str or list): local files to send to box remotefilenames (str or list, optional): same size as filenames and optional as to if they should have different names or not overwrite (bool): should this overwrite existing files, default `False` Returns: list of ids of the uploaded content """ if isinstance(filenames, string_types): filenames = [filenames, ] if isinstance(remotefilenames, string_types): remotefilenames = [remotefilenames, ] if remotefilenames is None: remotefilenames = [os.path.basename(f) for f in filenames] iemprops = get_properties() oauth = OAuth2( client_id=iemprops['boxclient.client_id'], client_secret=iemprops['boxclient.client_secret'], access_token=iemprops['boxclient.access_token'], refresh_token=iemprops['boxclient.refresh_token'], store_tokens=_store_tokens ) client = Client(oauth) folder_id = 0 for token in remotepath.split("/"): if token.strip() == '': continue offset = 0 found = False while not found: LOG.debug("folder(%s).get_items(offset=%s)", folder_id, offset) items = client.folder( folder_id=folder_id).get_items(limit=100, offset=offset) for item in items: if (item.type == 'folder' and item.name.lower() == token.lower()): folder_id = item.id found = True break if len(items) != 100: break offset += 100 if not found: LOG.debug("Creating folder %s inside of %s", token, folder_id) item = client.folder(folder_id=folder_id).create_subfolder(token) folder_id = item.id LOG.debug("Now we upload to folder_id: %s", folder_id) res = [] for localfn, remotefn in zip(filenames, remotefilenames): LOG.debug("uploading %s", localfn) try: item = client.folder(folder_id=folder_id).upload(localfn, remotefn) res.append(item.id) except Exception as exp: if overwrite and hasattr(exp, 'context_info'): _fileid = exp.context_info['conflicts']['id'] LOG.info("overwriting %s fid: %s", remotefn, _fileid) try: item = client.file(_fileid).update_contents(localfn) res.append(_fileid) continue except Exception as exp2: LOG.debug( "Upload_Contents of %s resulted in exception: %s", localfn, exp2 ) continue LOG.debug( "Upload of %s resulted in exception: %s", localfn, exp ) res.append(None) return res
#get actaul RID number with 4 digits sub = subBOX[0:3] + "0" + subBOX[3:] #print(sub) savepath = join(path , f"sub-{sub}") utils.checkPathAndMake(path, join(savepath, "ses-implant01", "anat")) utils.checkPathAndMake(path, join(savepath, "ses-implant01", "ct")) utils.checkPathAndMake(path, join(savepath, "ses-implant01", "ieeg")) items = client.folder(folder_id=subfolderID).get_items() for item in items: if item.name == "electrodenames_coordinates_native_and_T1.csv": newName = f"sub-{sub}_ses-implant01_electrodes.csv" print(join(item.name)) with open(join(savepath,"ses-implant01", "ieeg", newName), 'wb') as open_file: client.file(file_id=item.id).download_to(open_file); open_file.close() coordinates = pd.read_csv(join(savepath,"ses-implant01", "ieeg", newName), sep = ",", header=None) coordinatesT1 = coordinates.iloc[:,[0, 10, 11, 12]] size = np.zeros(( len(coordinates))) size[:] = 1 coordinatesT1 = pd.concat( [coordinatesT1, pd.DataFrame(size ) ] , axis= 1 ) coordinatesT1.columns = ["name", "x", "y", "z", "size"] outnameCoordinates = join(savepath, "ses-implant01", "ieeg", f"sub-{sub}_ses-implant01_space-T00_electrodes.tsv" ) coordinatesT1.to_csv( outnameCoordinates, sep="\t", index=False, header=True) coordinatesCT = coordinates.iloc[:,[0, 2, 3, 4]] size = np.zeros(( len(coordinates))) size[:] = 1 coordinatesCT = pd.concat( [coordinatesCT, pd.DataFrame(size ) ] , axis= 1 )
def pull(): # initialize paths for config file private_path = Path(__file__).resolve().parent config_path = private_path / "config.json" key_path = private_path / "private.pem" print('keypath = ' + str(key_path)) print('config = ' + str(config_path)) print('private = ' + str(private_path)) with config_path.open() as f: config = json.load(f) # initialize client variables from config tokens client_id = config["clientID"] client_secret = config["clientSecret"] enterprise_id = config["enterpriseID"] # initialize a new client auth = JWTAuth(client_id=client_id, client_secret=client_secret, enterprise_id=enterprise_id, jwt_key_id='0l74oi4p', rsa_private_key_file_sys_path="private.pem") access_token = auth.authenticate_instance() client = Client(auth) me = client.user(user_id='me').get() folderId = config["folderId"] # id of box folder to sync with # returns a list of all items inside box folder itemsToSync = client.folder(folder_id=folderId).get_items(limit=100, offset=0) # create a list of file names to parse and id number to identify and pull data nameList = [] idList = [] # counter variables i = 0 while i < len(itemsToSync): s = str(itemsToSync[i]) name = str(re.search('\(([^)]+)', s).group(1)) idNum = re.findall(r'\d{12}', s) # finds string of 12 digits as file id number # only searches for .png types # saves name and id number to separate lists if name.endswith('.png') or name.endswith('.jpg') or name.endswith( '.jpeg'): idList.append(idNum) nameList.append(name) else: pass i += 1 j = 0 destinationPath = config["destinationPath"] # path files will be saved to timelogPath = config["timelogPath"] now = datetime.datetime.now() # get current timestamp strNow = now.strftime("%Y-%m-%d %H:%M:%S") while j < len(nameList): print nameList[j] print idList[j] completeName = os.path.join( destinationPath, nameList[j]) # concatenates destination with name of file idNum = ''.join( idList[j]) # parses id number to be pulled from box folder filecontent = client.file( file_id=idNum).content() # saves file content as variable newfile = open(completeName, 'w') newfile.write(filecontent) newfile.close() j += 1 with open(timelogPath, "a") as f: # logs sync date and time in timelog.txt f.write("Last box sync at " + strNow + "\n")
#print (dcm_file) file_destination="c:/temp/"+file_id+".dcm" #pydicom.dcmwrite("c:/temp/altered_file.dcm", dcm_file) pydicom.dcmwrite(file_destination, dcm_file) #274974226735, 273962712480, 273961696958, 273961309534, 273939676673 ''' file_id_list = [ '274974226735', '273962712480', '273961696958', '273961309534', '273939676673' ] #file_id_list=['270819570112'] for file_id in file_id_list: print(file_id) url = client.file(file_id).content() with BytesIO(url) as dcmbox: dcm_file = pydicom.dcmread(dcmbox) file_destination = "c:/temp/pending_dexafit_files/DICOM" + file_id + ".dcm" pydicom.dcmwrite(file_destination, dcm_file) ''' # Import two classes from the boxsdk module - Client and OAuth2 from boxsdk import Client, OAuth2 # Define client ID, client secret, and developer token. CLIENT_ID = None CLIENT_SECRET = None ACCESS_TOKEN = None # Read app info from text file with open('app.cfg', 'r') as app_cfg:
def sendfiles2box(remotepath, filenames, remotefilenames=None, overwrite=False): """Send a file(s) to Box. Args: remotepath (str): remote directory to send file(s) to filenames (str or list): local files to send to box remotefilenames (str or list, optional): same size as filenames and optional as to if they should have different names or not overwrite (bool): should this overwrite existing files, default `False` Returns: list of ids of the uploaded content """ if isinstance(filenames, string_types): filenames = [ filenames, ] if isinstance(remotefilenames, string_types): remotefilenames = [ remotefilenames, ] if remotefilenames is None: remotefilenames = [os.path.basename(f) for f in filenames] iemprops = get_properties() oauth = OAuth2(client_id=iemprops['boxclient.client_id'], client_secret=iemprops['boxclient.client_secret'], access_token=iemprops['boxclient.access_token'], refresh_token=iemprops['boxclient.refresh_token'], store_tokens=_store_tokens) client = Client(oauth) folder_id = 0 for token in remotepath.split("/"): if token.strip() == '': continue offset = 0 found = False while not found: LOG.debug("folder(%s).get_items(offset=%s)", folder_id, offset) items = client.folder(folder_id=folder_id).get_items(limit=100, offset=offset) for item in items: if (item.type == 'folder' and item.name.lower() == token.lower()): folder_id = item.id found = True break if len(items) != 100: break offset += 100 if not found: LOG.debug("Creating folder %s inside of %s", token, folder_id) item = client.folder(folder_id=folder_id).create_subfolder(token) folder_id = item.id LOG.debug("Now we upload to folder_id: %s", folder_id) res = [] for localfn, remotefn in zip(filenames, remotefilenames): LOG.debug("uploading %s", localfn) try: item = client.folder(folder_id=folder_id).upload(localfn, remotefn) res.append(item.id) except Exception as exp: if overwrite and hasattr(exp, 'context_info'): _fileid = exp.context_info['conflicts']['id'] LOG.info("overwriting %s fid: %s", remotefn, _fileid) try: item = client.file(_fileid).update_contents(localfn) res.append(_fileid) continue except Exception as exp2: LOG.debug( "Upload_Contents of %s resulted in exception: %s", localfn, exp2) continue LOG.debug("Upload of %s resulted in exception: %s", localfn, exp) res.append(None) return res
def long_poll_event_listener(): """ :return: """ client = Client(oauth=oauth) while True: try: stream_position = client.events().get_latest_stream_position() for event in client.events().generate_events_with_long_polling( stream_position=stream_position): print(event, ' happened!') if event.get('message', '').lower() == 'reconnect': break if event['event_type'] == 'ITEM_RENAME': obj_id = event['source']['id'] obj_type = event['source']['type'] if obj_type == 'file': if int(event['source']['path_collection'] ['total_count']) > 1: path = '{}'.format(os.path.sep).join([ folder['name'] for folder in event['source'] ['path_collection']['entries'][1:] ]) else: path = '' path = os.path.join(BOX_DIR, path) file_path = os.path.join(path, event['source']['name']) file_obj = client.file(file_id=obj_id).get() src_file_path = None if not r_c.exists( redis_key(obj_id)) else redis_get( file_obj)['file_path'] if src_file_path and os.path.exists(src_file_path): version_info = redis_get(obj=file_obj) src_file_path = version_info['file_path'] os.rename(src_file_path, file_path) version_info['file_path'] = file_path version_info['etag'] = file_obj['etag'] r_c.set(redis_key(obj_id), json.dumps(version_info)) else: download_queue.put([file_obj, file_path]) elif event['event_type'] == 'ITEM_UPLOAD': obj_id = event['source']['id'] obj_type = event['source']['type'] if obj_type == 'file': if int(event['source']['path_collection'] ['total_count']) > 1: path = '{}'.format(os.path.sep).join([ folder['name'] for folder in event['source'] ['path_collection']['entries'][1:] ]) else: path = '' path = os.path.join(BOX_DIR, path) if not os.path.exists( path ): # just in case this is a file in a new subfolder os.makedirs(path) download_queue.put([ client.file(file_id=obj_id).get(), os.path.join(path, event['source']['name']) ]) # was_versioned = r_c.exists(redis_key(obj_id)) # if not was_versioned: # if int(event['source']['path_collection']['total_count']) > 1: # path = '{}'.format(os.path.sep).join([folder['name'] # for folder in # event['source']['path_collection']['entries'][1:]]) # else: # path = '' # path = os.path.join(BOX_DIR, path) # download_queue.put([client.file(file_id=obj_id).get(), os.path.join(path, event['source']['name'])]) # # with open(os.path.join(path, event['source']['name']), 'w') as new_file_handler: # # client.file(file_id=obj_id).get().download_to(new_file_handler) # # # # r_c.set(redis_key([obj_id]), json.dumps({'etag': event['source']['etag'], # # 'fresh_download': True, # # 'time_stamp': time.time()})) # else: # pass elif event['event_type'] == 'ITEM_TRASH': obj_id = event['source']['id'] obj_type = event['source']['type'] if obj_type == 'file': if int(event['source']['path_collection'] ['total_count']) > 1: path = '{}'.format(os.path.sep).join([ folder['name'] for folder in event['source'] ['path_collection']['entries'][1:] ]) else: path = '' path = os.path.join(BOX_DIR, path) file_path = os.path.join(path, event['source']['name']) if os.path.exists(file_path): os.unlink(file_path) if r_c.exists(redis_key(obj_id)): r_c.delete(redis_key(obj_id)) elif event['event_type'] == 'ITEM_DOWNLOAD': pass except Exception: print(traceback.format_exc())
def rename(args, user_integration): bxc = BOX_Credentials.objects.get(user_integration_id=user_integration) ACCESS_TOKEN = bxc.BOX_ACCESS_TOKEN REFRESH_TOKEN = bxc.BOX_REFRESH_TOKEN #print(ACCESS_TOKEN) REFRESH_TOKEN = bxc.BOX_REFRESH_TOKEN #print(REFRESH_TOKEN + " refreshtoken") # Log the token we're using & starting call logging.info('Using Refresh Token: %s' % REFRESH_TOKEN) # Get new access & refresh tokens getTokens = requests.post(oauth2URL, data={ 'grant_type': 'refresh_token', 'refresh_token': REFRESH_TOKEN, 'client_id': clientId, 'client_secret': clientSecret }) # If the above gives a 4XX or 5XX error # getTokens.raise_for_status() # Get the JSON from the above newTokens = getTokens.json() # Get the new access token, valid for 60 minutes accessToken = newTokens['access_token'] refreshToken = newTokens['refresh_token'] #print("New accessToken " + accessToken) #print("New refreshToken " + refreshToken) bxc.BOX_REFRESH_TOKEN = refreshToken bxc.BOX_ACCESS_TOKEN = accessToken bxc.save() CLIENT_ID = settings.CLIENT_ID CLIENT_SECRET = settings.CLIENT_SECRET oauth2 = OAuth2(CLIENT_ID, CLIENT_SECRET, access_token=ACCESS_TOKEN) old_name = args['Old-Name'] new_name = args['New-Name'] client = Client(oauth2, LoggingNetwork()) items = client.folder(folder_id='0').get_items(limit=1000, offset=0) #print("searching for item...") flag = 0 for item in items: # print(item['name']) if item['name'] == old_name: #print("Name: " + item['name'] + " ID: " + item['id']) file_id = item['id'] flag = 1 break #print("File name altered") field = AttachmentFieldsClass() attachment = MessageAttachmentsClass() if flag == 1: field.title = old_name + " has been renamed to " + new_name attachment.attach_field(field) client.file(file_id=int(file_id)).rename(new_name) else: field.title = old_name + " does not exist." attachment.attach_field(field) m = MessageClass() m.attach(attachment) return m
class BoxInstance(object): """ class to make use of google python api """ def __init__(self, number_to_process=-1, credential_file=HOMEDIR + '/.box/credentials'): """ init function """ self.credential_file = credential_file self.redirect_uri = '' self.client_id = '' self.client_secret = '' self.list_of_keys = {} self.list_of_mimetypes = {} self.items_processed = 0 self.list_of_folders = {} self.list_of_items = {} self.number_to_process = number_to_process self.read_credentials() self.client = self.get_auth() def store_tokens(self, access_token, refresh_token): with open(os.path.join(HOMEDIR, '.box_tokens.pkl'), 'w') as credfile: tmp = (access_token, refresh_token) pickle.dump(obj=tmp, file=credfile, protocol=pickle.HIGHEST_PROTOCOL) def read_credentials(self, credential_file=HOMEDIR + '/.box/credentials'): """ read credentials from file """ with open(credential_file, 'r') as credfile: for line in credfile: key_, val_ = line.split()[:2] for key in ('redirect_uri', 'client_id', 'client_secret'): if key.lower() == key_.strip().lower(): setattr(self, key, val_) def get_auth(self): """ do authorization """ if os.path.exists(os.path.join(HOMEDIR, '.box_tokens.pkl')): with open(os.path.join(HOMEDIR, '.box_tokens.pkl'), 'rb') as pfile: self.access_token, self.refresh_token = pickle.load(pfile) self.oauth = OAuth2( client_id=self.client_id, client_secret=self.client_secret, store_tokens=self.store_tokens, access_token=self.access_token, refresh_token=self.refresh_token) else: self.oauth = OAuth2( client_id=self.client_id, client_secret=self.client_secret, store_tokens=self.store_tokens) auth_url, csrf_token = self.oauth.get_authorization_url(self.redirect_uri) code = get_auth_code(auth_url, self.redirect_uri) print(code) self.access_token, self.refresh_token = \ self.oauth.authenticate(code) self.client = Client(self.oauth) return self.client def list_files(self, callback_fn, number_to_process=-1): """ list non-directory files """ fields = [ 'id', 'size', 'etag', 'description', 'parent', 'name', 'type', 'modified_at', 'sha1' ] number_complete = {'count': 0} def walk_nodes(parentid='0'): parent_node = self.client.folder(folder_id=parentid).get() cur_offset = 0 while True: new_items = parent_node.get_items(limit=100, offset=cur_offset, fields=fields) if not new_items: break for item in new_items: if number_to_process > 0 \ and number_complete['count'] > number_to_process: break number_complete['count'] += 1 item = item._response_object item['parentid'] = parentid if item.get('type', '') == 'folder': walk_nodes(parentid=item['id']) else: callback_fn(item) #print(parent_node._response_object['name'], cur_offset) cur_offset += 100 walk_nodes(parentid='0') def get_folders(self, callback_fn, number_to_process=-1): """ get folders """ number_complete = {'count': 0} def walk_nodes(parentid='0'): parent_node = self.client.folder(folder_id=parentid).get() item_col = parent_node._response_object.get('item_collection', {}) entries = item_col.get('entries', []) for item in entries: item['parentid'] = parentid if item.get('type', '') == 'folder': if number_to_process > 0 \ and number_complete['count'] > number_to_process: return number_complete['count'] += 1 node = self.client.folder(folder_id=item['id']).get() node = node._response_object node['parentid'] = item['parentid'] callback_fn(node) walk_nodes(parentid=item['id']) walk_nodes(parentid='0') def download(self, did, exportfile, sha1sum=None): """ download using dlink url """ dirname = os.path.dirname(os.path.abspath(exportfile)) if not os.path.exists(dirname): os.makedirs(dirname) with open(exportfile + '.new', 'w') as outfile: self.client.file(file_id=did).download_to(outfile) if sha1sum: from sync_app.util import get_sha1 sha = get_sha1(exportfile + '.new') if sha != sha1sum: raise TypeError('%s %s' % (sha, sha1sum)) os.rename('%s.new' % exportfile, exportfile) return True def upload(self, fname, parent_id='0'): """ upload fname and assign parent_id if provided """ bname = os.path.basename(fname) parent = self.client.folder(folder_id=parent_id) try: file_obj = parent.upload(file_path=fname, file_name=bname).get() except BoxAPIException as exc: print('BoxAPIException upload %s' % exc) raise item = file_obj._response_object item['parentid'] = parent_id return item def create_directory(self, dname, parent_id='0'): """ create directory, assign parent_id if supplied """ if not parent_id: raise ValueError('need to specify parent_id') parent = self.client.folder(folder_id=parent_id) try: parent.create_subfolder(dname) except BoxAPIException as exc: print('create_directory BoxAPIException %s %s' % (dname, exc)) pass parent = parent.get() item = parent._response_object items = item.get('item_collection', {}).get('entries', []) for item in items: if item['type'] == 'folder' and item['name'] == dname: item['parentid'] = parent_id return item def delete_directory(self, dirid): """ delete directory by folderid """ return self.client.folder(folder_id=dirid).delete() def delete_file(self, fileid): """ delete file by fileid """ return self.client.file(file_id=fileid).delete()
user = client.user().get() print("Login : {} and User Name : {} and User Mobile : {}".format( user.login, user.name, user.phone)) folder_id = '111881680762' fileName = client.folder(folder_id).upload( 'C:\\Users\\dell\\Desktop\\OOPS.pdf') print('File "{0}" uploaded to Box with file ID {1}'.format( fileName.name, fileName.id)) def viewSharedLink(file_id): url = client.file(file_id).get_shared_link() webbrowser.open(url, new=2) def getSharedLink(file_id): url = client.file(file_id).get_shared_link() return "The file shared link URL is {0}".format(url) file_id = '661659166400' downloadURL = client.file(file_id).get_download_url() webbrowser.open(downloadURL, new=2) print('File is currenty being Downloaded...') print(getSharedLink(file_id)) viewSharedLink(file_id)
class BoxAPI(StorageAPI): def __init__(self, credential_dir, credential_filename=CREDENTIAL_FILENAME): super(BoxAPI, self).__init__(credential_dir) self.auth_file = join(credential_dir, credential_filename) self.oauth = OAuth2(client_id=CLIENT_ID, client_secret=CLIENT_SECRET, store_tokens=self.write_access_token) try: self.get_tokens_from_file() self.authorize() except IOError: pass def get_auth_url(self): auth_url, csrf_token = self.oauth.get_authorization_url(REDIRECT_URI) return auth_url def build(self): self.client = Client(self.oauth) self.create_folder("demerio") def create_folder(self, folder_name): search_results = self.client.search( folder_name, limit=100, offset=0, ancestor_folders=[self.client.folder(folder_id='0')]) folder_filter = [ result for result in search_results if result._item_type == "folder" ] if len(folder_filter) == 0: demerio_folder = self.client.folder( folder_id='0').create_subfolder('demerio') else: assert len(folder_filter) == 1 demerio_folder = folder_filter[0].get(fields=["name"]) self.root_folder_id = demerio_folder.id def get_tokens_from_file(self): with open(self.auth_file, "r") as f: access_token = f.readline().rstrip() refresh_token = f.readline().rstrip() return access_token, refresh_token def write_access_token(self, access_token, refresh_token): with open(self.auth_file, 'w') as f: f.write(access_token + "\n") f.write(refresh_token + "\n") def authorize(self): if os.path.exists(self.auth_file): access_token, refresh_token = self.get_tokens_from_file() self.oauth._access_token = access_token self.oauth._refresh_token = refresh_token else: httpd = ClientRedirectServer(("localhost", 8888), ClientRedirectHandler) webbrowser.open(self.get_auth_url()) httpd.handle_request() self.oauth.authenticate(httpd.query_params['code']) self.build() def is_connected(self): ## TODO: There must be a better way to check connection, with self.oauth ?? try: self.client.user(user_id='me').get() except: return False return True def download_file(self, file_id, path_to_download): with open(path_to_download, "wb") as f: f.write(self.client.file(file_id=file_id).content()) def upload_new_file(self, local_file_path): new_file = self.client.folder(folder_id=self.root_folder_id).upload( local_file_path, file_name=generate_random_string()) return new_file.get()['id'] def delete_file(self, file_id): self.client.file(file_id=file_id).delete() def update_file(self, local_file_path, file_id): self.client.file(file_id=file_id).update_contents(local_file_path)
# print ("CLIENT_ID:%s" % CLIENT_ID) # print ("CLIENT_SECRET:%s" % CLIENT_SECRET) # print ("ACCESS_TOKEN:%s" % ACCESS_TOKEN) # Create OAuth2 object. It's already authenticated, thanks to the developer token. oauth2 = OAuth2(CLIENT_ID, CLIENT_SECRET, access_token=ACCESS_TOKEN) # Create the authenticated client client = Client(oauth2) # file_id: csv formatted list of faculty names (columns 1,2), email (col 3) and netid (4) my_file_id=YOUR_FILE_ID import html2text lines = client.file(file_id=my_file_id).content().decode("utf-8", "replace").splitlines() import requests import json import numpy baseURL = "https://experts.illinois.edu/ws/api/59/persons/" endpoint = "research-outputs.json" apiKey = YOUR_API_KEY f1=open('author_pubs.json','w') f2=open('author_pubs.csv','w') category="Microbes" for line in lines[1:]:
class Box: def __init__(self): self.client = None self.refresh_token = None self.authorization_url = None self.authorization_code = None self.access_token = None self.user = None self.oauth = OAuth2(client_id=config['the_box']['client_id'], client_secret=config['the_box']['client_secret']) self.redirect_url = config['redirect']['url'] self.root_directory = config['the_box']['root_directory'] def authorization(self): self.authorization_url, self.refresh_token = self.oauth.get_authorization_url( self.redirect_url) webbrowser.open(self.authorization_url) if not self.get_auth_tocken(): return False self.access_token, dummy = self.oauth.authenticate( self.authorization_code) self.client = Client(self.oauth) self.user = self.client.user(user_id='me').get() def get_auth_tocken(self): """ get auth code for oauth :return: auth code """ r = redis.StrictRedis(host=config['redis']['host'], port=config['redis']['port'], db=config['redis']['db']) for i in range(60): auth_code = r.get(self.refresh_token) if not auth_code: sleep(1) else: # print(auth_code) self.authorization_code = auth_code return True return False def list(self, folder_id, limits=100): """ list files in a remote folder :param folder_id: :param limits: :return: a list of files metadata """ items = self.client.folder(folder_id=folder_id).get_items(limit=limits, offset=0) for file in items: is_folder = 'folder' if self.is_folder(file) else 'file' print(file.name, colored('(id: ' + file.id + ')', 'blue'), colored(is_folder, 'yellow')) return items def download(self, file, destination): """ download a file to local :param file: :param destination: :return: """ if not isinstance(file, File): file = self.client.file(file) with open(os.path.join(destination, file.get().name), 'wb') as f: file.download_to(f) print(file.get().name, 'downloaded from the box to', destination) return True def upload(self, file, folder_id): self.client.folder(folder_id=folder_id).upload(file_path=file) print(os.path.basename(file), 'uploaded to the box at', self.client.folder(folder_id=folder_id).get().name) return True def is_folder(self, file): """ check if file is a folder :param file: :return: """ return isinstance(file, Folder)
########################################## ## This is where stuff actually happens ## ########################################## #''' #fileName,workingFolder=download_file() # ##wb = load_workbook("input-"+fileName) #wb = load_workbook('../Python/'+"input-"+fileName) #ws = wb.active## #makelists() #addNewNames(markhours()) # ##file = open(fileName, 'a+') #file = open('../Python/'+fileName, 'a+') #file.close() #wb.save(fileName) #uploadFile(fileName,workingFolder) #os.remove('../Python/'+"input-"+fileName) #os.remove('../Python/'+fileName) #'''and None open_file = open(names, "r") client.file(144656917590).update_contents(names) ########################################## ## End of stuff happening ## ##########################################
def file_content(args, user_integration): #print("The file has\n") bxc = BOX_Credentials.objects.get(user_integration_id=user_integration) ACCESS_TOKEN = bxc.BOX_ACCESS_TOKEN REFRESH_TOKEN = bxc.BOX_REFRESH_TOKEN #print(ACCESS_TOKEN) REFRESH_TOKEN = bxc.BOX_REFRESH_TOKEN #print(REFRESH_TOKEN + " refreshtoken") # Log the token we're using & starting call logging.info('Using Refresh Token: %s' % REFRESH_TOKEN) # Get new access & refresh tokens getTokens = requests.post(oauth2URL, data={ 'grant_type': 'refresh_token', 'refresh_token': REFRESH_TOKEN, 'client_id': clientId, 'client_secret': clientSecret }) # If the above gives a 4XX or 5XX error # getTokens.raise_for_status() # Get the JSON from the above newTokens = getTokens.json() # Get the new access token, valid for 60 minutes accessToken = newTokens['access_token'] refreshToken = newTokens['refresh_token'] #print("New accessToken " + accessToken) #print("New refreshToken " + refreshToken) bxc.BOX_REFRESH_TOKEN = refreshToken bxc.BOX_ACCESS_TOKEN = accessToken bxc.save() CLIENT_ID = settings.CLIENT_ID CLIENT_SECRET = settings.CLIENT_SECRET oauth2 = OAuth2(CLIENT_ID, CLIENT_SECRET, access_token=ACCESS_TOKEN) file_name = args['File-Name'] client = Client(oauth2, LoggingNetwork()) items = client.folder(folder_id='0').get_items(limit=1000, offset=0) flag = 0 for item in items: # print(item['name']) if item['name'] == file_name: print("Name: " + item['name'] + " ID: " + item['id']) file_id = item['id'] flag = 1 field = AttachmentFieldsClass() attachment = MessageAttachmentsClass() if flag == 1: s = client.file(file_id=file_id).content() response = json.loads(s) response = response['atext']['text'] field.title = "The file contains" field.value = response attachment.attach_field(field) else: field.title = file_name + " does not exist." attachment.attach_field(field) m = MessageClass() m.attach(attachment) response = json.loads(s) response = response['atext']['text'] return m
def sync_box_to_gcs(box: BoxClient, bucket: Bucket, cache: dict) -> List[Future]: """Sync Box account files to a GCS bucket. For versioned Box files, the latest version will always be synced back to the GCS bucket. Non-current versions will not be deliberately preserved, though syncing to a versioned bucket will have this effect. Arguments: box {BoxClient} -- [description] bucket {Bucket} -- [description] cache {dict} -- A dictionary that will opportunistically be filled with Box item paths/objects. Returns: List[Future] -- [description] """ # constuct an executor for copy tasks executor = ThreadPoolExecutor(max_workers=cpu_count()) futures = [] # sync box files back to GCS for path, item in box_walk(box.root_folder()): LOG.debug("Box directory walk found: {}".format(path)) # cache the Box item in module scope cache[path] = item # get the blob to overwrite, or make a new one blob_exists = True blob = bucket.get_blob(path) if not blob: blob_exists = False blob = Blob(path, bucket) # branch depending on whether file or folder if item.type == "folder": if not blob_exists: blob.metadata = { # Not an important value. BOX_MTIME_KEY: datetime.now().isoformat() } # create directory marker, used by UI and FUSE LOG.info("Creating directory marker in GCS: {}".format( blob.name)) blob.upload_from_string(b'') elif item.type == "file": box_file = box.file(item.id) box_mtime = box_file.get().modified_at if should_copy_box_to_gcs(box_file, box_mtime, blob, blob_exists): LOG.info( "Box file {} is not found in GCS or updated since last sync. Copying to {}." .format(item.name, blob.name)) blob.metadata = { BOX_MTIME_KEY: box_mtime } # This change will "follow" the upload into GCS temp_file = BytesIO() reader = box_file.download_to writer = blob.upload_from_file future = executor.submit(concurrent_upload, reader, writer, temp_file) futures.append(future) else: LOG.info("Ignoring item of type {}".format(item.type)) return futures
class Provider(StorageABC): def __init__(self, service=None, config="~/.cloudmesh/cloudmesh4.yaml"): super().__init__(service=service, config=config) self.sdk = JWTAuth.from_settings_file(self.credentials['config_path']) self.client = Client(self.sdk) def put(self, service=None, source=None, destination=None, recursive=False): """ uploads file to Box, if source is directory and recursive is true uploads all files in source directory :param source: local file or directory to be uploaded :param destination: cloud directory to upload to :param recursive: if true upload all files in source directory, source must be directory not file :return: file dict(s) that have been uploaded """ try: dest = basename(destination) sourcepath = change_path(source) sourcebase = basename(sourcepath) uploaded = [] files = [] if dest == '': files += [ item for item in self.client.folder('0').get_items() if item.type == 'file' ] folder_id = '0' else: items = self.client.search().query(dest, type='folder') folders = [item for item in items] folder_id = get_id(dest, folders, 'folder') if folder_id is not None: files += [ item for item in self.client.folder(folder_id).get_items() if item.type == 'file' ] else: items = self.client.search().query(basename( dirname(destination)), type='folder') folders = [item for item in items] folder_id = get_id(dest, folders, 'folder') if folder_id is not None: new_folder = self.client.folder( folder_id).create_subfolder(dest) folder_id = new_folder.id else: Console.error("Invalid destination.") return if not recursive: if os.path.isfile(sourcepath): filename = sourcebase else: Console.error("Invalid source path.") return file_id = get_id(filename, files, 'file') if file_id is None: file = self.client.folder(folder_id).upload(sourcepath) files_dict = update_dict(file) return files_dict else: file = self.client.file(file_id).update_contents( sourcepath) files_dict = update_dict(file) return files_dict else: folder_ids = [folder_id] uploads = [[s for s in os.listdir(source)]] while len(uploads) > 0: for s in uploads[0]: if os.path.isdir(s): uploads += [d for d in os.listdir(s)] new = self.client.folder( folder_ids[0]).create_subfolder(s) folder_ids.append(new.id) else: s_id = get_id(s, files, 'file') if s_id is None: file = self.client.folder( folder_ids[0]).upload(sourcepath + '/' + s) uploaded.append(file) else: file = self.client.file(s_id).update_contents( sourcepath + '/' + s) uploaded.append(file) uploads.pop(0) folder_ids.pop(0) files_dict = update_dict(uploaded) return files_dict except Exception as e: Console.error(e) def get(self, service=None, source=None, destination=None, recursive=False): """ downloads file from Box, if recursive is true and source is directory downloads all files in directory :param source: cloud file or directory to download :param destination: local directory to be downloaded into :param recursive: if true download all files in source directory, source must be directory :return: file dict(s) that have been downloaded """ try: target = basename(source) dest = change_path(destination) downloads = [] if recursive: if target == '': files = [ item for item in self.client.folder('0').get_items() if item.type == 'file' ] folders = [ item for item in self.client.folder('0').get_items() if item.type == 'folder' ] else: results = [ item for item in self.client.search().query(target, type='folder') ] folder_id = get_id(target, results, 'folder') if folder_id: files = [ item for item in self.client.folder( folder_id).get_items() if item.type == 'file' ] folders = [ item for item in self.client.folder( folder_id).get_items() if item.type == 'folder' ] else: Console.error("Source directory not found.") return while len(folders) > 0: files += [ item for item in self.client.folder( folders[0].id).get_items() if item.type == 'file' ] folders += [ item for item in self.client.folder( folders[0].id).get_items() if item.type == 'folder' ] folders.pop(0) for f in files: if f.type == 'file': file = self.client.file(f.id).get() full_dest = join(dest, file.name) with open(full_dest, 'wb') as file_dest: self.client.file(file.id).download_to(file_dest) downloads.append(file) files_dict = update_dict(downloads) return files_dict else: results = [item for item in self.client.search().query(target)] if not any(result.name == target for result in results): Console.error("Source file not found.") else: file_id = get_id(target, results, 'file') if file_id is not None: file = self.client.file(file_id).get() full_dest = join(dest, file.name) with open(full_dest, 'wb') as f: self.client.file(file.id).download_to(f) files_dict = update_dict(file) return files_dict except Exception as e: Console.error(e) def search(self, service=None, directory=None, filename=None, recursive=False): """ searches directory for file, if recursive searches all subdirectories :param directory: cloud directory to search in :param filename: name of file to search for :param recursive: if true search all child directories of original directory :return: file dict(s) matching filename in specified directory """ try: cloud_dir = basename(directory) results = [] if cloud_dir == '': files = [ item for item in self.client.folder('0').get_items() if item.type == 'file' ] folders = [ item for item in self.client.folder('0').get_items() if item.type == 'folder' ] else: items = self.client.search().query(cloud_dir, type='folder') folder_id = get_id(cloud_dir, items, 'folder') if not folder_id: Console.error("Directory not found.") files = [ item for item in self.client.folder(folder_id).get_items() if item.type == 'file' ] folders = [ item for item in self.client.folder(folder_id).get_items() if item.type == 'folder' ] for file in files: if filename in file.name: results.append(file) if not recursive: if len(results) > 0: files_dict = update_dict(results) return files_dict else: Console.error("No files found.") else: while len(folders) > 0: files = [ item for item in self.client.folder( folders[0].id).get_items() if item.type == 'file' ] folders += [ item for item in self.client.folder( folders[0].id).get_items() if item.type == 'folder' ] for file in files: if filename in file.name: results.append(file) folders.pop(0) if len(results) > 0: files_dict = update_dict(results) return files_dict else: Console.error("No files found.") except Exception as e: Console.error(e) def create_dir(self, service=None, directory=None): """ creates a new directory :param directory: path for new directory :return: dict of new directory """ try: path = directory.split('/') new_dir = basename(directory) if len(path) == 1: Console.error('Invalid path specified.') else: parent = path[len(path) - 2] if parent == '': folder = self.client.folder('0').create_subfolder(new_dir) folder_dict = update_dict(folder) return folder_dict folders = [ item for item in self.client.search().query(parent, type='folder') ] if len(folders) > 0: parent = folders[0].id folder = self.client.folder(parent).create_subfolder( new_dir) folder_dict = update_dict(folder) return folder_dict else: Console.error("Destination directory not found") except Exception as e: Console.error(e) def list(self, service=None, source=None, recursive=False): """ lists all contents of directory, if recursive lists contents of subdirectories as well :param source: cloud directory to list all contents of :param recursive: if true list contents of all child directories :return: dict(s) of files and directories """ try: result_list = [] subfolders = [] path = basename(source) if path == '': contents = [ item for item in self.client.folder('0').get_items() ] for c in contents: if c.type == 'folder': subfolders.append(c) result_list.append(c) else: folders = [ item for item in self.client.search().query(path, type='folder') ] folder_id = get_id(path, folders, 'folder') if folder_id: contents = [ result for result in self.client.folder( folder_id).get_items() ] for c in contents: if c.type == 'folder': subfolders.append(c) result_list.append(c) else: Console.error("Directory " + path + " not found.") if recursive: while len(subfolders) > 0: contents = [ item for item in self.client.folder( subfolders[0].id).get_items() ] for c in contents: if c.type == 'folder': subfolders.append(c) result_list.append(c) subfolders.pop(0) list_dict = update_dict(result_list) return list_dict except Exception as e: Console.error(e) def delete(self, service=None, source=None, recursive=False): """ deletes file or directory :param source: file or directory to be deleted :return: None """ try: path = source.strip('/').split('/') name = path[len(path) - 1] items = self.client.search().query(name, type='file') files = [item for item in items] items2 = self.client.search().query(name, type='folder') folders = [item2 for item2 in items2] results = files + folders deleted = [] if not any(result.name == name for result in results): Console.error("Source not found.") else: item_ind = next((index for (index, result) in enumerate(results) if (result.name == name)), None) item_id = results[item_ind].id item_type = results[item_ind].type deleted.append(results[item_ind]) if item_type == 'folder': self.client.folder(item_id).delete() elif item_type == 'file': self.client.file(item_id).delete() result_list = update_dict(deleted) return result_list except Exception as e: Console.error(e)
client_secret='YKatTFXOH1icNc9uxD3K2TMLCiulQJ0M', access_token='W8PeTR4cgnYIRYI6L5Q7HeU35g4dMqwl' # store_tokens=your_store_tokens_callback_method, ) auth_url, csrf_token = oauth.get_authorization_url( 'https://psu.app.box.com/folder/0') # Redirect user to auth_url, where they will enter their Box credentials response = requests.get("https://account.box.com/api/oauth2/authorize/") # print(response.status_code) client = Client(oauth) # client: object = requests.get("https://api.box.com/2.0/folders/:9dj7qs0aimiaywmxm2mo/").folder.get() # Mitford_Digital_Archives folder = client.folder(folder_id='907565446').get() sampleFile = client.file(file_id='33034276995') comments = sampleFile.get_comments() type = sampleFile.get().type print(str(comments)) print(str(type)) commentsall = [] for comment in comments: # print(str(comment)) contents = 'Comment was left by {0} at {1}: {2}'.format( comment.created_by.name, comment.created_at, comment.message) # print(contents) commentsall.append(contents) commentsComp = '\n'.join(commentsall) if len(commentsComp) > 0: print(str(commentsComp))