def oauth2_callback(request): auth_code = request.GET.get('code') state = request.GET.get('state') if 'box' in request.session: box = request.session['box'] else: return HttpResponseBadRequest('Unexpected request') if not (state == box['state']): return HttpResponseBadRequest('Request expired') try: oauth = OAuth2( client_id=settings.BOX_APP_CLIENT_ID, client_secret=settings.BOX_APP_CLIENT_SECRET ) access_token, refresh_token = oauth.authenticate(auth_code) client = Client(oauth) # save the token box_user = client.user(user_id=u'me').get() token = BoxUserToken( user=request.user, access_token=access_token, refresh_token=refresh_token, box_user_id=box_user.id, ) token.save() except BoxException as e: logger.exception('Unable to complete Box integration setup: %s' % e) messages.error(request, 'Oh no! An unexpected error occurred while trying to set ' 'up the Box.com application. Please try again.') return HttpResponseRedirect(reverse('box_integration:index'))
def initializeClientAndAuthObjects(): ###### ###### # IMPORTANT: You will need to place your RSA private key in /box/rsakey.pem ###### ###### global authObject try: authObject except NameError: authObject = None if authObject is None: print "refreshing auth " authObject = JWTAuth(client_id=client_id, client_secret=client_secret, enterprise_id=eid, rsa_private_key_file_sys_path=os.path.join(os.path.dirname(__file__),'rsakey.pem'), store_tokens=store_tokens) ### If you don't want the logging, use the line after it. global clientObject try: clientObject except NameError: clientObject = None if clientObject is None: print "refreshing client " clientObject = Client(authObject, network_layer=getLogger()) print "initializing " + clientObject.__str__() # clientObject = Client(authObject) return
def user_detail(user_id): print '### Sending detail view ###' client = Client(g.auth, network_layer=customLogger) user = client.user(user_id=user_id).get() # As an admin, we can act on behalf of other users by creating new auth and client objects. # We should also be caching this token. For the purposes of this quickstart # we only cache access for one user (the admin). print "AUTHENTICATING USER: "******" (" + user.name + ")" user_auth = JWTAuth(client_id=app.config['CLIENT_ID'], client_secret=app.config['CLIENT_SECRET'], enterprise_id=app.config['EID'], jwt_key_id=app.config['KEY_ID'], rsa_private_key_file_sys_path=os.path.join(os.path.dirname(__file__),'rsakey.pem')) user_auth.authenticate_app_user(user) # <--- Authenticate as the user user_client = Client(user_auth) # Make API calls as the user by using the user_client object files = user_client.folder(folder_id='0').get_items(limit=100) # Build the preview link into any files sent to the client for f in files: if f._item_type=="file": f.preview_url = f.get(fields=['expiring_embed_link']).expiring_embed_link['url'] token = user_auth.access_token return render_template("user_detail.html", user=user, files_list=files, token=token)
def get(self, request, *args, **kwargs): from io import BytesIO from django.http import HttpResponse boxuser = BoxUser.objects.order_by('-id')[0] oauth = RedisManagedOAuth2( client_id='5dn98104cyf535v4581cbb1wxnag6e5y', client_secret='8z6ysMEnsrickMWBwpnysxYJ9SvqaNlY', unique_id=boxuser.unique_id ) client = Client(oauth) folder_items = ( client.folder(folder_id='0').get_items(limit=100, offset=0)) file_items = [f for f in folder_items if f.type == "file"] first_item_id = file_items[0].id file_name = client.file(file_id=first_item_id).get()['name'] # RESPONSE response = HttpResponse(content_type='application/octet-stream') response['Content-Disposition'] = ( 'attachment; filename="%s"' % file_name) content = BytesIO(client.file(file_id=first_item_id).content()) file_content = content.getvalue() content.close() response.write(file_content) return response
def folder_detail(folder_id): client = Client(g.auth, network_layer=customLogger) folder = client.folder(folder_id=folder_id).get() files = folder.get_items(limit=100) return render_template("folder_detail.html", folder=folder, files_list=files)
def index(): print '### Sending Index view ###' client = Client(g.auth, network_layer=customLogger) # NEVER SEND AN ADMIN TOKEN TO THE CLIENT # I only provide it here so that you can use this app to quickly get a token. return render_template("index.html", users_list=client.users(), groups_list=client.groups(), token=g.auth.access_token)
def box_callback(): code = request.args.get('code') access_token, refresh_token = oauth.authenticate(code) client = Client(oauth) file_path = '/Users/davidbliu/desktop/wd/filtering/comp/IMG_0860_comp.png' file_name = id_generator()+'.png' folder_id = '7173575673' box_file = client.folder(folder_id).upload(file_path, file_name) print box_file return 'uploaded box file'
def get_folders(self, **kwargs): folder_id = kwargs.get('folder_id') if folder_id is None: return [{ 'id': '0', 'path': '/', 'addon': 'box', 'kind': 'folder', 'name': '/ (Full Box)', 'urls': { # 'folders': node.api_url_for('box_folder_list', folderId=0), 'folders': api_v2_url('nodes/{}/addons/box/folders/'.format(self.owner._id), params={'id': '0'} ) } }] try: Provider(self.external_account).refresh_oauth_key() oauth = OAuth2(client_id=settings.BOX_KEY, client_secret=settings.BOX_SECRET, access_token=self.external_account.oauth_key) client = Client(oauth) except BoxAPIException: raise HTTPError(http.FORBIDDEN) try: metadata = client.folder(folder_id).get() except BoxAPIException: raise HTTPError(http.NOT_FOUND) except MaxRetryError: raise HTTPError(http.BAD_REQUEST) folder_path = '/'.join( [ x['name'] for x in metadata['path_collection']['entries'] ] + [metadata['name']] ) return [ { 'addon': 'box', 'kind': 'folder', 'id': item['id'], 'name': item['name'], 'path': os.path.join(folder_path, item['name']).replace('All Files', ''), 'urls': { 'folders': api_v2_url('nodes/{}/addons/box/folders/'.format(self.owner._id), params={'id': item['id']} ) } } for item in metadata['item_collection']['entries'] if item['type'] == 'folder' ]
def main(): """Go Main Go""" iemprops = get_properties() oauth = OAuth2( client_id=iemprops['boxclient.client_id'], client_secret=iemprops['boxclient.client_secret'], store_tokens=_store_tokens ) print(oauth.get_authorization_url('https://mesonet.agron.iastate.edu')) oauth.authenticate(input("What was the code? ")) client = Client(oauth) print(client.user(user_id='me').get())
def delete_user(user_id): if request.form['deleteconf'].lower() == 'yes': print "DELETING USER: {0}".format(user_id) flash("Deleted user: {0}".format(user_id)) client = Client(g.auth, network_layer=customLogger) user = client.user(user_id=user_id) user.delete(params={"force":True}) # Use the force time.sleep(1) # Forcing thread sync or waiting for the DB to catch up return redirect(url_for('index')) else: flash("Must type YES to confirm", 'error') return redirect(url_for('delete_user', user_id=user_id))
def get_auth(self): """ do authorization """ if os.path.exists(os.path.join(HOMEDIR, '.box_tokens.pkl')): with open(os.path.join(HOMEDIR, '.box_tokens.pkl'), 'rb') as pfile: self.access_token, self.refresh_token = pickle.load(pfile) self.oauth = OAuth2( client_id=self.client_id, client_secret=self.client_secret, store_tokens=self.store_tokens, access_token=self.access_token, refresh_token=self.refresh_token) else: self.oauth = OAuth2( client_id=self.client_id, client_secret=self.client_secret, store_tokens=self.store_tokens) auth_url, csrf_token = self.oauth.get_authorization_url(self.redirect_uri) code = get_auth_code(auth_url, self.redirect_uri) print(code) self.access_token, self.refresh_token = \ self.oauth.authenticate(code) self.client = Client(self.oauth) return self.client
def testAndUpdateCredentials(): config = SafeConfigParser() config.read(configfile_name) client_id = config.get('boxCredentials', 'clientID') client_secret = config.get('boxCredentials', 'clientSecret') access_token = config.get('boxCredentials', 'accessToken') refresh_token = config.get('boxCredentials', 'refreshToken') oauth = OAuth2(client_id=client_id, client_secret=client_secret, access_token=access_token, refresh_token=refresh_token, store_tokens=store_tokens) client = Client(oauth) me = client.user(user_id='me').get() config.read(configfile_name) return config
def create_user(): if not request.form['name']: flash("Name required for user creation.", "error") return redirect(url_for('index')) client = Client(g.auth, network_layer=customLogger) new_user = client.create_user(request.form['name'], job_title=request.form['job'], phone=request.form['phone'], address=request.form['address']) if request.form.get('initialize'): # User init scripts go here add_user_to_group(client, new_user, "SuchGroup") flash("Initialized user: {0}".format(request.form['name'])) else: flash("Created new user: {0} ".format(request.form['name'])) return redirect(url_for('index'))
def upload_queue_processor(): """ Implements a simple re-try mechanism for pending uploads :return: """ while True: if upload_queue.not_empty: callable_up = upload_queue.get() # blocks # TODO: pass in the actual item being updated/uploaded, so we can do more intelligent retry mechanisms was_list = isinstance(callable_up, list) last_modified_time = oauth = None if was_list: last_modified_time, callable_up, oauth = callable_up args = callable_up.args if isinstance(callable_up, partial) else None num_retries = 15 for x in range(15): try: ret_val = callable_up() if was_list: item = ret_val # is the new/updated item if isinstance(item, File): client = Client(oauth) file_obj = client.file(file_id=item.object_id).get() redis_set(r_c, file_obj, last_modified_time, box_dir_path=BOX_DIR) break except BoxAPIException as e: crate_logger.debug('{the_args}, {the_trace}'.format(the_args=args, the_trace=traceback.format_exc())) if e.status == 409: crate_logger.debug('Apparently Box says this item already exists...' 'and we were trying to create it. Need to handle this better') break except (ConnectionError, BrokenPipeError, ProtocolError, ConnectionResetError): time.sleep(3) crate_logger.debug('{the_args}, {the_trace}'.format(the_args=args, the_trace=traceback.format_exc())) if x >= num_retries - 1: crate_logger.debug('Upload giving up on: {}'.format(callable_up)) # no immediate plans to do anything with this info, yet. uploads_given_up_on.append(callable_up) except (TypeError, FileNotFoundError): crate_logger.debug(traceback.format_exc()) break upload_queue.task_done()
def handle_callback(self, response): """View called when the Oauth flow is completed. Adds a new UserSettings record to the user and saves the user's access token and account info. """ client = Client(OAuth2( access_token=response['access_token'], refresh_token=response['refresh_token'], client_id=settings.BOX_KEY, client_secret=settings.BOX_SECRET, )) about = client.user().get() return { 'provider_id': about['id'], 'display_name': about['name'], 'profile_url': 'https://app.box.com/profile/{0}'.format(about['id']) }
def _folder_data(self, folder_id): # Split out from set_folder for ease of testing, due to # outgoing requests. Should only be called by set_folder try: Provider(self.external_account).refresh_oauth_key(force=True) except InvalidGrantError: raise exceptions.InvalidAuthError() try: oauth = OAuth2(client_id=settings.BOX_KEY, client_secret=settings.BOX_SECRET, access_token=self.external_account.oauth_key) client = Client(oauth) folder_data = client.folder(self.folder_id).get() except BoxAPIException: raise exceptions.InvalidFolderError() folder_name = folder_data['name'].replace('All Files', '') or '/ (Full Box)' folder_path = '/'.join( [x['name'] for x in folder_data['path_collection']['entries'] if x['name']] + [folder_data['name']] ).replace('All Files', '') or '/' return folder_name, folder_path
def __init__(self): self._db_engine = sqlalchemy.create_engine('sqlite+pysqlite:///photobooth.db') self._session_maker = sessionmaker(bind=self._db_engine, autoflush=True) self._session = self._session_maker() DeclarativeBase.metadata.create_all(self._db_engine) self._auth = JWTAuth( client_id=self._CLIENT_ID, client_secret=self._CLIENT_SECRET, enterprise_id=self._ENTERPRISE_ID, rsa_private_key_file_sys_path='private_key.pem', rsa_private_key_passphrase=self._PASSPHRASE, ) self._client = Client(self._auth) try: user_id = self._session.query(PhotoBoothInfo).filter_by(key='user_id').one().value from boxsdk.object.user import User self._upload_user = User(None, user_id) except NoResultFound: self._upload_user = self._client.create_user('Photobooth Uploader') self._session.add(PhotoBoothInfo(key='user_id', value=self._upload_user.object_id)) self._session.commit() self._uploader_auth = JWTAuth( client_id=self._CLIENT_ID, client_secret=self._CLIENT_SECRET, enterprise_id=self._ENTERPRISE_ID, rsa_private_key_file_sys_path='private_key.pem', rsa_private_key_passphrase=self._PASSPHRASE, ) self._uploader_auth.authenticate_app_user(self._upload_user) self._uploader = Client(self._uploader_auth) try: folder_id = self._session.query(PhotoBoothInfo).filter_by(key='folder_id').one().value self._folder = self._uploader.folder(folder_id) except NoResultFound: self._folder = self._uploader.folder('0').create_subfolder('Photobooth Images') self._session.add(PhotoBoothInfo(key='folder_id', value=self._folder.object_id)) self._session.commit()
def get_context_data(self, **kwargs): context = super(HomeView, self).get_context_data(**kwargs) if BoxUser.objects.count(): boxuser = BoxUser.objects.order_by('-id')[0] oauth = RedisManagedOAuth2( client_id='5dn98104cyf535v4581cbb1wxnag6e5y', client_secret='8z6ysMEnsrickMWBwpnysxYJ9SvqaNlY', unique_id=boxuser.unique_id ) client = Client(oauth) me = client.user(user_id='me').get() folder_items = ( client.folder(folder_id='0').get_items(limit=100, offset=0)) context['boxuser'] = me context['folder_items'] = folder_items #### otherbox = BoxUser.objects.exclude(id=boxuser.id) context['others'] = [] for other in otherbox: otherauth = RedisManagedOAuth2( client_id='5dn98104cyf535v4581cbb1wxnag6e5y', client_secret='8z6ysMEnsrickMWBwpnysxYJ9SvqaNlY', unique_id=other.unique_id ) otherclient = Client(otherauth) otherme = otherclient.user(user_id='me').get() context['others'].append({ 'boxuser': otherme, 'folder_items': ( otherclient .folder(folder_id='0') .get_items(limit=100, offset=0)) }) return context
def __init__(self, shouldAuthenticate=False): """ Initializes the Restore class. - Mainly provides OAuth2 authentication :return: """ if shouldAuthenticate: self.oauth = OAuth2(Constants.CLIENT_ID,Constants.CLIENT_SECRET,self.store_tokens) self.auth_url, self.csrf_token = self.oauth.get_authorization_url('https://127.0.0.1') print(self.auth_url) auth_code = raw_input("Please enter auth_code after granting access: ") logging.info(auth_code) access_token, refresh_token = self.oauth.authenticate(auth_code) logging.info(access_token + " " + refresh_token) self.client = Client(self.oauth)
def iem_token_callback(self, access_token, refresh_token): oauth = OAuth2(client_id=self.client_id, client_secret=self.client_secret, access_token=access_token, refresh_token=refresh_token, store_tokens=self.iem_token_callback) self.client = Client(oauth) pgconn = psycopg2.connect(database='mesosite', host='iemdb') cursor = pgconn.cursor() for propname, propvalue in zip([self.IEM_PROPERTIES_ACCESS_TOKEN, self.IEM_PROPERTIES_REFRESH_TOKEN], [access_token, refresh_token]): cursor.execute(""" UPDATE properties SET propvalue = %s WHERE propname = %s """, (propvalue, propname)) cursor.close() pgconn.commit()
def dbbootstrap(self, store_tokens): """Get configuration from IEM Database""" pgconn = psycopg2.connect(database='mesosite', host='iemdb') cursor = pgconn.cursor() cursor.execute("""SELECT propvalue from properties where propname = %s""", (self.IEM_PROPERTIES_CLIENT_ID,)) self.client_id = cursor.fetchone()[0] cursor.execute("""SELECT propvalue from properties where propname = %s""", (self.IEM_PROPERTIES_CLIENT_SECRET,)) self.client_secret = cursor.fetchone()[0] cursor.execute("""SELECT propvalue from properties where propname = %s""", (self.IEM_PROPERTIES_ACCESS_TOKEN,)) access_token = cursor.fetchone()[0] cursor.execute("""SELECT propvalue from properties where propname = %s""", (self.IEM_PROPERTIES_REFRESH_TOKEN,)) refresh_token = cursor.fetchone()[0] oauth = OAuth2(client_id=self.client_id, client_secret=self.client_secret, access_token=access_token, refresh_token=refresh_token, store_tokens=store_tokens) self.client = Client(oauth)
def __init__(self, client_id=None, client_secret=None, access_token=None, refresh_token=None, store_tokens=None): """constructor Args: client_id (str): The application box client_id client_secret (str): The application box client_secret access_token (str): The Oauth2 access_token refresh_token (str): The Oauth2 refresh_token store_tokens (function): The Oauth2 callback on new tokens """ st = self.iem_token_callback if store_tokens is None else store_tokens if client_id is None: self.dbbootstrap(st) else: self.client_id = client_id self.client_secret = client_secret oauth = OAuth2(client_id=self.client_id, client_secret=self.client_secret, access_token=access_token, refresh_token=refresh_token, store_tokens=st) self.client = Client(oauth)
class BoxApp(AppIntegration): """BoxApp integration""" _MAX_CHUNK_SIZE = 500 def __init__(self, config): super(BoxApp, self).__init__(config) self._client = None self._next_stream_position = None @classmethod def _type(cls): return 'admin_events' @classmethod def service(cls): return 'box' @classmethod def date_formatter(cls): """Return a format string for a date, ie: 2017-11-01T00:29:51-00:00 This format is consistent with the format recommended by Box docs: https://developer.box.com/reference#section-date-format """ return '%Y-%m-%dT%H:%M:%S-00:00' @classmethod def _load_auth(cls, auth_data): """Load JWTAuth from Box service account JSON keyfile Args: auth_data (dict): The loaded keyfile data from a Box service account JSON file Returns: boxsdk.JWTAuth Instance of JWTAuth that allows the client to authenticate or False if there was an issue loading the auth """ try: auth = JWTAuth.from_settings_dictionary(auth_data) except (TypeError, ValueError, KeyError): LOGGER.exception('Could not load JWT from settings dictionary') return False return auth def _create_client(self): """Box requests must be signed with a JWT keyfile Returns: bool: True if the Box client was successfully created or False if any errors occurred during the creation of the client """ if self._client: LOGGER.debug('Client already instantiated for %s', self.type()) return True auth = self._load_auth(self._config.auth['keyfile']) if not auth: return False self._client = Client(auth) return bool(self._client) @safe_timeout def _make_request(self): """Make the request using the Box client The inner function of `_perform_request` is used to handle a single retry in the event of a ConnectionError. If this fails twice, the function will return Returns: dict: Response from Box (boxsdk.session.box_session.BoxResponse) that is json loaded into a dictionary. """ # Create the parameters for this request, 100 is the max value for limit params = { 'limit': self._MAX_CHUNK_SIZE, 'stream_type': EnterpriseEventsStreamType.ADMIN_LOGS, } # From Box's docs: Box responds to the created_before and created_after # parameters only if the stream_position parameter is not included. if self._next_stream_position: params['stream_position'] = self._next_stream_position else: params['created_after'] = self._last_timestamp def _perform_request(allow_retry=True): try: # Get the events using a make_request call with the box api. This is to # support custom parameters such as 'created_after' and 'created_before' box_response = self._client.make_request( 'GET', self._client.get_url('events'), params=params, timeout=self._DEFAULT_REQUEST_TIMEOUT) except BoxException: LOGGER.exception('Failed to get events for %s', self.type()) return False, None # Return a tuple to conform to return value of safe_timeout except ConnectionError: # In testing, the requests connection seemed to get reset for no # obvious reason, and a simple retry once works fine so catch it # and retry once, but after that return False LOGGER.exception( 'Bad response received from host, will retry once') if allow_retry: return _perform_request(allow_retry=False) return False, None # Return a tuple to conform to return value of safe_timeout # Return a successful status and the JSON from the box response # Return a tuple to conform to return value of safe_timeout return True, box_response.json() return _perform_request() def _gather_logs(self): """Gather the Box Admin Events The ideal way to do this would be to use the boxsdk.events.Events class and the `get_events` method to retrieve these events. However, this method does allow you to pass keyword arguments (such as params) which are needed for specifying the 'created_after' parameter. Returns: bool or list: If the execution fails for some reason, return False. Otherwise, return a list of box admin event entries. """ if not self._create_client(): LOGGER.error('Could not create box client for %s', self.type()) return False result, response = self._make_request() # If the result is False, errors would be previously logged up # the stack before this, so just return False if not result: return False if not response: LOGGER.error('No results received from the Box API request for %s', self.type()) return False self._more_to_poll = int( response['chunk_size']) >= self._MAX_CHUNK_SIZE events = response.get('entries', []) if not events: LOGGER.info( 'No events in response from the Box API request for %s', self.type()) return False self._next_stream_position = response['next_stream_position'] self._last_timestamp = events[-1]['created_at'] return events @classmethod def _required_auth_info(cls): # Use a validation function to ensure the file the user provides is valid def keyfile_validator(keyfile): """A JSON formatted Box service account private key file key""" try: with open(keyfile.strip(), 'r') as json_keyfile: auth_data = json.load(json_keyfile) except (IOError, ValueError): return False if not cls._load_auth(auth_data): return False return auth_data return { 'keyfile': { 'description': ('the path on disk to the JSON formatted Box ' 'service account private key file'), 'format': keyfile_validator } } def _sleep_seconds(self): """Return the number of seconds this polling function should sleep for between requests to avoid failed requests. The Box API has a limit of 10 API calls per second per user, which we will not hit, so return 0 here. Returns: int: Number of seconds that this function should sleep for between requests """ return 0
class Provider(StorageABC): kind = "box" sample = "TODO: missing" output = {} # "TODO: missing" def __init__(self, service=None): super().__init__(service=service) self.sdk = JWTAuth.from_settings_file(self.credentials['config_path']) self.client = Client(self.sdk) def put(self, source=None, destination=None, recursive=False): """ uploads file to Box, if source is directory and recursive is true uploads all files in source directory :param source: local file or directory to be uploaded :param destination: cloud directory to upload to :param recursive: if true upload all files in source directory, source must be directory not file :return: file dict(s) that have been uploaded """ try: dest = basename(destination) sourcepath = change_path(source) sourcebase = basename(sourcepath) uploaded = [] files = [] if dest == '': files += [ item for item in self.client.folder('0').get_items() if item.type == 'file' ] folder_id = '0' else: items = self.client.search().query(dest, type='folder') folders = [item for item in items] folder_id = get_id(dest, folders, 'folder') if folder_id is not None: files += [ item for item in self.client.folder(folder_id).get_items() if item.type == 'file' ] else: items = self.client.search().query(basename( dirname(destination)), type='folder') folders = [item for item in items] folder_id = get_id(dest, folders, 'folder') if folder_id is not None: new_folder = self.client.folder( folder_id).create_subfolder(dest) folder_id = new_folder.id else: Console.error("Invalid destination.") return if not recursive: if os.path.isfile(sourcepath): filename = sourcebase else: Console.error("Invalid source path.") return file_id = get_id(filename, files, 'file') if file_id is None: file = self.client.folder(folder_id).upload(sourcepath) files_dict = update_dict(file) return files_dict else: file = self.client.file(file_id).update_contents( sourcepath) files_dict = update_dict(file) return files_dict else: folder_ids = [folder_id] uploads = [[s for s in os.listdir(source)]] while len(uploads) > 0: for s in uploads[0]: if os.path.isdir(s): uploads += [d for d in os.listdir(s)] new = self.client.folder( folder_ids[0]).create_subfolder(s) folder_ids.append(new.id) else: s_id = get_id(s, files, 'file') if s_id is None: file = self.client.folder( folder_ids[0]).upload(sourcepath + '/' + s) uploaded.append(file) else: file = self.client.file(s_id).update_contents( sourcepath + '/' + s) uploaded.append(file) uploads.pop(0) folder_ids.pop(0) files_dict = update_dict(uploaded) return files_dict except Exception as e: Console.error(e) def get(self, source=None, destination=None, recursive=False): """ downloads file from Box, if recursive is true and source is directory downloads all files in directory :param source: cloud file or directory to download :param destination: local directory to be downloaded into :param recursive: if true download all files in source directory, source must be directory :return: file dict(s) that have been downloaded """ try: target = basename(source) dest = change_path(destination) downloads = [] if recursive: if target == '': files = [ item for item in self.client.folder('0').get_items() if item.type == 'file' ] folders = [ item for item in self.client.folder('0').get_items() if item.type == 'folder' ] else: results = [ item for item in self.client.search().query(target, type='folder') ] folder_id = get_id(target, results, 'folder') if folder_id: files = [ item for item in self.client.folder( folder_id).get_items() if item.type == 'file' ] folders = [ item for item in self.client.folder( folder_id).get_items() if item.type == 'folder' ] else: Console.error("Source directory not found.") return while len(folders) > 0: files += [ item for item in self.client.folder( folders[0].id).get_items() if item.type == 'file' ] folders += [ item for item in self.client.folder( folders[0].id).get_items() if item.type == 'folder' ] folders.pop(0) for f in files: if f.type == 'file': file = self.client.file(f.id).get() full_dest = join(dest, file.name) with open(full_dest, 'wb') as file_dest: self.client.file(file.id).download_to(file_dest) downloads.append(file) files_dict = update_dict(downloads) return files_dict else: results = [item for item in self.client.search().query(target)] if not any(result.name == target for result in results): Console.error("Source file not found.") else: file_id = get_id(target, results, 'file') if file_id is not None: file = self.client.file(file_id).get() full_dest = join(dest, file.name) with open(full_dest, 'wb') as f: self.client.file(file.id).download_to(f) files_dict = update_dict(file) return files_dict except Exception as e: Console.error(e) def search(self, directory=None, filename=None, recursive=False): """ searches directory for file, if recursive searches all subdirectories :param directory: cloud directory to search in :param filename: name of file to search for :param recursive: if true search all child directories of original directory :return: file dict(s) matching filename in specified directory """ try: cloud_dir = basename(directory) results = [] if cloud_dir == '': files = [ item for item in self.client.folder('0').get_items() if item.type == 'file' ] folders = [ item for item in self.client.folder('0').get_items() if item.type == 'folder' ] else: items = self.client.search().query(cloud_dir, type='folder') folder_id = get_id(cloud_dir, items, 'folder') if not folder_id: Console.error("Directory not found.") files = [ item for item in self.client.folder(folder_id).get_items() if item.type == 'file' ] folders = [ item for item in self.client.folder(folder_id).get_items() if item.type == 'folder' ] for file in files: if filename in file.name: results.append(file) if not recursive: if len(results) > 0: files_dict = update_dict(results) return files_dict else: Console.error("No files found.") else: while len(folders) > 0: files = [ item for item in self.client.folder( folders[0].id).get_items() if item.type == 'file' ] folders += [ item for item in self.client.folder( folders[0].id).get_items() if item.type == 'folder' ] for file in files: if filename in file.name: results.append(file) folders.pop(0) if len(results) > 0: files_dict = update_dict(results) return files_dict else: Console.error("No files found.") except Exception as e: Console.error(e) def create_dir(self, directory=None): """ creates a new directory :param directory: path for new directory :return: dict of new directory """ try: path = directory.split('/') new_dir = basename(directory) if len(path) == 1: Console.error('Invalid path specified.') else: parent = path[len(path) - 2] if parent == '': folder = self.client.folder('0').create_subfolder(new_dir) folder_dict = update_dict(folder) return folder_dict folders = [ item for item in self.client.search().query(parent, type='folder') ] if len(folders) > 0: parent = folders[0].id folder = self.client.folder(parent).create_subfolder( new_dir) folder_dict = update_dict(folder) return folder_dict else: Console.error("Destination directory not found") except Exception as e: Console.error(e) def list(self, source=None, recursive=False): """ lists all contents of directory, if recursive lists contents of subdirectories as well :param source: cloud directory to list all contents of :param recursive: if true list contents of all child directories :return: dict(s) of files and directories """ try: result_list = [] subfolders = [] path = basename(source) if path == '': contents = [ item for item in self.client.folder('0').get_items() ] for c in contents: if c.type == 'folder': subfolders.append(c) result_list.append(c) else: folders = [ item for item in self.client.search().query(path, type='folder') ] folder_id = get_id(path, folders, 'folder') if folder_id: contents = [ result for result in self.client.folder( folder_id).get_items() ] for c in contents: if c.type == 'folder': subfolders.append(c) result_list.append(c) else: Console.error("Directory " + path + " not found.") if recursive: while len(subfolders) > 0: contents = [ item for item in self.client.folder( subfolders[0].id).get_items() ] for c in contents: if c.type == 'folder': subfolders.append(c) result_list.append(c) subfolders.pop(0) list_dict = update_dict(result_list) return list_dict except Exception as e: Console.error(e) def delete(self, source=None, recursive=False): """ deletes file or directory :param source: file or directory to be deleted :param recursive: copy the directory recurseively :return: None """ try: path = source.strip('/').split('/') name = path[len(path) - 1] items = self.client.search().query(name, type='file') files = [item for item in items] items2 = self.client.search().query(name, type='folder') folders = [item2 for item2 in items2] results = files + folders deleted = [] if not any(result.name == name for result in results): Console.error("Source not found.") else: item_ind = next((index for (index, result) in enumerate(results) if (result.name == name)), None) item_id = results[item_ind].id item_type = results[item_ind].type deleted.append(results[item_ind]) if item_type == 'folder': self.client.folder(item_id).delete() elif item_type == 'file': self.client.file(item_id).delete() result_list = update_dict(deleted) return result_list except Exception as e: Console.error(e)
from boxsdk import OAuth2 from boxsdk import Client import argparse import os oauth = OAuth2( client_id='bde3fxtg8ysjbrtdhlflftc1u9brsnbl', client_secret='jxfAFzhTdPA2DXBAIXyz4fIPl4OjzwAR', access_token='Sri58hN43NPONxezfk74vJgVeLlNSmyv', ) client = Client(oauth) root_folder = client.folder(folder_id='0') # print('root_folder_with_info.name:' + root_folder_with_info.name) parser = argparse.ArgumentParser(description='download a file from Box') parser.add_argument('--file', '-f', help='file path') parser.add_argument('--dstpath', '-d', help='destination path') args = parser.parse_args() if not args.file: print("Please specify the file" + "Usage: upload.py -f file/path -d Box/path") exit(1) dwn_file = args.file dst_file = args.dstpath # download def download(filedata): if args.dstpath: if os.path.isdir(dst_file):
if args.credentials_file is not None: credentials.to_pickle(args.credentials_file) oauth = OAuth2( client_id=credentials.loc['client_id'], client_secret=credentials.loc['client_secret'], ) auth_url, csrf_token = oauth.get_authorization_url(args.redirect_url) print(f'Authorization URL: {auth_url}') access_code = getpass('Access code (from redirect URL): ') access_token, refresh_token = oauth.authenticate(access_code) client = Client(oauth) folder_files_path = os.path.join(args.source_directory, args.file_pattern) folder_files = glob(folder_files_path) file_info_df = pd.DataFrame(columns=[ 'file_path', 'file_name', 'file_sha1', 'box_share_url', 'wget_command' ]) # get current files in folder box_items = list( client.folder(folder_id=args.target_box_directory).get_items()) box_items = {f.name: f for f in box_items} n_items_in_folder = len(box_items) for file_idx, file in enumerate(folder_files):
def __init__(self, service=None): super().__init__(service=service) self.sdk = JWTAuth.from_settings_file(self.credentials['config_path']) self.client = Client(self.sdk)
from boxsdk import JWTAuth from boxsdk import Client import datetime import sys auth = JWTAuth.from_settings_file( '/Users/tanegu/Desktop/197894819_4r2t9oka_config.json') client = Client(auth) #file_path = '/Users/tanegu/Desktop/boxapi/test2.rtf' #file_name = 'test2' #folder_id = '0' file_id_01 = '460527349299' file_info_01 = client.file(file_id_01).get() print(file_info_01, end="") file_id_02 = '460527606671' file_info_02 = client.file(file_id_02).get() print(file_info_02, end="") file_id_03 = '460530323749' file_info_03 = client.file(file_id_03).get() print(file_info_03, end="") file_id_04 = '460540008243' file_info_04 = client.file(file_id_04).get()
ACCESS_TOKEN = 'LlutB466K4MJcGj2r1JkoLZLeqR40DqL' # box app access token (or developer token, in configuration tab) CLIENT_ID = '66zcvd8k07vlxvuuokqwq0h8rihgksuk' # OAuth2.0 client ID for box app (in configuration tab) CLIENT_SECRET = '8eMkGQBVhuR0F0l5zSWNewYcwGXqcPKE' # OAuth2.0 client secret (in configuration tab) # Define Box folder ID (can find from URL), and path to deposit folder folderID = '104816745525' # ID of box folder to download on box path = '/media/arevell/sharedSSD/linux/data/BIDS/PIER/' auth = OAuth2( client_id=CLIENT_ID, client_secret=CLIENT_SECRET, access_token=ACCESS_TOKEN, ) client = Client(auth) folderID = "102659909076" #CNT reconstruction folder name folder = client.folder(folder_id=folderID).get() subfolders = client.folder(folder_id=folderID).get_items() #get all patient folder IDS for subfolder in subfolders: if not "OLD" in subfolder.name: if not "RNS" in subfolder.name : #print(f"{subfolder.name} {subfolder.id}") subBOX = subfolder.name[0:6] #get RID subfolderID = subfolder.id if subBOX[0:3] == "RID": #If folder name actually begins with RID #get actaul RID number with 4 digits sub = subBOX[0:3] + "0" + subBOX[3:]
from boxsdk import OAuth2 from boxsdk import Client import io from boxsdk.exception import BoxAPIException oauth = OAuth2( client_id='bde3fxtg8ysjbrtdhlflftc1u9brsnbl', client_secret='jxfAFzhTdPA2DXBAIXyz4fIPl4OjzwAR', access_token='Zd3HyETzTvivOXZFqksBbIgZZlWrbgMe', ) client = Client(oauth) root_folder = client.folder(folder_id='0') root_folder_with_info = root_folder.get() # shared_folder = root_folder.create_subfolder('shared_folder') # uploaded_file = shared_folder.upload('test.txt') # shared_link = shared_folder.get_shared_link() print('root_folder_with_info.name:' + root_folder_with_info.name) root_folder_with_limited_info = root_folder.get(fields=['owned_by']) print(root_folder_with_limited_info.owned_by) # print('root_folder_with_limited_info:' + root_folder_with_limited_info.owned_by) folder_info = client.folder(folder_id='me') print(folder_info) print(client.file(file_id='me')) me = client.user(user_id='me').get() print(me) print('name:' + me.name) print('login:' + me.login)
class BoxComFSProvider(FSProvider): def __init__(self, root, config, client): """ :param root: the root path for this provider :param config: the dict of the configuration of the object :param plugin_config: contains the plugin settings """ if len(root) > 0 and root[0] == '/': root = root[1:] self.root = root self.connection = client.get("box_com_connection") self.access_token = self.connection['access_token'] self.cache_enabled = config.get("cache_enabled") if self.cache_enabled: cache_file_name = hashlib.sha1(self.access_token.encode('utf-8')).hexdigest() else: cache_file_name = None auth = OAuth2( client_id="", client_secret="", access_token=self.access_token ) main_session = AuthorizedSession(auth, network_layer=LessVerboseLoggingNetwork()) self.client = Client(auth, main_session) self.user = self.client.user().get() self.box_item = BoxItem(cache_file_name, root, self.client) self.box_item.check_path_format(get_normalized_path(root)) def close(self): """ Perform any necessary cleanup """ self.box_item.close() def stat(self, path): """ Get the info about the object at the given path inside the provider's root, or None if the object doesn't exist """ full_path = get_full_path(self.root, path) box_item = self.box_item.get_by_path(full_path) if box_item.not_exists(): return None return box_item.get_stat() def set_last_modified(self, path, last_modified): """ Set the modification time on the object denoted by path. Return False if not possible """ return False def browse(self, path): """ List the file or directory at the given path, and its children (if directory) """ normalized_path = get_normalized_path(path) full_path = get_full_path(self.root, path) item = self.box_item.get_by_path(get_rel_path(full_path)) if item.not_exists(): return {'fullPath' : normalized_path, 'exists' : False} if item.is_folder(): return {'fullPath' : normalized_path, 'exists' : True, 'directory' : True, 'children' : item.get_children(normalized_path), 'lastModified' : item.get_last_modified()} else: return item.get_as_browse() def enumerate(self, path, first_non_empty): """ Enumerate files recursively from prefix. If first_non_empty, stop at the first non-empty file. If the prefix doesn't denote a file or folder, return None """ full_path = get_full_path(self.root, path) normalized_path = get_normalized_path(path) item = self.box_item.get_by_path(full_path) if item.not_exists(): return None paths = [] if item.is_folder(): paths = self.list_recursive(normalized_path, item.id, first_non_empty) else: paths.append({'path':normalized_path.split("/")[-1], 'size':item.size, 'lastModified':int(0) * 1000}) return paths def list_recursive(self, path, folder_id, first_non_empty): paths = [] if path == "/": path = "" for child in self.client.folder(folder_id).get_items(fields = ['modified_at','name','type','size']): if child.type == self.box_item.BOX_FOLDER: paths.extend(self.list_recursive(path + '/' + child.name, child.id, first_non_empty)) else: paths.append({'path':path + '/' + child.name, 'size':child.size}) if first_non_empty: return paths return paths def delete_recursive(self, path): """ Delete recursively from path. Return the number of deleted files (optional) """ full_path = get_full_path(self.root, path) item = self.box_item.get_by_path(full_path, force_no_cache = True) if item.not_exists(): return 0 else: ret = item.delete() self.box_item.cache.reset() return ret def move(self, from_path, to_path): """ Move a file or folder to a new path inside the provider's root. Return false if the moved file didn't exist """ full_from_path = get_full_path(self.root, from_path) full_to_path = get_full_path(self.root, to_path) from_base, from_item_name = os.path.split(full_from_path) to_base, to_item_name = os.path.split(full_to_path) from_item = self.box_item.get_by_path(full_from_path, force_no_cache = True) if from_item.not_exists(): return False from_item_id = from_item.get_id() from_item_is_folder = from_item.is_folder() to_item = self.box_item.get_by_path(full_to_path, force_no_cache = True) if to_item.not_exists(): to_item = self.box_item.get_by_path(to_base, force_no_cache = True) destination_folder = self.client.folder(to_item.get_id()) if from_item_is_folder: source = self.client.folder(from_item_id) else: source = self.client.file(from_item_id) if from_item_name == to_item_name: source.move(destination_folder) else: source.rename(to_item_name) return True def read(self, path, stream, limit): full_path = get_full_path(self.root, path) byte_range = None if limit is not None and limit is not "-1": int_limit = int(limit) if int_limit > 0: byte_range = (0, int(limit) - 1) item = self.box_item.get_by_path(full_path) if item.not_exists(): raise Exception('Path doesn t exist') shutil.copyfileobj(item.get_stream(byte_range), stream) def write(self, path, stream): """ Write the stream to the object denoted by path into the stream """ full_path = get_full_path(self.root, path) item = self.box_item.create_path(full_path, force_no_cache = True) if item.is_folder(): item.write_stream(stream) else: raise Exception('Not a file name')
thick_border = Border(left=Side(style=None), right=Side(style=None), top=Side(style='medium'), bottom=Side(style='medium')) yellowFill = PatternFill(start_color='FFFF00', end_color='FFFF00', fill_type='solid') grayFill = PatternFill(start_color='BFBFBF', end_color='BFBFBF', fill_type='solid') # Login Credentials # CLIENT_ID = "akbuknoxh68mlusnhqio5n4lh43emdxf" CLIENT_SECRET = "WCxlaGEehP9itIwzLV1oJ2CYrsm9Kb4l" oauth2 = OAuth2(CLIENT_ID, CLIENT_SECRET, access_token=ACCESS_TOKEN) client = Client(oauth2) workingFolder = '0' def download_file(): fid = '0' # All of the items in the box user's 'root' directory. items = client.folder(folder_id=fid).get_items(limit=100, offset=0) # Find the appropriate directory for x in items: if "Senior Design Test" in str(x): fid = int(str(x).split("(")[0].split()[-1]) workingFolder = fid items = client.folder(folder_id=fid).get_items(limit=100, offset=0)
def build(self): self.client = Client(self.oauth) self.create_folder("demerio")
} data = { 'client_id': client_id, 'client_secret': client_secret, 'refresh_token': access_token, 'grant_type': 'refresh_token' } response = requests.post('https://api.box.com/oauth2/token', headers=headers, data=data) return response if __name__ == "__main__": # Read app info from text file with open('app.cfg', 'r') as app_cfg: CLIENT_ID = app_cfg.readline() CLIENT_SECRET = app_cfg.readline() ACCESS_TOKEN = app_cfg.readline() try: # Create OAuth2 object. It's already authenticated, thanks to the developer token. oauth2 = OAuth2(CLIENT_ID, CLIENT_SECRET, access_token=ACCESS_TOKEN) # Create the authenticated client client = Client(oauth2, LoggingNetwork()) my = client.user(user_id='me').get() root_folder = client.folder('0') root_folder_with_info = root_folder.get() except: refresh_access_token(CLIENT_ID, CLIENT_SECRET, ACCESS_TOKEN) pass
# Read app info from text file with open('app.cfg', 'r') as app_cfg: CLIENT_ID = app_cfg.readline() CLIENT_SECRET = app_cfg.readline() ACCESS_TOKEN = app_cfg.readline() # print ("CLIENT_ID:%s" % CLIENT_ID) # print ("CLIENT_SECRET:%s" % CLIENT_SECRET) # print ("ACCESS_TOKEN:%s" % ACCESS_TOKEN) # Create OAuth2 object. It's already authenticated, thanks to the developer token. oauth2 = OAuth2(CLIENT_ID, CLIENT_SECRET, access_token=ACCESS_TOKEN) # Create the authenticated client client = Client(oauth2) # file_id: csv formatted list of faculty names (columns 1,2), email (col 3) and netid (4) my_file_id=YOUR_FILE_ID import html2text lines = client.file(file_id=my_file_id).content().decode("utf-8", "replace").splitlines() import requests import json import numpy baseURL = "https://experts.illinois.edu/ws/api/59/persons/" endpoint = "research-outputs.json" apiKey = YOUR_API_KEY
end_color='FFFFFF', fill_type='solid') grayFill = PatternFill(start_color='BFBFBF', end_color='BFBFBF', fill_type='solid') brownFill = PatternFill(start_color='DDD9C3', end_color='DDD9C3', fill_type='solid') bold = Font(bold=True) # Login Credentials # CLIENT_ID = "ka44qzs9e9cnabzmaaxmmi2mrpll9j32" #"akbuknoxh68mlusnhqio5n4lh43emdxf" CLIENT_SECRET = "T8Oxv8xK4ObJY6SqMzLQXZIucFxGqlvs" #"WCxlaGEehP9itIwzLV1oJ2CYrsm9Kb4l" if DEVMODE == False: oauth2 = OAuth2(CLIENT_ID, CLIENT_SECRET, access_token=ACCESS_TOKEN) client = Client(oauth2) # workingFolder='0' workingFolder = '4263683603' def download_file(): fid = '4263683603' # All of the items in the box user's 'root' directory. items = client.folder(folder_id=fid).get_items(limit=100, offset=0) # Find the appropriate directory for x in items: if "Attendance Management" in str(x): fid = int(str(x).split("(")[0].split()[-1])
class BoxInstance(object): """ class to make use of google python api """ def __init__(self, number_to_process=-1, credential_file=HOMEDIR + '/.box/credentials'): """ init function """ self.credential_file = credential_file self.redirect_uri = '' self.client_id = '' self.client_secret = '' self.list_of_keys = {} self.list_of_mimetypes = {} self.items_processed = 0 self.list_of_folders = {} self.list_of_items = {} self.number_to_process = number_to_process self.read_credentials() self.client = self.get_auth() def store_tokens(self, access_token, refresh_token): with open(os.path.join(HOMEDIR, '.box_tokens.pkl'), 'w') as credfile: tmp = (access_token, refresh_token) pickle.dump(obj=tmp, file=credfile, protocol=pickle.HIGHEST_PROTOCOL) def read_credentials(self, credential_file=HOMEDIR + '/.box/credentials'): """ read credentials from file """ with open(credential_file, 'r') as credfile: for line in credfile: key_, val_ = line.split()[:2] for key in ('redirect_uri', 'client_id', 'client_secret'): if key.lower() == key_.strip().lower(): setattr(self, key, val_) def get_auth(self): """ do authorization """ if os.path.exists(os.path.join(HOMEDIR, '.box_tokens.pkl')): with open(os.path.join(HOMEDIR, '.box_tokens.pkl'), 'rb') as pfile: self.access_token, self.refresh_token = pickle.load(pfile) self.oauth = OAuth2( client_id=self.client_id, client_secret=self.client_secret, store_tokens=self.store_tokens, access_token=self.access_token, refresh_token=self.refresh_token) else: self.oauth = OAuth2( client_id=self.client_id, client_secret=self.client_secret, store_tokens=self.store_tokens) auth_url, csrf_token = self.oauth.get_authorization_url(self.redirect_uri) code = get_auth_code(auth_url, self.redirect_uri) print(code) self.access_token, self.refresh_token = \ self.oauth.authenticate(code) self.client = Client(self.oauth) return self.client def list_files(self, callback_fn, number_to_process=-1): """ list non-directory files """ fields = [ 'id', 'size', 'etag', 'description', 'parent', 'name', 'type', 'modified_at', 'sha1' ] number_complete = {'count': 0} def walk_nodes(parentid='0'): parent_node = self.client.folder(folder_id=parentid).get() cur_offset = 0 while True: new_items = parent_node.get_items(limit=100, offset=cur_offset, fields=fields) if not new_items: break for item in new_items: if number_to_process > 0 \ and number_complete['count'] > number_to_process: break number_complete['count'] += 1 item = item._response_object item['parentid'] = parentid if item.get('type', '') == 'folder': walk_nodes(parentid=item['id']) else: callback_fn(item) #print(parent_node._response_object['name'], cur_offset) cur_offset += 100 walk_nodes(parentid='0') def get_folders(self, callback_fn, number_to_process=-1): """ get folders """ number_complete = {'count': 0} def walk_nodes(parentid='0'): parent_node = self.client.folder(folder_id=parentid).get() item_col = parent_node._response_object.get('item_collection', {}) entries = item_col.get('entries', []) for item in entries: item['parentid'] = parentid if item.get('type', '') == 'folder': if number_to_process > 0 \ and number_complete['count'] > number_to_process: return number_complete['count'] += 1 node = self.client.folder(folder_id=item['id']).get() node = node._response_object node['parentid'] = item['parentid'] callback_fn(node) walk_nodes(parentid=item['id']) walk_nodes(parentid='0') def download(self, did, exportfile, sha1sum=None): """ download using dlink url """ dirname = os.path.dirname(os.path.abspath(exportfile)) if not os.path.exists(dirname): os.makedirs(dirname) with open(exportfile + '.new', 'w') as outfile: self.client.file(file_id=did).download_to(outfile) if sha1sum: from sync_app.util import get_sha1 sha = get_sha1(exportfile + '.new') if sha != sha1sum: raise TypeError('%s %s' % (sha, sha1sum)) os.rename('%s.new' % exportfile, exportfile) return True def upload(self, fname, parent_id='0'): """ upload fname and assign parent_id if provided """ bname = os.path.basename(fname) parent = self.client.folder(folder_id=parent_id) try: file_obj = parent.upload(file_path=fname, file_name=bname).get() except BoxAPIException as exc: print('BoxAPIException upload %s' % exc) raise item = file_obj._response_object item['parentid'] = parent_id return item def create_directory(self, dname, parent_id='0'): """ create directory, assign parent_id if supplied """ if not parent_id: raise ValueError('need to specify parent_id') parent = self.client.folder(folder_id=parent_id) try: parent.create_subfolder(dname) except BoxAPIException as exc: print('create_directory BoxAPIException %s %s' % (dname, exc)) pass parent = parent.get() item = parent._response_object items = item.get('item_collection', {}).get('entries', []) for item in items: if item['type'] == 'folder' and item['name'] == dname: item['parentid'] = parent_id return item def delete_directory(self, dirid): """ delete directory by folderid """ return self.client.folder(folder_id=dirid).delete() def delete_file(self, fileid): """ delete file by fileid """ return self.client.file(file_id=fileid).delete()
class Session(object): """Represents a ongoing / running session with Box API""" IEM_PROPERTIES_ACCESS_TOKEN = 'boxclient.access_token' IEM_PROPERTIES_REFRESH_TOKEN = 'boxclient.refresh_token' IEM_PROPERTIES_CLIENT_ID = 'boxclient.client_id' IEM_PROPERTIES_CLIENT_SECRET = 'boxclient.client_secret' def __init__(self, client_id=None, client_secret=None, access_token=None, refresh_token=None, store_tokens=None): """constructor Args: client_id (str): The application box client_id client_secret (str): The application box client_secret access_token (str): The Oauth2 access_token refresh_token (str): The Oauth2 refresh_token store_tokens (function): The Oauth2 callback on new tokens """ st = self.iem_token_callback if store_tokens is None else store_tokens if client_id is None: self.dbbootstrap(st) else: self.client_id = client_id self.client_secret = client_secret oauth = OAuth2(client_id=self.client_id, client_secret=self.client_secret, access_token=access_token, refresh_token=refresh_token, store_tokens=st) self.client = Client(oauth) def dbbootstrap(self, store_tokens): """Get configuration from IEM Database""" pgconn = psycopg2.connect(database='mesosite', host='iemdb') cursor = pgconn.cursor() cursor.execute("""SELECT propvalue from properties where propname = %s""", (self.IEM_PROPERTIES_CLIENT_ID,)) self.client_id = cursor.fetchone()[0] cursor.execute("""SELECT propvalue from properties where propname = %s""", (self.IEM_PROPERTIES_CLIENT_SECRET,)) self.client_secret = cursor.fetchone()[0] cursor.execute("""SELECT propvalue from properties where propname = %s""", (self.IEM_PROPERTIES_ACCESS_TOKEN,)) access_token = cursor.fetchone()[0] cursor.execute("""SELECT propvalue from properties where propname = %s""", (self.IEM_PROPERTIES_REFRESH_TOKEN,)) refresh_token = cursor.fetchone()[0] oauth = OAuth2(client_id=self.client_id, client_secret=self.client_secret, access_token=access_token, refresh_token=refresh_token, store_tokens=store_tokens) self.client = Client(oauth) def iem_token_callback(self, access_token, refresh_token): oauth = OAuth2(client_id=self.client_id, client_secret=self.client_secret, access_token=access_token, refresh_token=refresh_token, store_tokens=self.iem_token_callback) self.client = Client(oauth) pgconn = psycopg2.connect(database='mesosite', host='iemdb') cursor = pgconn.cursor() for propname, propvalue in zip([self.IEM_PROPERTIES_ACCESS_TOKEN, self.IEM_PROPERTIES_REFRESH_TOKEN], [access_token, refresh_token]): cursor.execute(""" UPDATE properties SET propvalue = %s WHERE propname = %s """, (propvalue, propname)) cursor.close() pgconn.commit() def get_folder(self, remote_folder): """Get or Create a remote folder on Box Args: remote_folder (str): the full remote path of the folder """ # print("get_folder(%s)" % (repr(remote_folder),)) dirs = remote_folder.split("/") root = self.client.folder(folder_id=0) for dirname in dirs: if dirname == '': continue # BUG folders over 1000 items :/ found = False for item in root.get_items(1000): if item.name == dirname: root = self.client.folder(item.object_id) found = True break if not found: root = root.create_subfolder(dirname) return root def rmirror(self, local_folder, remote_folder): """Recursively send local_folder to remote_folder""" for root, _, filenames in os.walk(local_folder): boxpath = os.path.join(remote_folder, root.lstrip(local_folder)) localfns = ["%s/%s" % (root, f) for f in filenames] self.uploads(localfns, boxpath, filenames) def upload(self, localfn, remote_folder, remotefn=None): """Upload a single file to remote path""" remotefn = localfn if remotefn is None else remotefn self.uploads([localfn, ], remote_folder, [remotefn, ]) def uploads(self, localfns, remote_folder, remotefns=[]): """Upload multiple files to remote path""" root = self.get_folder(remote_folder) currentitems = {} for item in root.get_items(1000): currentitems[item.name] = item remotefns = localfns if len(remotefns) == 0 else remotefns for localfn, remotefn in tqdm(zip(localfns, remotefns), desc=remote_folder, disable=(not sys.stdout.isatty())): if remotefn in currentitems: continue root.upload(localfn, remotefn if remotefn is not None else localfn)
class BoxClient(): def __init__(self): self.client = Client(auth) self.client_creator = self.client.user() self.client_created_time = datetime.datetime.now() self.as_users = as_users self.logger = logging.getLogger(__name__) # Common User Methods def get_users(self): """ Return a dictionary with users and their ids :return: """ users_dict = dict() users = self.client.users(user_type='all') for user in users: users_dict[user.id] = user.name return users_dict def get_user_by_email(self, login): """ Searches for a user by email and returns a Box User Object :param login: :return: """ user = None users = self.client.users(filter_term=login) for user in users: if user.login == login: return user return user def create_users(self, upload_method, file, group_name, query): """ Create users at scale :param upload_method: :param file: :param group_name: :return: a dictionary containing on how many users were created and how many failed to be created """ success_count = 0 fail_count = 0 group_id = self.get_group_id(group_name) # If the group doesn't exist, create it. if not group_id: user_input = input( "The group %s doesn't exist. Would you like to create it (yes/no)? " % group_name) if user_input == "yes": group_response = self.create_groups(logger, group_name) msg = "Group '%s' successfully created" % group_name logger.info(msg) elif user_input == "no": msg = "No users were added because user opted to not create a new group" logger.warning(msg) return { 'success_count': success_count, 'fail_count': fail_count } # Excel Handler if upload_method == 'excel': # create a Pandas Dataframe to store Excel Data df = pd.read_excel(file) row_count = len(df) user_input = input( "You are about to create %s new user accounts. Are you sure you'd like to continue? (yes/no): " % row_count) if user_input == "yes": if row_count > 10: payload = list() # generate payload for row in df.itertuples(): payload_tuple = (row._1 + ' ' + row._2, row.Email, group_name) payload.append(payload_tuple) self.create_users_with_thread(payload) else: # Todo: is there a better way to iterate through DataFrame rows? for row in df.itertuples(): create_user_response = self.create_user( row._1 + row._2, row.Email, group_name) if create_user_response: success_count += 1 else: fail_count += 1 else: logger.info("User chose not to create accounts") # JSON Handler elif upload_method == 'json': with open(file) as json_file: data = json.load(json_file) row_count = len(data) user_input = input( "You are about to create %s new user accounts. Are you sure you'd like to continue? (yes/no): " % row_count) if user_input == "yes": if row_count > 10: payload = list() for current_user in data: payload_tuple = (current_user['first_name'] + ' ' + current_user['last_name'], current_user['email'], group_name) payload.append(payload_tuple) self.create_users_with_thread(payload) else: for current_user in data: create_user_response = self.create_user( (current_user['first_name'] + ' ' + current_user['last_name'], current_user['email'], group_name)) if create_user_response: success_count += 1 else: fail_count += 1 else: logger.info("User chose not to create accounts") # PostgreSQL Handler elif upload_method == 'db': db = DB() with db.conn.cursor() as cursor: cursor.execute(query) records = cursor.fetchall() num_rows = cursor.rowcount user_input = input( "You are about to create %s new user accounts. Are you sure you'd like to continue? (yes/no): " % num_rows) if user_input == "yes": if num_rows > 10: payload = list() for row in records: payload_tuple = (row[1] + row[2], row[3], group_name) payload.append(payload_tuple) self.create_users_with_thread(payload) else: for row in records: login = row[3] create_user_response = self.create_user( row[1] + row[2], login, group_name) if create_user_response: success_count += 1 else: fail_count += 1 else: logger.info("User chose not to create accounts") return {'success_count': success_count, 'fail_count': fail_count} def create_user(self, payload): """ Creates a single user :param name: Name of the user :param login: what's the login for the user you are creating :param group_name: which group do you want the user to be assigned to :return: """ # Payload Unpacking name = payload[0] login = payload[1] group_name = payload[2] success = False group_id = None if name == None or login == None: return success if group_name == None: group_id = self.get_group_id(group_name) try: user = self.client.create_user(name, login) # TODO: Deal with this later if group_id != None: membership_response = self.client.group( group_id=group_id).add_member(user) # if an error is throw by the API, handle it by sending to failed_array # the most common error is that user already exists except exception.BoxAPIException as e: msg = "Status Code: %s. %s: <%s>" % (e.status, e.message, login) logger.error(msg) return success else: msg = "User was successfully created: %s " % user logger.info(msg) success = True return success def delete_all_users(self, force): """ Delete all users at scale. Can not be undone. :param force: :return: """ success_count = 0 fail_count = 0 users = self.client.users(user_type='all') for user in users: # if the current user is accessed, which is also the admin, don't delete it. if user == self.client.user().get() or user.id in admins: continue delete_response = self.client.user(user.id).delete(force=force) if delete_response == True: msg = 'Deleted: {0} (User ID: {1})'.format(user.name, user.id) logger.info(msg) success_count += 1 else: msg = 'Unable to delete user. %s : %s' % (user.id, user.login) logger.error(msg) fail_count += 1 return {'success_count': success_count, 'fail_count': fail_count} def delete_user(self, email, force): """ Delete a single user :param email: :param force: :return: """ success = False user = self.get_user_by_email(email) if user == None: return success success = self.client.user(user.id).delete(force=force) if success: msg = 'Deleted: {0} (User ID: {1})'.format(user.name, user.id) logger.info(msg) else: msg = 'Unable to delete user. %s : %s' % (user.id, user.login) logger.error(msg) return success def generate_payload(self, upload_method, file, group_name, query): # Excel Handler if upload_method == 'excel': # create a Pandas Dataframe to store Excel Data df = pd.read_excel(file) row_count = len(df) if row_count > 10: payload = list() # generate payload for row in df.itertuples(): payload_tuple = (row._1 + ' ' + row._2, row.Email, group_name) payload.append(payload_tuple) # self.create_users_with_thread(payload) # JSON Handler elif upload_method == 'json': with open(file) as json_file: data = json.load(json_file) row_count = len(data) if row_count > 10: payload = list() for current_user in data: payload_tuple = (current_user['first_name'] + ' ' + current_user['last_name'], current_user['email'], group_name) payload.append(payload_tuple) # self.create_users_with_thread(payload) # PostgreSQL Handler elif upload_method == 'db': db = DB() with db.conn.cursor() as cursor: cursor.execute(query) records = cursor.fetchall() num_rows = cursor.rowcount if num_rows > 10: payload = list() for row in records: payload_tuple = (row[1] + row[2], row[3], group_name) payload.append(payload_tuple) # self.create_users_with_thread(payload) return payload def create_users_with_thread(self, payload): print("Begin Threading Operation") with ThreadPoolExecutor(max_workers=3) as executors: for _ in executors.map(self.create_user, payload): print("Thread Executor") # Common Group Methods def create_groups(self, logger, group_name): if self.is_a_group(self.client, group_name): logger.warning('Group already exists.') else: response = self.client.create_group(group_name) def is_a_group(self, group_to_check): """ returns whether a group is a present in the enterprise :param group_to_check: :return: """ groups = self.client.get_groups(group_to_check) list_of_groups = [] for group in groups: list_of_groups.append(group.name) if group_to_check in list_of_groups: return True else: return False def get_group_id(self, group_name): """ Returns the group id as an int by searching for the group name :param group_name: :return: int """ group_id = None groups = self.client.get_groups(group_name) for group in groups: if group.name == group_name: group_id = group.id return group_id return group_id # Common Upload Methods def upload_single_file(self, source, destination_folder_id): response = self.client.folder( folder_id=destination_folder_id).upload(source) return response def upload_all_files_from_directory(self, source, destination_folder_id): # 1. Check if the path exists if not (os.path.exists(source)): print("Path doesn't exist.") return 0 content = [] # 2. Get all files in directory for path, subdirs, files in os.walk(source): for name in files: content.append(os.path.join(path, name)) for file in content: print(file) self.upload_single_file( source=file, destination_folder_id=destination_folder_id) # Common Folder Methods def get_items_in_folder(self, folder_id): items_dict = dict() items = self.client.folder(folder_id).get_items() for item in items: items_dict[item.id] = item.name return items_dict
from boxsdk import OAuth2, Client auth = OAuth2( client_id='c0hjuh3tjr90sloycc4g0jcdvid1yjs2', client_secret='w5oJLroCuN4TQ8bEjmJycwFmOexZXB2g', access_token='GS3u5KorxW0jVURWMmBtNogkEhP2YwUu', ) client = Client(auth) # collections = client.collections() # for collection in collections: # print('Collection "{0}" has ID {1}'.format(collection.name, collection.id)) # items = client.collection(collection_id='5731943133').get_items() # for item in items: # print('{0} "{1}" is in the collection'.format(item.type.capitalize(), item.name)) user = client.user().get() print('The current user ID is {0}'.format(user.id)) print('The current user name is {0}'.format(user.name)) items = client.folder(folder_id='0').get_items() for item in items: print(item.id) if item.name == "7.json": print('{0} {1} is named "{2}"'.format(item.type.capitalize(), item.id, item.name)) with open(item.name, 'wb') as open_file: client.file(item.id).download_to(open_file) open_file.close()
logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) print_handler = logging.StreamHandler(stream=sys.stdout) file_handler = logging.FileHandler(filename=config['logging_fname'], mode='a') formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') print_handler.setFormatter(formatter) file_handler.setFormatter(formatter) logger.addHandler(print_handler) logger.addHandler(file_handler) # Authenticate using JWTAuth credentials stored in a JSON file sdk = JWTAuth.from_settings_file(config['auth_fname']) session_client = Client(sdk) app_user = session_client.user(user_id=str(config['app_user_id'])) logger.info('Successfully authenticated to the Box API as the app user "%s"!', app_user.get().name) def catch_exception(err_type, value, trace): """Report any exceptions that were raised during the pipeline run.""" logger = logging.getLogger(__name__ + '.catch_exception') # Extract the error message from the exception traceback error_msg = traceback.format_exception_only(err_type, value)[0] fail_notif = {"status": "FAIL", "message": f"Failed to update {config['db_name']}!",
def sync_box_to_gcs(box: BoxClient, bucket: Bucket, cache: dict) -> List[Future]: """Sync Box account files to a GCS bucket. For versioned Box files, the latest version will always be synced back to the GCS bucket. Non-current versions will not be deliberately preserved, though syncing to a versioned bucket will have this effect. Arguments: box {BoxClient} -- [description] bucket {Bucket} -- [description] cache {dict} -- A dictionary that will opportunistically be filled with Box item paths/objects. Returns: List[Future] -- [description] """ # constuct an executor for copy tasks executor = ThreadPoolExecutor(max_workers=cpu_count()) futures = [] # sync box files back to GCS for path, item in box_walk(box.root_folder()): LOG.debug("Box directory walk found: {}".format(path)) # cache the Box item in module scope cache[path] = item # get the blob to overwrite, or make a new one blob_exists = True blob = bucket.get_blob(path) if not blob: blob_exists = False blob = Blob(path, bucket) # branch depending on whether file or folder if item.type == "folder": if not blob_exists: blob.metadata = { # Not an important value. BOX_MTIME_KEY: datetime.now().isoformat() } # create directory marker, used by UI and FUSE LOG.info("Creating directory marker in GCS: {}".format( blob.name)) blob.upload_from_string(b'') elif item.type == "file": box_file = box.file(item.id) box_mtime = box_file.get().modified_at if should_copy_box_to_gcs(box_file, box_mtime, blob, blob_exists): LOG.info( "Box file {} is not found in GCS or updated since last sync. Copying to {}." .format(item.name, blob.name)) blob.metadata = { BOX_MTIME_KEY: box_mtime } # This change will "follow" the upload into GCS temp_file = BytesIO() reader = box_file.download_to writer = blob.upload_from_file future = executor.submit(concurrent_upload, reader, writer, temp_file) futures.append(future) else: LOG.info("Ignoring item of type {}".format(item.type)) return futures
def insert_box_events(): # Hyper file instantiation path_to_database = Path(box_hyper_file) hyper_file_exists = Path.exists(path_to_database) # Start the Hyper API pricess with HyperProcess(telemetry=Telemetry.SEND_USAGE_DATA_TO_TABLEAU) as hyper: # Check if the Hyper file exists or not. CreateMode.NONE will append. CreateMode.CREATE_AND_REPLACE will create a net new file create_mode = None if hyper_file_exists: create_mode = CreateMode.NONE else: create_mode = CreateMode.CREATE_AND_REPLACE # Open a new connection with Connection(endpoint=hyper.endpoint, database=path_to_database, create_mode=create_mode) as connection: # Check a new schema if it does not exist connection.catalog.create_schema_if_not_exists(schema=box_schema) # Instantiate the table schema box_events_table_def = TableDefinition( table_name=TableName(box_schema, box_events_table), columns=[ TableDefinition.Column(name='event_id', type=SqlType.text(), nullability=NULLABLE), TableDefinition.Column(name='event_type', type=SqlType.text(), nullability=NULLABLE), TableDefinition.Column(name='created_at', type=SqlType.timestamp_tz(), nullability=NULLABLE), TableDefinition.Column(name='created_by_id', type=SqlType.text(), nullability=NULLABLE), TableDefinition.Column(name='created_by_name', type=SqlType.text(), nullability=NULLABLE), TableDefinition.Column(name='created_by_login', type=SqlType.text(), nullability=NULLABLE), TableDefinition.Column(name='source', type=SqlType.json(), nullability=NULLABLE), TableDefinition.Column(name='ip_address', type=SqlType.text(), nullability=NULLABLE), TableDefinition.Column(name='additional_details', type=SqlType.json(), nullability=NULLABLE) ]) print('Found schema: {0} and table def: {1}'.format( box_events_table_def.table_name.schema_name, box_events_table_def.table_name)) # Create the table if it does not exist and get the Box events table connection.catalog.create_table_if_not_exists( table_definition=box_events_table_def) table_name = TableName(box_schema, box_events_table) # Get the MAX row by created_at last_event_created_at = connection.execute_scalar_query( query= f"SELECT MAX(created_at) FROM {box_events_table_def.table_name}" ) if last_event_created_at is not None: print('Found last event in hyper file: {0}'.format( last_event_created_at.to_datetime())) # Get the Box service account client auth = JWTAuth.from_settings_file(box_config) box_client = Client(auth) service_account = box_client.user().get() print( 'Found Service Account with name: {0}, id: {1}, and login: {2}' .format(service_account.name, service_account.id, service_account.login)) # Get the current date and the date for one month ago if there is not lastest event today = datetime.utcnow() if last_event_created_at is None: last_event_created_at = today - relativedelta.relativedelta( months=month_lookback) else: last_event_created_at = last_event_created_at.to_datetime( ).replace(tzinfo=timezone.utc).astimezone(tz=None) # Get the Box enterprise events for a given date range print( 'Using date range for events today: {0} and starting datetime: {1}' .format(today, last_event_created_at)) get_box_events(box_client, 0, last_event_created_at, today) # Insert the Box enteprise events into the Hyper file with Inserter(connection, box_events_table_def) as inserter: inserter.add_rows(rows=box_events) inserter.execute() # Number of rows in the "Box"."Events" table. row_count = connection.execute_scalar_query( query=f"SELECT COUNT(*) FROM {table_name}") print(f"The number of rows in table {table_name} is {row_count}.") print("The connection to the Hyper file has been closed.") print("The Hyper process has been shut down.")
def impersonate_mirror_user(box: BoxClient, login: str) -> BoxClient: mirror_user = [x for x in box.users() if x.login == login][0] LOG.info("Mirroring user: {}, login: {}".format(mirror_user, mirror_user.login)) return box.as_user(mirror_user)
parser.add_argument('--client_secret', required=True, help=''' Client secret for boxsdk. ''') parser.add_argument('--developer_token', required=True, help=''' Developer token for boxsdk. ''') parser.add_argument('--folder_id', required=True, help=''' Folder ID of the target Box folder. ''') parser.add_argument('--file_path', required=True, help=''' Path to file to be uploaded. ''') args = parser.parse_args() from boxsdk import Client, OAuth2 oauth = OAuth2( client_id=args.client_id, client_secret=args.client_secret, access_token=args.developer_token, ) client = Client(oauth) folder_id = args.folder_id print('Start uploading {}'.format(args.file_path)) new_file = client.folder(folder_id).upload(args.file_path) print('Finish uploading {}'.format(args.file_path))
import netmiko from netmiko import ConnectHandler import datetime import threading from threading import Thread # Configure JWT auth object sdk = JWTAuth(client_id="Qn6p8cio5i1nr93kzlyac70aj6xjprbw", client_secret="TMrbjJLq8Ek3kht8hGH9FAgZbOJkKaJl", enterprise_id="62977688", jwt_key_id="qun049ut", rsa_private_key_file_sys_path= "C:\\cygwin64\\home\\User-12\\private_Jing.json", rsa_private_key_passphrase='#####Round'.encode()) client = Client(sdk) def runner(ipp, u_name, p_word, s_word): ssh_connection = ConnectHandler( device_type='checkpoint_gaia', ip=ipp, username=u_name, password=p_word, #secret=s_word ) result = ssh_connection.find_prompt() + "\n" result += ssh_connection.send_command('clish -c "show configuration"', delay_factor=4) result += ssh_connection.send_command('clish -c "fw getifs"',
class BoxProvider(OAuthProvider): BOX_ROOT_ID = '0' # The root of the Box system (per Box docs) MAX_BOX_LIMIT = 1000 # the maximum number of items returned from a Box request @classmethod def provider_identifier(cls): return "box" @classmethod def provider_name(cls): return "Box" def __init__(self, credential_manager): super(BoxProvider, self).__init__(credential_manager) self.id_cache, self._email, self._app_credentials = None, "", None self.access_token, self.refresh_token = None, None self.write_tokens = True @contextmanager def exception_handler(self): try: yield except BoxOAuthException: raise exceptions.AuthFailure(self) except BoxAPIException: raise exceptions.ProviderOperationFailure(self) except ReadTimeout: raise exceptions.ConnectionFailure(self) except Exception: raise exceptions.ProviderOperationFailure(self) finally: self._persist_tokens() def start_connection(self): self.oauth = OAuth2(client_id=self.app_credentials["client_id"], client_secret=self.app_credentials["client_secret"]) with self.exception_handler(): authorize_url, self.csrf_token = self.oauth.get_authorization_url(self.get_oauth_redirect_url()) return authorize_url def finish_connection(self, url): params = parse_url(url) try: # get auth_token auth_token = params["code"] assert self.csrf_token == params["state"] except AssertionError: # csrf mismatch or csrf not found raise exceptions.AuthFailure(self) except KeyError: try: error_code = params["error"] except KeyError: raise exceptions.ProviderOperationFailure(self) if error_code == "invalid_request" or error_code == "unsupported_response_type": raise exceptions.ProviderOperationFailure(self) elif error_code == "access_denied" or error_code == "server_error": raise exceptions.AuthFailure(self) elif error_code == "temporarily_unavailable": raise exceptions.ConnectionFailure(self) else: raise exceptions.ProviderOperationFailure(self) credentials = {} with self.exception_handler(): credentials["access_token"], credentials["refresh_token"] = self.oauth.authenticate(auth_token) self._connect(credentials) def _persist_tokens(self): if self.write_tokens and self.access_token is not None and self.uid is not None: user_credentials = {"access_token": self.access_token, "refresh_token": self.refresh_token} self.credential_manager.set_user_credentials(self.__class__, self.uid, user_credentials) self.write_tokens = False def _connect(self, user_credentials): def store_tokens_callback(access_token, refresh_token): self.write_tokens = True self.access_token = access_token self.refresh_token = refresh_token def load_email(): with self.exception_handler(): self._email = self.client.user(user_id='me').get()['login'] def make_app_folder(): with self.exception_handler(): box_root_folder = self.client.folder(self.BOX_ROOT_ID) try: # make an app-specific folder if one does not already exist _, folder_id, _ = box_root_folder.create_subfolder(self.ROOT_DIR) except BoxAPIException as e: folder_id = e.context_info['conflicts'][0]['id'] self.app_folder = self.client.folder(folder_id) def prime_cache(): with self.exception_handler(): # get all items files = [] offset = 0 while len(files) == offset: files += self.app_folder.get_items(self.MAX_BOX_LIMIT, offset=offset) offset += self.MAX_BOX_LIMIT self.id_cache = {user_file.name: user_file.object_id for user_file in files} # if this came from cache, it is a json string that needs to be converted if type(user_credentials) in [unicode, str]: user_credentials = json.loads(user_credentials) self.access_token, self.refresh_token = user_credentials["access_token"], user_credentials["refresh_token"] oauth = OAuth2(client_id=self.app_credentials["client_id"], client_secret=self.app_credentials["client_secret"], store_tokens=store_tokens_callback, access_token=self.access_token, refresh_token=self.refresh_token) self.client = Client(oauth) load_email() make_app_folder() prime_cache() @property def uid(self): return self._email def get(self, filename): with self.exception_handler(): box_file = self.client.file(self.id_cache[filename]) return box_file.content() def put(self, filename, data): data_stream = StringIO(data) with self.exception_handler(): if filename in self.id_cache: existing_file = self.client.file(self.id_cache[filename]) existing_file.update_contents_with_stream(data_stream) else: new_file = self.app_folder.upload_stream(data_stream, filename) self.id_cache[filename] = new_file.object_id def delete(self, filename): with self.exception_handler(): box_file = self.client.file(self.id_cache[filename]) box_file.delete() self.id_cache.pop(filename, None) def wipe(self): with self.exception_handler(): try: for _, file_id in self.id_cache.items(): box_file = self.client.file(file_id) box_file.delete() except: raise finally: self.id_cache = {}
# # Forward user to Box's login page # req = 'https://app.box.com/api/oauth2/authorize?response_type=code&client_id='+CLIENT_ID+'&state=security_token%3DKnhMJatFipTAnM0nHlZA' # resp = requests.get(req) # print(resp.content) class LoggingNetwork(DefaultNetwork): def request(self, method, url, access_token, **kwargs): """ Base class override. Pretty-prints outgoing requests and incoming responses. """ print('\x1b[36m{} {} {}\x1b[0m'.format(method, url, pformat(kwargs))) response = super(LoggingNetwork, self).request(method, url, access_token, **kwargs) if response.ok: print('\x1b[32m{}\x1b[0m'.format(response.content)) else: print('\x1b[31m{}\n{}\n{}\x1b[0m'.format( response.status_code, response.headers, pformat(response.content), )) return response oauth = OAuth2(CLIENT_ID, CLIENT_SECRET, access_token=DEVELOPER_TOKEN) client = Client(oauth, LoggingNetwork()) root_folder = client.folder(folder_id='0').get() items = client.folder(folder_id='0').get_items(limit=100, offset=0) shared_link = client.folder(folder_id='3800889110').get_shared_link()
def box_client(box_oauth, mock_box_network): # pylint:disable=redefined-outer-name return Client(box_oauth, network_layer=mock_box_network)
oauth = OAuth2(client_id='37zh1wo00w7h8qphpviwjkia7ng8g1j4', client_secret='YKatTFXOH1icNc9uxD3K2TMLCiulQJ0M', access_token='W8PeTR4cgnYIRYI6L5Q7HeU35g4dMqwl' # store_tokens=your_store_tokens_callback_method, ) auth_url, csrf_token = oauth.get_authorization_url( 'https://psu.app.box.com/folder/0') # Redirect user to auth_url, where they will enter their Box credentials response = requests.get("https://account.box.com/api/oauth2/authorize/") # print(response.status_code) client = Client(oauth) # client: object = requests.get("https://api.box.com/2.0/folders/:9dj7qs0aimiaywmxm2mo/").folder.get() # Mitford_Digital_Archives folder = client.folder(folder_id='907565446').get() sampleFile = client.file(file_id='33034276995') comments = sampleFile.get_comments() type = sampleFile.get().type print(str(comments)) print(str(type)) commentsall = [] for comment in comments: # print(str(comment)) contents = 'Comment was left by {0} at {1}: {2}'.format( comment.created_by.name, comment.created_at, comment.message) # print(contents) commentsall.append(contents) commentsComp = '\n'.join(commentsall)
def download(file_id): with open('C:\\Users\\dhussai\\Desktop\\down_box\\read.txt','wb') as open_file: client.file(file_id).download_to(open_file) def event(): t = Test() c = pycurl.Curl() c.setopt(c.URL,"https://api.box.com/2.0/events?stream_position=1442239977411") c.setopt(pycurl.HTTPHEADER, ['Authorization: Bearer lI2GbdKyZfIVLK2dY3vQtoFArO7AWQPEWfXmAAn6cpzqVaARyXfe3JoG9hiPDVqr']) c.setopt(c.WRITEFUNCTION, t.body_callback) c.perform() c.close() events=(t.contents) jsond=(json.JSONDecoder().decode(events)) return jsond oauth2 = OAuth2("68akmqusbktx65oelq9n6e166rmqw8el", "iE96DxCTUfw6USg3GsU7opw5hWArXtam", access_token="lI2GbdKyZfIVLK2dY3vQtoFArO7AWQPEWfXmAAn6cpzqVaARyXfe3JoG9hiPDVqr") client = Client(oauth2) my_jnj_box = client.user(user_id='me').get() print('user_login: '******'login']) vendor_ids,name=ids("4308645477") #print(vendor_ids) def tree(): create_parentfolders("vendor") vendor_ids,name=ids(parent_folder) seed=vendor_ids[-1] create_subfolders(seed) seed_branch,names= ids(seed) ID_incoming= seed_branch[0] create_branch(ID_incoming) #metatcreate(37030354650,"tag","accepted") #metadata_files=info(37030354650) #jsondict_file=(json.JSONDecoder().decode(metadata_files))
import urllib2 as urllib from boxsdk import OAuth2, Client from box_utils import get_folder from config import CustomConfig import sendgrid from sendgrid.helpers.mail import * c = CustomConfig(path=os.path.abspath(os.path.dirname(__file__))) # Authentication oauth = OAuth2( client_id=os.environ.get('CLIENT_ID'), client_secret=os.environ.get('CLIENT_SECRET'), access_token=os.environ.get('DEVELOPER_TOKEN'), ) client = Client(oauth) root_folder = client.folder(folder_id=c.root_folder_id) shared_folder = get_folder(client, root_folder, c.demo_folder_name) # Grab the file from a URL and store it in Box file = io.BytesIO(urllib.urlopen(c.file_url).read()) rn = str(int(random.random() * 10000)) uploaded_file = shared_folder.upload_stream( file, '{0}_{1}.png'.format(c.file_name, rn)) shared_link = uploaded_file.get_shared_link() # Send Email with link to file stored on Box sg = sendgrid.SendGridAPIClient(apikey=os.environ.get('SENDGRID_API_KEY')) from_email = Email(email=c.from_email, name=c.from_email_name) subject = "SendGrid BoxDev Demo!"
def get_box_client(): oauth = OAuth2(client_id=None, client_secret=None, access_token=BOX_ACCESS_TOKEN) return Client(oauth=oauth)
def check_update_for_user(user): logger.info('Updating user') # Get modules info from IVLE logger.info('Getting IVLE modules') modules = get_ivle_modules(user) # Get all module workbins logger.info('Getting IVLE file structure') ivle_file_structure = get_ivle_file_structure(user, modules) if user.dropbox_token: # Get Dropbox info logger.info('Updating dropbox') dbx = dropbox.Dropbox(user.dropbox_token) # Update Dropbox using IVLE file structure sync_dropbox(user, dbx, modules, ivle_file_structure) if user.box_access_token and user.box_refresh_token: # TODO: I'm sure this will work 99% of the time. But need to confirm. oauth = OAuth2(client_id=BOX_CLIENT_ID, client_secret=BOX_CLIENT_SECRET, access_token=user.box_access_token, refresh_token=user.box_refresh_token) # Immediately regenerate a new token for the next use new_access_token, new_refresh_token = oauth.refresh( access_token_to_refresh=user.box_access_token) user.box_access_token = new_access_token user.box_refresh_token = new_refresh_token user.save() # This consumes our old tokens box = Client(oauth, DefaultNetwork()) sync_box(user, box, modules, ivle_file_structure) if user.gdrive_token: logger.info('Updating google drive for ' + user.ivle_user_id) # configure sdk gauth = GoogleAuth('api/client_secrets.yaml') # To replace refresh token with actual value, NEED TRY CATCH HERE tf = tempfile.NamedTemporaryFile('r+') with open("api/credentials.json", "r+") as jsonFile: data = json.load(jsonFile) data["refresh_token"] = user.gdrive_token data["client_secret"] = GDRIVE_CLIENT_SECRET data["client_id"] = GDRIVE_CLIENT_ID json.dump(data, tf) tf.seek(0) # Try to load saved client credentials gauth.LoadCredentialsFile(tf.name) if gauth.credentials is None: # Authenticate if they're not there logger.error('Authentication failed for user for google drive') # gauth.LocalWebserverAuth() else: # Refresh them logger.info('Refreshing Gdrive token') gauth.Refresh() drive = GoogleDrive(gauth) sync_gdrive(user, drive, modules, ivle_file_structure) logger.info('Sync done for Gdrive') if user.onedrive_access_token: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) client = get_onedrive_client(loop) attach_onedrive_session(client, user) client.auth_provider.refresh_token() # Save the new tokens after refreshing user.onedrive_access_token = client.auth_provider._session.access_token user.onedrive_refresh_token = client.auth_provider._session.refresh_token user.save() sync_onedrive(user, client, modules, ivle_file_structure) loop.close()
def process_event(self, event, operation): """ Wrapper to process the given event on the operation. :param event: :param operation: :return: """ if operation == 'delete': crate_logger.debug('Doing a delete on {}'.format(event.pathname)) folders_to_traverse = self.folders_to_traverse(event.path) crate_logger.debug(folders_to_traverse) client = Client(self.oauth) box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in (event.path, event.path[:-1],) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: AssertionError(cur_box_folder['name'] == last_dir, cur_box_folder['name'] + 'not equals ' + last_dir) event_was_for_dir = 'IN_ISDIR'.lower() in event.maskname.lower() num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): if not event_was_for_dir and entry['type'] == 'file' and entry['name'] == event.name: if entry['id'] not in self.files_from_box: cur_file = client.file(file_id=entry['id']).get() if cur_file.delete(): # does not actually check correctly...unless not "ok" is false # del version_info[cur_file['id']] r_c.delete(redis_key(cur_file['id'])) else: self.files_from_box.remove(entry['id']) # just wrote if, assuming create event didn't run break elif event_was_for_dir and entry['type'] == 'folder' and entry['name'] == event.name: if entry['id'] not in self.folders_from_box: self.get_folder(client, entry['id']).delete() # cur_folder = client.folder(folder_id=entry['id']).get() # upload_queue.put(partial(cur_folder.update_contents, event.pathname)) else: self.folders_from_box.remove(entry['id']) # just wrote if, assuming create event didn't run break elif operation == 'move': crate_logger.debug('Doing a move on: {}'.format(event)) src_event, dest_event = event folders_to_traverse = self.folders_to_traverse(dest_event.path) crate_logger.debug(folders_to_traverse) client = Client(self.oauth) box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder # if we're modifying in root box dir, then we've already found the folder cur_box_folder = self.traverse_path(client, dest_event, cur_box_folder, folders_to_traverse) src_folders_to_traverse = self.folders_to_traverse(src_event.path) src_box_folder = box_folder src_box_folder = self.traverse_path(client, src_event, src_box_folder, src_folders_to_traverse) is_rename = src_event.path == dest_event.path # is_a_directory = 'IN_ISDIR'.lower() in dest_event.maskname.lower() did_find_src_file = os.path.isdir(dest_event.pathname) # true if we are a directory :) did_find_src_folder = os.path.isfile(dest_event.pathname) # true if we are a regular file :) is_file = os.path.isfile(dest_event.pathname) is_dir = os.path.isdir(dest_event.pathname) move_from_remote = False src_num_entries = src_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, src_num_entries, limit): for entry in src_box_folder.get_items(offset=offset, limit=limit): did_find_src_file = is_file and entry['name'] == src_event.name and entry['type'] == 'file' did_find_src_folder = is_dir and entry['name'] == src_event.name and entry['type'] == 'folder' if did_find_src_file: src_file = client.file(file_id=entry['id']).get() if is_rename: src_file.rename(dest_event.name) else: did_find_cur_file = os.path.isdir(dest_event.pathname) # should check box instead did_find_cur_folder = os.path.isfile(dest_event.pathname) # should check box instead cur_num_entries = cur_box_folder['item_collection']['total_count'] for cur_offset in range(0, cur_num_entries, limit): for cur_entry in cur_box_folder.get_items(offset=cur_offset, limit=limit): matching_name = cur_entry['name'] == dest_event.name did_find_cur_file = is_file and matching_name and isinstance(cur_entry, File) did_find_cur_folder = is_dir and matching_name and isinstance(cur_entry, Folder) if did_find_cur_file: self.upload_queue.put([os.path.getmtime(dest_event.pathname), partial(cur_entry.update_contents, dest_event.pathname), self.oauth]) self.upload_queue.put(partial(src_file.delete)) break elif did_find_cur_folder: crate_logger.debug( 'do not currently support movinga same name folder into parent with' 'folder inside of the same name -- would may need to update the ' 'contents') break if (is_file and did_find_cur_file) or (is_dir and did_find_cur_folder): break if is_file and not did_find_cur_file: src_file.move(cur_box_folder) # do not yet support moving and renaming in one go assert src_file['name'] == dest_event.name elif did_find_src_folder: src_folder = client.folder(folder_id=entry['id']).get() if is_rename: src_folder.rename(dest_event.name) else: src_folder.move(cur_box_folder) # do not yet support moving and renaming in one go assert src_folder['name'] == dest_event.name elif entry['name'] == dest_event.name: move_from_remote = True if not move_from_remote: # if it was moved from a different folder on remote, could be false still dest_box_folder = box_folder dest_folders_to_traverse = self.folders_to_traverse(dest_event.path) dest_box_folder = self.traverse_path(client, dest_event, dest_box_folder, dest_folders_to_traverse) dest_num_entries = dest_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, dest_num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): if entry['name'] == dest_event.name: move_from_remote = True break if not move_from_remote: if is_file and not did_find_src_file: # src file [should] no longer exist[s]. this file did not originate in box, too. last_modified_time = os.path.getmtime(dest_event.pathname) self.upload_queue.put([last_modified_time, partial(cur_box_folder.upload, dest_event.pathname, dest_event.name), self.oauth]) elif is_dir and not did_find_src_folder: self.upload_queue.put(partial(cur_box_folder.create_subfolder, dest_event.name)) wm.add_watch(dest_event.pathname, rec=True, mask=mask) elif operation == 'create': crate_logger.debug("Creating: {}".format(event.pathname)) folders_to_traverse = self.folders_to_traverse(event.path) crate_logger.debug(folders_to_traverse) client = Client(self.oauth) box_folder = client.folder(folder_id='0').get() cur_box_folder = box_folder # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in (event.path, event.path[:-1],) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: assert cur_box_folder['name'] == last_dir did_find_the_file = os.path.isdir(event.pathname) # true if we are a directory :) did_find_the_folder = os.path.isfile(event.pathname) # true if we are a regular file :) is_file = os.path.isfile(event.pathname) is_dir = os.path.isdir(event.pathname) num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): did_find_the_file = is_file and entry['type'] == 'file' and entry['name'] == event.name did_find_the_folder = is_dir and entry['type'] == 'folder' and entry['name'] == event.name if did_find_the_file: if entry['id'] not in self.files_from_box: # more accurately, was this created offline? AssertionError(False, 'We should not be able to create a ' 'file that exists in box; should be a close/modify.') crate_logger.debug('Update the file: {}'.format(event.pathname)) a_file = client.file(file_id=entry['id']).get() # seem it is possible to get more than one create (without having a delete in between) self.upload_queue.put(partial(a_file.update_contents, event.pathname)) # cur_box_folder.upload(event.pathname, event.name) else: self.files_from_box.remove(entry['id']) # just downloaded it break elif did_find_the_folder: # we are not going to re-create the folder, but we are also not checking if the contents in this # local creation are different from the contents in box. if entry['id'] in self.folders_from_box: self.folders_from_box.remove(entry['id']) # just downloaded it break if is_file and not did_find_the_file: crate_logger.debug('Upload the file: {}'.format(event.pathname)) last_modified_time = os.path.getctime(event.pathname) self.upload_queue.put([last_modified_time, partial(cur_box_folder.upload, event.pathname, event.name), self.oauth]) elif is_dir and not did_find_the_folder: crate_logger.debug('Upload the folder: {}'.format(event.pathname)) self.upload_queue.put(partial(cur_box_folder.create_subfolder, event.name)) wm.add_watch(event.pathname, rec=True, mask=mask) elif operation == 'modify': crate_logger.debug("{op}...: {pathname}".format(op=operation, pathname=event.pathname)) folders_to_traverse = self.folders_to_traverse(event.path) crate_logger.debug(folders_to_traverse) client = Client(self.oauth) cur_box_folder = None folder_id = '0' retry_limit = 5 cur_box_folder = get_box_folder(client, cur_box_folder, folder_id, retry_limit) # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in (event.path, event.path[:-1],) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: AssertionError(cur_box_folder['name'] == last_dir, cur_box_folder['name'] + 'not equals ' + last_dir) did_find_the_file = os.path.isdir(event.pathname) # true if we are a directory :) did_find_the_folder = os.path.isfile(event.pathname) # true if we are a regular file :) is_file = os.path.isfile(event.pathname) is_dir = os.path.isdir(event.pathname) num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): did_find_the_file = is_file and entry['type'] == 'file' and entry['name'] == event.name did_find_the_folder = is_dir and entry['type'] == 'folder' and entry['name'] == event.name if did_find_the_file: last_modified_time = os.path.getmtime(event.pathname) if entry['id'] not in self.files_from_box: cur_file = client.file(file_id=entry['id']).get() can_update = True was_versioned = r_c.exists(redis_key(cur_file['id'])) try: info = redis_get(r_c, cur_file) if was_versioned else None info = info if was_versioned else {'fresh_download': True, 'etag': '0', 'time_stamp': 0} item_version = info if cur_file['etag'] == item_version['etag'] and \ ((item_version['fresh_download'] and item_version[ 'time_stamp'] >= last_modified_time) or (not item_version['fresh_download'] and item_version[ 'time_stamp'] >= last_modified_time)): can_update = False if can_update: self.upload_queue.put([last_modified_time, partial(cur_file.update_contents, event.pathname), self.oauth]) else: is_new_time_stamp = item_version['time_stamp'] >= last_modified_time crate_logger.debug('Skipping the update because not versioned: {not_versioned}, ' 'fresh_download: {fresh_download}, ' 'version time_stamp >= ' 'new time stamp: {new_time_stamp}, ' 'event pathname: {path_name}, ' 'cur file id: {obj_id}'.format(not_versioned=not was_versioned, fresh_download=item_version[ 'fresh_download'], new_time_stamp=is_new_time_stamp, path_name=event.pathname, obj_id=cur_file['id'])) except TypeError: crate_logger.debug(traceback.format_exc()) except Exception: crate_logger.debug(traceback.format_exc()) else: self.files_from_box.remove(entry['id']) # just wrote if, assuming create event didn't run break elif did_find_the_folder: if entry['id'] not in self.folders_from_box: crate_logger.debug('Cannot create a subfolder when it already exists: {}'.format(event.pathname)) # cur_folder = client.folder(folder_id=entry['id']).get() # upload_queue.put(partial(cur_folder.update_contents, event.pathname)) else: self.folders_from_box.remove(entry['id']) # just wrote if, assuming create event didn't run break if is_file and not did_find_the_file: crate_logger.debug('Uploading contents...: {}'.format(event.pathname)) last_modified_time = os.path.getmtime(event.pathname) self.upload_queue.put([last_modified_time, partial(cur_box_folder.upload, event.pathname, event.name), self.oauth]) if is_dir and not did_find_the_folder: crate_logger.debug('Creating a sub-folder...: {}'.format(event.pathname)) self.upload_queue.put(partial(cur_box_folder.create_subfolder, event.name)) wm.add_watch(event.pathname, rec=True, mask=mask) elif operation == 'real_close': crate_logger.debug("Real close...: {}".format(event.pathname)) folders_to_traverse = self.folders_to_traverse(event.path) crate_logger.debug(folders_to_traverse) client = Client(self.oauth) cur_box_folder = None cur_box_folder = get_box_folder(client, cur_box_folder, '0', 5) # if we're modifying in root box dir, then we've already found the folder is_base = BOX_DIR in (event.path, event.path[:-1],) cur_box_folder = self.traverse_path(client, event, cur_box_folder, folders_to_traverse) last_dir = os.path.split(event.path)[-1] if not is_base: AssertionError(cur_box_folder['name'] == last_dir, cur_box_folder['name'] + 'not equals ' + last_dir) did_find_the_file = os.path.isdir(event.pathname) # true if we are a directory :) did_find_the_folder = os.path.isfile(event.pathname) # true if we are a regular file :) is_file = os.path.isfile(event.pathname) is_dir = os.path.isdir(event.pathname) num_entries = cur_box_folder['item_collection']['total_count'] limit = 100 for offset in range(0, num_entries, limit): for entry in cur_box_folder.get_items(offset=offset, limit=limit): did_find_the_file = is_file and entry['type'] == 'file' and entry['name'] == event.name did_find_the_folder = is_dir and entry['type'] == 'folder' and entry['name'] == event.name if did_find_the_file: break # not a box file/folder (though could have been copied from a local box item) if is_file and not did_find_the_file: last_modified_time = os.path.getmtime(event.pathname) self.upload_queue.put([last_modified_time, partial(cur_box_folder.upload, event.pathname, event.name), self.oauth]) elif is_dir and not did_find_the_folder: cur_box_folder.create_subfolder(event.name) wm.add_watch(event.pathname, rec=True, mask=mask, auto_add=True)
def _get_client(self): if not self._client: self._client = Client(self.auth) log.debug('Box connection is successfull.') return self._client
class BoxAPI(StorageAPI): def __init__(self, credential_dir, credential_filename=CREDENTIAL_FILENAME): super(BoxAPI, self).__init__(credential_dir) self.auth_file = join(credential_dir, credential_filename) self.oauth = OAuth2(client_id=CLIENT_ID, client_secret=CLIENT_SECRET, store_tokens=self.write_access_token) try: self.get_tokens_from_file() self.authorize() except IOError: pass def get_auth_url(self): auth_url, csrf_token = self.oauth.get_authorization_url(REDIRECT_URI) return auth_url def build(self): self.client = Client(self.oauth) self.create_folder("demerio") def create_folder(self, folder_name): search_results = self.client.search(folder_name, limit=100, offset=0, ancestor_folders=[self.client.folder(folder_id='0')]) folder_filter = [result for result in search_results if result._item_type == "folder"] if len(folder_filter) == 0: demerio_folder = self.client.folder(folder_id='0').create_subfolder('demerio') else: assert len(folder_filter) == 1 demerio_folder = folder_filter[0].get(fields=["name"]) self.root_folder_id = demerio_folder.id def get_tokens_from_file(self): with open(self.auth_file, "r") as f: access_token = f.readline().rstrip() refresh_token = f.readline().rstrip() return access_token, refresh_token def write_access_token(self, access_token, refresh_token): with open(self.auth_file, 'w') as f: f.write(access_token + "\n") f.write(refresh_token + "\n") def authorize(self): if os.path.exists(self.auth_file): access_token, refresh_token = self.get_tokens_from_file() self.oauth._access_token = access_token self.oauth._refresh_token = refresh_token else: httpd = ClientRedirectServer(("localhost", 8888), ClientRedirectHandler) webbrowser.open(self.get_auth_url()) httpd.handle_request() self.oauth.authenticate(httpd.query_params['code']) self.build() def is_connected(self): ## TODO: There must be a better way to check connection, with self.oauth ?? try: self.client.user(user_id='me').get() except: return False return True def download_file(self, file_id, path_to_download): with open(path_to_download, "wb") as f: f.write(self.client.file(file_id=file_id).content()) def upload_new_file(self, local_file_path): new_file = self.client.folder(folder_id=self.root_folder_id).upload(local_file_path, file_name = generate_random_string()) return new_file.get()['id'] def delete_file(self, file_id): self.client.file(file_id=file_id).delete() def update_file(self, local_file_path, file_id): self.client.file(file_id=file_id).update_contents(local_file_path)
from boxsdk import Client from boxsdk import JWTAuth from flask import Flask, render_template import config # Configure JWT auth and fetch access token auth = JWTAuth(client_id=config.client_id, client_secret=config.client_secret, enterprise_id=config.enterprise_id, jwt_key_id=config.jwt_key_id, rsa_private_key_file_sys_path=config.private_key_path, rsa_private_key_passphrase=config.private_key_passphrase) # Obtain client auth access_token = auth.authenticate_instance() client = Client(auth) # Render HTML using Flask app = Flask(__name__) @app.route('/') def hello(): html = render_template('template.html', fid=config.fid, at=access_token) return html if __name__ == '__main__': app.run()
def __init__(self): self.client = Client(auth) self.client_creator = self.client.user() self.client_created_time = datetime.datetime.now() self.as_users = as_users self.logger = logging.getLogger(__name__)
if item.type == 'folder' and item.sync_state in [ 'synced', 'partially_synced' ]: tree[item.name] = (item, get_tree(item.id)) elif item.type == 'file': tree[item.name] = (item, None) return tree def sync(): mkdir(_sync_dir) _sync_sub(_sync_dir, get_tree('0')) def _sync_sub(dir_path, tree): for name, (item, tree) in tree.items(): path = dir_path + '/' + name mtime = int(iso8601.parse_date(item.get().modified_at).timestamp()) if item.type == 'folder': mkdir(path) _sync_sub(path, tree) else: if not os.path.exists(path) or os.path.getmtime(path) < mtime: with open(path, 'wb') as f: f.write(item.content()) count += 1 os.utime(path, (mtime, mtime)) client = Client(authenticate())