def in_progress(self): """Return true when the upload finished.""" query = dict(datapackage=self._descriptor_s3_url) answer = upload_status(params=query).json() args = self, answer['status'], answer['progress'], len(self) log.debug('%s is loading (%s) %s/%s', *args) return answer['status'] != 'done'
def do_GET(self): log.debug('Callback received: %s', self.path) token = self.path[6:] if token: with io.open(TOKEN_FILE, 'w+', encoding='utf-8') as file: file.write(dumps({'token': token}, ensure_ascii=False)) log.info('Saved your token in %s', TOKEN_FILE)
def _uncache(cls, key): filepath = join(settings.USER_DIR, key + '.json') try: with io.open(filepath) as cache: json = loads(cache.read()) log.debug('Your %s is %s', key, json) return json except FileNotFoundError: return
def _uncache_token(): """Read the token from the cache. """ if TOKEN_ENV_VAR in os.environ: return os.environ[TOKEN_ENV_VAR] with io.open(TOKEN_FILE) as cache: json = loads(cache.read()) log.debug('Your token is %s', json['token']) return json['token']
def _uncache_token(): """Read the token from the cache. """ if TOKEN_ENV_VAR in os.environ: return os.environ[TOKEN_ENV_VAR] with io.open(TOKEN_FILE) as cache: json = loads(cache.read()) log.debug('Your token is %s', json['jwt']) return json['jwt']
def create_user(): """Obtain the new user a token. Open-Spending uses Google OAuth2 for authentication, so you will need a valid Google email address to make this work. When you click on the link provided, you will be redirected to your browser to sign up. That's it. """ def install_user_folder(): try: mkdir(settings.USER_DIR) except FileExistsError: pass def request_new_token(): localhost = 'http://%s:%s' % settings.LOCALHOST query = dict(next=localhost, callback_url=oauth_callback.url) response = authenticate_user(query) authorization = handle(response) new_thread = Thread(target=listen_for_token) prompt_user(authorization) local_server = new_thread.run() local_server.join() log.info('Well done, you have a now token!') def prompt_user(authorization): sign_up_url = authorization['providers']['google']['url'] message = ('Please open a new private browsing window ' 'and paste this link to get a token: %s') log.critical(message, sign_up_url) def listen_for_token(): server = HTTPServer(settings.LOCALHOST, LocalHost) server.serve_forever() def cache(info_, file): with io.open(file, 'w+', encoding='utf-8') as json: json.write(dumps(info_, ensure_ascii=False, indent=4)) install_user_folder() request_new_token() user_ = User() cached_info = ( (user_.token, TOKEN_FILE), (user_.permissions, PERMISSIONS_FILE), (user_.authentication, AUTHENTICATION_FILE) ) for info in cached_info: cache(*info) log.debug('Cached %s to %s', *info) return user_
def create_user(): """Obtain the new user a token. Open-Spending uses Google OAuth2 for authentication, so you will need a valid Google email address to make this work. When you click on the link provided, you will be redirected to your browser to sign up. That's it. """ def install_user_folder(): try: mkdir(settings.USER_DIR) except FileExistsError: pass def request_new_token(): localhost = 'http://%s:%s' % settings.LOCALHOST query = dict(next=localhost, callback_url=oauth_callback.url) response = authenticate_user(query) authorization = handle(response) new_thread = Thread(target=listen_for_token) prompt_user(authorization) local_server = new_thread.run() local_server.join() log.info('Well done, you have a now token!') def prompt_user(authorization): sign_up_url = authorization['providers']['google']['url'] message = ('Please open a new private browsing window ' 'and paste this link to get a token: %s') log.critical(message, sign_up_url) def listen_for_token(): server = HTTPServer(settings.LOCALHOST, LocalHost) server.serve_forever() def cache(info_, file): with io.open(file, 'w+', encoding='utf-8') as json: json.write(dumps(info_, ensure_ascii=False, indent=4)) install_user_folder() request_new_token() user_ = User() cached_info = ((user_.token, TOKEN_FILE), (user_.permissions, PERMISSIONS_FILE), (user_.authentication, AUTHENTICATION_FILE)) for info in cached_info: cache(*info) log.debug('Cached %s to %s', *info) return user_
def _log(self): """Is there such a thing as too much logging?""" code = self.response.status_code reason = self.response.reason response_json = to_json(self.response) begin = code, reason, self.endpoint, 'begin' end = code, reason, self.endpoint, 'end' transaction = ' [%s] %s - %s (%s) ' log.debug('{:*^100}'.format(transaction % begin)) messages = ( ('Request endpoint: %s', self.endpoint.url), ('Request time: %s', self.response.elapsed), ('Request parameters: %s', self.params), ('Request payload: %s', self.request_payload), ('Request headers: %s', self.headers), ('Response headers: %s', self.response.headers), ('Response payload: %s', response_json), ('Response cookies: %s', self.response.cookies), ('Request full URL: %s', self.url), ) for message in messages: log.debug(*message) indent = 4 if settings.EXPANDED_LOG_STYLE else None log.debug(dumps(response_json, ensure_ascii=False, indent=indent)) log.debug('{:*^100}'.format(transaction % end))
def in_progress(self): """Return true when the upload finished.""" query = dict(datapackage=self._descriptor_s3_url) try: answer = loads(upload_status(params=query).text) except JSONDecodeError: return True args = self, answer['status'], answer['progress'], len(self) log.debug('%s is loading (%s) %s/%s', *args) if answer['status'] == 'fail': raise UploadError(answer.get('error')) return answer['status'] not in {'done', 'fail'}
def summarize(feedback_, path_): intro = 'GoodTables has detected some errors in %s.' % path_ hint = 'Please check out the full report: %s.' % REPORT_FILENAME info = feedback_['meta'] summary = ( 'There are {bad_rows} (out of {total_rows}) bad rows ' 'and {bad_cols} (out of {total_cols}) bad columns. ' ).format( bad_rows=info['bad_row_count'], total_rows=info['row_count'], bad_cols=info['bad_column_count'], total_cols=len(info['columns']) ) log.debug(intro + summary) return [intro, summary, hint]
def _push_to_s3(self, url, path, query, headers): """Send data files for upload to the S3 bucket. """ log.debug('Started uploading %s to %s', path, url) log.debug('Headers: %s', headers) log.debug('Query parameters: %s', query) absolute_path = join(self.base_path, path) stream = io.open(absolute_path, mode='rb') future = self._session.put(url, headers=headers, data=stream, params=query, background_callback=self._s3_callback) self._streams.append(stream) self._futures.append(future)
def uncache(cls, key): filepath = join(settings.USER_DIR, key + '.json') with io.open(filepath) as cache: json = loads(cache.read()) log.debug('Your %s is %s', key, json) return json
def _save(self): """Save the snapshot as JSON in the appropriate place""" with io.open(self._filepath, 'w+', encoding='utf-8') as file: file.write(dumps(self, ensure_ascii=False)) log.debug('Saved request + response to %s', self._filepath)