def toggle(self, to_state): """Toggle public access to a fiscal datapackage Change the status of a fiscal data package from public to private or vice-versa. If something went wrong, whilst changing the status, you will get a :class:`upload.ToggleError`. :param to_state: the unique name of the datapackage :return: the new state of the package, i.e. "public" or "private" """ publish = True if to_state == 'public' else False package_id = self.user.id + ':' + self.name query = dict( jwt=self.user.permissions['os.datastore']['token'], id=package_id, publish=publish ) answer = handle(toggle_publish(params=query)) if not answer['success']: message = 'Unable to toggle datapackage to %s' raise ToggleError(message, to_state) log.info('%s is now %s', package_id, to_state) return to_state
def validate(self, raise_error=True): """Validate a datapackage schema. :param raise_error: raise error on failure or not (default: True) :raise: :class:`ValidationError` if the schema is invalid :return True or a list of error messages (if `raise_error` is False). """ if raise_error: super(FiscalDataPackage, self).validate() else: try: super(FiscalDataPackage, self).validate() message = '%s (%s) is a valid fiscal datapackage descriptor' log.info(message, self, self.path) return [] except ValidationError: messages = [] for error in self.iter_errors(): messages.append(error.message) log.warn('%s ValidationError: %s', self, error.message) return messages
def upload(self, publish=False): """Upload a fiscal datapackage to Open-Spending. It does this in 3 steps: * request upload urls for AWS S3 storage * upload all files to the owner's S3 bucket * insert the data into the Open-Spending datastore (PostgreSQL) By default, newly uploaded packages are kept private, but you can change that with the `publish` flag. Also note that if you upload the same fiscal data package again, the previous version will be overwritten. For now, the only valid datafile format is CSV. :param publish: toggle the datapackage to "published" after upload """ self.validate() log.info('Starting uploading process for %s', self) for s3_target in self._request_s3_upload(): self._push_to_s3(*s3_target) self._handle_promises() self._insert_into_datastore() while self.in_progress: sleep(POLL_PERIOD) if publish: self.toggle('public') return self.url
def do_GET(self): log.debug('Callback received: %s', self.path) token = self.path[6:] if token: with io.open(TOKEN_FILE, 'w+', encoding='utf-8') as file: file.write(dumps({'token': token}, ensure_ascii=False)) log.info('Saved your token in %s', TOKEN_FILE)
def request_new_token(): localhost = 'http://%s:%s' % settings.LOCALHOST query = dict(next=localhost, callback_url=oauth_callback.url) response = authenticate_user(query) authorization = handle(response) new_thread = Thread(target=listen_for_token) prompt_user(authorization) local_server = new_thread.run() local_server.join() log.info('Well done, you have a now token!')
def _request_s3_upload(self): """Request AWS S3 upload urls for all files. """ response = request_upload(params=dict(jwt=self.user.token), json=self.filedata) files = handle(response)['filedata'] for path, info in files.items(): message = '%s is ready for upload to %s' log.info(message, path, info['upload_url']) query = {k: v[0] for k, v in info['upload_query'].items()} yield info['upload_url'], path, query, self._get_header(path, info['type'])
def _request_s3_upload(self): """Request AWS S3 upload urls for all files. """ response = request_upload(params=dict(jwt=self.user.token), json=self.filedata) files = handle(response)['filedata'] for path, info in files.items(): message = '%s is ready for upload to %s' log.info(message, path, info['upload_url']) query = {k: v[0] for k, v in info['upload_query'].items()} yield info['upload_url'], path, query, self._get_header( path, info['type'])
def _insert_into_datastore(self): """Transfer datafiles from S3 into the postgres datastore. :return: the url of the fiscal datapackage on Open-Spending """ query = dict(jwt=self.user.token, datapackage=self._descriptor_s3_url) response = upload_package(params=query) handle(response) log.info('Congratuations, %s was uploaded successfully!', self) log.info('You can find you fiscal datapackage here: %s', self.url) return self.url
def _request_authentication(self): """Ask Open-Spending if the token is valid. """ query = dict(jwt=self.token) response = authenticate_user(params=query) authentication = handle(response) if not authentication['authenticated']: message = 'Token has expired: request a new one' log.error(message) raise InvalidToken(message) name = authentication['profile']['name'] log.info('Hello %s! You are logged into Open-Spending', name) return authentication
def validate(self, raise_on_error=True, schema_only=True): """Validate a datapackage schema. By default, only the data-package schema is validated. To validate the data files too, set the `data` flag to `True`. The method fails if an error is found, unless the `raise_error` flag is explicitely set to `False`. :param raise_on_error: raise error on failure or not (default: True) :param schema_only: only validate the schema (default: True) :raise: :class:`ValidationError` if the schema is invalid :return A list of error messages or an empty list. """ messages = [] profile = Profile('fiscal-data-package') if raise_on_error: profile.validate(self.descriptor) else: try: profile.validate(self.descriptor) message = '%s (%s) is a valid fiscal data-package schema' log.info(message, self.path, self) except ValidationError as e: for error in e.errors: message = 'SCHEMA ERROR in %s: %s' args = self.path, error messages.append(message % args) log.warn(message, *args) if messages: messages.append('Aborting data validation due to invalid schema') return messages if not schema_only: return self._validate_data(raise_on_error) else: return messages
def _s3_callback(_, response): handle(response) log.info('Successful S3 upload: %s', response.url)