def add_coverage_data(coverage_id, coverage, environment_type): if coverage is None: raise ObjectNotFound("Coverage {} not found.".format(coverage_id)) if environment_type not in coverage.environments: raise ObjectNotFound("Environment{}' not found.".format(environment_type)) if not request.files: raise InvalidArguments('No file provided.') if request.files and 'file' not in request.files: raise InvalidArguments('File provided with bad param ("file" param expected).') content = request.files['file'] logger = logging.getLogger(__name__) logger.info('content received: %s', content) with tempfile.TemporaryDirectory() as tmpdirname: tmp_file = os.path.join(tmpdirname, content.filename) content.save(tmp_file) #TODO: improve this function so we don't have to write the file localy first file_type, file_name = data_handler.type_of_data(tmp_file) if file_type in [None, "tmp"]: logger.warning('invalid file provided: %s', content.filename) raise InvalidArguments('Invalid file provided: {}.'.format(content.filename)) with open(tmp_file, 'rb') as file: if file_type == 'fusio': #ntfs is called fusio in type_of_data coverage.save_ntfs(environment_type, file) tasks.send_ntfs_to_tyr.delay(coverage_id, environment_type) else: #we need to temporary save the file before sending it file_id = models.save_file_in_gridfs(file, filename=content.filename) tasks.send_file_to_tyr_and_discard.delay(coverage_id, environment_type, file_id) return {'message': 'Valid {} file provided : {}'.format(file_type, file_name)}, 200
def post(self, coverage_id): coverage = models.Coverage.get(coverage_id) if coverage is None: raise ObjectNotFound("Coverage {} not found.".format(coverage_id)) if 'id' not in request.json: raise InvalidArguments( 'Missing data_source_id attribute in request body.') data_source_id = request.json['id'] data_sources = models.DataSource.get(data_source_id=data_source_id) if data_sources is None: raise ObjectNotFound( "Data source {} not found.".format(data_source_id)) if coverage.has_data_source(data_sources[0]): raise DuplicateEntry( 'Data source id {} already exists in coverage {}.'.format( data_source_id, coverage_id)) coverage.add_data_source(data_sources[0]) try: coverage = models.Coverage.update( coverage_id, {"data_sources": coverage.data_sources}) except (PyMongoError, ValueError) as e: raise InternalServerError( 'Impossible to update coverage {} with data_source {}.'.format( coverage_id, data_source_id)) return { 'coverages': schema.CoverageSchema().dump([coverage], many=True).data }, 200
def get(self, coverage_id, environment_type, data_type): available_data_types = ['ntfs'] if data_type.lower() not in available_data_types: raise InvalidArguments('Bad data type {} (expected formats: {}).' .format(data_type, ','.join(available_data_types))) coverage = models.Coverage.get(coverage_id) if coverage is None: raise ObjectNotFound("Coverage {} not found.".format(coverage_id)) if environment_type not in coverage.environments: raise ObjectNotFound("Environment{}' not found.".format(environment_type)) ntfs_file_id = coverage.environments[environment_type].current_ntfs_id ntfs_file = models.get_file_from_gridfs(ntfs_file_id) return flask.send_file(ntfs_file, mimetype='application/zip')
def patch(self, contributor_id, data_source_id=None): ds = models.DataSource.get(contributor_id, data_source_id) if len(ds) != 1: abort(404) schema_data_source = schema.DataSourceSchema(partial=True) errors = schema_data_source.validate(request.json, partial=True) if errors: raise ObjectNotFound( "Data source '{}' not found.".format(contributor_id)) if 'id' in request.json and ds[0].id != request.json['id']: raise InvalidArguments( 'The modification of the id is not possible') try: data_sources = models.DataSource.update(contributor_id, data_source_id, request.json) except ValueError as e: raise InvalidArguments(str(e)) except PyMongoError as e: raise InternalServerError( 'impossible to update contributor with dataset {}'.format( request.json)) return { 'data_sources': schema.DataSourceSchema(many=True).dump(data_sources).data }, 200
def delete(self, coverage_id, data_source_id): coverage = models.Coverage.get(coverage_id) if coverage is None: raise ObjectNotFound( 'Unknown coverage id "{}".'.format(coverage_id)) if data_source_id not in coverage.data_sources: raise ObjectNotFound( 'Unknown data source id "{}" attribute in uri.'.format( data_source_id)) try: coverage.remove_data_source(data_source_id) except (PyMongoError, ValueError): raise InternalServerError return {'data_sources': None}, 204
def patch(self, contributor_id, preprocess_id): ds = models.PreProcess.get(contributor_id, preprocess_id) if len(ds) != 1: abort(404) schema_preprocess = schema.PreProcessSchema(partial=True) errors = schema_preprocess.validate(request.json, partial=True) if errors: raise ObjectNotFound( "Preprocess '{}' not found.".format(contributor_id)) try: p = request.json validate_preprocesses_or_raise([p]) preprocesses = models.PreProcess.update(contributor_id, preprocess_id, p) except ValueError as e: raise InvalidArguments(str(e)) except PyMongoError as e: raise InternalServerError( 'impossible to update contributor with preprocess {}'.format( p)) return { 'preprocesses': schema.PreProcessSchema(many=True).dump(preprocesses).data }, 200
def post(self, coverage_id): coverage = models.Coverage.get(coverage_id) if coverage is None: raise ObjectNotFound("Coverage {} not found.".format(coverage_id)) if not request.files: raise InvalidArguments('The archive is missing.') content = request.files['file'] logger = logging.getLogger(__name__) logger.info('content received: {}'.format(content)) if not zipfile.is_zipfile(content): raise InvalidArguments('Invalid ZIP.') zip_file = zipfile.ZipFile(content) valid_file, missing_files = is_valid_file(zip_file) if not valid_file: raise InvalidArguments('File(s) missing : {}.'.format(''.join(missing_files))) # check files header valid_header, invalid_files = check_files_header(zip_file) if not valid_header: raise InvalidArguments('Non-compliant file(s) : {}.'.format(''.join(invalid_files))) content.stream.seek(0) coverage.save_grid_calendars(content) zip_file.close() #run the update of navitia in background for k, env in coverage.environments.items(): if env.current_ntfs_id: #TODO: use a chain later tasks.send_ntfs_to_tyr.delay(coverage.id, k) return {'message': 'OK'}, 200
def patch(self, coverage_id): coverage = models.Coverage.get(coverage_id) if coverage is None: raise ObjectNotFound( "Coverage '{}' not found.".format(coverage_id)) if 'id' in request.json and coverage.id != request.json['id']: raise InvalidArguments( 'The modification of the id is not possible') coverage_schema = schema.CoverageSchema(partial=True) errors = coverage_schema.validate(request.json, partial=True) if errors: raise InvalidArguments(errors) logging.debug(request.json) try: coverage = models.Coverage.update(coverage_id, request.json) except PyMongoError: raise InternalServerError( 'Impossible to update coverage with dataset {}'.format( request.json)) return { 'coverages': schema.CoverageSchema().dump([coverage], many=True).data }, 200
def post(self, contributor_id): contributor = Contributor.get(contributor_id) if not contributor: raise ObjectNotFound( 'Contributor not found: {}'.format(contributor_id)) job = self._export(contributor) job_schema = JobSchema(strict=True) return {'job': job_schema.dump(job).data}, 201
def delete(self, contributor_id, data_source_id=None): try: nb_deleted = models.DataSource.delete(contributor_id, data_source_id) if nb_deleted == 0: raise ObjectNotFound( "Data source '{}' not found.".format(contributor_id)) except ValueError as e: raise InvalidArguments(str(e)) return {'data_sources': []}, 204
def delete(self, contributor_id, preprocess_id): try: nb_deleted = models.PreProcess.delete(contributor_id, preprocess_id) if nb_deleted == 0: raise ObjectNotFound( "Preprocess '{}' not found.".format(contributor_id)) except ValueError as e: raise InvalidArguments(str(e)) return {'preprocesses': []}, 204
def get(self, job_id=None): jobs = models.Job.get(job_id) if job_id: if jobs: return { 'jobs': [JobSchema(many=False, strict=True).dump(jobs).data] }, 200 else: raise ObjectNotFound('Job not found: {}'.format(job_id)) return {'jobs': JobSchema(many=True, strict=True).dump(jobs).data}, 200
def get(self, contributor_id, data_source_id=None): try: ds = models.DataSource.get(contributor_id, data_source_id) if ds is None: raise ObjectNotFound( "Data source '{}' not found.".format(data_source_id)) except ValueError as e: raise InvalidArguments(str(e)) return { 'data_sources': schema.DataSourceSchema(many=True).dump(ds).data }, 200
def get(self, contributor_id, preprocess_id=None): try: ps = models.PreProcess.get(contributor_id, preprocess_id) if not ps and preprocess_id: raise ObjectNotFound( "Preprocess '{}' not found.".format(preprocess_id)) except ValueError as e: raise InvalidArguments(str(e)) return { 'preprocesses': schema.PreProcessSchema(many=True).dump(ps).data }, 200
def get(self, contributor_id=None): if contributor_id: c = models.Contributor.get(contributor_id) if c is None: raise ObjectNotFound( "Contributor '{}' not found.".format(contributor_id)) result = schema.ContributorSchema().dump(c) return {'contributors': [result.data]}, 200 contributors = models.Contributor.all() return { 'contributors': schema.ContributorSchema(many=True).dump(contributors).data }, 200
def get(self, coverage_id=None): if coverage_id: c = models.Coverage.get(coverage_id) if c is None: raise ObjectNotFound( "Coverage '{}' not found.".format(coverage_id)) result = schema.CoverageSchema().dump(c) return {'coverages': [result.data]}, 200 coverages = models.Coverage.all() return { 'coverages': schema.CoverageSchema(many=True).dump(coverages).data }, 200
def post(self, data_source_id): datasource = models.DataSource.get(data_source_id=data_source_id) if datasource is None: raise ObjectNotFound( "Data source '{}' not found.".format(data_source_id)) if not request.files: raise InvalidArguments('No file provided.') if request.files and 'file' not in request.files: raise InvalidArguments( 'File provided with bad param ("file" param expected).') content = request.files['file'] logger = logging.getLogger(__name__) logger.info('content received: %s', content) with tempfile.TemporaryDirectory() as tmpdirname: tmp_file = os.path.join(tmpdirname, content.filename) content.save(tmp_file) file_type, file_name = data_handler.type_of_data(tmp_file) if file_type in [None, "tmp"]: logger.warning('invalid file provided: %s', content.filename) raise InvalidArguments('Invalid file provided: {}.'.format( content.filename)) # backup content input_dir = coverage.technical_conf.input_dir if not os.path.exists(input_dir): os.makedirs(input_dir) full_file_name = os.path.join(os.path.realpath(input_dir), content.filename) shutil.move(tmp_file, full_file_name + ".tmp") shutil.move(full_file_name + ".tmp", full_file_name) return { 'message': 'Valid {} file provided : {}'.format(file_type, file_name) }, 200
def patch(self, contributor_id): # "data_prefix" field is not modifiable, impacts of the modification # need to be checked. The previous value needs to be checked for an error contributor = models.Contributor.get(contributor_id) if contributor is None: raise ObjectNotFound( "Contributor '{}' not found.".format(contributor_id)) request_data = request.json #checking errors before updating PATCH data for ds in request_data.get('data_sources', []): if not ds.get('id', None): ds['id'] = str(uuid.uuid4()) schema_contributor = schema.ContributorSchema(partial=True) errors = schema_contributor.validate(request_data, partial=True) if errors: raise InvalidArguments(errors) if 'data_prefix' in request_data and contributor.data_prefix != request_data[ 'data_prefix']: raise InvalidArguments( 'The modification of the data_prefix is not possible ({} => {})' .format(contributor.data_prefix, request_data['data_prefix'])) if 'id' in request_data and contributor.id != request_data['id']: raise InvalidArguments( 'The modification of the id is not possible') existing_ds_id = [d.id for d in contributor.data_sources] logging.getLogger(__name__).debug( "PATCH : list of existing data_sources ids %s", str(existing_ds_id)) #constructing PATCH data patched_data_sources = None if "data_sources" in request_data: patched_data_sources = schema.DataSourceSchema(many=True).dump( contributor.data_sources).data for ds in request_data["data_sources"]: if ds['id'] in existing_ds_id: pds = next( (p for p in patched_data_sources if p['id'] == ds['id']), None) if pds: pds.update(ds) else: #adding a new data_source patched_data_sources.append(ds) if patched_data_sources: request_data['data_sources'] = patched_data_sources try: contributor = models.Contributor.update(contributor_id, request_data) except PyMongoError: raise InternalServerError( 'impossible to update contributor with dataset {}'.format( request_data)) return { 'contributors': [schema.ContributorSchema().dump(contributor).data] }, 200
def delete(self, contributor_id): c = models.Contributor.delete(contributor_id) if c == 0: raise ObjectNotFound( "Contributor '{}' not found.".format(contributor_id)) return "", 204
def delete(self, coverage_id): c = models.Coverage.delete(coverage_id) if c == 0: raise ObjectNotFound( "Coverage '{}' not found.".format(coverage_id)) return "", 204