def test_uuid_generator_pattern_is_not_restricted_to_uuid4(self): generator = generators.UUID4() valid_uuid = 'fd800e8d-e8e9-3cac-f502-816cbed9bb6c' self.assertTrue(generator.match(valid_uuid)) invalid_uuid4 = '00000000-0000-5000-a000-000000000000' self.assertTrue(generator.match(invalid_uuid4)) invalid_uuid4 = '00000000-0000-4000-e000-000000000000' self.assertTrue(generator.match(invalid_uuid4))
def __init__(self, *args, **kwargs): super(DummyRequest, self).__init__(*args, **kwargs) self.upath_info = '/v0/' self.registry = mock.MagicMock(settings=DEFAULT_SETTINGS.copy()) self.registry.id_generator = generators.UUID4() self.GET = {} self.headers = {} self.errors = cornice_errors.Errors(request=self) self.authenticated_userid = 'bob' self.authn_type = 'basicauth' self.prefixed_userid = 'basicauth:bob' self.json = {} self.validated = {} self.matchdict = {} self.response = mock.MagicMock(headers={}) def route_url(*a, **kw): # XXX: refactor DummyRequest to take advantage of `pyramid.testing` parts = parse_url_overrides(kw) return ''.join([p for p in parts if p]) self.route_url = route_url
def test_uuid_generator_pattern_allows_uuid_only(self): invalid_uuid = 'XXX-00000000-0000-5000-a000-000000000000' generator = generators.UUID4() self.assertFalse(generator.match(invalid_uuid))
class Storage(StorageBase, SQLiteMigratorMixin): """Storage backend using SQLite. Recommended in testing to provide a permanent storage solution for test data. Recommended in production for apps that need local persistent storage with transactions. Enable in configuration:: How to enable this? Figure it out later Database location URI can be customized:: Probably path to local sqlite file. SQLite does not require any username and password. Hence, make sure the database file is stored in a secure location that can not be accessed without application privileges. """ name = 'storage' schema_version = 0 schema_file = os.path.join(HERE, 'sqlite_support/schema.sql') migrations_directory = os.path.join(HERE, 'migrations') id_generator = generators.UUID4() # description:- Initialize method for this storage class # client:- SQLite client to use # max_fetch_size:- Maximum number of records to fetch # if query returns more than one results. # readonly:- Boolean indicating if records are mutable. def __init__(self, client, max_fetch_size=50, readonly=False, *args, **kwargs): super().__init__(*args, **kwargs) self.client = client self._max_fetch_size = max_fetch_size self._readonly = readonly # returns the current version of the schema def get_installed_version(self): return self.schema_version # description:- Initializes the database schema def initialize_schema(self, dry_run=False): return self.create_or_migrate_schema(dry_run) def create(self, collection_id, parent_id, record, id_generator=None, id_field=DEFAULT_ID_FIELD, modified_field=DEFAULT_MODIFIED_FIELD, auth=None): # Get the query and set table name query = CREATE_QUERY table_name = "records" # Check which id generator to use id_generator = id_generator or self.id_generator record = {**record} # If id_field in record check if it # already is present in table if id_field in record: try: existing = self.get(collection_id, parent_id, record[id_field]) raise exceptions.UnicityError(id_field, existing) except exceptions.RecordNotFoundError: pass else: # Generate ID record[id_field] = id_generator() # Prepare values for SQL statement values_dict = { 'id': record[id_field], 'parent_id': parent_id, 'collection_id': collection_id, 'data': json.dumps(record), 'last_modified': time.time(), 'deleted': False } # Execute with self.client.connect(commit=True) as conn: results = conn.execute(query.format(table_name=table_name), values_dict) record = self.get(collection_id, parent_id, record[id_field]) return record def get(self, collection_id, parent_id, object_id, id_field=DEFAULT_ID_FIELD, modified_field=DEFAULT_MODIFIED_FIELD, auth=None): # Get read query and set table name query = READ_QUERY table_name = "records" # Prepare value dict for query values_dict = { 'id': object_id, 'parent_id': parent_id, 'collection_id': collection_id } object = None # Execute query with self.client.connect() as conn: results = conn.execute(query.format(table_name=table_name), values_dict) object = results.fetchone() # If object not found raise error if object == None: raise exceptions.RecordNotFoundError(object_id) # Convert blob to dict because it is # stored as string record = json.loads(object['data']) record[id_field] = object['id'] record[modified_field] = object['last_modified'] return record def update(self, collection_id, parent_id, object_id, record, id_field=DEFAULT_ID_FIELD, modified_field=DEFAULT_MODIFIED_FIELD, auth=None): # Get update query and set table name query = UPDATE_QUERY table_name = 'records' # Deleted is usually false until we get a record with # deleted set to true deleted = False or (record.get('deleted') != None) # Delete id fields and deleted field as they are not stored with the # data blob. if 'deleted' in record: del record['deleted'] if id_field in record: del record[id_field] if modified_field in record: del record[modified_field] # Prepare values value_dict = { 'id': object_id, 'parent_id': parent_id, 'collection_id': collection_id, 'data': json.dumps(record), 'last_modified': time.time(), 'deleted': deleted } # Execute with self.client.connect(commit=True) as conn: print(query.format(table_name=table_name)) result = conn.execute(query.format(table_name=table_name), value_dict) # Try to get the update record try: record = self.get(collection_id, parent_id, object_id, id_field, modified_field) print("updated record", record) return record except exceptions.RecordNotFoundError: # if record not found # raise error if this wasn't a delete update if deleted: pass else: raise exceptions.RecordNotFoundError def delete(self, collection_id, parent_id, object_id, id_field=DEFAULT_ID_FIELD, with_deleted=True, modified_field=DEFAULT_MODIFIED_FIELD, deleted_field=DEFAULT_DELETED_FIELD, auth=None, last_modified=None): if with_deleted: # Deleted by marking it as delete record = self.get(collection_id, parent_id, object_id, id_field=id_field, modified_field=modified_field) record[deleted_field] = True # Use update to set delete. Does not # return a record when used like this self.update(collection_id, parent_id, object_id, record, id_field=id_field, modified_field=modified_field) # Last modified would be right after setting delet record[modified_field] = time.time() return record else: # Actually delete from database query = DELETE_QUERY table_name = "records" values_dict = { 'id': object_id, 'parent_id': parent_id, 'collection_id': collection_id } with self.client.connect() as conn: results = conn.execute(query.format(table_name=table_name), values_dict) if results <= 0: raise exceptions.RecordNotFoundError deleted = results.fetchone() record = {} record[modified_field] = deleted['last_modified'] record[deleted_field] = True return record def delete_all(self, collection_id, parent_id, filters=None, sorting=None, pagination_rules=None, limit=None, id_field=DEFAULT_ID_FIELD, with_deleted=True, modified_field=DEFAULT_MODIFIED_FIELD, deleted_field=DEFAULT_DELETED_FIELD, auth=None): if with_deleted: # Deleted by marking it as delete pass else: # Actually delete from database pass def get_all(self, collection_id, parent_id, filters=None, sorting=None, pagination_rules=None, limit=None, include_deleted=False, id_field=DEFAULT_ID_FIELD, modified_field=DEFAULT_MODIFIED_FIELD, deleted_field=DEFAULT_DELETED_FIELD, auth=None): query = GET_ALL_QUERY table_name = "records" values_dict = {'parent_id': parent_id, 'collection_id': collection_id} format_values = defaultdict(str) # Parent conditions if '*' in parent_id: format_values['parent_id_filter'] = 'parent_id LIKE :parent_id' values_dict['parent_id'] = parent_id.replace("*", "%") else: format_values['parent_id_filter'] = 'parent_id = :parent_id' if filters: conditions, holders, data_filters = self._format_filtering_conditions( filters, id_field, modified_field) filters_condition = "AND {}".format( map(lambda x: x[0].format(x[1])), zip(conditions, holders)) values_dict['conditions_filter'] = filters_condition if sorting: pass if pagination_rules: pass limit = min(self._max_fetch_size, limit) if limit else self._max_fetch_size values_dict["pagination_limit"] = limit # Deleted conditions if not include_deleted: format_values['conditions_deleted'] = 'AND NOT deleted' def _format_filtering_conditions(self, filters, id_field, modified_field): operators = { COMPARISON.EQ: '=', COMPARISON.NOT: '<>', COMPARISON.IN: 'IN', COMPARISON.EXCLUDE: 'NOT IN', COMPARISON.LIKE: 'LIKE', COMPARISON.CONTAINS: 'IN', } conditions = [] holders = [] data_filters = [] for i, filter in enumerate(filters): value = filter.value field = filter.field if field in (id_field, modified_field): if field == id_field: sql_field = "id" if type(value) == int: value = str(value) elif field == modified_field: sql_field = 'last_modified' if type(value) == int: value = str(value) if filter.operator == COMPARISON.EQ or filter.operator == COMPARISON.NOT: condition_sql = "{} " + operators[filter.operator] + " {}" condition_values = (field, value) elif filter.operator == COMPARISON.IN or filter.operator == COMPARISON.EXCLUDE: value = tuple(value) condition_sql = "{} " + operators[filter.operator] + " {}" condition_values = (field, value) elif filter.operator == COMPARISON.LIKE: if '*' not in value: value = '*{}*'.format(value) value = value.replace('*', '%') condition_sql = "{} " + operators[COMPARISON.LIKE] + " {}" condition_values = (field, value) elif filter.operator == COMPARISON.CONTAINS_ANY: condition_sql = "{} " + operators[COMPARISON.IN] + " {}" condition_values = (value, field) elif filter.operator == COMPARISON.HAS: condition_sql = "{} IS {}" condition_values = (field, "NOT NULL") if value else (field, "NULL") conditions.append(condition_sql) holders.append(condition_values) else: data_filters.append(filter) return conditions, holders, data_filters def flush(self, auth=None): pass