def _select_data(self, data): """Select content based on data. Args: data (str): Content data or part of it. Returns: Collection: Collection of selected content. """ collection = Collection() if self._connection: query = ('SELECT * FROM contents WHERE data={0}'.format( self._placeholder)) qargs = [Const.DELIMITER_DATA.join(map(Const.TEXT_TYPE, data))] self._logger.debug('search content with data attribute: %s' % qargs) try: with closing(self._connection.cursor()) as cursor: cursor.execute(query, qargs) rows = cursor.fetchall() collection.convert(rows) except (sqlite3.Error, psycopg2.Error) as error: Cause.push( Cause.HTTP_500, 'selecting content from database with data failed with exception: {}' .format(error)) else: Cause.push(Cause.HTTP_500, 'internal error prevented searching from database') self._logger.debug('selected rows:\n%s', rows) return collection
def sort_fields(self, value): """Sorted fields are stored in internal presentation.""" parsed = OrderedDict() fields = Parser.format_list(value, sort_=False) for field in fields: match = re.match( r''' (?P<direction>-?) # Catch sort direction sign (+/-). (?P<field>\S+) # Catch fields. ''', field, re.IGNORECASE | re.VERBOSE) if match.group('field') and match.group( 'field') in self.ATTRIBUTES: parsed[match.group('field')] = 'DESC' if match.group( 'direction') == '-' else 'ASC' else: Cause.push( Cause.HTTP_BAD_REQUEST, 'sort option validation failed for non existent field={}'. format(field)) self._logger.debug( '{}: content attribute sort order from user: {}'.format( self._derived, fields)) self._logger.debug( '{}: content attribute internal sort structure: {}'.format( self._derived, parsed)) self._sort_fields = parsed # pylint: disable=attribute-defined-outside-init
def dump_completion(cls, complete): """Dump shell completion script into a file. Args: complete (str): Name of the shell for completion. """ filename = Config.get_operation_file() path, _ = os.path.split(filename) cls._logger.debug('exporting: %s :completion: %s', Config.complete, filename) if not os.path.exists(path) or not os.access(path, os.W_OK): Cause.push( Cause.HTTP_BAD_REQUEST, 'cannot export: {} :completion file because path is not writable: {}' .format(complete, filename)) return with open(filename, 'w') as outfile: try: outfile.write(Config.completion[Config.complete]) except IOError as error: cls._logger.exception( 'fatal failure when creating {} shell completion file: {}', filename, error) Cause.push( Cause.HTTP_INTERNAL_SERVER_ERROR, 'fatal failure while exporting shell completion {}'.format( filename))
def _select(self, query, qargs): """Run generic query to get data.""" rows = () if self._connection: try: with closing(self._connection.cursor()) as cursor: cursor.execute(query, qargs) rows = cursor.fetchall() except (sqlite3.DataError, psycopg2.DataError) as error: # This is executed with PostgreSQL when queried with invalid # content external UUID field. There is also UUID field that # is used only for internal purposes as a database primary # key but that is always assumed to work hence no 'Internal # Server Error' here. # # If rollback is not executed, there will be PostgreSQL error: # 'current transaction is aborted, commands ignored until end # of transaction block'. # # The only reason why Cause is not set here is that this works # differently between Sqlite and PostgreSQL. The intention is # to have same end user behavior also from error cause point # of view. self._connection.rollback() self._set_data_error(error) except (sqlite3.Error, psycopg2.Error) as error: self._set_error(error) else: Cause.push(Cause.HTTP_500, 'internal error prevented reading from database') return rows
def select_all(self, scat): """Select all content from specific categories. Args: scat (tuple): Search category keyword list. Returns: Collection: Collection of all content in database. """ collection = Collection() if self._connection: self._logger.debug('select all contents from categories: %s', scat) query = ('SELECT * FROM contents WHERE (') for _ in scat: query = query + 'category={0} OR '.format(self._placeholder) query = query[:-4] # Remove last ' OR ' added by the loop. query = query + ') ORDER BY created ASC, brief ASC' qargs = list(scat) try: with closing(self._connection.cursor()) as cursor: cursor.execute(query, qargs) rows = cursor.fetchall() collection.convert(rows) except (sqlite3.Error, psycopg2.Error) as error: Cause.push(Cause.HTTP_500, 'selecting all from database failed with exception: {}'.format(error)) else: Cause.push(Cause.HTTP_500, 'internal error prevented selecting all content from database') return collection
def _dump_term(self, use_ansi, debug_logs): """Convert collection into terminal format. Args: use_ansi (bool): Define if ANSI characters are used. debug_logs (bool): Define if debut information is included. Returns: string: Collection in text format. """ text = Const.EMPTY if not self: Cause.push(Cause.HTTP_NOT_FOUND, 'cannot find content with given search criteria') return text for i, resource in enumerate(self.resources(), start=1): text = text + resource.dump_term( index=i, use_ansi=use_ansi, debug_logs=debug_logs) # Set one empty line at the end of string for beautified output. if self: text = text.rstrip() text = text + Const.NEWLINE return text
def _call_editor(cls, template): """Run editor session.""" import tempfile from subprocess import call # External dependencies are isolated in this method to ease # testing. This method is mocked to return the edited text. message = Const.EMPTY template = template.encode('UTF-8') editor = cls._get_editor() cls._logger.debug('using %s as editor', editor) with tempfile.NamedTemporaryFile(prefix='snippy-edit-') as outfile: outfile.write(template) outfile.flush() try: call([editor, outfile.name]) outfile.seek(0) message = outfile.read() message = message.decode('UTF-8') except OSError as error: Cause.push( Cause.HTTP_INTERNAL_SERVER_ERROR, 'required editor %s not installed %s' % (editor, error)) return message
def on_get(self, request, response, identity, field): """Get defined content field based on resource ID. If the given uuid matches to multiple resources or no resources at all, an error is returned. This conflicts against the JSON API v1.0 specifications. See the Snippy documentation for more information. Args: request (obj): Falcon Request(). response (obj): Falcon Response(). identity (str): Partial or full message digest or UUID. field (str): Resource attribute. """ self._logger.debug('run: %s %s', request.method, request.uri) local_params = {'identity': identity, 'fields': field} api = Api(self._category, Api.SEARCH, local_params) Config.load(api) self._content.run() if len(self._content.collection) != 1: Cause.push(Cause.HTTP_NOT_FOUND, 'content identity: %s was not unique and matched to: %d resources' % (identity, len(self._content.collection))) if Cause.is_ok(): response.content_type = ApiResource.MEDIA_JSON_API response.body = Generate.resource(self._content.collection, request, response, identity, field=field, pagination=False) response.status = Cause.http_status() else: response.content_type = ApiResource.MEDIA_JSON_API response.body = Generate.error(Cause.json_message()) response.status = Cause.http_status() Cause.reset() self._logger.debug('end: %s %s', request.method, request.uri)
def format_versions(cls, versions): """Convert versions to utf-8 encoded list of version. Only specific operators between key value versions are allowed. Args: versions (str,list,tuple): Versions in a string, list or tuple. Returns: tuple: Tuple of utf-8 encoded versions. """ versions_ = [] versions = cls._to_list(versions) # Order of operators matter for the code logic. If operators < or > # are before >= and <=, the version is split into three values. Add # the longest match first into the operators. operators = ('>=', '<=', '!=', '==', '>', '<', '~') for version in versions: value = re.split('|'.join(operators), version) if len(value) == 2 and value[0] and value[1]: versions_.append(version) else: Cause.push(Cause.HTTP_BAD_REQUEST, 'version: {} did not have key value pair with any of the supported operators: {}'.format(version, operators)) return tuple(versions_)
def _select_uuid(self, suuid): """Select content based on uuid. Args: uuid (str): Content uuid or part of it. Returns: Collection: Collection of selected content. """ collection = Collection() if self._connection: query = ('SELECT * FROM contents WHERE uuid = {0}'.format(self._placeholder)) qargs = [suuid] self._logger.debug('running select uuid query: %s :with qargs: %s', query, qargs) try: with closing(self._connection.cursor()) as cursor: cursor.execute(query, qargs) rows = cursor.fetchall() collection.convert(rows) except (sqlite3.DataError, psycopg2.DataError) as error: # This method is used only validated content which should # always have valid external UUID field. Because of this, # the error here is internal server error. self._connection.rollback() self._set_data_error(error) Cause.push(Cause.HTTP_500, 'invalid user data for search: {}'.format(qargs)) except (sqlite3.Error, psycopg2.Error) as error: self._set_error(error) else: Cause.push(Cause.HTTP_500, 'internal error prevented searching from database') self._logger.debug('selected rows: %s', rows) return collection
def create_test_document(self): """Create test documentation from test files.""" # The ImportError is the parent class of ModuleNotFoundError. The later one # is only in Python 3.6 but the ImportError works with older Python versions. try: pkg_resources.resource_isdir('tests', '') except ImportError as error: Cause.push( Cause.HTTP_INTERNAL_SERVER_ERROR, 'test cases are not packaged with release {}'.format(error)) return # Test case file mock does not support iterators. Because of this, the # file is read directly to list where it is parsed. tests = pkg_resources.resource_listdir('tests', Const.EMPTY) regex = re.compile(r''' test_wf.*\.py ''', re.VERBOSE) tests = [filename for filename in tests if regex.match(filename)] for filename in tests: testfile = pkg_resources.resource_filename('tests', filename) with open(testfile, 'r') as infile: wf_brief = Const.EMPTY testcase = infile.readlines() for line_nbr, line in enumerate(testcase): brief, line = Reference.get_brief(line, line_nbr, testcase) if brief: wf_brief = brief wf_command = Reference.get_command(line) if wf_command: self.tests.append(wf_command + Reference.TEST_SEPARATOR + wf_brief)
def import_all(self): """Import content.""" content_digest = Config.operation_digest content_uuid = Config.operation_uuid if content_digest or content_uuid: collection = self._storage.search(uuid=content_uuid, digest=content_digest) if len(collection) == 1: resource = next(collection.resources()) digest = resource.digest updates = Migrate.load(Config.get_operation_file()) self._logger.debug('updating: %s ' % resource.category, ':with: uuid: %.16s' % content_uuid if content_uuid else ':with: digest: %.16s' % resource.digest) if len(updates) == 1: resource.migrate(next(updates.resources())) self._storage.update(digest, resource) else: Cause.push(Cause.HTTP_BAD_REQUEST, 'updates for content: %.16s :could not be used' % digest) else: Config.validate_search_context(collection, 'import') else: self._logger.debug('importing content: %s', Config.get_operation_file()) collection = Migrate.load(Config.get_operation_file()) self._storage.import_content(collection)
def scat(self, value): """Store content categories. The ``scat`` option defines the content category or categories for the operation. If operation is ``create``, there must be only one category. If the operation is ``search`` or the operation requires searching content, there can be multiple values. The keywords are stored in tuple with one keywords per element. If any of the given categories is incorrect, an error is set. This is a simple error handling that fails the operation instead of trying to recover it. An unknown value is set to the ``scat`` option in case of a failure because it minimizes the search results in the error scenario. If all categories would be searched with errors, it could lead to a large search results sets in case of failures. """ scat = Parser.format_search_keywords(value) if not scat: scat = (Const.SNIPPET, ) if Const.ALL_CATEGORIES in scat: scat = Const.CATEGORIES if not set(scat).issubset(Const.CATEGORIES): Cause.push(Cause.HTTP_BAD_REQUEST, 'content categories: {} :are not a subset of: {}'.format(self._format_scat(scat), Const.CATEGORIES)) # noqa pylint: disable=line-too-long scat = (Const.UNKNOWN_CATEGORY, ) if self.operation == self.CREATE and (Const.UNKNOWN_CATEGORY in scat or len(scat) != 1): Cause.push(Cause.HTTP_BAD_REQUEST, 'content category must be unique when content is created: {}'.format(self._format_scat(scat))) # noqa pylint: disable=line-too-long self._scat = scat # pylint: disable=attribute-defined-outside-init
def on_get(self, request, response): """Search unique resource attributes. Search is made from all content categories by default. Args: request (obj): Falcon Request(). response (obj): Falcon Response(). """ self._logger.debug('run: %s %s', request.method, request.uri) if 'scat' not in request.params: request.params['scat'] = Const.CATEGORIES api = Api(self._category, Api.UNIQUE, request.params) Config.load(api) self._content.run() if not self._content.uniques: Cause.push( Cause.HTTP_NOT_FOUND, 'cannot find unique fields for %s attribute' % self._category) if Cause.is_ok(): response.content_type = ApiResource.MEDIA_JSON_API response.body = Generate.fields(self._category, self._content.uniques, request, response) response.status = Cause.http_status() else: response.content_type = ApiResource.MEDIA_JSON_API response.body = Generate.error(Cause.json_message()) response.status = Cause.http_status() Cause.reset() self._logger.debug('end: %s %s', request.method, request.uri)
def insert(self, collection): """Insert collection into database. If any of the resources in the given collection is successfully inseted, the operation results Created status. The failing resources each produce own failure cause. Args: collection (Collection): Content container to be stored. Returns: Collection: Collection of inserted content. """ stored = Collection() if not collection: Cause.push(Cause.HTTP_NOT_FOUND, 'no content to be stored') return stored if self._insert(collection): Cause.push(Cause.HTTP_CREATED, 'content created') for resource in collection: stored.migrate( self.select(resource.category, digest=resource.digest)) self._logger.debug('inserted: %d :out of: %d :content', len(stored), len(collection)) return stored
def _operation_file_format(cls, filename): """Extract operation file format. The file format must be exactly as defined for supported file formats. In case of shell completion, there is no file extension and the check passes. Args: filename (str): Filename with file extension defining the format. Returns: str: Operation file format. """ file_format = Const.CONTENT_FORMAT_NONE name, extension = os.path.splitext(filename) if name and extension == '.json': file_format = Const.CONTENT_FORMAT_JSON elif name and extension in ('.md', '.mkdn'): file_format = Const.CONTENT_FORMAT_MKDN elif name and extension in ('.text', '.txt'): file_format = Const.CONTENT_FORMAT_TEXT elif name and extension in ('.yaml', '.yml'): file_format = Const.CONTENT_FORMAT_YAML elif not cls.complete: Cause.push( Cause.HTTP_BAD_REQUEST, 'cannot identify file format for file: {}'.format(filename)) return file_format
def run(self, collection=Collection()): """Run operation.""" self._logger.debug('run: %s :content', self._category) Config.content_category = self._category self.collection = collection if Config.is_operation_create: self.create() elif Config.is_operation_search: self.search() elif Config.is_operation_unique: self.unique() elif Config.is_operation_update: self.update() elif Config.is_operation_delete: self.delete() elif Config.is_operation_export: self.export_all() elif Config.is_operation_import: self.import_all() else: Cause.push(Cause.HTTP_BAD_REQUEST, 'unknown operation for: {}'.format(self._category)) self._logger.debug('end: %s :content', self._category)
def on_get(self, request, response, sall=None, stag=None, sgrp=None): """Search resources. Args: request (obj): Falcon Request(). response (obj): Falcon Response(). sall (str): Search all ``sall`` path parameter. stag (str): Search tags ``stag`` path parameter. sgrp (str): Search groups ``sgrp`` path parameter. """ self._logger.debug('run: %s %s', request.method, request.uri) if sall: request.params['sall'] = sall if stag: request.params['stag'] = stag if sgrp: request.params['sgrp'] = sgrp api = Api(self._category, Api.SEARCH, request.params) Config.load(api) self._content.run() if not self._content.collection and Config.search_limit != 0: Cause.push(Cause.HTTP_NOT_FOUND, 'cannot find resources') if Cause.is_ok(): response.content_type = ApiResource.MEDIA_JSON_API response.body = Generate.collection(self._content.collection, request, response, pagination=True) response.status = Cause.http_status() else: response.content_type = ApiResource.MEDIA_JSON_API response.body = Generate.error(Cause.json_message()) response.status = Cause.http_status() Cause.reset() self._logger.debug('end: %s %s', request.method, request.uri)
def _set_integrity_error(self, error, resource): """Set integrity error. Args: error (Exception): Exception string from integrity error. resource (Resource): Resource which SQL operation caused exception. """ digest = self._get_digest(resource) match = self._catch_violating_column.search(str(error)) if match: if match.group('column') == 'uuid' and not Config.defaults: cause = Cause.HTTP_500 else: cause = Cause.HTTP_CONFLICT Cause.push( cause, 'content: {} :already exist with digest: {:.16}'.format( match.group('column'), digest)) else: self._logger.info( 'database integrity error parse failure: {}'.format(error)) Cause.push( Cause.HTTP_CONFLICT, 'content already exist with digest: {:.16}'.format(digest)) if not Config.defaults: self._logger.info( 'database integrity error from database: {}'.format( traceback.format_exc())) self._logger.info( 'database integrity error from resource: {}'.format( Logger.remove_ansi(str(resource)))) self._logger.info( 'database integrity error stack trace: {}'.format( traceback.format_stack(limit=20)))
def read_env(cls, option, default): """Read parameter from optional environment variable. Read parameter value from environment variable or return given default value. Environment variable names follow the same command line option naming convesion with modifications: 1. Leading hyphens are removed. 2. Option casing is converted to full upper case. 3. Hyphens are replaced with underscores. 4. ``SNIPPY_`` prefix is added, For example corresponding environment variable for the ``--server-host`` command line option is ``SNIPPY_SERVER_HOST``. Args: option (str): Command line option. default: Default value. Returns: tuple: Same command line option name as received with value. """ # Remove leading hyphens to allow calling the method with the name # of command line parameter. This helps finding related code. option = cls.RE_MATCH_OPT_LEADING_HYPHENS.sub('', option) # The getenv returns None if parameter was not set. value = os.getenv('SNIPPY_' + option.replace('-', '_').upper(), default) if value is None: return (option, default) # There is no need to convert string variables because they can be # returned directly. The bool typed environment variable gets True # only if the value is read as expected. # # The environment variables are strings so value '0' will convert # to boolean True because the string is not empty. if isinstance(default, bool): try: value = int(value) value = bool(value) except ValueError: if value.lower() == 'true': value = True else: value = default elif isinstance(default, int): try: value = int(value) except ValueError: value = default if option == 'storage_type' and value not in Const.STORAGES: Cause.push(Cause.HTTP_BAD_REQUEST, 'incorrect storage type: {} :is not a subset of: {}'.format(value, Const.STORAGES)) value = Const.DB_SQLITE return (option, value)
def update(self, digest, resource): """Update existing content. Args: digest (str): Content digest that is udpated. resource (Resource): Stored content in ``Resource()`` container. Returns: Collection: Collection of updated content. """ stored = Collection() if not resource: Cause.push(Cause.HTTP_NOT_FOUND, 'no content to be updated') return stored query = ''' UPDATE contents SET id = {0} , category = {0} , data = {0} , brief = {0} , description = {0} , name = {0} , groups = {0} , tags = {0} , links = {0} , source = {0} , versions = {0} , filename = {0} , created = {0} , updated = {0} , uuid = {0} , digest = {0} WHERE digest LIKE {0} '''.format(self._placeholder) qargs = resource.dump_qargs() + (digest, ) try: with closing(self._connection.cursor()) as cursor: cursor.execute(query, qargs) self._connection.commit() except (sqlite3.IntegrityError, psycopg2.IntegrityError) as error: self._logger.info( 'database integrity error with query: {}'.format(query)) self._logger.info( 'database integrity error with query arguments: {}'.format( qargs)) self._set_integrity_error(error, resource) except (sqlite3.Error, psycopg2.Error) as error: self._set_error(error) stored.migrate(self.select(resource.category, digest=resource.digest)) return stored
def _set_error(self, error): """Set generic error. Args: error (Exception): Exception string from integrity error. """ self._logger.info('database error: {}'.format(traceback.format_exc())) self._logger.info('database error stack trace: {}'.format(traceback.format_stack(limit=20))) Cause.push(Cause.HTTP_500, 'database operation failed with exception: {}'.format(error).lower())
def _split_contents(self): """Split source text to multiple contents. This method parses a single text string and extracts a list of text contents from it. All line numbers with a content specific ``head tag`` are scanned. The head tag is the first field description in a content specific template. The whole text string is then split based on scanned line numbers where a head tag was found. An offset is substracted from the line number where a ``head tag`` was found. The offset is coming from informative description field of a text template that has few lines before the head tag. If the source test string contains template tags or examples, those are removed from each returned content. Returns: list: List of text contents. """ contents = [] category = self._read_category(self._text) if category == Const.SNIPPET: offset = 2 # Two lines in the text template before the head tag. tag = '# Add mandatory snippet below' elif category == Const.SOLUTION: offset = 1 # One line in the text template before the content starts. tag = self.BRIEF[Const.SOLUTION] elif category == Const.REFERENCE: offset = 2 # Two lines in the text template before the head tag. tag = '# Add mandatory links below one link per line' else: Cause.push( Cause.HTTP_BAD_REQUEST, 'could not identify content category - please keep template tags in place' ) return contents lines = self._text.split(Const.NEWLINE) line_numbers = [ i for i, line in enumerate(lines) if line.startswith(tag) ] line_numbers[:] = [max(x - offset, 0) for x in line_numbers] if line_numbers: head = line_numbers.pop(0) for line in line_numbers: contents.append(Const.NEWLINE.join(lines[head:line])) head = line contents.append( self.remove_template_fillers(Const.NEWLINE.join(lines[head:]))) return contents
def search_limit(self, value): """Search result limit defines maximum amount of search results.""" self._search_limit = self.LIMIT_DEFAULT_API # pylint: disable=attribute-defined-outside-init try: value = int(value) if value >= 0: self._search_limit = value # pylint: disable=attribute-defined-outside-init else: raise ValueError except ValueError: Cause.push(Cause.HTTP_BAD_REQUEST, 'search result limit is not a positive integer: {}'.format(value))
def send(cls, request, response): """Send standard 405 Not Allowed HTTP response. Args: request (obj): Falcon Request(). response (obj): Falcon Response(). """ Cause.push(Cause.HTTP_METHOD_NOT_ALLOWED, 'fields api does not support method: {}'.format(request.method)) response.content_type = ApiResource.MEDIA_JSON_API response.body = Generate.error(Cause.json_message()) response.status = Cause.http_status()
def read(cls, timestamp, template_format, template, collection): """Read content from editor. Args: timestamp (str): IS8601 timestamp to be used with created collection. template_format (str): Template format. template (str): Default template for editor. collection (Collection()): Collection to store parsed content. """ text = cls._call_editor(template) Parser(template_format, timestamp, text, collection).read() if not collection: Cause.push(Cause.HTTP_BAD_REQUEST, 'edited: {} :content could not be read - please keep template tags in place'.format(template_format)) # noqa pylint: disable=line-too-long
def search_offset(self, value): """Store 'search offset'. The search offset defines how many entries are skippet from the beginning of search results.""" self._search_offset = self.OFFSET_DEFAULT # pylint: disable=attribute-defined-outside-init try: value = int(value) if value >= 0: self._search_offset = value # pylint: disable=attribute-defined-outside-init else: raise ValueError except ValueError: Cause.push(Cause.HTTP_BAD_REQUEST, 'search offset is not a positive integer: {}'.format(value))
def _run_server(self): """Run server.""" try: from snippy.server.server import Server if Config.defaults: AllContent(self.storage).import_all() self.server = Server(self.storage) self.server.run() except ImportError: Cause.push( Cause.HTTP_INTERNAL_SERVER_ERROR, 'install snippy as a server in order to run api server') Cause.print_message()
def on_delete(self, request, response, **kwargs): # pylint: disable=unused-argument """Delete resource. Args: request (obj): Falcon Request(). response (obj): Falcon Response(). """ self._logger.debug('run: %s %s', request.method, request.uri) Cause.push(Cause.HTTP_NOT_FOUND, 'cannot delete content without identified resource') response.content_type = ApiResource.MEDIA_JSON_API response.body = Generate.error(Cause.json_message()) response.status = Cause.http_status() Cause.reset() self._logger.debug('end: %s %s', request.method, request.uri)
def import_all(self): """Import content.""" if Config.defaults: self._logger.debug('importing all default content') collection = Migrate.load( Config.default_content_file(Const.SNIPPET)) collection.migrate( Migrate.load(Config.default_content_file(Const.SOLUTION))) collection.migrate( Migrate.load(Config.default_content_file(Const.REFERENCE))) self._storage.import_content(collection) else: Cause.push( Cause.HTTP_BAD_REQUEST, 'import operation for content category \'all\' is supported only with default content' )