def execute_aql(self, aql, params): """Execute AQL""" try: cursor = self.database.aql.execute( aql, bind_vars=params, count=True, full_count=True, batch_size=1, ttl=10, optimizer_rules=['+all'] ) return cursor except exceptions.AQLQueryExecuteError as err: msg = db_err.get(1).format(err.message) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg) except Exception as err: msg = db_err.get(1).format(str(err)) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg)
def create_database(self, name=''): """Create DB""" try: self.database = self._conn.create_database(name) return self.database except exceptions.DatabaseCreateError as err: if err.error_code == 1207: msg = db_err.get(1207).format(name) raise gmap_exceptions.DatabaseAlreadyExist(msg) else: msg = db_err.get(0).format(name, err.message) raise gmap_exceptions.DatabaseException(msg) except Exception as err: msg = db_err.get(0).format(name, err.message) raise gmap_exceptions.DatabaseException(msg)
def search_in_collection(self, collection, search, page=1, per_page=10): """Search Document""" try: per_page = per_page if per_page <= MAX_PER_PAGE else MAX_PER_PAGE offset = (page - 1) * per_page where, bind_vars = self.prepare_search(search) bind_vars['@collection'] = collection bind_vars['offset'] = offset bind_vars['count'] = per_page full_query = 'FOR doc IN @@collection {} ' \ 'LIMIT @offset, @count RETURN doc'.format(where) LOGGER.debug('Full Query: %s' % full_query) cursor = self.database.aql.execute( full_query, bind_vars=bind_vars, count=True, full_count=True, batch_size=1, ttl=10, optimizer_rules=['+all'] ) return cursor except exceptions.AQLQueryExecuteError as err: if err.error_code == 1203: msg = db_err.get(1203).format(collection) LOGGER.error(msg) raise gmap_exceptions.CollectionNotExist(msg) else: msg = db_err.get(1).format(err.message) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg) except Exception as err: msg = db_err.get(1).format(str(err)) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg)
def delete_database(self, name=''): """Delete DB""" try: self._conn.delete_database(name) self.database = None return True except exceptions.DatabaseDeleteError as err: if err.error_code == 1228: msg = db_err.get(1228).format(name) raise gmap_exceptions.DatabaseNotExist(msg) else: msg = db_err.get(0).format(name, err.message) raise gmap_exceptions.DatabaseException(msg) except Exception as err: msg = db_err.get(0).format(name, err.message) raise gmap_exceptions.DatabaseException(msg)
def clear_collection(self, collection, search): """Clear colection by query""" try: where, bind_vars = self.prepare_search(search) bind_vars['@collection'] = collection full_query = 'FOR doc IN @@collection {} ' \ 'REMOVE doc._key in @@collection'.format(where) LOGGER.debug('Full Query: %s' % full_query) cursor = self.database.aql.execute( full_query, bind_vars=bind_vars, count=True, full_count=True, batch_size=1, ttl=10, optimizer_rules=['+all'] ) return cursor except exceptions.AQLQueryExecuteError as err: if err.error_code == 1203: msg = db_err.get(1203).format(collection) LOGGER.error(msg) raise gmap_exceptions.CollectionNotExist(msg) else: msg = db_err.get(1).format(err.message) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg) except Exception as err: msg = db_err.get(1).format(str(err)) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg)
def has_database(self, name=''): """Return True if there database""" try: self.conn_database(name) self.database.properties() except exceptions.DatabasePropertiesError as err: LOGGER.error(err) return False except Exception as err: msg = db_err.get(0).format(name, str(err)) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg) else: return True
def count_in_document(self, collection, search=[]): """Get count from collection""" bind_vars = {} where = '' if search: where, bind_vars = self.prepare_search(search) bind_vars['@collection'] = collection full_query = 'FOR doc IN @@collection {} ' \ 'COLLECT WITH COUNT INTO length RETURN length'.format(where) try: cursor = self.database.aql.execute( full_query, bind_vars=bind_vars, count=True, full_count=True, batch_size=1, ttl=10, optimizer_rules=['+all'] ) return cursor except exceptions.AQLQueryExecuteError as err: msg = db_err.get(1).format(err.message) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg) except Exception as err: msg = db_err.get(1).format(str(err)) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg)
def validate_aql(self, aql): """Validate AQL""" try: self.database.aql.validate(aql) except exceptions.AQLQueryValidateError as err: if err.error_code == 1501: msg = aql_err.get(1501) LOGGER.error(msg) raise gmap_exceptions.QueryException(msg) else: msg = aql_err.get(0).format(err.message) LOGGER.error(msg) raise gmap_exceptions.QueryException(msg) except Exception as err: msg = db_err.get(0).format(str(err)) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg)
def search_in_database(self, collection, field, value, offset=0, count=10): """Search Document""" # TODO: To use a better way to search try: if field and value: where = 'FILTER LOWER(doc.`{}`) like "%{}%"'.format( field, value.lower()) else: where = '' cursor = self.database.aql.execute(''' FOR doc IN {} {} LIMIT {}, {} RETURN doc'''.format( collection, where, offset, count), count=True, batch_size=1, ttl=10, optimizer_rules=['+all']) return cursor except Exception as err: msg = db_err.get(1).format(err.message) raise gmap_exceptions.DatabaseException(msg)
def search_in_collections(self, collections, search, page=1, per_page=10): """Search Document""" try: offset = (page - 1) * per_page per_page = per_page if per_page <= 100 else 100 union = [ 'a', 'b', 'c', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm' ] search_name = search[0] partial_query = [] for index_name, name in enumerate(search_name): where, bind_vars = self.prepare_search([[name]]) bind_vars['offset'] = offset bind_vars['count'] = per_page queries = [] for index_coll, collection in enumerate(collections): idx = '@cl_{}'.format(index_coll) bind_vars[idx] = collection query = 'FOR doc IN @{} {} RETURN doc'.format(idx, where) queries.append(query) colls = '({})'.format(','.join(queries)) if len(queries) > 1: colls = 'UNION{}'.format(colls) union_item = union[index_name] partial_query.append('FOR {} IN {} ' \ 'SORT {}.name LIMIT @offset, @count RETURN {}'.format( union_item, colls, union_item, union_item ) ) if len(partial_query) > 1: full_query = 'FOR full IN UNION({}) RETURN full'.format( ','.join(partial_query)) else: full_query = partial_query[0] LOGGER.debug('Full Query: %s' % full_query) cursor = self.database.aql.execute(full_query, bind_vars=bind_vars, count=True, full_count=True, ttl=10, optimizer_rules=['+all']) return cursor except exceptions.AQLQueryExecuteError as err: if err.error_code == 1203: msg = db_err.get(1203).format(collection) LOGGER.error(msg) raise gmap_exceptions.CollectionNotExist(msg) else: msg = db_err.get(1).format(err.message) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg) except Exception as err: msg = db_err.get(1).format(str(err)) LOGGER.error(msg) raise gmap_exceptions.DatabaseException(msg)