def _construct_query(cls, name, collection, multiple=False, result=True, **kwargs): """ """ query = { 'collection': collection.name, } for arg_name in kwargs: query[arg_name] = kwargs[arg_name] client = Client.instance() client.set_database(collection.database) api = client.api result_dict = api.simple(name).put(data=query) if not result: return result_dict if result_dict['count'] == 0: return None if multiple is True: docs = [] for result_dict_obj in result_dict['result']: doc = create_document_from_result_dict(result_dict_obj, api) docs.append(doc) return docs else: return create_document_from_result_dict(result_dict['result'][0], api)
def create(cls, url, databases): """ If databases is an empty list, all databases present in the server will become accessible via the endpoint, with the _system database being the default database. If databases is non-empty, only the specified databases will become available via the endpoint. The first database name in the databases list will also become the default database for the endpoint. The default database will always be used if a request coming in on the endpoint does not specify the database name explicitly. *Note*: adding or reconfiguring endpoints is allowed in the system database only. Calling this action in any other database will make the server return an error. Adding SSL endpoints at runtime is only supported if the server was started with SSL properly configured (e.g. --server.keyfile must have been set). :param url the endpoint specification, e.g. tcp://127.0.0.1:8530 :param databases a list of database names the endpoint is responsible for. """ api = Client.instance().api result = api.endpoint.post(data={ 'endpoint': url, 'databases': databases, }) return result
def remove(cls, name): """ """ api = Client.instance().api api.user(name).delete()
def _send_follow(cls, request_data): """ """ related_docs = [] api = Client.instance().api result_dict = api.traversal.post(data=request_data) results = result_dict['result']['visited'] vertices = results['vertices'] vertices.remove(vertices[0]) for vertice in vertices: collection_name = vertice['_id'].split('/')[0] doc = Document( id=vertice['_id'], key=vertice['_key'], collection=collection_name, api=api, ) del vertice['_id'] del vertice['_key'] del vertice['_rev'] doc.data = vertice related_docs.append(doc) return related_docs
def save(self): """ Creates this index in the collection if it hasn't been already created """ api = Client.instance().api index_details = { 'type': self.index_type_obj.type_name } extra_index_attributes = self.index_type_obj.get_extra_attributes() for extra_attribute_key in extra_index_attributes: extra_attribute_value = extra_index_attributes[extra_attribute_key] index_details[extra_attribute_key] = extra_attribute_value query_parameters = { 'collection': self.collection.name, } result = api.index.post(data=index_details, **query_parameters) self.index_type_obj.is_new = result['isNewlyCreated'] self.index_type_obj.id = result['id']
def remove(cls, id): """ Deletes an index with id :param id string/document-handle """ api = Client.instance().api api.index(id).delete()
def get(cls, name): """ """ api = Client.instance().api user = api.user(name).get() user_name = user['user'] change_password = user['changePassword'] active = user['active'] extra = user['extra'] user_obj = cls(name=user_name, change_password=change_password, active=active, extra=extra, api=api) return user_obj
def destroy(cls, url): """ This operation deletes an existing endpoint from the list of all endpoints, and makes the server stop listening on the endpoint. *Note*: deleting and disconnecting an endpoint is allowed in the system database only. Calling this action in any other database will make the server return an error. Futhermore, the last remaining endpoint cannot be deleted as this would make the server kaput. :param url The endpoint to delete, e.g. tcp://127.0.0.1:8529. """ api = Client.instance().api api.endpoint(url).delete()
def create(cls, name, password='', active=True, extra=None, change_password=False): """ """ api = Client.instance().api api.user.post({ 'user': name, 'passwd': password, 'active': active, 'exta': extra, 'changePassword': change_password, }) user_obj = cls(name=name, change_password=change_password, active=active, extra=extra, api=api) return user_obj
def execute_raw(cls, query_string): """ """ logger.debug(query_string) post_data = { 'query': query_string } api = Client.instance().api result = [] try: start_time = time() post_result = api.cursor.post(data=post_data) end_time = time() calculated_time = (end_time - start_time) * 1000 time_result = '%s ms' % calculated_time logger_output = 'Query took %s' % time_result logger.debug(logger_output) result_dict_list = post_result['result'] # Create documents for result_list in result_dict_list: # Look if it is a list which needs to be iterated if isinstance(result_list, list): for result_dict in result_list: doc = create_document_from_result_dict(result_dict, api) result.append(doc) # Otherwise just create a result document else: result_dict = result_list doc = create_document_from_result_dict(result_dict, api) result.append(doc) except Exception as err: raise err return result
def execute_raw(cls, query_string): """ """ logger.debug(query_string) post_data = {'query': query_string} api = Client.instance().api result = [] try: start_time = time() post_result = api.cursor.post(data=post_data) end_time = time() calculated_time = (end_time - start_time) * 1000 time_result = '%s ms' % calculated_time logger_output = 'Query took %s' % time_result logger.debug(logger_output) result_dict_list = post_result['result'] # Create documents for result_list in result_dict_list: # Look if it is a list which needs to be iterated if isinstance(result_list, list): for result_dict in result_list: doc = create_document_from_result_dict( result_dict, api) result.append(doc) # Otherwise just create a result document else: result_dict = result_list doc = create_document_from_result_dict(result_dict, api) result.append(doc) except Exception as err: raise err return result
def start(self, transaction): """ """ statements = transaction.compile() client = Client.instance() api = client.api query = { 'collections': transaction.collections, 'action': statements, } logger.debug(query) try: val = api.transaction.post(data=query) return val except HttpClientError as err: raise TransactionController.InvalidTransactionException(err.content)
def start(self, transaction): """ """ statements = transaction.compile() client = Client.instance() api = client.api query = { 'collections': transaction.collections, 'action': statements, } logger.debug(query) try: val = api.transaction.post(data=query) return val except HttpClientError as err: raise TransactionController.InvalidTransactionException( err.content)
def follow(cls, start_vertex, edge_collection, direction): """ """ related_docs = [] request_data = { 'startVertex': start_vertex, 'edgeCollection': edge_collection, 'direction': direction, } api = Client.instance().api result_dict = api.traversal.post(data=request_data) results = result_dict['result']['visited'] vertices = results['vertices'] vertices.remove(vertices[0]) for vertice in vertices: collection_name = vertice['_id'].split('/')[0] doc = Document( id=vertice['_id'], key=vertice['_key'], collection=collection_name, api=api, ) del vertice['_id'] del vertice['_key'] del vertice['_rev'] doc.data = vertice related_docs.append(doc) return related_docs
def save(self): """ Creates this index in the collection if it hasn't been already created """ api = Client.instance().api index_details = {'type': self.index_type_obj.type_name} extra_index_attributes = self.index_type_obj.get_extra_attributes() for extra_attribute_key in extra_index_attributes: extra_attribute_value = extra_index_attributes[extra_attribute_key] index_details[extra_attribute_key] = extra_attribute_value query_parameters = { 'collection': self.collection.name, } result = api.index.post(data=index_details, **query_parameters) self.index_type_obj.is_new = result['isNewlyCreated'] self.index_type_obj.id = result['id']
def all(cls): """ Returns a list of all configured endpoints the server is listening on. For each endpoint, the list of allowed databases is returned too if set. The result is a JSON hash which has the endpoints as keys, and the list of mapped database names as values for each endpoint. If a list of mapped databases is empty, it means that all databases can be accessed via the endpoint. If a list of mapped databases contains more than one database name, this means that any of the databases might be accessed via the endpoint, and the first database in the list will be treated as the default database for the endpoint. The default database will be used when an incoming request does not specify a database name in the request explicitly. *Note*: retrieving the list of all endpoints is allowed in the system database only. Calling this action in any other database will make the server return an error. """ api = Client.instance().api endpoint_list = api.endpoint.get() return endpoint_list
import timeit from arangodb.api import Client, Database, Collection ################################## # These tests ################################## # Init client from arangodb.query.advanced import Query from arangodb.query.simple import SimpleQuery client = Client('arango.nix.bra') # Create database in which all collections are created database_name = 'arangopy_speed_test_database' try: Database.create(database_name) except: client.set_database(database_name) timer = timeit.default_timer def timer_decorator(message): def outer_wrapper(func): def wrapper(*args, **kwargs): start = timer() func(*args, **kwargs) elapsed = timer() - start print(message % elapsed)
JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(seconds=300), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, } MY_SITE_PROTOCOL = "http" LOGIN_URL = 'rest_framework:login' LOGOUT_URL = 'rest_framework:logout' SWAGGER_SETTINGS = { 'USE_SESSION_AUTH': True, 'SECURITY_DEFINITIONS': { 'api_key': { 'type': 'apiKey', 'in': 'header', 'name': 'Authorization' } }, } client = Client(hostname='127.0.0.1', database="_system", auth=('root', 'hrhk1234')) CORS_ORIGIN_ALLOW_ALL = True # CORS_ORIGIN_WHITELIST = ( # 'localhost:8000', # '127.0.0.1:9000' # )
import unittest from arangodb.api import Client class ExtendedTestCase(unittest.TestCase): def assertDocumentsEqual(self, doc1, doc2): """ """ self.assertEqual(doc1.id, doc2.id) for prop in doc1.data: doc1_val = doc1.data[prop] doc2_val = doc2.data[prop] self.assertEqual(doc1_val, doc2_val) client = Client(hostname='localhost', auth=('root', ''))
def on_init(self, model_class): """ """ if not self.related_name is None: relation_name = self._get_relation_collection_name(model_class) try: self.relation_collection = Collection.create(name=relation_name, database=Client.instance().database, type=3) except: self.relation_collection = Collection.get_loaded_collection(name=relation_name) fields = self.relation_class._model_meta_data._fields otherside_field = ManyToManyField(to=model_class, related_name=None) fields[self.related_name] = otherside_field # Configure other side field otherside_field.related_queryset = self.relation_class.objects.all() otherside_field.relation_collection = self.relation_collection self.related_queryset = self.relation_class.objects.all()
from arangodb.api import Client, Database, Collection, Document from arangodb.index.api import Index from arangodb.index.general import FulltextIndex, CapConstraintIndex from arangodb.index.unique import HashIndex, SkiplistIndex, GeoIndex from arangodb.orm.fields import CharField, ForeignKeyField, NumberField, DatetimeField, DateField, BooleanField, \ UuidField, ManyToManyField, ChoiceField from arangodb.orm.models import CollectionModel from arangodb.query.advanced import Query, Traveser from arangodb.query.utils.document import create_document_from_result_dict from arangodb.query.simple import SimpleQuery, SimpleIndexQuery from arangodb.server.endpoint import Endpoint from arangodb.transaction.controller import Transaction, TransactionController from arangodb.user import User client = Client(hostname='localhost') class ExtendedTestCase(unittest.TestCase): def assertDocumentsEqual(self, doc1, doc2): """ """ self.assertEqual(doc1.id, doc2.id) for prop in doc1.data: doc1_val = doc1.data[prop] doc2_val = doc2.data[prop] self.assertEqual(doc1_val, doc2_val)
def remove(cls, id): """ """ api = Client.instance().api api.index(id).delete()
def on_init(self, model_class, attribute_name): """ """ super(ManyToManyField, self).on_init(model_class=model_class, attribute_name=attribute_name) if not self.related_name is None: relation_name = self._get_relation_collection_name(model_class) try: self.relation_collection = Collection.create(name=relation_name, database=Client.instance().database, type=3) except: self.relation_collection = Collection.get_loaded_collection(name=relation_name) fields = self.relation_class._model_meta_data._fields otherside_field = ManyToManyField(to=model_class, related_name=None) fields[self.related_name] = otherside_field # Configure other side field otherside_field.related_queryset = self.relation_class.objects.all() otherside_field.relation_collection = self.relation_collection self.related_queryset = self.relation_class.objects.all()
def on_init(self, model_class): """ """ if not self.related_name is None: relation_name = self._get_relation_collection_name(model_class) try: self.relation_collection = Collection.create(name=relation_name, database=Client.instance().database, type=3) except: self.relation_collection = Collection.get_loaded_collection(name=relation_name) fields = self.relation_class._model_meta_data._fields fields[self.related_name] = ManyToManyField(to=model_class, related_name=None)