def test_globaldb_endpoint_discovery(self):
        connection_policy = documents.ConnectionPolicy()
        connection_policy.EnableEndpointDiscovery = False

        read_location_client = document_client.DocumentClient(Test_globaldb_tests.read_location_host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy)

        document_definition = { 'id': 'doc',
                                'name': 'sample document',
                                'key': 'value'}        
        
        # Create Document will fail for the read location client since it has EnableEndpointDiscovery set to false, and hence the request will directly go to 
        # the endpoint that was used to create the client instance(which happens to be a read endpoint)
        self.__AssertHTTPFailureWithStatus(
            403,
            3,
            read_location_client.CreateDocument,
            self.test_coll['_self'],
            document_definition)

        # Query databases will pass for the read location client as it's a GET operation
        list(read_location_client.QueryDatabases({
            'query': 'SELECT * FROM root r WHERE r.id=@id',
            'parameters': [
                { 'name':'@id', 'value': self.test_db['id'] }
            ]
        }))

        connection_policy.EnableEndpointDiscovery = True
        read_location_client = document_client.DocumentClient(Test_globaldb_tests.read_location_host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy)

        # CreateDocument call will go to the WriteEndpoint as EnableEndpointDiscovery is set to True and client will resolve the right endpoint based on the operation
        created_document = read_location_client.CreateDocument(self.test_coll['_self'], document_definition)
        self.assertEqual(created_document['id'], document_definition['id'])
    def test_globaldb_preferred_locations(self):
        connection_policy = documents.ConnectionPolicy()
        connection_policy.EnableEndpointDiscovery = True

        client = document_client.DocumentClient(
            Test_globaldb_tests.host,
            {'masterKey': Test_globaldb_tests.masterKey}, connection_policy)

        document_definition = {
            'id': 'doc',
            'name': 'sample document',
            'key': 'value'
        }

        created_document = client.CreateDocument(self.test_coll['_self'],
                                                 document_definition)
        self.assertEqual(created_document['id'], document_definition['id'])

        # Delay to get these resources replicated to read location due to Eventual consistency
        time.sleep(5)

        client.ReadDocument(created_document['_self'])
        content_location = str(client.last_response_headers[
            http_constants.HttpHeaders.ContentLocation])

        content_location_url = urlparse(content_location)
        write_location_url = urlparse(Test_globaldb_tests.write_location_host)

        # If no preferred locations is set, we return the write endpoint as ReadEndpoint for better latency performance
        self.assertEqual(str(content_location_url.hostname),
                         str(write_location_url.hostname))
        self.assertEqual(client.ReadEndpoint,
                         Test_globaldb_tests.write_location_host)

        connection_policy.PreferredLocations = [
            Test_globaldb_tests.read_location2
        ]
        client = document_client.DocumentClient(
            Test_globaldb_tests.host,
            {'masterKey': Test_globaldb_tests.masterKey}, connection_policy)

        document_definition['id'] = 'doc2'
        created_document = client.CreateDocument(self.test_coll['_self'],
                                                 document_definition)

        # Delay to get these resources replicated to read location due to Eventual consistency
        time.sleep(5)

        client.ReadDocument(created_document['_self'])
        content_location = str(client.last_response_headers[
            http_constants.HttpHeaders.ContentLocation])

        content_location_url = urlparse(content_location)
        read_location2_url = urlparse(Test_globaldb_tests.read_location2_host)

        # Test that the preferred location is set as ReadEndpoint instead of default write endpoint when no preference is set
        self.assertEqual(str(content_location_url.hostname),
                         str(read_location2_url.hostname))
        self.assertEqual(client.ReadEndpoint,
                         Test_globaldb_tests.read_location2_host)
    def test_globaldb_read_write_endpoints(self):
        connection_policy = documents.ConnectionPolicy()
        connection_policy.EnableEndpointDiscovery = False

        client = document_client.DocumentClient(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy)

        document_definition = { 'id': 'doc',
                                'name': 'sample document',
                                'key': 'value'}        
        
        # When EnableEndpointDiscovery is False, WriteEndpoint is set to the endpoint passed while creating the client instance
        created_document = client.CreateDocument(self.test_coll['_self'], document_definition)
        self.assertEqual(client.WriteEndpoint, Test_globaldb_tests.host)
        
        # Delay to get these resources replicated to read location due to Eventual consistency
        time.sleep(5)

        client.ReadDocument(created_document['_self'])
        content_location = str(client.last_response_headers[http_constants.HttpHeaders.ContentLocation])

        content_location_url = urlparse(content_location)
        host_url = urlparse(Test_globaldb_tests.host)
        
        # When EnableEndpointDiscovery is False, ReadEndpoint is set to the endpoint passed while creating the client instance
        self.assertEqual(str(content_location_url.hostname), str(host_url.hostname))
        self.assertEqual(client.ReadEndpoint, Test_globaldb_tests.host)
        
        connection_policy.EnableEndpointDiscovery = True
        document_definition['id'] = 'doc2'

        client = document_client.DocumentClient(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy)

        # When EnableEndpointDiscovery is True, WriteEndpoint is set to the write endpoint
        created_document = client.CreateDocument(self.test_coll['_self'], document_definition)
        self.assertEqual(client.WriteEndpoint, Test_globaldb_tests.write_location_host)
        
        # Delay to get these resources replicated to read location due to Eventual consistency
        time.sleep(5)

        client.ReadDocument(created_document['_self'])
        content_location = str(client.last_response_headers[http_constants.HttpHeaders.ContentLocation])
        
        content_location_url = urlparse(content_location)
        write_location_url = urlparse(Test_globaldb_tests.write_location_host)

        # If no preferred locations is set, we return the write endpoint as ReadEndpoint for better latency performance
        self.assertEqual(str(content_location_url.hostname), str(write_location_url.hostname))
        self.assertEqual(client.ReadEndpoint, Test_globaldb_tests.write_location_host)
示例#4
0
def delete():
    start = clock()
    client = document_client.DocumentClient(config['ENDPOINT'], {'masterKey': config['MASTERKEY']})
    database_link = 'dbs/' + 'dbFood'
    collection_link = database_link + '/colls/{0}'.format('FoodCollection')
    collection = client.ReadCollection(collection_link)
    param1 = request.form['param1']

    query = {'query': 'Select s.id from server s where s.TotalCalories > ' + param1 + ''}

    result_iterable = client.QueryDocuments(collection['_self'], query)
    results = list(result_iterable)
    print (results)
    for i in range(len(results)):
        a = results[i]['id']
        b.append(a)
    print(b)
    for rows in b:
        document_link = collection_link + '/docs/{0}'.format(rows)
        document_del = client.DeleteDocument(document_link)

    end = clock()
    ela = end - start
    elap = str(ela)
    # return str(results)
    # return render_template("display.html", image=results,time = elap)
    return "Deleted"
 def __init__(self, endpoint, key, database, collection):
     self.client = document_client.DocumentClient(endpoint,
                                                  {'masterKey': key})
     self.database = database
     self.collection = collection
     self.coll_link = self.GetDocumentCollectionLink(
         self.database, self.collection)
示例#6
0
    def test_default_retry_policy_for_create(self):
        connection_policy = documents.ConnectionPolicy()

        client = document_client.DocumentClient(
            Test_retry_policy_tests.host,
            {'masterKey': Test_retry_policy_tests.masterKey},
            connection_policy)

        document_definition = {
            'id': 'doc',
            'name': 'sample document',
            'key': 'value'
        }

        self.OriginalExecuteFunction = retry_utility._ExecuteFunction
        retry_utility._ExecuteFunction = self._MockExecuteFunctionConnectionReset

        created_document = {}
        try:
            created_document = client.CreateDocument(
                self.created_collection['_self'], document_definition)
        except Exception as err:
            self.assertEqual(err.status_code, 10054)

        self.assertDictEqual(created_document, {})
        self.assertEqual(self.counter, 7)

        retry_utility._ExecuteFunction = self.OriginalExecuteFunction
示例#7
0
    def test_document_ttl_with_no_defaultTtl(self):
        client = document_client.DocumentClient(
            Test_ttl_tests.host, {'masterKey': Test_ttl_tests.masterKey})

        created_db = client.CreateDatabase({'id': Test_ttl_tests.testDbName})

        collection_definition = {'id': 'sample collection'}

        created_collection = client.CreateCollection(created_db['_self'],
                                                     collection_definition)

        document_definition = {
            'id': 'doc1',
            'name': 'sample document',
            'key': 'value',
            'ttl': 5
        }

        dummy_document_definition = {'id': 'dummy doc'}

        created_document = client.CreateDocument(created_collection['_self'],
                                                 document_definition)

        time.sleep(6)

        # Call to Upsert a dummy document here is a way to update the logical timestamp of the created document
        dummy_doc = client.UpsertDocument(created_collection['_self'],
                                          dummy_document_definition)

        # Created document still exists even after ttl time has passed since the TTL is disabled at collection level(no defaultTtl property defined)
        read_document = client.ReadDocument(created_document['_self'])
        self.assertEqual(created_document['id'], read_document['id'])
示例#8
0
def cf_cosmosdb_document(cli_ctx, kwargs):
    from pydocumentdb import document_client
    service_type = document_client.DocumentClient

    logger.debug('Getting data service client service_type=%s',
                 service_type.__name__)
    try:
        name = kwargs.pop('db_account_name', None)
        key = kwargs.pop('db_account_key', None)
        url_connection = kwargs.pop('db_url_connection', None)
        resource_group = kwargs.pop('db_resource_group_name', None)

        if name and resource_group and not key:
            # if resource group name is provided find key
            keys = cf_cosmosdb(cli_ctx).database_accounts.list_keys(
                resource_group, name)
            key = keys.primary_master_key

        url_connection = _get_url_connection(url_connection, name)

        if not key and not url_connection:
            raise CLIError(NO_CREDENTIALS_ERROR_MESSAGE)
        auth = {'masterKey': key}
        client = document_client.DocumentClient(url_connection=url_connection,
                                                auth=auth)
    except Exception as ex:
        if isinstance(ex, CLIError):
            raise ex

        raise CLIError(
            'Failed to instantiate an Azure Cosmos DB client using the provided credential '
            + str(ex))
    _add_headers(client)
    return client
示例#9
0
    def setUpClass(cls):
        # creates the database, collection, and insert all the documents
        # we will gain some speed up in running the tests by creating the database, collection and inserting all the docs only once
        
        if (cls.masterKey == '[YOUR_KEY_HERE]' or
                cls.host == '[YOUR_ENDPOINT_HERE]'):
            raise Exception(
                "You must specify your Azure Cosmos DB account values for "
                "'masterKey' and 'host' at the top of this class to run the "
                "tests.")
            
        CrossPartitionTopOrderByTest.cleanUpTestDatabase();
        
        cls.client = document_client.DocumentClient(cls.host, {'masterKey': cls.masterKey})
        cls.created_db = cls.client.CreateDatabase({ 'id': 'sample database' })        
        cls.created_collection = CrossPartitionTopOrderByTest.create_collection(cls.client, cls.created_db)
        cls.collection_link = cls.GetDocumentCollectionLink(cls.created_db, cls.created_collection)

        # create a document using the document definition
        cls.document_definitions = []
        for i in xrange(20):
            d = {'id' : str(i),
                 'name': 'sample document',
                 'spam': 'eggs' + str(i),
                 'cnt': i,
                 'key': 'value',
                 'spam2': 'eggs' + str(i) if (i == 3) else i,
                 'boolVar': (i % 2 == 0),
                 'number': 1.1 * i
                 
                 }
            cls.document_definitions.append(d)

        CrossPartitionTopOrderByTest.insert_doc()
def updateRecord(blobURL, tags):

    try:
        #these function calls are used to locate the db client, database, collection, then the actual document
        #based off the URL provided
        #connects to documentdb
        client = document_client.DocumentClient(db_client,
                                                {'masterKey': db_client_key})
        #gets instance of database based on the client id
        db = next(
            (data for data in client.ReadDatabases() if data['id'] == db_name))
        #gets instance of coll based on the coll id and db path
        coll = next((coll for coll in client.ReadCollections(db['_self'])
                     if coll['id'] == db_collection))

        #Read documents and take first since blobURL should not be duplicated.
        doc = next((doc for doc in client.ReadDocuments(coll['_self'])
                    if doc['photo_url'] == blobURL))

        #changes the tags field to the new one provided by the user
        doc['tags'] = tags
        #replaces the current document with the new document, with updated tags
        replaced_document = client.ReplaceDocument(doc['_self'], doc)
        return "success"

    #returns error if method throws an exception
    except Exception:
        return "error"
示例#11
0
def result():
    replaced_document = {}
    client = document_client.DocumentClient(
        config_cosmos.COSMOSDB_HOST, {'masterKey': config_cosmos.COSMOSDB_KEY})

    # Read databases and take first since id should not be duplicated.
    db = next((data for data in client.ReadDatabases()
               if data['id'] == config_cosmos.COSMOSDB_DATABASE))

    # Read collections and take first since id should not be duplicated.
    coll = next((coll for coll in client.ReadCollections(db['_self'])
                 if coll['id'] == config_cosmos.COSMOSDB_COLLECTION))

    # Read documents and take first since id should not be duplicated.
    doc = next((doc for doc in client.ReadDocuments(coll['_self'])
                if doc['id'] == config_cosmos.COSMOSDB_DOCUMENT))

    replaced_document = client.ReplaceDocument(doc['_self'], doc)

    # Create a model to pass to results.html
    class VoteObject:
        choices = dict()
        total_votes = 0

    vote_object = VoteObject()
    vote_object.choices = {
        "Web Site": doc['Web Site'],
        "Cloud Service": doc['Cloud Service'],
        "Virtual Machine": doc['Virtual Machine']
    }
    vote_object.total_votes = sum(vote_object.choices.values())

    return render_template('results.html',
                           year=datetime.now().year,
                           vote_object=vote_object)
示例#12
0
def main(event: func.EventHubEvent):

    cosmosdb_order_masterKey = os.environ.get('cosmosdb_order_masterKey')
    cosmosdb_order_host = os.environ.get('cosmosdb_order_host')
    cosmosdb_order_databaseId = os.environ.get('cosmosdb_order_databaseId')
    cosmosdb_order_collectionId = os.environ.get('cosmosdb_order_collectionId')

    event_body = event.get_body().decode('utf-8')
    logging.info(event_body)

    combineOrderContent = os.environ.get('combineOrderContent')

    response = requests.post(combineOrderContent, data=event_body)
    doc_json = response.json()

    client = document_client.DocumentClient(
        cosmosdb_order_host, {'masterKey': cosmosdb_order_masterKey})

    for sale in doc_json:

        dbLink = 'dbs/' + cosmosdb_order_databaseId
        collLink = dbLink + '/colls/' + cosmosdb_order_collectionId

        sale['salesNumber'] = sale['headers']['salesNumber']

        client.CreateDocument(collLink, sale)
def createRecord(user, originalFilename, tags, time, url):
    #tries updloading metadata to documentdb
    try:
        #creates epoc based on the date 2/23/2016 (arbitrary date)
        epoc = datetime.datetime(2016, 2, 23, 3, 0, 00, 000000)
        #converts timestamp to value
        #this value is needed for searcing documentdb based on timestamp
        val = (time - epoc).total_seconds() * 1000000
        #connects to documentdb
        client = document_client.DocumentClient(db_client,
                                                {'masterKey': db_client_key})
        #gets instance of database based on the client id
        db = next(
            (data for data in client.ReadDatabases() if data['id'] == db_name))
        #gets instance of coll based on the coll id and db path
        coll = next((coll for coll in client.ReadCollections(db['_self'])
                     if coll['id'] == db_collection))

        #create document. Tags is an array, as passed.
        document = client.CreateDocument(
            coll['_self'], {
                "user_id": user,
                "file_name": originalFilename,
                "photo_url": url,
                "photo_id": val,
                "tags": tags
            })

        return "success"

    #returns error if the metadata wasn't uploaded
    except Exception:
        return "error"
示例#14
0
    def setUp(self):
        self.client = document_client.DocumentClient(
            Test_retry_policy_tests.host,
            {'masterKey': Test_retry_policy_tests.masterKey})

        # Create the test database only when it's not already present
        query_iterable = self.client.QueryDatabases(
            'SELECT * FROM root r WHERE r.id=\'' +
            Test_retry_policy_tests.test_db_name + '\'')
        it = iter(query_iterable)

        self.created_db = next(it, None)
        if self.created_db is None:
            self.created_db = self.client.CreateDatabase(
                {'id': Test_retry_policy_tests.test_db_name})

        # Create the test collection only when it's not already present
        query_iterable = self.client.QueryCollections(
            self.created_db['_self'], 'SELECT * FROM root r WHERE r.id=\'' +
            Test_retry_policy_tests.test_coll_name + '\'')
        it = iter(query_iterable)

        self.created_collection = next(it, None)
        if self.created_collection is None:
            self.created_collection = self.client.CreateCollection(
                self.created_db['_self'],
                {'id': Test_retry_policy_tests.test_coll_name})

        self.retry_after_in_milliseconds = 1000
示例#15
0
    def test_resource_throttle_retry_policy_max_wait_time(self):
        connection_policy = documents.ConnectionPolicy()
        connection_policy.RetryOptions = retry_options.RetryOptions(5, 2000, 3)

        client = document_client.DocumentClient(
            Test_retry_policy_tests.host,
            {'masterKey': Test_retry_policy_tests.masterKey},
            connection_policy)

        self.OriginalExecuteFunction = retry_utility._ExecuteFunction
        retry_utility._ExecuteFunction = self._MockExecuteFunction

        document_definition = {
            'id': 'doc',
            'name': 'sample document',
            'key': 'value'
        }

        try:
            client.CreateDocument(self.created_collection['_self'],
                                  document_definition)
        except errors.HTTPFailure as e:
            self.assertEqual(e.status_code, 429)
            self.assertGreaterEqual(
                client.last_response_headers[
                    http_constants.HttpHeaders.ThrottleRetryWaitTimeInMs],
                connection_policy.RetryOptions.MaxWaitTimeInSeconds * 1000)

        retry_utility._ExecuteFunction = self.OriginalExecuteFunction
示例#16
0
def conecta(methods=['POST']):
    if request.method=='POST':
        account_name=request.form['account']
        db_name=request.form['db_name']
        coll_name=request.form['coll_name']
        master_key=request.form['master_key']
        lon=float(request.form['lon'])
        lat=float(request.form['lat'])
        maps_api_key=request.form['maps_key']
        distancia_centro=str(request.form['dist'])

        cliente = document_client.DocumentClient('https://'+account_name+'.documents.azure.com', {'masterKey': master_key})
        bbdd=cliente.ReadDatabase('dbs/'+db_name)
        coleccion=cliente.ReadCollection('dbs/'+db_name+'/colls/'+coll_name)
        id_coleccion=coleccion['_self']
               
        opciones={}
        opciones['enableCrossPartitionQuery'] = True
        consulta='SELECT c.Location.coordinates[0] as lon, c.Location.coordinates[1] as lat, c.mass_grams as masa, c.Name as nombre FROM c WHERE ST_DISTANCE(c.Location, { "type": "Point", "coordinates": ['+str(lon)+', '+str(lat)+'] }) <'+distancia_centro

        salida = cliente.QueryDocuments(id_coleccion,consulta,opciones)
        lista_salida=salida.fetch_next_block()

        with open ('static/sitios.js','w') as fichero:
            fichero.write('myData = [\n')
            for elem in lista_salida:
                m_name=elem['nombre'].replace(u"\ufeff", "")[1:].replace("'","`")
                fichero.write('['+str(elem['lat'])+','+str(elem['lon'])+','+"'"+m_name+': '+str(elem['masa'])+" gramos'"+'],\n')
            fichero.write('];')
            fichero.close()
        return render_template('mapa.html', latitud=lat, longitud=lon, maps_api_key=maps_api_key)
示例#17
0
def caption():
    # Setting up the client configuration
    client = document_client.DocumentClient(config['ENDPOINT'], {'masterKey': config['MASTERKEY']})

    # The link for database with an id of Foo would be dbs/Foo
    database_link = 'dbs/' + 'db1'

    # The link for collection with an id of Bar in database Foo would be dbs/Foo/colls/Bar
    collection_link = database_link + '/colls/{0}'.format('kcoll')

    # Reading the documents in collection

    collection = client.ReadCollection(collection_link)
    # Query  in SQL
    #cap = request.form['word']
    cap = 'this'
    a = str(cap)
    query = {'query': 'SELECT s.Cloud FROM server s where CONTAINS (s.caption, a)'}


    # iterating through the document to get query result
    result_iterable = client.QueryDocuments(collection['_self'], query, )
    results = list(result_iterable);

    picture = []
    for row in results:
        picture.append(row['Cloud'])
    return render_template("displayimage.html", image=picture)
    def test_globaldb_endpoint_discovery_retry_policy_mock(self):
        client = document_client.DocumentClient(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey})

        self.OriginalExecuteFunction = retry_utility._ExecuteFunction
        retry_utility._ExecuteFunction = self._MockExecuteFunction

        self.OriginalGetDatabaseAccount = client.GetDatabaseAccount
        client.GetDatabaseAccount = self._MockGetDatabaseAccount

        max_retry_attempt_count = 10
        retry_after_in_milliseconds = 500

        endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy.Max_retry_attempt_count = max_retry_attempt_count
        endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy.Retry_after_in_milliseconds = retry_after_in_milliseconds

        document_definition = { 'id': 'doc',
                                'name': 'sample document',
                                'key': 'value'} 

        self.__AssertHTTPFailureWithStatus(
            403,
            3,
            client.CreateDocument,
            self.test_coll['_self'],
            document_definition)

        retry_utility._ExecuteFunction = self.OriginalExecuteFunction
示例#19
0
def download():
    # Setting up the client configuration
    client = document_client.DocumentClient(config['ENDPOINT'], {'masterKey': config['MASTERKEY']})

    # The link for database with an id of Foo would be dbs/Foo
    database_link = 'dbs/' + 'db1'

    # The link for collection with an id of Bar in database Foo would be dbs/Foo/colls/Bar
    collection_link = database_link + '/colls/{0}'.format('kcoll')

    # Reading the documents in collection

    collection = client.ReadCollection(collection_link)
    #new FeedOptions {EnableCrossPartitionQuery = true}

    #document_link = collection_link + '/docs/{0}'.format('image2')
    #document = client.ReadDocument(document_link)
    # Query  in SQL
    query = {'query': 'SELECT s.Cloud FROM server s'}


    # iterating through the document to get query result
    result_iterable = client.QueryDocuments(collection['_self'], query)
    #print "Hi"
    results = list(result_iterable);

    #return (str(results[0]))
    #return str(results)
    picture = []
    for row in results:
        picture.append(row['Cloud'])

    return render_template("displayimage.html", image=picture)
def init():
    from azureml.dataprep import datasource
    df = datasource.load_datasource('ratings.dsource')

    from pyspark.ml.recommendation import ALS
    als = ALS() \
        .setUserCol("userId") \
        .setRatingCol("rating") \
        .setItemCol("movieId") \

    alsModel = als.fit(df)
    global userRecs
    userRecs = alsModel.recommendForAllUsers(10)

    # Query them in SQL
    import pydocumentdb.documents as documents
    import pydocumentdb.document_client as document_client
    import pydocumentdb.errors as errors
    import datetime

    MASTER_KEY = 'oX6tWPep8FCah8RM258s7cC3x9Kl8tWdbDxmNknXCP34ShW1Ag1ladvb5QWuBmMxuRISBO2HfrRFv3QeJYCSYg=='
    HOST = 'https://dcibrecommendationhack.documents.azure.com:443/'
    DATABASE_ID = "recommendation_engine"
    COLLECTION_ID = "user_recommendations"
    database_link = 'dbs/' + DATABASE_ID
    collection_link = database_link + '/colls/' + COLLECTION_ID

    global client, collection
    client = document_client.DocumentClient(HOST, {'masterKey': MASTER_KEY})
    collection = client.ReadCollection(collection_link=collection_link)
示例#21
0
def run_sample():
    with IDisposable(document_client.DocumentClient(HOST, {'masterKey': MASTER_KEY} )) as client:
        try:
			# setup database for this sample
            try:
                client.CreateDatabase({"id": DATABASE_ID})

            except errors.DocumentDBError as e:
                if e.status_code == 409:
                    pass
                else:
                    raise errors.HTTPFailure(e.status_code)

            # setup collection for this sample
            try:
                client.CreateCollection(database_link, {"id": COLLECTION_ID})
                print('Collection with id \'{0}\' created'.format(COLLECTION_ID))

            except errors.DocumentDBError as e:
                if e.status_code == 409:
                    print('Collection with id \'{0}\' was found'.format(COLLECTION_ID))
                else:
                    raise errors.HTTPFailure(e.status_code)

            DocumentManagement.CreateDocuments(client)
            DocumentManagement.ReadDocument(client,'SalesOrder1')
            DocumentManagement.ReadDocuments(client)

        except errors.HTTPFailure as e:
            print('\nrun_sample has caught an error. {0}'.format(e.message))
        
        finally:
            print("\nrun_sample done")
示例#22
0
def makeMetadata(user, originalFilename, tags, time, url):
    try:

        epoc = datetime.datetime(2016, 2, 23, 3, 0, 00, 000000)
        val = (time - epoc).total_seconds() * 1000000

        client = document_client.DocumentClient(db_client,
                                                {'masterKey': db_client_key})

        #Not sure we need this. Client may be it.
        db = next(
            (data for data in client.ReadDatabases() if data['id'] == db_name))

        coll = next((coll for coll in client.ReadCollections(db['_self'])
                     if coll['id'] == db_collection))

        #create document. Tags is an array, as passed.
        document = client.CreateDocument(
            coll['_self'], {
                "user_id": user,
                "file_name": originalFilename,
                "photo_url": url,
                "photo_id": val,
                "tags": tags
            })

        returnlist = []

        return "success"

    except Exception:
        return "error"
def run_sample():
    with IDisposable(
            document_client.DocumentClient(
                HOST, {'masterKey': MASTER_KEY})) as client:
        try:
            # query for a database
            DatabaseManagement.find_database(client, DATABASE_ID)

            # create a database
            DatabaseManagement.create_database(client, DATABASE_ID)

            # get a database using its id
            DatabaseManagement.read_database(client, DATABASE_ID)

            # list all databases on an account
            DatabaseManagement.list_databases(client)

            # delete database by id
            DatabaseManagement.delete_database(client, DATABASE_ID)

        except errors.HTTPFailure as e:
            print('\nrun_sample has caught an error. {0}'.format(e.message))

        finally:
            print("\nrun_sample done")
    def test_default_retry_policy_for_query(self):
        connection_policy = Test_retry_policy_tests.connectionPolicy

        client = document_client.DocumentClient(Test_retry_policy_tests.host, {'masterKey': Test_retry_policy_tests.masterKey}, connection_policy)

        document_definition_1 = { 'id': 'doc1',
                                  'name': 'sample document',
                                  'key': 'value'} 
        document_definition_2 = { 'id': 'doc2',
                                  'name': 'sample document',
                                  'key': 'value'} 

        client.CreateDocument(self.created_collection['_self'], document_definition_1)
        client.CreateDocument(self.created_collection['_self'], document_definition_2)

        self.OriginalExecuteFunction = retry_utility._ExecuteFunction
        retry_utility._ExecuteFunction = self._MockExecuteFunctionConnectionReset

        docs = client.QueryDocuments(self.created_collection['_self'], "Select * from c", {'maxItemCount':1})
        
        result_docs = list(docs)
        self.assertEqual(result_docs[0]['id'], 'doc1')
        self.assertEqual(result_docs[1]['id'], 'doc2')
        self.assertEqual(self.counter, 12)

        self.counter = 0;
        retry_utility._ExecuteFunction = self.OriginalExecuteFunction

        client.DeleteDocument(result_docs[0]['_self'])
        client.DeleteDocument(result_docs[1]['_self'])
示例#25
0
 def __init__(self, host, master_key, database_id):
     self.host = host
     self.master_key = master_key
     self.database_id = database_id
     self.client = documents_client.DocumentClient(
         host, {'masterKey': master_key})
     self.database_link = 'dbs/' + database_id
    def test_document_ttl_with_no_defaultTtl(self):
        client = document_client.DocumentClient(
            Test_ttl_tests.host, {'masterKey': Test_ttl_tests.masterKey},
            Test_ttl_tests.connectionPolicy)

        created_db = client.CreateDatabase({'id': Test_ttl_tests.testDbName})

        collection_definition = {'id': 'sample collection'}

        created_collection = client.CreateCollection(created_db['_self'],
                                                     collection_definition)

        document_definition = {
            'id': 'doc1',
            'name': 'sample document',
            'key': 'value',
            'ttl': 5
        }

        created_document = client.CreateDocument(created_collection['_self'],
                                                 document_definition)

        time.sleep(7)

        # Created document still exists even after ttl time has passed since the TTL is disabled at collection level(no defaultTtl property defined)
        read_document = client.ReadDocument(created_document['_self'])
        self.assertEqual(created_document['id'], read_document['id'])
示例#27
0
    def test_resource_throttle_retry_policy_query(self):
        connection_policy = documents.ConnectionPolicy()
        connection_policy.RetryOptions = retry_options.RetryOptions(5)

        client = document_client.DocumentClient(Test_retry_policy_tests.host, {'masterKey': Test_retry_policy_tests.masterKey}, connection_policy)
        
        document_definition = { 'id': 'doc',
                                'name': 'sample document',
                                'key': 'value'} 

        client.CreateDocument(self.created_collection['_self'], document_definition)

        self.OriginalExecuteFunction = retry_utility._ExecuteFunction
        retry_utility._ExecuteFunction = self._MockExecuteFunction

        try:
            list(client.QueryDocuments(
            self.created_collection['_self'],
            {
                'query': 'SELECT * FROM root r WHERE r.id=@id',
                'parameters': [
                    { 'name':'@id', 'value':document_definition['id'] }
                ]
            }))
        except errors.HTTPFailure as e:
            self.assertEqual(e.status_code, 429)
            self.assertEqual(connection_policy.RetryOptions.MaxRetryAttemptCount, client.last_response_headers[http_constants.HttpHeaders.ThrottleRetryCount])
            self.assertGreaterEqual(client.last_response_headers[http_constants.HttpHeaders.ThrottleRetryWaitTimeInMs], connection_policy.RetryOptions.MaxRetryAttemptCount * self.retry_after_in_milliseconds)

        retry_utility._ExecuteFunction = self.OriginalExecuteFunction
示例#28
0
def getRecords(user, lastID, direction, tags):

    dir = ">"
    order = "ASC"

    if direction == True:
        dir = "<"
        order = "DESC"

    tagString = "[\"" + '", "'.join(tags) + "\"]"
    client = document_client.DocumentClient(db_client,
                                            {'masterKey': db_client_key})
    #Not sure we need this. Client may be it.
    #db = next((data for data in client.ReadDatabases() if data['id'] == db_name));
    #coll = next((coll for coll in client.ReadCollections(db['_self']) if coll['id'] == db_collection));

    queryString = 'SELECT TOP 20 '+ db_collection +'.user_id, '+ db_collection +'.photo_id, '+ \
                    db_collection +'.file_name, '+ db_collection +'.photo_url, '+ db_collection + \
                    '.tags FROM '+ db_collection +' WHERE '+ db_collection +'.user_id = "' \
                    + user + '" AND ' + db_collection + '.photo_id ' + dir + ' ' + lastID

    if len(tags) > 0:
        for taG in tags:
            #queryString += ' AND '
            queryString += ' AND ARRAY_CONTAINS(' + db_collection + '.tags ,"' + taG + '")'

    queryString += ' ORDER BY ' + db_collection + '.photo_id ' + order

    print(queryString)

    itterResult = client.QueryCollections(db_client, queryString)

    print(itterResult._client)
    def setUp(self):
        QueryExecutionContextEndToEndTests.cleanUpTestDatabase();
        
        self.client = document_client.DocumentClient(QueryExecutionContextEndToEndTests.host, {'masterKey': QueryExecutionContextEndToEndTests.masterKey}, QueryExecutionContextEndToEndTests.connectionPolicy)
        self.created_db = self.client.CreateDatabase({ 'id': 'sample database' })        
        self.created_collection = self.create_collection(self.client, self.created_db)
        self.collection_link = self.GetDocumentCollectionLink(self.created_db, self.created_collection)
        
        # sanity check:
        partition_key_ranges = list(self.client._ReadPartitionKeyRanges(self.collection_link))
        self.assertGreaterEqual(len(partition_key_ranges), 1)

        # create a document using the document definition
        self.document_definitions = []
        for i in xrange(20):
            d = {'id' : str(i),
                 'name': 'sample document',
                 'spam': 'eggs' + str(i),
                 'key': 'value'}
            self.document_definitions.append(d)
        self.insert_doc(self.client, self.created_db, self.collection_link, self.document_definitions)

        # sanity check: read documents after creation
        queried_docs = list(self.client.ReadDocuments(self.collection_link))
        self.assertEqual(
            len(queried_docs),
            len(self.document_definitions),
            'create should increase the number of documents')    
示例#30
0
def ObtainClient():
    connection_policy = documents.ConnectionPolicy()
    connection_policy.SSLConfiguration = documents.SSLConfiguration()
    urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
    connection_policy.SSLConfiguration.SSLCaCerts = False
    return document_client.DocumentClient(HOST, {'masterKey': MASTER_KEY},
                                          connection_policy)