Пример #1
0
class Cosmos_Manager:
    def __init__(self, cosmosEndpoint=None, cosmosKey=None):
        if cosmosEndpoint is None:
            cosmosEndpoint = app.config["NLP_COSMOS_ENDPOINT"]

        if cosmosKey is None:
            cosmosKey = app.config["NLP_COSMOS_KEY"]

        self.client = CosmosClient(cosmosEndpoint, cosmosKey)

    def add(self, data):
        database_name = data["database"]
        container_name = data["container"]
        content = data["value"]

        database = self.client.get_database_client(database_name)
        container = database.get_container_client(container_name)

        container.create_item(content)

    def get(self, data):  #TODO: need to refactor
        database_name = data["database"]
        container_name = data["container"]
        query = data["query"]

        database = self.client.get_database_client(database_name)
        container = database.get_container_client(container_name)

        return list(
            container.query_items(query=query,
                                  enable_cross_partition_query=True))
Пример #2
0
 def __init__(self, url=None, key=None):
     if not url:
         url = os.environ['COSMOS_ENDPOINT']
     if not key:
         key = os.environ['COSMOS_KEY']
     client = CosmosClient(url, credential=key)
     self.database = client.get_database_client('camera-trap')
Пример #3
0
    def __init__(self, cosmosEndpoint=None, cosmosKey=None):
        if cosmosEndpoint is None:
            cosmosEndpoint = app.config["NLP_COSMOS_ENDPOINT"]

        if cosmosKey is None:
            cosmosKey = app.config["NLP_COSMOS_KEY"]

        self.client = CosmosClient(cosmosEndpoint, cosmosKey)
Пример #4
0
 def __init__(self, url: Optional[str] = None, key: Optional[str] = None):
     if not url:
         url = os.environ['COSMOS_ENDPOINT']
     if not key:
         key = os.environ['COSMOS_KEY']
     client = CosmosClient(url, credential=key)
     self.database = client.get_database_client('camera-trap')
     self.container_sequences = self.database.get_container_client(
         'sequences')
Пример #5
0
 def setUp(self, *args, **kwargs):
     super(CosmosDBReplaceOfferActionTest, self).setUp(*args, **kwargs)
     client = local_session(Session).client('azure.mgmt.cosmosdb.CosmosDB')
     key = CosmosDBChildResource.get_cosmos_key('test_cosmosdb',
                                                'cctestcosmosdb',
                                                client,
                                                readonly=False)
     self.data_client = CosmosClient(
         url_connection='https://cctestcosmosdb.documents.azure.com:443/',
         auth={'masterKey': key})
    def __init__(self, collection: Collection, writer: bool = False):
        credentials = Credentials.READER.value

        if writer:
            credentials = Credentials.WRITER.value

        self.cosmos_client = CosmosClient(url=DB_URL, credential=credentials)
        self.db_client: DatabaseProxy = self.cosmos_client.get_database_client(
            DB_NAME)
        self.client: ContainerProxy = self.db_client.get_container_client(
            collection.value)
Пример #7
0
class CosmosDBReplaceOfferActionTest(BaseTest):
    def setUp(self, *args, **kwargs):
        super(CosmosDBReplaceOfferActionTest, self).setUp(*args, **kwargs)
        client = local_session(Session).client('azure.mgmt.cosmosdb.CosmosDB')
        key = CosmosDBChildResource.get_cosmos_key('test_cosmosdb',
                                                   'cctestcosmosdb',
                                                   client,
                                                   readonly=False)
        self.data_client = CosmosClient(
            url_connection='https://cctestcosmosdb.documents.azure.com:443/',
            auth={'masterKey': key})

    def tearDown(self, *args, **kwargs):
        super(CosmosDBReplaceOfferActionTest, self).tearDown(*args, **kwargs)
        self.data_client.ReplaceOffer(self.initial_offer['_self'],
                                      self.initial_offer)

    def test_replace_offer_collection_action(self):

        p = self.load_policy({
            'name':
            'test-azure-cosmosdb',
            'resource':
            'azure.cosmosdb-collection',
            'filters': [{
                'type': 'value',
                'key': 'id',
                'op': 'eq',
                'value': 'cccontainer'
            }, {
                'type': 'offer',
                'key': 'content.offerThroughput',
                'op': 'eq',
                'value': 400
            }],
            'actions': [{
                'type': 'replace-offer',
                'throughput': 500
            }]
        })
        collections = p.run()
        self.assertEqual(len(collections), 1)

        self.initial_offer = collections[0]['c7n:offer'][0]
        self.collection = collections[0]
        self._assert_offer_throughput_equals(500, collections[0]['_self'])

    def _assert_offer_throughput_equals(self, throughput, resource_self):
        offers = self.data_client.ReadOffers()
        offer = next((o for o in offers if o['resource'] == resource_self),
                     None)
        self.assertIsNotNone(offer)
        self.assertEqual(offer['content']['offerThroughput'], throughput)
 def setUp(self, *args, **kwargs):
     super(CosmosDBThroughputActionsTest, self).setUp(*args, **kwargs)
     self.client = local_session(Session).client('azure.mgmt.cosmosdb.CosmosDB')
     sub_id = local_session(Session).get_subscription_id()[-12:]
     account_name = "cctestcosmosdb%s" % sub_id
     key = CosmosDBChildResource.get_cosmos_key(
         'test_cosmosdb', account_name, self.client, readonly=False)
     self.data_client = CosmosClient(
         url_connection='https://%s.documents.azure.com:443/' % account_name,
         auth={
             'masterKey': key
         }
     )
     self.offer = None
Пример #9
0
 def __init__(self, host: str, master_key: str):
     """
     Creates a CosmosDbClient instance.
     :param host: The CosmosDb host url.
     :param master_key: The CosmosDb access key.
     """
     self._client = CosmosClient(host, {"masterKey": master_key})
Пример #10
0
 def get_data_client(self, parent_resource):
     key = CosmosDBChildResource.get_cosmos_key(
         parent_resource['resourceGroup'], parent_resource.get('name'),
         self.get_parent_manager().get_client())
     data_client = CosmosClient(url_connection=parent_resource.get(
         'properties').get('documentEndpoint'),
                                auth={'masterKey': key})
     return data_client
Пример #11
0
 def get_cosmos_data_client_for_account(account_id, account_endpoint, manager, readonly=True):
     key = CosmosDBChildResource.get_cosmos_key(
         ResourceIdParser.get_resource_group(account_id),
         ResourceIdParser.get_resource_name(account_id),
         manager.get_client(),
         readonly
     )
     data_client = CosmosClient(url_connection=account_endpoint, auth={'masterKey': key})
     return data_client
Пример #12
0
    def process_resource_set(self, resources):
        matched = []

        try:
            # Skip if offer key is present anywhere because we already
            # queried and joined offers in a previous filter instance
            if not resources[0].get('c7n:offer'):

                # Get the data client keys
                parent_key = resources[0]['c7n:parent-id']
                key = CosmosDBChildResource.get_cosmos_key(
                    ResourceIdParser.get_resource_group(parent_key),
                    ResourceIdParser.get_resource_name(parent_key),
                    self.manager.get_parent_manager().get_client())

                # Build a data client
                data_client = CosmosClient(
                    url_connection=resources[0]['c7n:document-endpoint'],
                    auth={'masterKey': key})

                # Get the offers
                offers = list(data_client.ReadOffers())

                # Match up offers to collections
                for resource in resources:
                    offer = [
                        o for o in offers if o['resource'] == resource['_self']
                    ]
                    resource['c7n:offer'] = offer

            # Pass each resource through the base filter
            for resource in resources:
                filtered_resource = super(CosmosDBOfferFilter,
                                          self).process(resource['c7n:offer'],
                                                        event=None)

                if filtered_resource:
                    matched.append(resource)

        except Exception as error:
            log.warning(error)

        return matched
Пример #13
0
def configure_collections(db_name, collection_names, master_key,
                          url_connection):
    client = CosmosClient(url_connection=url_connection,
                          auth={"masterKey": master_key})
    db = configure_db(client, db_name)
    for collection_name in collection_names:
        configure_collection(client,
                             db,
                             collection_name,
                             desired_throughput=Mongo.collection_throughput)
Пример #14
0
    def get_cosmos_data_client(resources, manager, readonly=True):
        cosmos_db_key = resources[0]['c7n:parent-id']
        url_connection = resources[0]['c7n:document-endpoint']

        # Get the data client keys
        key = CosmosDBChildResource.get_cosmos_key(
            ResourceIdParser.get_resource_group(cosmos_db_key),
            ResourceIdParser.get_resource_name(cosmos_db_key),
            manager.get_client(), readonly)

        # Build a data client
        data_client = CosmosClient(url_connection=url_connection,
                                   auth={'masterKey': key})
        return data_client
Пример #15
0
    def get_conn(self) -> CosmosClient:
        """Return a cosmos db client."""
        if not self._conn:
            conn = self.get_connection(self.conn_id)
            extras = conn.extra_dejson
            endpoint_uri = conn.login
            master_key = conn.password

            self.default_database_name = extras.get('database_name')
            self.default_collection_name = extras.get('collection_name')

            # Initialize the Python Azure Cosmos DB client
            self._conn = CosmosClient(endpoint_uri, {'masterKey': master_key})
        return self._conn
Пример #16
0
    def __init__(self,
                 cfg_filename=None,
                 cfg_dict=None,
                 endpoint=None,
                 key=None,
                 dbname=None,
                 container_name=None,
                 test=False):
        """
        Initialise object
        :param cfg_filename: Path of configuration file
        :param cfg_dict: Configuration dict
        :param endpoint: URI of the database account
        :param key: primary key of the database account
        :param dbname: name of the database
        :param container_name: name of the database container
        :param test: test mode flag; default False
        """
        self.endpoint = endpoint
        self.key = key
        self.dbname = dbname
        self.container_name = container_name
        self.partition_key = None
        if cfg_filename is not None:
            self._load_cfg_filename(cfg_filename)
        elif cfg_dict is not None:
            self.__set_config(cfg_dict)

        # check for missing required keys
        for key in CosmosDb.REQUIRED_KEYS:
            if self[key] is None:
                raise ValueError(f'Missing {key} configuration')

        # if not in test mode, create client
        if not test:
            self.client = CosmosClient(self.endpoint, {'masterKey': self.key})
Пример #17
0
from azure.cosmos.cosmos_client import CosmosClient
import json
from flask import Flask, request

app = Flask(__name__)

url = 'https://first-test.documents.azure.com:443/'
key = 'xLKkkwDJl6ZNvXZl00nyia8doLzMfjyug5w3QYRID3fNdbUiRxJrdHvSJZBc9UMS36j3yW0XSGNHO6Pw9QIFCQ=='
client = CosmosClient(url, credential=key)
database = client.get_database_client('dheeraj1234')
container = database.get_container_client('container1')


@app.route('/v1/clients', methods=['GET'])
def get_clients():
    # Enumerate the returned items
    client_data = []
    for item in container.query_items(
            query='SELECT * FROM container1',
            enable_cross_partition_query=True):
        client_data.append(json.dumps(item, indent=True))
    return {'data': client_data}


@app.route('/v1/clients', methods=['POST'])
def add_client_row():
    client_data = request.get_json()
    container.upsert_item(client_data)
    return 'Successful'

@app.route('/v2/clients', methods=['POST'])
Пример #18
0
class CosmosDb:
    """
    CosmosDb client
    """

    REQUIRED_KEYS = (
        'endpoint',  # URI of the database account
        'key'  # primary key of the database account
    )
    KEYS = REQUIRED_KEYS + (
        'dbname',  # name of the database
        'container_name'  # name of the database container
    )

    def __init__(self,
                 cfg_filename=None,
                 cfg_dict=None,
                 endpoint=None,
                 key=None,
                 dbname=None,
                 container_name=None,
                 test=False):
        """
        Initialise object
        :param cfg_filename: Path of configuration file
        :param cfg_dict: Configuration dict
        :param endpoint: URI of the database account
        :param key: primary key of the database account
        :param dbname: name of the database
        :param container_name: name of the database container
        :param test: test mode flag; default False
        """
        self.endpoint = endpoint
        self.key = key
        self.dbname = dbname
        self.container_name = container_name
        self.partition_key = None
        if cfg_filename is not None:
            self._load_cfg_filename(cfg_filename)
        elif cfg_dict is not None:
            self.__set_config(cfg_dict)

        # check for missing required keys
        for key in CosmosDb.REQUIRED_KEYS:
            if self[key] is None:
                raise ValueError(f'Missing {key} configuration')

        # if not in test mode, create client
        if not test:
            self.client = CosmosClient(self.endpoint, {'masterKey': self.key})

    def __set_config(self, config):
        """
        Set the configuration
        :param config: dict with settings
        """
        for key in config.keys():
            if key in CosmosDb.KEYS:
                self[key] = config[key]

    def _load_cfg_file(self, cfg_file):
        """
        Read settings from specified configuration file
        :param cfg_file: Configuration file descriptor to load
        """
        self.__set_config(load_cfg_file(cfg_file, CosmosDb.KEYS))

    def _load_cfg_filename(self, cfg_filename):
        """
        Read settings from specified configuration file
        :param cfg_filename: Path of configuration file to load
        """
        self.__set_config(load_cfg_filename(cfg_filename, CosmosDb.KEYS))

    def make_db_link(self, name=None):
        """
        Create a database link
        :param name: Optional database name, default is instance name
        :return: database link
        :rtype: string
        """
        if name is None:
            name = self.dbname
        if name is None:
            raise ValueError('Database name not configured')
        link = f'dbs/{name}'
        logging.debug(link)
        return link

    def make_container_link(self, name=None, dbname=None):
        """
        Create the container link
        :param name: Optional container name, default is instance container
        :param dbname: Optional database name, default is instance name
        :return: container link
        :rtype: string
        """
        if name is None:
            name = self.container_name
        if name is None:
            raise ValueError('Container name not configured')
        link = f'{self.make_db_link(name=dbname)}/colls/{name}'
        logging.debug(link)
        return link

    def make_doc_link(self, doc_id, container_name=None, dbname=None):
        """
        Create the container link
        :param doc_id: id of document
        :param container_name: Optional container name, default is instance container
        :param dbname: Optional database name, default is instance name
        :return: container link
        """
        link = f'{self.make_container_link(name=container_name, dbname=dbname)}/docs/{doc_id}'
        logging.debug(link)
        return link

    def create_database(self):
        """
        Create the database, if it does not already exit
        :return:
        """
        try:
            database = self.client.CreateDatabase({'id': self.dbname})
        except errors.HTTPFailure:
            database = self.client.ReadDatabase(self.make_db_link())
        return database

    def database_exists(self):
        """
        Check if database exists
        :return: True if database exists
        :rtype: bool
        """
        try:
            database = self.client.ReadDatabase(self.make_db_link())
        except errors.HTTPFailure as e:
            if e.status_code == http_constants.StatusCodes.NOT_FOUND:
                database = None
            else:
                raise e
        return database is not None

    def create_container(self, partition_path='/id'):
        """
        Create the container, if it does not already exit
        :param partition_path: The document path(s) to use as the partition key
        :return:
        """
        if self.container_name is None:
            raise ValueError('Container name not configured')

        if isinstance(partition_path, str):
            partition_path_list = [partition_path]
        elif isinstance(partition_path, list):
            partition_path_list = partition_path
        else:
            raise ValueError(
                f'Invalid partition path configuration: expected str or list, got {type(partition_path)}'
            )

        self.partition_key = {
            'paths': partition_path_list,
            'kind': documents.PartitionKind.Hash
        }

        container_definition = {
            'id': self.container_name,
            'partitionKey': self.partition_key
        }
        try:
            container = self.client.CreateContainer(self.make_db_link(),
                                                    container_definition,
                                                    {'offerThroughput': 400})
        except errors.HTTPFailure as e:
            if e.status_code == http_constants.StatusCodes.CONFLICT:
                container = self.client.ReadContainer(
                    self.make_container_link())
            else:
                raise e

        return container

    def container_exists(self):
        """
        Create the container
        :return: True if container exists
        :rtype: bool
        """
        try:
            container = self.client.ReadContainer(self.make_container_link())
        except errors.HTTPFailure as e:
            if e.status_code == http_constants.StatusCodes.NOT_FOUND:
                container = None
            else:
                raise e

        return container is not None

    def upsert_item(self, item):
        """
        Upsert a document in a collection
        :param item: document to upsert
        :return: The upserted Document.
        :rtype: dict
        """
        # https://docs.microsoft.com/en-ie/python/api/azure-cosmos/azure.cosmos.cosmos_client.cosmosclient?view=azure-python#upsertitem-database-or-container-link--document--options-none-
        return self.client.UpsertItem(self.make_container_link(), item)

    def query_items_sql(self, query, options=None, partition_key=None):
        """
        Perform a query
        See https://docs.microsoft.com/en-ie/azure/cosmos-db/sql-query-getting-started
        :param query: SQL query string
        :param options: The request options for the request (default value None)
        :param partition_key: Partition key for the query (default value None)
        :return: List of json objects
        :rtype: List
        """
        # https://docs.microsoft.com/en-ie/python/api/azure-cosmos/azure.cosmos.cosmos_client.cosmosclient?view=azure-python#queryitems-database-or-container-link--query--options-none--partition-key-none-
        if options is None:
            options = {'enableCrossPartitionQuery': True}
        return self.client.QueryItems(self.make_container_link(),
                                      query,
                                      options=options,
                                      partition_key=partition_key)

    def query_items(self,
                    selection,
                    alias=ALIAS,
                    project=None,
                    where=None,
                    options=None,
                    partition_key=None):
        """
        Perform a query
        See https://docs.microsoft.com/en-ie/azure/cosmos-db/sql-query-getting-started
        :param selection: entries to select; may be
                - string, e.g. '*'
                - list, e.g. ['id', 'name']
                - dict, e.g. {
        :param alias:
        :param project:
        :param where: dict with 'key' as the property and 'value' as the required value for the
        :param options: The request options for the request (default value None)
        :param partition_key: Partition key for the query (default value None)
        :return: List of json objects
        :rtype: List
        """
        return self.query_items_sql(select(self.container_name,
                                           selection,
                                           alias=alias,
                                           project=project,
                                           where=where),
                                    options=options,
                                    partition_key=partition_key)

    def delete_items(self, partition_key, where=None):
        """
        Delete an item(s) from the database
        :param partition_key: value of partition path for container; e.g. partition path = '/day', partition path = 'monday'
        :param where:
        :return:
        """
        # The SQL API in Cosmos DB does not support the SQL DELETE statement.
        deleted_items = []
        link = ''
        try:
            for item in self.query_items('*',
                                         where=where,
                                         options={
                                             'enableCrossPartitionQuery': True
                                         }):
                link = self.make_doc_link(item['id'])
                # TODO DeleteItem is supposed to return the deleted doc but only seems to be returning None
                deleted = self.client.DeleteItem(
                    link, {'partitionKey': partition_key})
                deleted_items.append(deleted)
        except errors.HTTPFailure as e:
            if e.status_code == http_constants.StatusCodes.NOT_FOUND:
                logging.warning(
                    f'Delete NOT_FOUND: {link} on partition "{partition_key}"')
                logging.warning(
                    f' Valid partition keys are: {self.partition_key["paths"]}'
                )
                deleted_items = []
            else:
                raise e

        return deleted_items

    def __setitem__(self, key, value):
        """
        Implement assignment to self[key]
        :param key: object property name
        :param value: value to assign
        """
        if key not in CosmosDb.KEYS:
            raise ValueError(f'The key "{key}" is not valid')
        self.__dict__[key] = value

    def __getitem__(self, key):
        """
        Implement evaluation of self[key]
        :param key: object property name
        """
        if key not in CosmosDb.KEYS:
            raise ValueError(f'The key "{key}" is not valid')
        return self.__dict__[key]
class CosmosDB:
    def __init__(self, collection: Collection, writer: bool = False):
        credentials = Credentials.READER.value

        if writer:
            credentials = Credentials.WRITER.value

        self.cosmos_client = CosmosClient(url=DB_URL, credential=credentials)
        self.db_client: DatabaseProxy = self.cosmos_client.get_database_client(
            DB_NAME)
        self.client: ContainerProxy = self.db_client.get_container_client(
            collection.value)

    def close(self):
        self.cosmos_client.__exit__()

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        return self.close()

    def query_iter(
            self,
            query: str,
            params: ParametersType,
            partition: Union[str, None] = None) -> Iterable[ResponseType]:
        return self.client.query_items(
            query=query,
            parameters=params,
            enable_cross_partition_query=partition is None or None,
            partition_key=partition)

    def query(self,
              query: str,
              params: ParametersType,
              partition: Union[str, None] = None) -> List[ResponseType]:
        return list(self.query_iter(query, params, partition))

    def paginated_query(self,
                        query: str,
                        params: ParametersType,
                        partition: Union[str, None] = None,
                        limit: int = 1000,
                        page: Union[int, None] = None) -> PaginatedResponse:
        """

        Parameters
        ----------
        query: str
        params: ParametersType
        partition: Union[str, None]
        limit: int
        page: Union[int, None]
            Page number, starting from zero. [Default: ``None``]
            Where ``None``, returns a lazy iterable of all pages.

        Returns
        -------
        PaginatedResponse

        """
        query_hash = blake2b(query.encode(), digest_size=32).hexdigest()

        response = self.client.query_items(
            query=query,
            parameters=params,
            enable_cross_partition_query=partition is None or None,
            partition_key=partition,
            max_item_count=limit)

        paginated_response = response.by_page(query_hash)

        if page is None:
            return paginated_response

        return list(paginated_response)[page]

    def upsert(self, body: ItemType):
        return self.client.upsert_item(body=body)

    def replace(self, old_item: ItemType, new_item: ItemType):
        return self.client.replace_item(item=old_item, body=new_item)

    def delete(self, item: ItemType):
        return self.client.delete_item(item=item)
SERVER_LOCATION = getenv("SERVER_LOCATION", "UKS_00")
server_location = SERVER_LOCATION.split("_")[0]

if server_location == UK_SOUTH:
    PREFERRED_LOCATIONS = ["UK South", "UK West"]
else:
    PREFERRED_LOCATIONS = ["UK West", "UK South"]

logger = logging.getLogger('azure')
logger.setLevel(logging.WARNING)

DB_KWS = dict(url=DatabaseCredentials.host,
              credential={'masterKey': DatabaseCredentials.key},
              preferred_locations=PREFERRED_LOCATIONS)

client = CosmosClient(**DB_KWS)
db = client.get_database_client(DatabaseCredentials.db_name)
container = db.get_container_client(DatabaseCredentials.data_collection)


async def process_head(filters: str, ordering: OrderingType,
                       arguments: QueryArguments,
                       date: str) -> QueryResponseType:

    ordering_script = format_ordering(ordering)

    query = DBQueries.exists.substitute(clause_script=filters,
                                        ordering=await ordering_script)

    logging.info(f"DB Query: {query}")
    logging.info(f"Query arguments: {arguments}")
Пример #21
0
class TRECosmosDBMigrations:

    def __init__(self):
        if (self.can_connect_to_cosmos()):
            url = os.environ['STATE_STORE_ENDPOINT']
            key = self.get_store_key()
            self.client = CosmosClient(url=url, credential=key)
            self.database = self.client.get_database_client(STATE_STORE_DATABASE)

    def can_connect_to_cosmos(self) -> bool:
        return os.getenv('ENABLE_LOCAL_DEBUGGING', 'False').lower() in ('true', 1, 't') if 'ENABLE_LOCAL_DEBUGGING' in os.environ else False

    def get_store_key(self) -> str:
        if 'STATE_STORE_KEY' in os.environ:
            primary_master_key = os.getenv('STATE_STORE_KEY')
        else:
            credential = DefaultAzureCredential()
            cosmosdb_client = CosmosDBManagementClient(credential, subscription_id=os.environ['SUBSCRIPTION_ID'])
            database_keys = cosmosdb_client.database_accounts.list_keys(resource_group_name=os.environ['RESOURCE_GROUP_NAME'], account_name=os.environ['COSMOSDB_ACCOUNT_NAME'])
            primary_master_key = database_keys.primary_master_key

        return primary_master_key

    def renameCosmosDBFields(self, container_name, old_field_name, new_field_name):

        container = self.database.get_container_client(container_name)

        for item in container.query_items(query='SELECT * FROM c', enable_cross_partition_query=True):
            print(json.dumps(item, indent=True))
            if old_field_name in item:
                item[new_field_name] = item[old_field_name]
                del item[old_field_name]
                container.upsert_item(item)

    def moveDeploymentsToOperations(self, resources_container_name, operations_container_name):
        resources_container = self.database.get_container_client(resources_container_name)

        # create operations container if needed
        self.database.create_container_if_not_exists(id=operations_container_name, partition_key=PartitionKey(path="/id"))
        operations_container = self.database.get_container_client(operations_container_name)

        for item in resources_container.query_items(query='SELECT * FROM c', enable_cross_partition_query=True):
            isActive = True
            if ("deployment" in item):
                newOperation = {
                    "id": str(uuid.uuid4()),
                    "resourceId": item["id"],
                    "status": item["deployment"]["status"],
                    "message": item["deployment"]["message"],
                    "resourceVersion": 0,
                    "createdWhen": datetime.utcnow().timestamp(),
                    "updatedWhen": datetime.utcnow().timestamp()
                }
                operations_container.create_item(newOperation)

                if item["deployment"]["status"] == "deleted":
                    isActive = False

                del item["deployment"]
                item["isActive"] = isActive
                resources_container.upsert_item(item)
                print(f'Moved deployment from resource id {item["id"]} to operations')

    def deleteDuplicatedSharedServices(self, resource_container_name):
        resources_container = self.database.get_container_client(resource_container_name)

        template_names = ['tre-shared-service-firewall', 'tre-shared-service-nexus', 'tre-shared-service-gitea']

        for template_name in template_names:
            for item in resources_container.query_items(query=f'SELECT * FROM c WHERE c.resourceType = "shared-service" AND c.templateName = "{template_name}" \
                                                                ORDER BY c.updatedWhen ASC OFFSET 1 LIMIT 10000', enable_cross_partition_query=True):
                print(f"Deleting element {item}")
                resources_container.delete_item(item, partition_key=item["id"])

    def moveAuthInformationToProperties(self, resources_container_name):
        resources_container = self.database.get_container_client(resources_container_name)

        for item in resources_container.query_items(query='SELECT * FROM c', enable_cross_partition_query=True):
            template_version = semantic_version.Version(item["templateVersion"])
            updated = False
            if (template_version < semantic_version.Version('0.2.9') and item["templateName"] == "tre-workspace-base"):

                # Rename app_id to be client_id
                if "app_id" in item["properties"]:
                    item["properties"]["client_id"] = item["properties"]["app_id"]
                    del item["properties"]["app_id"]
                    updated = True

                if "scope_id" not in item["properties"]:
                    item["properties"]["scope_id"] = item["properties"]["client_id"]
                    updated = True

                if "authInformation" in item:
                    print(f'Upgrading authInformation in workspace {item["id"]}')

                    if "app_id" in item["authInformation"]:
                        del item["authInformation"]["app_id"]

                    # merge authInformation into properties
                    item["properties"] = {**item["authInformation"], **item["properties"]}
                    del item["authInformation"]
                    updated = True

                if updated:
                    resources_container.upsert_item(item)
                    print(f'Upgraded authentication for workspace id {item["id"]}')
Пример #22
0
 def __init__(self):
     if (self.can_connect_to_cosmos()):
         url = os.environ['STATE_STORE_ENDPOINT']
         key = self.get_store_key()
         self.client = CosmosClient(url=url, credential=key)
         self.database = self.client.get_database_client(STATE_STORE_DATABASE)
Пример #23
0
 def __init__(self, api_instance=None):
     self.api_instance = api_instance if api_instance is not None else API_INSTANCE_NAME
     cosmos_client = CosmosClient(COSMOS_ENDPOINT,
                                  credential=COSMOS_WRITE_KEY)
     db_client = cosmos_client.get_database_client('camera-trap')
     self.db_jobs_client = db_client.get_container_client('batch_api_jobs')
Пример #24
0
partition_key = 'wcs'  # use None if querying across all partitions

save_every = 20000
assert save_every > 0

# use False for when the results file will be too big to store in memory or in a single JSON.
consolidate_results = True

#%% Script

time_stamp = datetime.utcnow().strftime('%Y%m%d%H%M%S')

# initialize Cosmos DB client
url = os.environ['COSMOS_ENDPOINT']
key = os.environ['COSMOS_KEY']
client = CosmosClient(url, credential=key)

database = client.get_database_client('camera-trap')
container_sequences = database.get_container_client('sequences')

# execute the query
start_time = time.time()

if partition_key:
    result_iterable = container_sequences.query_items(
        query=query, partition_key=partition_key)
else:
    result_iterable = container_sequences.query_items(
        query=query, enable_cross_partition_query=True)

# loop through and save the results
Пример #25
0
UK_WEST = "UKW"

SERVER_LOCATION = getenv("SERVER_LOCATION", "UKS_00")
server_location = SERVER_LOCATION.split("_")[0]

if server_location == UK_SOUTH:
    PREFERRED_LOCATIONS = ["UK South", "UK West"]
else:
    PREFERRED_LOCATIONS = ["UK West", "UK South"]

DB_KWS = dict(
    url=instance_settings.DatabaseCredentials.host,
    credential={'masterKey': instance_settings.DatabaseCredentials.key},
    preferred_locations=PREFERRED_LOCATIONS)

client = CosmosClient(**DB_KWS)
db = client.get_database_client(instance_settings.DatabaseCredentials.db_name)
container = db.get_container_client(
    instance_settings.DatabaseCredentials.data_collection)


def process_head(filters: str, ordering: OrderingType,
                 arguments: QueryArguments) -> QueryResponseType:

    ordering_script = format_ordering(ordering)

    query = instance_settings.DBQueries.exists.substitute(
        clause_script=filters, ordering=ordering_script)

    logging.info(f"DB Query: {query}")
    logging.info(f"Query arguments: {arguments}")
Пример #26
0
class CosmosDBThroughputActionsTest(BaseTest):
    def setUp(self, *args, **kwargs):
        super(CosmosDBThroughputActionsTest, self).setUp(*args, **kwargs)
        self.client = local_session(Session).client(
            'azure.mgmt.cosmosdb.CosmosDBManagementClient')
        sub_id = local_session(Session).get_subscription_id()[-12:]
        account_name = "cctestcosmosdb%s" % sub_id
        key = CosmosDBChildResource.get_cosmos_key('test_cosmosdb',
                                                   account_name,
                                                   self.client,
                                                   readonly=False)
        self.data_client = CosmosClient(
            url_connection='https://%s.documents.azure.com:443/' %
            account_name,
            auth={'masterKey': key})
        self.offer = None

    def tearDown(self, *args, **kwargs):
        super(CosmosDBThroughputActionsTest, self).tearDown(*args, **kwargs)
        if self.offer:
            self.offer['content']['offerThroughput'] = 400
            self.data_client.ReplaceOffer(self.offer['_self'], self.offer)

    def test_replace_offer_collection_action(self):
        p = self.load_policy({
            'name':
            'test-azure-cosmosdb',
            'resource':
            'azure.cosmosdb-collection',
            'filters': [{
                'type': 'value',
                'key': 'id',
                'op': 'eq',
                'value': 'cccontainer'
            }, {
                'type': 'offer',
                'key': 'content.offerThroughput',
                'op': 'eq',
                'value': 400
            }],
            'actions': [{
                'type': 'replace-offer',
                'throughput': 500
            }]
        })
        collections = p.run()
        self.offer = collections[0]['c7n:offer']

        self.assertEqual(len(collections), 1)
        self._assert_offer_throughput_equals(500, collections[0]['_self'])

    def test_restore_throughput_state_updates_throughput_from_tag(self):

        p1 = self.load_policy({
            'name':
            'test-azure-cosmosdb',
            'resource':
            'azure.cosmosdb-collection',
            'filters': [{
                'type': 'value',
                'key': 'id',
                'op': 'eq',
                'value': 'cccontainer'
            }],
            'actions': [{
                'type': 'save-throughput-state',
                'state-tag': 'test-restore-throughput'
            }]
        })

        collections = p1.run()
        self.assertEqual(len(collections), 1)

        collection_offer = collections[0]['c7n:offer']
        self.offer = collection_offer

        throughput_to_restore = collection_offer['content']['offerThroughput']

        collection_offer['content'][
            'offerThroughput'] = throughput_to_restore + 100

        self.data_client.ReplaceOffer(collection_offer['_self'],
                                      collection_offer)

        self._assert_offer_throughput_equals(throughput_to_restore + 100,
                                             collections[0]['_self'])

        p2 = self.load_policy({
            'name':
            'test-azure-cosmosdb',
            'resource':
            'azure.cosmosdb-collection',
            'filters': [
                {
                    'type': 'value',
                    'key': 'id',
                    'op': 'eq',
                    'value': 'cccontainer'
                },
            ],
            'actions': [{
                'type': 'restore-throughput-state',
                'state-tag': 'test-restore-throughput'
            }]
        })

        collections = p2.run()

        self.assertEqual(len(collections), 1)
        self._assert_offer_throughput_equals(throughput_to_restore,
                                             collections[0]['_self'])

    def _assert_offer_throughput_equals(self, throughput, resource_self):
        self.sleep_in_live_mode()
        offers = self.data_client.ReadOffers()
        offer = next((o for o in offers if o['resource'] == resource_self),
                     None)
        self.assertIsNotNone(offer)
        self.assertEqual(throughput, offer['content']['offerThroughput'])
def send_message():
    cosmos_client = CosmosClient(COSMOS_ENDPOINT, credential=COSMOS_READ_KEY)
    db_client = cosmos_client.get_database_client('camera-trap')
    db_jobs_client = db_client.get_container_client('batch_api_jobs')

    yesterday = datetime.now(timezone.utc).date() - timedelta(days=1)

    query = f'''
    SELECT *
    FROM job
    WHERE job.job_submission_time >= "{yesterday.isoformat()}T00:00:00Z"
    '''

    result_iterable = db_jobs_client.query_items(query=query,
                                                 enable_cross_partition_query=True)

    # aggregate the number of images, country and organization names info from each job
    # submitted during yesterday (UTC time)
    instance_num_images = defaultdict(lambda: defaultdict(int))
    instance_countries = defaultdict(set)
    instance_orgs = defaultdict(set)

    total_images_received = 0

    for job in result_iterable:
        api_instance = job['api_instance']
        status = job['status']
        call_params = job['call_params']

        if status['request_status'] == 'completed':
            instance_num_images[api_instance]['num_images_completed'] += status.get('num_images', 0)
        instance_num_images[api_instance]['num_images_total'] += status.get('num_images', 0)
        total_images_received += status.get('num_images', 0)

        instance_countries[api_instance].add(call_params.get('country', 'unknown'))
        instance_orgs[api_instance].add(call_params.get('organization_name', 'unknown'))

    print(f'send_message, number of images received yesterday: {total_images_received}')

    if total_images_received < 1:
        print('send_message, no images submitted yesterday, not sending a summary')
        print('')
        return

    # create the card
    sections = []

    for instance_name, num_images in instance_num_images.items():
        entry = {
            'activityTitle': f'API instance: {instance_name}',
            'facts': [
                {
                    'name': 'Total images',
                    'value': '{:,}'.format(num_images['num_images_total'])
                },
                {
                    'name': 'Images completed',
                    'value': '{:,}'.format(num_images['num_images_completed'])
                },
                {
                    'name': 'Countries',
                    'value': ', '.join(sorted(list(instance_countries[instance_name])))
                },
                {
                    'name': 'Organizations',
                    'value': ', '.join(sorted(list(instance_orgs[instance_name])))
                }
            ]
        }
        sections.append(entry)

    card = {
        '@type': 'MessageCard',
        '@context': 'http://schema.org/extensions',
        'themeColor': 'ffcdb2',
        'summary': 'Digest of batch API activities over the past 24 hours',
        'title': f'Camera traps batch API activities on {yesterday.strftime("%b %d, %Y")}',
        'sections': sections,
        'potentialAction': [
            {
                '@type': 'OpenUri',
                'name': 'View Batch account in Azure Portal',
                'targets': [
                    {
                        'os': 'default',
                        'uri': 'https://ms.portal.azure.com/#@microsoft.onmicrosoft.com/resource/subscriptions/74d91980-e5b4-4fd9-adb6-263b8f90ec5b/resourcegroups/camera_trap_api_rg/providers/Microsoft.Batch/batchAccounts/cameratrapssc/accountOverview'
                    }
                ]
            }
        ]
    }

    response = requests.post(TEAMS_WEBHOOK, data=json.dumps(card))
    print(f'send_message, card to send:')
    print(json.dumps(card, indent=4))
    print(f'send_message, sent summary to webhook, response code: {response.status_code}')
    print('')