def test_initializer_1(): rql = Query({ 'product.id': 'PRD-123123123', 'ordering': ['test1', 'test2'], 'limit': 10, 'offset': 4, 'order_by': 'property' }) assert rql.compile() == '?eq(product.id,PRD-123123123)&ordering(test1,test2)&limit=10'\ '&order_by=property&offset=4'
def filters(self, **kwargs): # type: (Dict[str, Any]) -> Dict[str, Any] query = Query() if self.limit: query = query.limit(self.limit) for key, val in kwargs.items(): if isinstance(val, (list, tuple)): query = query.in_(key, val) else: query = query.equal(key, val) return query
def process_usage(project_id=None): """Create UsageFiles for active Assets""" ConnectorConfig(file='/etc/cloudblue-connector/config-usage.json') mngr = UsageAutomation(project_id=project_id) # check that keystone works mngr.find_role('admin') # last day usage reporting for suspended/terminated assets five_days_ago = datetime.utcnow() - timedelta(days=5) filters = Query().greater('updated', five_days_ago.isoformat()).in_( 'status', ['suspended', 'terminated']) mngr.process(filters) # every day usage reporting filters = Query().in_('status', ['active']) mngr.process(filters) return mngr.usages
def _get_filters_query(self, filters, add_product): """ :param dict|Query filters: Filters to return as query (with product.id field). :param bool add_product: Whether to add a product.id field to the query. :return: The query. :rtype: Query """ query = copy(filters) if isinstance(filters, Query) else Query(filters) if add_product and self._config.products: query.in_('product.id', self._config.products) return query
def get_tier_partner_data(self, account_id=None): """Look for domain name in tier1 configuration data. `partner_id` keeps this information for us""" filters = Query().equal('account.id', account_id) configs = list(Directory().list_tier_configs(filters)) for config in configs: for param in config.params: if param.id == 'partner_id': return param return None
def get_product_configurations(self, filters=None, config=None): """ :param dict|Query filters: Filters for the requests. Supported filters are: - ``parameter.id`` - ``parameter.title`` - ``parameter.scope`` - ``marketplace.id`` - ``marketplace.name`` - ``item.id`` - ``item.name`` - ``value`` :param Config config: Configuration to use, or None for environment config. :return: A list with the product configuration parameter data. :rtype: List[ProductConfigurationParameter] """ query = copy(filters) if isinstance(filters, Query) else Query(filters) text, _ = ApiClient( config or Config.get_instance(), 'products/' + self.id + '/configurations' + query.compile()).get() return ProductConfigurationParameter.deserialize(text)
def test_in(): rql = Query().in_('key', ['value1', 'value2']) assert rql.compile() == '?in(key,(value1,value2))'
def test_offset(): rql = Query().offset(10) assert rql.compile() == '?offset=10'
def test_limit(): rql = Query().limit(10) assert rql.compile() == '?limit=10'
def test_lesser_equal(): rql = Query().lesser_equal('property', 'value') assert rql.compile() == '?le(property,value)'
def test_empty_initializer(): rql = Query() assert rql.compile() == ''
def test_greater_equal(): rql = Query().greater_equal('property', 'value') assert rql.compile() == '?ge(property,value)'
def test_order_by(): rql = Query().order_by('date') assert rql.compile() == '?order_by=date'
def test_ilike(): rql = Query().ilike('product.id', 'PR-') assert rql.compile() == '?ilike(product.id,PR-)'
def test_select(): rql = Query().select(['attribute']) assert rql.compile() == '?select(attribute)'
def test_out(): rql = Query().out('product.id', ['PR-', 'CN-']) assert rql.compile() == '?out(product.id,(PR-,CN-))'
def test_not_equal(): rql = Query().not_equal('property', 'value') assert rql.compile() == '?ne(property,value)'
def test_equal(): rql = Query().equal('key', 'value') assert rql.compile() == '?eq(key,value)'
def test_initializer_2(): rql = Query({'product.id': ['PRD-123123123', 'PRD-123123123']}) assert rql.compile() == '?in(product.id,(PRD-123123123,PRD-123123123))'
def process_request(self, request): """Generate UsageFile for each active Asset""" # store each processed request for debug self.usages.append(request) today = datetime.utcnow() - timedelta(minutes=10) name_format = 'Report for {asset} {date}' project = self.get_project(request) if not project: return stop_report_time = self.get_stop_report_time(request, project) start_report_time = self.get_start_report_time(request, project) self.logger.info("Start report time: %s, stop report time: %s", start_report_time, stop_report_time) if request.status in ['suspended', 'terminated' ] and not stop_report_time: self.logger.info( "%s-%s: asset usage reporting was stopped without stop report time label", request.id, project.id) return last_report_time, confirmed = self.get_last_report_time( request, project) report_time = last_report_time + timedelta(days=1) report_time = report_time.replace(hour=0, minute=0, second=0, microsecond=0) self.logger.info( "Last report time: %s, report time: %s, confirmed: %s", last_report_time, report_time, confirmed) if self.project_id is not None and self.project_id != project.id: self.logger.info( "project_id=%s is not the same as project.id=%s, skip it", self.project_id, project.id) return # check that previous report has passed validation if confirmed is False: usage_files = UsageFileAutomation() try: report_date = last_report_time.strftime('%Y-%m-%d') report_name = name_format.format(asset=request.id, date=report_date) filters = Query().equal('name', report_name).limit(10) if self.config.products: filters.in_('product_id', self.config.products) found = usage_files.list(filters) found = [f for f in found or [] if f.status != 'deleted'] self.logger.debug("Found usage files: %s", found) if found: if len(found) > 2: raise Exception("Found multiple reports with name %s" % report_name) report = found[0] if report.status in ('processing', 'draft', 'uploading'): self.logger.info( "%s-%s: usage report '%s' is being processed", request.id, project.id, report_name) return if report.status in ('invalid', 'rejected'): # we have to wait when user remove invalid report self.logger.error( "%s-%s: failed usage report '%s' found", request.id, project.id, report_name) return self.update_last_report_time(project, last_report_time, confirmed=True) else: report_time = last_report_time except ServerError: # this section is useless but left for future development raise if request.status in ['suspended', 'terminated']: if stop_report_time and last_report_time < stop_report_time <= report_time: self.logger.info("%s-%s: sending last report (%s)", request.id, project.id, request.status) report_time = stop_report_time if start_report_time and last_report_time < start_report_time < report_time: last_report_time = start_report_time if report_time > today and self.project_id is None: self.logger.info("%s-%s: usage is already reported", request.id, project.id) return usage_file = UsageFile( name=name_format.format(asset=request.id, date=report_time.strftime('%Y-%m-%d')), product=Product(id=request.product.id), contract=Contract(id=request.contract.id), description=name_format.format( asset=request.id, date=report_time.strftime('%Y-%m-%d')), ) # report for each day since last report date self.logger.info("%s-%s: creating report from %s to %s", request.id, project.id, last_report_time, report_time) items = {item.mpn: item for item in request.items} usage_records = self.collect_usage_records(items, project, last_report_time, report_time) self.submit_usage(usage_file=usage_file, usage_records=usage_records) if report_time > today: # when project id is specified we allow to send usage for today # but don't update last report time return self.update_last_report_time(project, report_time)
def test_ordering(): rql = Query().ordering(['property1', 'property2']) assert rql.compile() == '?ordering(property1,property2)'