def translate_products(data): products = [] for item in data: product = Product() product.created = convert_to_datetime(item["created"]) product.updated = convert_to_datetime(item["updated"]) product.product_id = item["id"] product.name = item["name"] for attribute in item["attributes"]: # Expecting values for "type", "arch", "name" product.attrs[attribute["name"]] = attribute["value"] eng_prods = [] eng_ids = [] for prod_cnt in item["productContent"]: ep = dict() ep["id"] = prod_cnt["content"]["id"] ep["label"] = prod_cnt["content"]["label"] ep["name"] = prod_cnt["content"]["name"] ep["vendor"] = prod_cnt["content"]["vendor"] eng_prods.append(ep) eng_ids.append(ep["id"]) product.eng_prods = eng_prods product.engineering_ids = eng_ids product.dependent_product_ids = item["dependentProductIds"] products.append(product) return products
def translate_pools(data): pools = [] for item in data: p = Pool() p.uuid = item["id"] p.account = item["accountNumber"] p.created = convert_to_datetime(item["created"]) p.quantity = item["quantity"] p.end_date = convert_to_datetime(item["endDate"]) p.start_date = convert_to_datetime(item["startDate"]) p.updated = convert_to_datetime(item["updated"]) for prod_attr in item["productAttributes"]: name = prod_attr["name"] value = prod_attr["value"] p.product_attributes[name] = value p.product_id = item["productId"] p.product_name = item["productName"] provided_products = [] for prov_prod in item["providedProducts"]: entry = dict() entry["id"] = prov_prod["productId"] entry["name"] = prov_prod["productName"] provided_products.append(entry) p.provided_products = provided_products pools.append(p) return pools
def create_or_update_consumer_identity(item): """ Creates a new consumer identity or updates existing to match passed in item data @param item: dict containing needed info to construct a ConsumerIdentity object required keys: 'uuid', 'engineering_ids' @type item: dict @return: True on success, False on failure @rtype: bool """ if not item.has_key("uuid"): raise Exception("Missing required parameter: 'uuid'") if not item.has_key("engineering_ids"): raise Exception("Missing required parameter: 'engineering_ids'") consumer_id = item["uuid"] engineering_ids = item["engineering_ids"] created_date = datetime.now(tzutc()) modified_date = datetime.now(tzutc()) deleted = False deleted_date = None if item.has_key("created_date"): created_date = convert_to_datetime(item["created_date"]) if item.has_key("modified_date"): modified_date = convert_to_datetime(item["modified_date"]) if item.has_key("deleted"): deleted = item["deleted"] if item.has_key("deleted_date"): deleted_date = convert_to_datetime(item["deleted_date"]) if deleted and not deleted_date: deleted_date = datetime.now(tzutc()) identity = ConsumerIdentity.objects(uuid=UUID(consumer_id)).first() if not identity: _LOG.info("Creating new ConsumerIdentity for: %s" % (consumer_id)) identity = ConsumerIdentity(uuid=UUID(consumer_id)) identity.engineering_ids = engineering_ids identity.created_date = created_date identity.modified_date = modified_date identity.deleted = deleted identity.deleted_date = deleted_date try: _LOG.debug("Updating ConsumerIdentity: %s" % (identity)) identity.save(safe=True) return True except Exception, e: _LOG.exception(e) return False
def pre_save(cls, sender, document, **kwargs): for attr_name in ["created", "updated"]: if isinstance(getattr(document, attr_name), basestring): setattr(document, attr_name, convert_to_datetime(getattr(document, attr_name))) if document.attrs: document.attrs = sanitize_dict_for_mongo(document.attrs)
def hydrate_updated(self, bundle): if bundle.data.has_key("updated"): value = utils.convert_to_datetime(bundle.data["updated"]) if not value: value = get_now() bundle.data["updated"] = value return bundle
def pre_save(cls, sender, document, **kwargs): if isinstance(document.date, basestring): document.date = convert_to_datetime(document.date) if document.facts: document.facts = sanitize_dict_for_mongo(document.facts) # Ensure no duplicate entries are stored for document.tracker if document.tracker: document.tracker = list(set(document.tracker))
def convert_dict_to_consumer_identity(item): """ Converts a dictionary to a ConsumerIdentity @param item: dict containing needed info to construct a ConsumerIdentity object required keys: 'uuid', 'engineering_ids' @type item: dict @return: instance of a consumer identity, note this instance has not yet been saved @rtype: splice.common.models.ConsumerIdentity """ if not item.has_key("uuid"): raise Exception("Missing required parameter: 'uuid'") if not item.has_key("engineering_ids"): raise Exception("Missing required parameter: 'engineering_ids'") consumer_id = item["uuid"] engineering_ids = item["engineering_ids"] created_date = datetime.now(tzutc()) modified_date = datetime.now(tzutc()) deleted = False deleted_date = None if item.has_key("created_date"): created_date = convert_to_datetime(item["created_date"]) if item.has_key("modified_date"): modified_date = convert_to_datetime(item["modified_date"]) if item.has_key("deleted"): deleted = item["deleted"] if item.has_key("deleted_date"): deleted_date = convert_to_datetime(item["deleted_date"]) if deleted and not deleted_date: deleted_date = datetime.now(tzutc()) identity = ConsumerIdentity(uuid=UUID(consumer_id)) identity.engineering_ids = engineering_ids identity.created_date = created_date identity.modified_date = modified_date identity.deleted = deleted identity.deleted_date = deleted_date return identity
def test_convert_to_datetime(self): # Ensure we can handle None being passed in self.assertIsNone(utils.convert_to_datetime(None)) a = '2012-09-19T19:01:55.008000+00:00' dt_a = utils.convert_to_datetime(a) self.assertEquals(dt_a.year, 2012) self.assertEquals(dt_a.month, 9) self.assertEquals(dt_a.day, 19) self.assertEquals(dt_a.hour, 19) self.assertEquals(dt_a.minute, 1) self.assertEquals(dt_a.microsecond, 8000) self.assertEquals(dt_a.second, 55) self.assertIsNotNone(dt_a.tzinfo) b = '2012-09-19T19:01:55+00:00' dt_b = utils.convert_to_datetime(b) self.assertEquals(dt_b.year, 2012) self.assertEquals(dt_b.month, 9) self.assertEquals(dt_b.day, 19) self.assertEquals(dt_b.hour, 19) self.assertEquals(dt_b.minute, 1) self.assertEquals(dt_b.second, 55) self.assertIsNotNone(dt_b.tzinfo) c = '2012-09-19T19:01:55' dt_c = utils.convert_to_datetime(c) self.assertEquals(dt_c.year, 2012) self.assertEquals(dt_c.month, 9) self.assertEquals(dt_c.day, 19) self.assertEquals(dt_c.hour, 19) self.assertEquals(dt_c.minute, 1) self.assertEquals(dt_c.second, 55) self.assertIsNotNone(dt_c.tzinfo) d = '2012-12-06T10:11:48.050566' dt_d = utils.convert_to_datetime(d) self.assertEquals(dt_d.year, 2012) self.assertEquals(dt_d.month, 12) self.assertEquals(dt_d.day, 06) self.assertEquals(dt_d.hour, 10) self.assertEquals(dt_d.minute, 11) self.assertEquals(dt_d.second, 48) self.assertIsNotNone(dt_d.tzinfo) caught = False bad_value = 'BadValue' try: utils.convert_to_datetime(bad_value) self.assertTrue(False) # Exception should be raised except UnsupportedDateFormatException, e: caught = True self.assertEquals(e.date_str, bad_value)
def instance_detail(request): data = utils.data_from_post(request) user = str(request.user) #account = Account.objects.filter(login=user)[0].account_id instance = data["instance"] date = convert_to_datetime(data["date"]) results = MarketingReportData.objects.filter(instance_identifier=instance, date=date)[0] response_data = {} response_data['space_hostname'] = config.CONFIG.get('spacewalk', 'db_host') response_data['facts'] = results["facts"] response_data['product_info'] = results["product_info"] response_data['status'] = results["status"] response_data['splice_server'] = results["splice_server"] response_data['system_id'] = results["systemid"] response_data['instance_identifier'] = results["instance_identifier"] response_data['date'] = results["date"] return create_response(response_data)
def post_list(self, request, **kwargs): if not request.raw_post_data: _LOG.info("Empty body in request") return http.HttpBadRequest("Empty body in request") try: raw_post_data = request.raw_post_data _LOG.info("ProductUsageResource::post_list() processing %s KB." % (len(request.raw_post_data)/1024.0)) if request.META.has_key("HTTP_CONTENT_ENCODING") and request.META["HTTP_CONTENT_ENCODING"] == "gzip": start_unzip = time.time() data = StringIO.StringIO(raw_post_data) gzipper = gzip.GzipFile(fileobj=data) raw_post_data = gzipper.read() end_unzip = time.time() _LOG.info("ProductUsageResource::post_list() uncompressed %s KB to %s KB in %s seconds" % \ (len(request.raw_post_data)/float(1024), len(raw_post_data)/float(1024), end_unzip - start_unzip)) a = time.time() product_usage = json.loads(raw_post_data, object_hook=json_util.object_hook) if isinstance(product_usage, dict): product_usage = [product_usage] pu_models = [ProductUsage._from_son(p) for p in product_usage] for pu in pu_models: if isinstance(pu.date, basestring): # We must convert from str to datetime for ReportServer to be able to process this data pu.date = utils.convert_to_datetime(pu.date) b = time.time() items_not_imported = self.import_hook(pu_models) c = time.time() _LOG.info("ProductUsageResource::post_list() Total Time: %s, %s seconds to convert %s KB to JSON. " "%s seconds to import %s objects into mongo with %s errors." % (c-a, b-a, len(raw_post_data)/1024.0, c-b, len(pu_models), items_not_imported)) if not items_not_imported: return http.HttpAccepted() else: return http.HttpConflict(items_not_imported) except Exception, e: _LOG.exception("Unable to process request with %s bytes in body" % (len(raw_post_data))) _LOG.info("Snippet of failed request body: \n%s\n" % (raw_post_data[:8*1024])) return http.HttpBadRequest(e)
def pre_save(cls, sender, document, **kwargs): if isinstance(document.stamp, basestring): document.stamp = utils.convert_to_datetime(document.stamp)
def pre_save(cls, sender, document, **kwargs): if isinstance(document.date, basestring): document.date = convert_to_datetime(document.date) if document.facts: document.facts = sanitize_dict_for_mongo(document.facts)
def pre_save(cls, sender, document, **kwargs): if isinstance(document.created, basestring): document.created = convert_to_datetime(document.created) if isinstance(document.updated, basestring): document.updated = convert_to_datetime(document.updated)
def import_candlepin_data(mkt_product_usage=[], checkin_interval=1, from_splice_server="NA", force_import=False): if not mkt_product_usage: mkt_product_usage = MarketingProductUsage.objects.all() quarantined = [] for pu in mkt_product_usage: if isinstance(pu.date, basestring): # We must convert from str to datetime for ReportServer to be able to process this data pu.date = utils.convert_to_datetime(pu.date) if isinstance(pu.updated, basestring): # We must convert from str to datetime for ReportServer to be able to process this data pu.updated = utils.convert_to_datetime(pu.updated) if isinstance(pu.created, basestring): # We must convert from str to datetime for ReportServer to be able to process this data pu.created = utils.convert_to_datetime(pu.created) subscriptions = [] for p in pu.product_info: this_product = Product.objects.filter(product_id=p["product"])[0] this_pool = Pool.objects.filter(product_id=p["product"])[0] mydict = {} mydict["product_account"] = p["account"] mydict["product_id"] = p["product"] mydict["product_name"] = this_product.name mydict["product_contract"] = p["contract"] mydict["product_quantity"] = p["quantity"] mydict["pool_uuid"] = this_pool.uuid mydict["pool_provided_products"] = this_pool.provided_products mydict["pool_start"] = this_pool.start_date mydict["pool_end"] = this_pool.end_date mydict["pool_active"] = this_pool.active mydict["pool_quantity"] = this_pool.quantity mydict["pool_sla"] = p["sla"] mydict["pool_support"] = p["support_level"] subscriptions.append(mydict) #product_info = utils.obj_to_json(subscriptions) #facts = utils.obj_to_json(pu.facts) product_info = subscriptions facts = pu.facts _LOG.info("TYPE PROVIDED PRODUCTS ") _LOG.info(type(product_info)) rd = MarketingReportData( instance_identifier = pu.instance_identifier, status = pu.entitlement_status, date = pu.date, created = pu.created, updated = pu.updated, hour = pu.date.strftime(constants.hr_fmt), systemid = pu.facts["systemid"], cpu_sockets = pu.facts["cpu_dot_cpu_socket(s)"], facts = facts, environment = pu.splice_server, splice_server = pu.splice_server, product_info = product_info, record_identifier = (pu.splice_server + str(pu.instance_identifier) + pu.date.strftime(constants.hr_fmt) ) ) try: rd.save(safe=True) _LOG.info('recording: ' + str(pu.product_info[0]["product"])) except NotUniqueError: _LOG.info("Ignorning NotUniqueError for: %s" % (rd)) except OperationError as oe: _LOG.info("could not import:" + str(pu) + "Exception: " + str(oe)) quarantined.append(rd) _LOG.info('import complete') return quarantined
def hydrate_updated(self, bundle): bundle.data["updated"] = utils.convert_to_datetime(bundle.data["updated"]) return bundle