def normalize_time_str(time_str): try: # local time zone datetime format return timeutils.normalize_time(timeutils.local_to_utc(timeutils.parse_strtime(time_str, "%Y-%m-%d %H:%M:%S"))) except ValueError: # UTC+0 time zone datetime format or others return timeutils.normalize_time(timeutils.parse_isotime(time_str))
def _is_in_time(target_time, start, end): if start is not None: if (normalize_time_str(target_time) - timeutils.normalize_time(start)).total_seconds() < 0: return False if end is not None: if (normalize_time_str(target_time) - timeutils.normalize_time(end)).total_seconds() > 0: return False return True
def setting_get_all(filters=None, marker=None, limit=None, sort_key='created_at', sort_dir='desc'): """ Get all settings that match zero or more filters. :param filters: dict of filter keys and values. :param marker: setting uuid after which to start page :param limit: maximum number of settings to return :param sort_key: setting attribute by which results should be sorted :param sort_dir: direction in which results should be sorted (asc, desc) """ filters = filters or {} session = get_session() query = session.query(models.Settings).\ options(sqlalchemy.orm.joinedload(models.Settings.alarming)) showing_deleted = False if 'changes-since' in filters: # normalize timestamp to UTC, as sqlalchemy doesn't appear to # respect timezone offsets changes_since = timeutils.normalize_time(filters.pop('changes-since')) query = query.filter(models.Settings.updated_at > changes_since) showing_deleted = True if 'deleted' in filters: deleted_filter = filters.pop('deleted') query = query.filter_by(deleted=deleted_filter) showing_deleted = deleted_filter for (k, v) in filters.items(): if v is not None: key = k if k.endswith('_min') or k.endswith('_max'): key = key[0:-4] try: v = int(v) except ValueError: msg = _("Unable to filter on a range " "with a non-numeric value.") raise exception.InvalidFilterRangeValue(msg) if k.endswith('_min'): query = query.filter(getattr(models.Settings, key) >= v) elif k.endswith('_max'): query = query.filter(getattr(models.Settings, key) <= v) elif hasattr(models.Settings, key): query = query.filter(getattr(models.Settings, key) == v) marker_setting = None if marker is not None: marker_setting = setting_get(marker, force_show_deleted=showing_deleted) query = paginate_query(query, models.Settings, limit, [sort_key, 'created_at', 'id'], marker=marker_setting, sort_dir=sort_dir) return query.all()
def _repack_server_data(self, server, req, tenant_id): server.update({"OS-EXT-STS:power_state": self._get_power_state(server['OS-EXT-STS:power_state'])}) auth_token = req.headers.get("x-auth-token") # get flavor info flavor_id = server['flavor']['id'] flavor = ops_api.get_flavor(tenant_id, auth_token, flavor_id) if flavor is not None: for k, v in flavor['flavor'].iteritems(): if k == 'links': continue server.update({"flavor-%s" % k: v}) server.pop('flavor') server_owner_id = server['tenant_id'] # get tenant name tenant = ops_api.get_tenant(server_owner_id, auth_token) tenant_name = tenant['name'] server.update(tenant_name=tenant_name) fixed_ips = [] private_floating_ips = [] public_floating_ips = [] # recognize IP type for ip in server['addresses'].get('private', {}): is_floating_ip, ip_type = _recog_ip_type(ip['addr']) if is_floating_ip and ip_type == 'private': private_floating_ips.append(ip) elif is_floating_ip and ip_type == 'public': public_floating_ips.append(ip) else: fixed_ips.append(ip) server.update(fixed_ips=fixed_ips) server.update(private_floating_ips=private_floating_ips) server.update(public_floating_ips=public_floating_ips) # get image info image_id = server['image']['id'] image = ops_api.get_image(auth_token, image_id) if image is not None: server.update(image) server.pop('image') # get running time (now - created_at) # FIXME(hzzhoushaoyu): created_at should transform to utc+0 datetime, # but as list show required nova return utc+8 datetime. # If nova return utc+0 datetime, running time may be ERROR data. created_at = server['created'] created_datetime = timeutils.normalize_time( timeutils.local_to_utc( timeutils.parse_strtime(created_at, '%Y-%m-%d %H:%M:%S'))) running_time = timeutils.seconds_from_now(created_datetime) server.update({"running_seconds": running_time}) return server
def alarming_get_all(filters=None, marker=None, limit=None, sort_key='created_at', sort_dir='desc', session=None): """ Get all alarmings that match zero or more filters. :param filters: dict of filter keys and values. :param marker: alarming id after which to start page :param limit: maximum number of alarmings to return :param sort_key: alarming attribute by which results should be sorted :param sort_dir: direction in which results should be sorted (asc, desc) """ filters = filters or {} session = session or get_session() query = session.query(models.Alarming).\ options(sqlalchemy.orm.joinedload(models.Alarming.setting)) showing_deleted = False if 'changes-since' in filters: # normalize timestamp to UTC, as sqlalchemy doesn't appear to # respect timezone offsets changes_since = timeutils.normalize_time(filters.pop('changes-since')) query = query.filter(models.Alarming.updated_at > changes_since) showing_deleted = True if 'deleted' in filters: deleted_filter = filters.pop('deleted') query = query.filter_by(deleted=deleted_filter) showing_deleted = deleted_filter if 'readed' in filters: query = query.filter_by(readed=filters.pop('readed')) if 'done' in filters: query = query.filter_by(done=filters.pop('done')) if 'setting-uuid' in filters: query = query.filter_by(settings_uuid=filters.pop('setting-uuid')) marker_alarming = None if marker is not None: marker_alarming = alarming_get(marker, force_show_deleted=showing_deleted) query = paginate_query(query, models.Alarming, limit, [sort_key, 'created_at', 'id'], marker=marker_alarming, sort_dir=sort_dir) return query.all()
def request_admin_token(search_cache=True): """Retrieve new token as admin user from keystone. :return token id upon success :raises ServerError when unable to communicate with keystone """ admin_user = CONF.get("admin_user") admin_password = CONF.get("admin_password") admin_tenant_name = CONF.get("admin_tenant_name") keystone_client = client.KeystonePublicClient() local_store = local.dict_store() if search_cache and hasattr(local_store, "admin_token"): token = local_store.admin_token expires = timeutils.parse_isotime(token["expires"]) if not timeutils.is_older_than(timeutils.normalize_time(expires), 0): LOG.debug(_("Get token from local store.")) return token params = { "auth": { "passwordCredentials": {"username": admin_user, "password": admin_password}, "tenantName": admin_tenant_name, } } data, headers = keystone_client.response( "POST", "/tokens", headers={"content-type": "application/json"}, body=params ) try: token = data["access"]["token"] assert token local_store.admin_token = token LOG.debug(_("Request for admin token and save to local store.")) return token except (AssertionError, KeyError): LOG.warn("Unexpected response from keystone service: %s", data) raise