Commit 3b5f8441 authored by Thomas Goirand's avatar Thomas Goirand
Browse files

Merge tag '1.5.0' into debian/yoga

oslo.limit 1.5.0 release

meta:version: 1.5.0
meta:diff-start: -
meta:series: yoga
meta:release-type: release
meta:pypi: yes
meta:first: yes
meta:release:Author: melanie witt <melwittt@gmail.com>
meta:release:Commit: melanie witt <melwittt@gmail.com>
meta:release:Change-Id: Ib13eb3644c35cbf1d011ccc54264f73f896cf768
meta:release:Code-Review+2: Hervé Beraud <herveberaud.pro@gmail.com>
meta:release:Code-Review+2: Elod Illes <elod.illes@est.tech>
meta:release:Workflow+1: Elod Illes <elod.illes@est.tech>
parents 2f8af95d bf9deb10
......@@ -28,7 +28,7 @@
templates:
- check-requirements
- lib-forward-testing-python3
- openstack-python3-xena-jobs
- openstack-python3-yoga-jobs
- periodic-stable-jobs
- publish-openstack-docs-pti
- release-notes-jobs-python3
......
......@@ -146,7 +146,10 @@ Another usage pattern is to check a limit and usage for a given
project, outside the scope of enforcement. This may be useful in a
reporting API to be able to expose to a user the limit and usage
information that the enforcer would use to judge a resource
consumption event.
consumption event. Any limit passed to this API which is not
registered in keystone will be considered to be zero, in keeping with
the behavior of the enforcer assuming that "unregistered means no
quota."
.. note::
This should ideally not be used to provide your own enforcement of
......
......@@ -31,7 +31,7 @@ class LimitFixture(fixtures.Fixture):
As in reality, only per-project overrides need be
provided here; any unmentioned projects or
resources will take the registered limit defaults.
:type reglimits: dict
:type projlimits: dict
"""
self.reglimits = reglimits
self.projlimits = projlimits
......
......@@ -12,6 +12,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from collections import defaultdict
from collections import namedtuple
from keystoneauth1 import exceptions as ksa_exceptions
......@@ -55,31 +56,34 @@ def _get_keystone_connection():
class Enforcer(object):
def __init__(self, usage_callback):
def __init__(self, usage_callback, cache=True):
"""An object for checking usage against resource limits and requests.
:param usage_callback: A callable function that accepts a project_id
string as a parameter and calculates the current
usage of a resource.
:type usage_callback: callable function
:param cache: Whether to cache resource limits for the lifetime of this
enforcer. Defaults to True.
:type cache: boolean
"""
if not callable(usage_callback):
msg = 'usage_callback must be a callable function.'
raise ValueError(msg)
self.connection = _get_keystone_connection()
self.model = self._get_model_impl(usage_callback)
self.model = self._get_model_impl(usage_callback, cache=cache)
def _get_enforcement_model(self):
"""Query keystone for the configured enforcement model."""
return self.connection.get('/limits/model').json()['model']['name']
def _get_model_impl(self, usage_callback):
def _get_model_impl(self, usage_callback, cache=True):
"""get the enforcement model based on configured model in keystone."""
model = self._get_enforcement_model()
for impl in _MODELS:
if model == impl.name:
return impl(usage_callback)
return impl(usage_callback, cache=cache)
raise ValueError("enforcement model %s is not supported" % model)
def enforce(self, project_id, deltas):
......@@ -88,8 +92,8 @@ class Enforcer(object):
From the deltas we extract the list of resource types that need to
have limits enforced on them.
From keystone we fetch limits relating to this project_id and the
endpoint specified in the configuration.
From keystone we fetch limits relating to this project_id (if
not None) and the endpoint specified in the configuration.
Using the usage_callback specified when creating the enforcer,
we fetch the existing usage.
......@@ -103,8 +107,12 @@ class Enforcer(object):
a limit of zero, i.e. do not allow any use of a resource type
that does not have a registered limit.
Note that if a project_id of None is provided, we just compare
against the registered limits (i.e. use this for
non-project-scoped limits)
:param project_id: The project to check usage and enforce limits
against.
against (or None).
:type project_id: string
:param deltas: An dictionary containing resource names as keys and
requests resource quantities as positive integers.
......@@ -113,9 +121,11 @@ class Enforcer(object):
:type deltas: dictionary
:raises exception.ClaimExceedsLimit: when over limits
"""
if not project_id or not isinstance(project_id, str):
msg = 'project_id must be a non-empty string.'
if project_id is not None and (
not project_id or not isinstance(project_id, str)):
msg = 'project_id must be a non-empty string or None.'
raise ValueError(msg)
if not isinstance(deltas, dict) or len(deltas) == 0:
msg = 'deltas must be a non-empty dictionary.'
......@@ -140,15 +150,17 @@ class Enforcer(object):
This should *not* be used to conduct custom enforcement, but
rather only for reporting.
:param project_id: The project for which to check usage and limits.
:param project_id: The project for which to check usage and limits,
or None.
:type project_id: string
:param resources_to_check: A list of resource names to query.
:type resources_to_check: list
:returns: A dictionary of name:limit.ProjectUsage for the
requested names against the provided project.
"""
if not project_id or not isinstance(project_id, str):
msg = 'project_id must be a non-empty string.'
if project_id is not None and (
not project_id or not isinstance(project_id, str)):
msg = 'project_id must be a non-empty string or None.'
raise ValueError(msg)
msg = ('resources_to_check must be non-empty sequence of '
......@@ -167,16 +179,26 @@ class Enforcer(object):
usage = self.model.get_project_usage(project_id, resources_to_check)
return {resource: ProjectUsage(limit, usage[resource])
for resource, limit in dict(limits).items()}
for resource, limit in limits}
def get_registered_limits(self, resources_to_check):
return self.model.get_registered_limits(resources_to_check)
def get_project_limits(self, project_id, resources_to_check):
return self.model.get_project_limits(project_id,
resources_to_check)
class _FlatEnforcer(object):
name = 'flat'
def __init__(self, usage_callback):
def __init__(self, usage_callback, cache=True):
self._usage_callback = usage_callback
self._utils = _EnforcerUtils()
self._utils = _EnforcerUtils(cache=cache)
def get_registered_limits(self, resources_to_check):
return self._utils.get_registered_limits(resources_to_check)
def get_project_limits(self, project_id, resources_to_check):
return self._utils.get_project_limits(project_id, resources_to_check)
......@@ -202,9 +224,12 @@ class _StrictTwoLevelEnforcer(object):
name = 'strict-two-level'
def __init__(self, usage_callback):
def __init__(self, usage_callback, cache=True):
self._usage_callback = usage_callback
def get_registered_limits(self, resources_to_check):
raise NotImplementedError()
def get_project_limits(self, project_id, resources_to_check):
raise NotImplementedError()
......@@ -229,8 +254,13 @@ class _LimitNotFound(Exception):
class _EnforcerUtils(object):
"""Logic common used by multiple enforcers"""
def __init__(self):
def __init__(self, cache=True):
self.connection = _get_keystone_connection()
self.should_cache = cache
# {project_id: {resource_name: project_limit}}
self.plimit_cache = defaultdict(dict)
# {resource_name: registered_limit}
self.rlimit_cache = {}
# get and cache endpoint info
endpoint_id = CONF.oslo_limit.endpoint_id
......@@ -244,7 +274,7 @@ class _EnforcerUtils(object):
def enforce_limits(project_id, limits, current_usage, deltas):
"""Check that proposed usage is not over given limits
:param project_id: project being checked
:param project_id: project being checked or None
:param limits: list of (resource_name,limit) pairs
:param current_usage: dict of resource name and current usage
:param deltas: dict of resource name and proposed additional usage
......@@ -268,42 +298,73 @@ class _EnforcerUtils(object):
LOG.debug("hit limit for project: %s", over_limit_list)
raise exception.ProjectOverLimit(project_id, over_limit_list)
def get_registered_limits(self, resource_names):
"""Get all the default limits for a given resource name list
:param resource_names: list of resource_name strings
:return: list of (resource_name, limit) pairs
"""
# Using a list to preserve the resource_name order
registered_limits = []
for resource_name in resource_names:
reg_limit = self._get_registered_limit(resource_name)
if reg_limit:
limit = reg_limit.default_limit
else:
limit = 0
registered_limits.append((resource_name, limit))
return registered_limits
def get_project_limits(self, project_id, resource_names):
"""Get all the limits for given project a resource_name list
We will raise ClaimExceedsLimit if no limit is found to ensure that
all clients of this library react to this situation in the same way.
If a limit is not found, it will be considered to be zero
(i.e. no quota)
:param project_id:
:param project_id: project being checked or None
:param resource_names: list of resource_name strings
:return: list of (resource_name,limit) pairs
:raises exception.ClaimExceedsLimit: if no limit is found
"""
# Using a list to preserver the resource_name order
project_limits = []
missing_limits = []
for resource_name in resource_names:
try:
limit = self._get_limit(project_id, resource_name)
project_limits.append((resource_name, limit))
except _LimitNotFound:
missing_limits.append(resource_name)
if len(missing_limits) > 0:
over_limit_list = [exception.OverLimitInfo(name, 0, 0, 0)
for name in missing_limits]
raise exception.ProjectOverLimit(project_id, over_limit_list)
limit = 0
project_limits.append((resource_name, limit))
return project_limits
def _get_limit(self, project_id, resource_name):
# TODO(johngarbutt): might need to cache here
project_limit = self._get_project_limit(project_id, resource_name)
# If we are configured to cache limits, look in the cache first and use
# the cached value if there is one. Else, retrieve the limit and add it
# to the cache. Do this for both project limits and registered limits.
# Look for a project limit first.
if (project_id in self.plimit_cache and
resource_name in self.plimit_cache[project_id]):
return self.plimit_cache[project_id][resource_name].resource_limit
project_limit = (self._get_project_limit(project_id, resource_name)
if project_id is not None else None)
if self.should_cache and project_limit:
self.plimit_cache[project_id][resource_name] = project_limit
if project_limit:
return project_limit.resource_limit
# If there is no project limit, look for a registered limit.
if resource_name in self.rlimit_cache:
return self.rlimit_cache[resource_name].default_limit
registered_limit = self._get_registered_limit(resource_name)
if self.should_cache and registered_limit:
self.rlimit_cache[resource_name] = registered_limit
if registered_limit:
return registered_limit.default_limit
......
......@@ -44,6 +44,9 @@ def list_opts():
return [(_option_group,
copy.deepcopy(_options) +
loading.get_session_conf_options() +
loading.get_auth_plugin_conf_options('password'),
loading.get_auth_plugin_conf_options('v2password'),
loading.get_auth_plugin_conf_options('v3password'),
loading.get_adapter_conf_options(include_deprecated=False)
)]
......
......@@ -29,6 +29,7 @@ from oslo_config import fixture as config_fixture
from oslotest import base
from oslo_limit import exception
from oslo_limit import fixture
from oslo_limit import limit
from oslo_limit import opts
......@@ -142,13 +143,42 @@ class TestEnforcer(base.BaseTestCase):
self.assertEqual(expected, enforcer.calculate_usage(project_id,
['a', 'b']))
@mock.patch.object(limit._EnforcerUtils, "_get_project_limit")
@mock.patch.object(limit._EnforcerUtils, "_get_registered_limit")
def test_calculate_and_enforce_some_missing(self, mock_get_reglimit,
mock_get_limit):
# Registered and project limits for a and b, c is unregistered
reg_limits = {'a': mock.MagicMock(default_limit=10),
'b': mock.MagicMock(default_limit=10)}
prj_limits = {('bar', 'b'): mock.MagicMock(resource_limit=6)}
mock_get_reglimit.side_effect = lambda r: reg_limits.get(r)
mock_get_limit.side_effect = lambda p, r: prj_limits.get((p, r))
# Regardless, we have usage for all three
mock_usage = mock.MagicMock()
mock_usage.return_value = {'a': 5, 'b': 5, 'c': 5}
enforcer = limit.Enforcer(mock_usage)
# When we calculate usage, we should expect the default limit
# of zero for the unregistered limit
expected = {
'a': limit.ProjectUsage(10, 5),
'b': limit.ProjectUsage(6, 5),
'c': limit.ProjectUsage(0, 5),
}
self.assertEqual(expected,
enforcer.calculate_usage('bar', ['a', 'b', 'c']))
# Make sure that if we enforce, we get the expected behavior
# of c being considered to be zero
self.assertRaises(exception.ProjectOverLimit,
enforcer.enforce, 'bar', {'a': 1, 'b': 0, 'c': 1})
def test_calculate_usage_bad_params(self):
enforcer = limit.Enforcer(mock.MagicMock())
# Non-string project_id
self.assertRaises(ValueError,
enforcer.calculate_usage,
None, ['foo'])
self.assertRaises(ValueError,
enforcer.calculate_usage,
123, ['foo'])
......@@ -168,6 +198,27 @@ class TestEnforcer(base.BaseTestCase):
enforcer.calculate_usage,
'project', ['a', 123, 'b'])
@mock.patch.object(limit._EnforcerUtils, "get_registered_limits")
def test_get_registered_limits(self, mock_get_limits):
mock_get_limits.return_value = [("a", 1), ("b", 0), ("c", 2)]
enforcer = limit.Enforcer(lambda: None)
limits = enforcer.get_registered_limits(["a", "b", "c"])
mock_get_limits.assert_called_once_with(["a", "b", "c"])
self.assertEqual(mock_get_limits.return_value, limits)
@mock.patch.object(limit._EnforcerUtils, "get_project_limits")
def test_get_project_limits(self, mock_get_limits):
project_id = uuid.uuid4().hex
mock_get_limits.return_value = [("a", 1), ("b", 0), ("c", 2)]
enforcer = limit.Enforcer(lambda: None)
limits = enforcer.get_project_limits(project_id, ["a", "b", "c"])
mock_get_limits.assert_called_once_with(project_id, ["a", "b", "c"])
self.assertEqual(mock_get_limits.return_value, limits)
class TestFlatEnforcer(base.BaseTestCase):
def setUp(self):
......@@ -175,6 +226,27 @@ class TestFlatEnforcer(base.BaseTestCase):
self.mock_conn = mock.MagicMock()
limit._SDK_CONNECTION = self.mock_conn
@mock.patch.object(limit._EnforcerUtils, "get_registered_limits")
def test_get_registered_limits(self, mock_get_limits):
mock_get_limits.return_value = [("a", 1), ("b", 0), ("c", 2)]
enforcer = limit._FlatEnforcer(lambda: None)
limits = enforcer.get_registered_limits(["a", "b", "c"])
mock_get_limits.assert_called_once_with(["a", "b", "c"])
self.assertEqual(mock_get_limits.return_value, limits)
@mock.patch.object(limit._EnforcerUtils, "get_project_limits")
def test_get_project_limits(self, mock_get_limits):
project_id = uuid.uuid4().hex
mock_get_limits.return_value = [("a", 1), ("b", 0), ("c", 2)]
enforcer = limit._FlatEnforcer(lambda: None)
limits = enforcer.get_project_limits(project_id, ["a", "b", "c"])
mock_get_limits.assert_called_once_with(project_id, ["a", "b", "c"])
self.assertEqual(mock_get_limits.return_value, limits)
@mock.patch.object(limit._EnforcerUtils, "get_project_limits")
def test_enforce(self, mock_get_limits):
mock_usage = mock.MagicMock()
......@@ -215,19 +287,25 @@ class TestFlatEnforcer(base.BaseTestCase):
self.assertEqual(0, over_a.current_usage)
self.assertEqual(2, over_a.delta)
@mock.patch.object(limit._EnforcerUtils, "get_project_limits")
def test_enforce_raises_on_missing_limit(self, mock_get_limits):
mock_usage = mock.MagicMock()
@mock.patch.object(limit._EnforcerUtils, "_get_project_limit")
@mock.patch.object(limit._EnforcerUtils, "_get_registered_limit")
def test_enforce_raises_on_missing_limit(self, mock_get_reglimit,
mock_get_limit):
def mock_usage(*a):
return {'a': 1, 'b': 1}
project_id = uuid.uuid4().hex
deltas = {"a": 0, "b": 0}
mock_get_limits.side_effect = exception.ProjectOverLimit(
project_id, [exception.OverLimitInfo("a", 0, 0, 0)])
mock_get_reglimit.return_value = None
mock_get_limit.return_value = None
enforcer = limit._FlatEnforcer(mock_usage)
self.assertRaises(exception.ProjectOverLimit, enforcer.enforce,
project_id, deltas)
self.assertRaises(exception.ProjectOverLimit, enforcer.enforce,
None, deltas)
class TestEnforcerUtils(base.BaseTestCase):
def setUp(self):
......@@ -262,6 +340,33 @@ class TestEnforcerUtils(base.BaseTestCase):
self.assertEqual(foo, reg_limit)
def test_get_registered_limits(self):
fake_endpoint = endpoint.Endpoint()
fake_endpoint.service_id = "service_id"
fake_endpoint.region_id = "region_id"
self.mock_conn.get_endpoint.return_value = fake_endpoint
# a and c have limits, b doesn't have one
empty_iterator = iter([])
a = registered_limit.RegisteredLimit()
a.resource_name = "a"
a.default_limit = 1
a_iterator = iter([a])
c = registered_limit.RegisteredLimit()
c.resource_name = "c"
c.default_limit = 2
c_iterator = iter([c])
self.mock_conn.registered_limits.side_effect = [a_iterator,
empty_iterator,
c_iterator]
utils = limit._EnforcerUtils()
limits = utils.get_registered_limits(["a", "b", "c"])
self.assertEqual([('a', 1), ('b', 0), ('c', 2)], limits)
def test_get_project_limits(self):
fake_endpoint = endpoint.Endpoint()
fake_endpoint.service_id = "service_id"
......@@ -291,21 +396,75 @@ class TestEnforcerUtils(base.BaseTestCase):
limits = utils.get_project_limits(project_id, ["a", "b"])
self.assertEqual([('a', 1), ('b', 2)], limits)
e = self.assertRaises(exception.ProjectOverLimit,
utils.get_project_limits,
project_id, ["c", "d"])
expected = ("Project %s is over a limit for "
"[Resource c is over limit of 0 due to current usage 0 "
"and delta 0, "
"Resource d is over limit of 0 due to current usage 0 "
"and delta 0]")
self.assertEqual(expected % project_id, str(e))
self.assertEqual(project_id, e.project_id)
self.assertEqual(2, len(e.over_limit_info_list))
over_c = e.over_limit_info_list[0]
self.assertEqual("c", over_c.resource_name)
over_d = e.over_limit_info_list[1]
self.assertEqual("d", over_d.resource_name)
self.assertEqual(0, over_d.limit)
self.assertEqual(0, over_d.current_usage)
self.assertEqual(0, over_d.delta)
limits = utils.get_project_limits(project_id, ["c", "d"])
self.assertEqual([('c', 0), ('d', 0)], limits)
def test_get_limit_cache(self, cache=True):
# No project limit and registered limit = 5
fix = self.useFixture(fixture.LimitFixture({'foo': 5}, {}))
project_id = uuid.uuid4().hex
utils = limit._EnforcerUtils(cache=cache)
foo_limit = utils._get_limit(project_id, 'foo')
self.assertEqual(5, foo_limit)
self.assertEqual(1, fix.mock_conn.registered_limits.call_count)
# Second call should be cached, so call_count for registered limits
# should remain 1. When cache is disabled, it should increase to 2
foo_limit = utils._get_limit(project_id, 'foo')
self.assertEqual(5, foo_limit)
count = 1 if cache else 2
self.assertEqual(count, fix.mock_conn.registered_limits.call_count)
# Add a project limit = 1
fix.projlimits[project_id] = {'foo': 1}
foo_limit = utils._get_limit(project_id, 'foo')
self.assertEqual(1, foo_limit)
# Project limits should have been queried 3 times total, once per
# _get_limit call
self.assertEqual(3, fix.mock_conn.limits.call_count)
# Fourth call should be cached, so call_count for project limits should
# remain 3. When cache is disabled, it should increase to 4
foo_limit = utils._get_limit(project_id, 'foo')
self.assertEqual(1, foo_limit)
count = 3 if cache else 4
self.assertEqual(count, fix.mock_conn.limits.call_count)
def test_get_limit_no_cache(self):
self.test_get_limit_cache(cache=False)
def test_get_limit(self):
utils = limit._EnforcerUtils(cache=False)
mgpl = mock.MagicMock()
mgrl = mock.MagicMock()
with mock.patch.multiple(utils, _get_project_limit=mgpl,
_get_registered_limit=mgrl):
# With a project, we expect the project limit to be
# fetched. If present, we never check the registered limit.
utils._get_limit('project', 'foo')
mgrl.assert_not_called()
mgpl.assert_called_once_with('project', 'foo')
mgrl.reset_mock()
mgpl.reset_mock()
# With a project, we expect the project limit to be
# fetched. If absent, we check the registered limit.
mgpl.return_value = None
utils._get_limit('project', 'foo')
mgrl.assert_called_once_with('foo')
mgpl.assert_called_once_with('project', 'foo')
mgrl.reset_mock()
mgpl.reset_mock()
# With no project, we expect to get registered limit but
# not project limit
utils._get_limit(None, 'foo')
mgrl.assert_called_once_with('foo')
mgpl.assert_not_called()
---
features:
- |
``Enforcer`` objects now cache limits by default for the lifetime of the
object to provide improved performance when multiple calls of ``enforce()``
are needed. This behavior is controlled by the boolean ``cache`` keyword
argument to the ``__init__`` method.
......@@ -6,6 +6,7 @@
:maxdepth: 1
unreleased
xena
wallaby
victoria
ussuri
......
=========================
Xena Series Release Notes
=========================
.. release-notes::
:branch: stable/xena
[metadata]
name = oslo.limit
summary = Limit enforcement library to assist with quota calculation.
description-file =
description_file =
README.rst
author = OpenStack
author-email = openstack-discuss@lists.openstack.org
home-page = https://docs.openstack.org/oslo.limit/latest/
python-requires = >=3.6
author_email = openstack-discuss@lists.openstack.org
home_page = https://docs.openstack.org/oslo.limit/latest/
python_requires = >=3.6
classifier =
Environment :: OpenStack
Intended Audience :: Information Technology
......
[tox]
minversion = 3.2.0
minversion = 3.18.0
envlist = py3,pep8,docs
ignore_basepython_conflict = true
......@@ -28,7 +28,7 @@ commands = {posargs}
deps =
-c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
-r{toxinidir}/doc/requirements.txt
whitelist_externals = rm
allowlist_externals = rm
commands =
rm -fr doc/build
sphinx-build -W --keep-going -b html doc/source doc/build/html
......@@ -37,7 +37,7 @@ commands =
commands = python setup.py test --coverage --coverage-package-name=oslo_limit --testr-args='{posargs}'
[testenv:releasenotes]
whitelist_externals = rm
allowlist_externals = rm
deps =
-r{toxinidir}/doc/requirements.txt
commands =
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment