From 13d837739d1282e4071aa1d0925593ce517fdffa Mon Sep 17 00:00:00 2001 From: "John L. Villalovos" Date: Wed, 26 Jul 2017 10:23:43 -0700 Subject: [PATCH 001/303] Update some tests to copy the dictionaries passed in When doing a test, we should not pass a dictionary to a function and then use that same dictionary to check to make sure results match. Since the called function can modify the dictionary and our test will pass with a modified dictionary. Change-Id: Ic3b29d2f05e3b2a05b9dd25a2d2d2f682c79a0f9 --- sushy/tests/unit/test_connector.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 3586175..760c1f2 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -35,19 +35,22 @@ class ConnectorMethodsTestCase(base.TestCase): @mock.patch.object(connector.Connector, '_op', autospec=True) def test_get(self, mock__op): - self.conn.get(path='fake/path', data=self.data, headers=self.headers) + self.conn.get(path='fake/path', data=self.data.copy(), + headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'GET', 'fake/path', self.data, self.headers) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_post(self, mock__op): - self.conn.post(path='fake/path', data=self.data, headers=self.headers) + self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'POST', 'fake/path', self.data, self.headers) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_patch(self, mock__op): - self.conn.patch(path='fake/path', data=self.data, headers=self.headers) + self.conn.patch(path='fake/path', data=self.data.copy(), + headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'PATCH', 'fake/path', self.data, self.headers) @@ -85,7 +88,7 @@ class ConnectorOpTestCase(base.TestCase): expected_headers = self.headers.copy() expected_headers['Content-Type'] = 'application/json' - self.conn._op('POST', path='fake/path', data=self.data, + self.conn._op('POST', path='fake/path', data=self.data.copy(), headers=self.headers) self.request.assert_called_once_with( 'POST', 'http://foo.bar:1234/fake/path', -- GitLab From c622e5ba7d7dc7dea37c14ba8b68c3be0de9abef Mon Sep 17 00:00:00 2001 From: "John L. Villalovos" Date: Wed, 26 Jul 2017 10:40:00 -0700 Subject: [PATCH 002/303] Replace HTTP numeric constants with http_client constants Replace usage of HTTP numeric constants with http_client constants. For example instead of: 400 use http_client.BAD_REQUEST Change-Id: I81b44d0be844d539da2a5ad4a9e40dbc5d903c94 --- sushy/exceptions.py | 13 ++++++++----- sushy/tests/unit/resources/test_base.py | 3 ++- sushy/tests/unit/test_connector.py | 24 +++++++++++++----------- 3 files changed, 23 insertions(+), 17 deletions(-) diff --git a/sushy/exceptions.py b/sushy/exceptions.py index db07cac..3a611d8 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -15,6 +15,8 @@ import logging +from six.moves import http_client + LOG = logging.getLogger(__name__) @@ -115,15 +117,16 @@ class AccessError(HTTPError): def raise_for_response(method, url, response): """Raise a correct error class, if needed.""" - if response.status_code < 400: + if response.status_code < http_client.BAD_REQUEST: return - elif response.status_code == 404: + elif response.status_code == http_client.NOT_FOUND: raise ResourceNotFoundError(method, url, response) - elif response.status_code == 400: + elif response.status_code == http_client.BAD_REQUEST: raise BadRequestError(method, url, response) - elif response.status_code in (401, 403): + elif response.status_code in (http_client.UNAUTHORIZED, + http_client.FORBIDDEN): raise AccessError(method, url, response) - elif response.status_code >= 500: + elif response.status_code >= http_client.INTERNAL_SERVER_ERROR: raise ServerSideError(method, url, response) else: raise HTTPError(method, url, response) diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 96a1db4..ff4e9df 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -16,6 +16,7 @@ import copy import mock +from six.moves import http_client from sushy import exceptions from sushy.resources import base as resource_base @@ -106,7 +107,7 @@ class ResourceCollectionBaseTestCase(base.TestCase): self.test_resource_collection.members_identities = ('1',) self.conn.get.side_effect = exceptions.ResourceNotFoundError( method='GET', url='http://foo.bar:8000/redfish/v1/Fakes/2', - response=mock.Mock(status_code=404)) + response=mock.Mock(status_code=http_client.NOT_FOUND)) # | WHEN & THEN | self.assertRaises(exceptions.ResourceNotFoundError, self.test_resource_collection.get_member, '2') diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 760c1f2..c523754 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -17,6 +17,7 @@ import json import mock import requests +from six.moves import http_client from sushy import connector from sushy import exceptions @@ -74,7 +75,7 @@ class ConnectorOpTestCase(base.TestCase): self.session = mock.Mock(spec=requests.Session) self.conn._session = self.session self.request = self.session.request - self.request.return_value.status_code = 200 + self.request.return_value.status_code = http_client.OK def test_ok_get(self): expected_headers = self.headers.copy() @@ -107,19 +108,19 @@ class ConnectorOpTestCase(base.TestCase): self.assertRaises(exceptions.ConnectionError, self.conn._op, 'GET') def test_unknown_http_error(self): - self.request.return_value.status_code = 409 + self.request.return_value.status_code = http_client.CONFLICT self.request.return_value.json.side_effect = ValueError('no json') with self.assertRaisesRegex(exceptions.HTTPError, 'unknown error') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception - self.assertEqual(409, exc.status_code) + self.assertEqual(http_client.CONFLICT, exc.status_code) self.assertIsNone(exc.body) self.assertIsNone(exc.detail) def test_known_http_error(self): - self.request.return_value.status_code = 400 + self.request.return_value.status_code = http_client.BAD_REQUEST with open('sushy/tests/unit/json_samples/error.json', 'r') as f: self.request.return_value.json.return_value = json.load(f) @@ -127,36 +128,37 @@ class ConnectorOpTestCase(base.TestCase): 'A general error has occurred') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception - self.assertEqual(400, exc.status_code) + self.assertEqual(http_client.BAD_REQUEST, exc.status_code) self.assertIsNotNone(exc.body) self.assertIn('A general error has occurred', exc.detail) def test_not_found_error(self): - self.request.return_value.status_code = 404 + self.request.return_value.status_code = http_client.NOT_FOUND self.request.return_value.json.side_effect = ValueError('no json') with self.assertRaisesRegex(exceptions.ResourceNotFoundError, 'Resource http://foo.bar not found') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception - self.assertEqual(404, exc.status_code) + self.assertEqual(http_client.NOT_FOUND, exc.status_code) def test_server_error(self): - self.request.return_value.status_code = 500 + self.request.return_value.status_code = ( + http_client.INTERNAL_SERVER_ERROR) self.request.return_value.json.side_effect = ValueError('no json') with self.assertRaisesRegex(exceptions.ServerSideError, 'unknown error') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception - self.assertEqual(500, exc.status_code) + self.assertEqual(http_client.INTERNAL_SERVER_ERROR, exc.status_code) def test_access_error(self): - self.request.return_value.status_code = 403 + self.request.return_value.status_code = http_client.FORBIDDEN self.request.return_value.json.side_effect = ValueError('no json') with self.assertRaisesRegex(exceptions.AccessError, 'unknown error') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception - self.assertEqual(403, exc.status_code) + self.assertEqual(http_client.FORBIDDEN, exc.status_code) -- GitLab From 846d7724375860329ade7210358d662bd6815a37 Mon Sep 17 00:00:00 2001 From: Nate Potter Date: Tue, 25 Jul 2017 16:12:20 -0700 Subject: [PATCH 003/303] Add DELETE method to connector Add standard HTTP delete method to the sushy connector class to enable extensions that need to make use of deletion. Change-Id: I9f4c102665fc3467ba3e281f39f650018b5e190b --- sushy/connector.py | 12 ++++++++++++ sushy/tests/unit/test_connector.py | 15 +++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/sushy/connector.py b/sushy/connector.py index debe824..9420ccc 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -113,6 +113,18 @@ class Connector(object): """ return self._op('PATCH', path, data, headers) + def delete(self, path='', data=None, headers=None): + """HTTP DELETE method. + + :param path: Optional sub-URI path to the resource. + :param data: Optional JSON data. + :param headers: Optional dictionary of headers. + :returns: The response object from the requests library. + :raises: ConnectionError + :raises: HTTPError + """ + return self._op('DELETE', path, data, headers) + def __enter__(self): return self diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 53b04ec..3586175 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -51,6 +51,13 @@ class ConnectorMethodsTestCase(base.TestCase): mock__op.assert_called_once_with(mock.ANY, 'PATCH', 'fake/path', self.data, self.headers) + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_delete(self, mock__op): + self.conn.delete(path='fake/path', data=self.data.copy(), + headers=self.headers.copy()) + mock__op.assert_called_once_with(mock.ANY, 'DELETE', 'fake/path', + self.data, self.headers) + class ConnectorOpTestCase(base.TestCase): @@ -84,6 +91,14 @@ class ConnectorOpTestCase(base.TestCase): 'POST', 'http://foo.bar:1234/fake/path', data=json.dumps(self.data), headers=expected_headers) + def test_ok_delete(self): + expected_headers = self.headers.copy() + + self.conn._op('DELETE', path='fake/path', headers=self.headers.copy()) + self.request.assert_called_once_with( + 'DELETE', 'http://foo.bar:1234/fake/path', + data=None, headers=expected_headers) + def test_connection_error(self): self.request.side_effect = requests.exceptions.ConnectionError self.assertRaises(exceptions.ConnectionError, self.conn._op, 'GET') -- GitLab From b21698a5df0c248898be85bc4424baff5141662d Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Fri, 28 Jul 2017 13:01:24 +0000 Subject: [PATCH 004/303] Updated from global requirements Change-Id: Idd263c805216a8b2a318dfe36b0795e28cd5cf62 --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index 72155ce..f796a23 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -7,7 +7,7 @@ hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=0.0.18 # Apache-2.0/BSD sphinx>=1.6.2 # BSD -openstackdocstheme>=1.11.0 # Apache-2.0 +openstackdocstheme>=1.16.0 # Apache-2.0 oslotest>=1.10.0 # Apache-2.0 testrepository>=0.0.18 # Apache-2.0/BSD testscenarios>=0.4 # Apache-2.0/BSD -- GitLab From 7981bd325e545f0b1260490f8897ade53efb1819 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Fri, 28 Jul 2017 21:08:51 +0000 Subject: [PATCH 005/303] Update reno for stable/pike Change-Id: I06ff402d47fd56799eee54e6ec88de585f35f45a --- releasenotes/source/index.rst | 1 + releasenotes/source/pike.rst | 6 ++++++ 2 files changed, 7 insertions(+) create mode 100644 releasenotes/source/pike.rst diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst index c10c897..4205f91 100644 --- a/releasenotes/source/index.rst +++ b/releasenotes/source/index.rst @@ -6,3 +6,4 @@ :maxdepth: 1 unreleased + pike diff --git a/releasenotes/source/pike.rst b/releasenotes/source/pike.rst new file mode 100644 index 0000000..e43bfc0 --- /dev/null +++ b/releasenotes/source/pike.rst @@ -0,0 +1,6 @@ +=================================== + Pike Series Release Notes +=================================== + +.. release-notes:: + :branch: stable/pike -- GitLab From 23c9bad3bdfe7a1240790ccb18cb63ab5e244022 Mon Sep 17 00:00:00 2001 From: loooosy Date: Tue, 11 Jul 2017 10:43:43 +0800 Subject: [PATCH 006/303] Enable some off-by-default checks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Some of the available checks are disabled by default, like: [H106] Don’t put vim configuration in source files [H203] Use assertIs(Not)None to check for None Change-Id: I380596faf23005e03a9a45f06cade5cb05837fb5 --- sushy/tests/unit/resources/system/test_system.py | 10 +++++----- tox.ini | 4 +++- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 2bf879e..b3c602f 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -222,7 +222,7 @@ class SystemTestCase(base.TestCase): self.sys_inst._parse_attributes() # | THEN | self.assertEqual(96, self.sys_inst.memory_summary.size_gib) - self.assertEqual(None, self.sys_inst.memory_summary.health) + self.assertIsNone(self.sys_inst.memory_summary.health) # | GIVEN | self.sys_inst._json['MemorySummary'].pop('Status') @@ -230,22 +230,22 @@ class SystemTestCase(base.TestCase): self.sys_inst._parse_attributes() # | THEN | self.assertEqual(96, self.sys_inst.memory_summary.size_gib) - self.assertEqual(None, self.sys_inst.memory_summary.health) + self.assertIsNone(self.sys_inst.memory_summary.health) # | GIVEN | self.sys_inst._json['MemorySummary'].pop('TotalSystemMemoryGiB') # | WHEN | self.sys_inst._parse_attributes() # | THEN | - self.assertEqual(None, self.sys_inst.memory_summary.size_gib) - self.assertEqual(None, self.sys_inst.memory_summary.health) + self.assertIsNone(self.sys_inst.memory_summary.size_gib) + self.assertIsNone(self.sys_inst.memory_summary.health) # | GIVEN | self.sys_inst._json.pop('MemorySummary') # | WHEN | self.sys_inst._parse_attributes() # | THEN | - self.assertEqual(None, self.sys_inst.memory_summary) + self.assertIsNone(self.sys_inst.memory_summary) def test_processors(self): # check for the underneath variable value diff --git a/tox.ini b/tox.ini index c5ad819..b1d9f85 100644 --- a/tox.ini +++ b/tox.ini @@ -40,8 +40,10 @@ commands = oslo_debug_helper {posargs} [flake8] # E123, E125 skipped as they are invalid PEP-8. - show-source = True ignore = E123,E125 +# H106: Don’t put vim configuration in source files +# H203: Use assertIs(Not)None to check for None +enable-extensions=H106,H203 builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build -- GitLab From 16b0d64baca6e8e3cf934917aa234b2a1539e0e9 Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Wed, 13 Sep 2017 13:04:26 +0000 Subject: [PATCH 007/303] Updated from global requirements Change-Id: Id3eef1af2e5fb13af9c4d6e23dbcbdcd218af02b --- test-requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index f796a23..31d53a3 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -7,11 +7,11 @@ hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=0.0.18 # Apache-2.0/BSD sphinx>=1.6.2 # BSD -openstackdocstheme>=1.16.0 # Apache-2.0 +openstackdocstheme>=1.17.0 # Apache-2.0 oslotest>=1.10.0 # Apache-2.0 testrepository>=0.0.18 # Apache-2.0/BSD testscenarios>=0.4 # Apache-2.0/BSD testtools>=1.4.0 # MIT # releasenotes -reno!=2.3.1,>=1.8.0 # Apache-2.0 +reno>=2.5.0 # Apache-2.0 -- GitLab From 26f7523667c756caafb5a55cf0760af94ee93955 Mon Sep 17 00:00:00 2001 From: "John L. Villalovos" Date: Thu, 14 Sep 2017 12:16:27 -0700 Subject: [PATCH 008/303] flake8: Enable some off-by-default checks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update test-requirements.txt to use latest version of: * hacking Enable the following off-by-default checks: * [H203] Use assertIs(Not)None to check for None. * [H204] Use assert(Not)Equal to check for equality. * [H205] Use assert(Greater|Less)(Equal) for comparison. * [H210] Require ‘autospec’, ‘spec’, or ‘spec_set’ in mock.patch/mock.patch.object calls * [H904] Delay string interpolations at logging calls. No code changes were required. Change-Id: I486551c0149acd1d066f7a1a8bad857e7b961b92 --- test-requirements.txt | 2 +- tox.ini | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 31d53a3..c11b9f4 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,7 +2,7 @@ # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. -hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0 +hacking>=1.0.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=0.0.18 # Apache-2.0/BSD diff --git a/tox.ini b/tox.ini index b1d9f85..693edab 100644 --- a/tox.ini +++ b/tox.ini @@ -42,8 +42,12 @@ commands = oslo_debug_helper {posargs} # E123, E125 skipped as they are invalid PEP-8. show-source = True ignore = E123,E125 -# H106: Don’t put vim configuration in source files -# H203: Use assertIs(Not)None to check for None -enable-extensions=H106,H203 +# [H106] Don’t put vim configuration in source files. +# [H203] Use assertIs(Not)None to check for None. +# [H204] Use assert(Not)Equal to check for equality. +# [H205] Use assert(Greater|Less)(Equal) for comparison. +# [H210] Require ‘autospec’, ‘spec’, or ‘spec_set’ in mock.patch/mock.patch.object calls +# [H904] Delay string interpolations at logging calls. +enable-extensions=H106,H203,H204,H205,H210,H904 builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build -- GitLab From 53e0725b5d82a3d3912817a7fc633c976c2e5800 Mon Sep 17 00:00:00 2001 From: Anshul Jain Date: Tue, 26 Sep 2017 12:43:53 +0530 Subject: [PATCH 009/303] Add PUT method to connector Add PUT method for sushy connector class that will help extensions to make PUT requests where POST/PATCH is not supported. Change-Id: Ibdd2c819847202eac62c75eb5e73686b22d1ece4 --- sushy/connector.py | 12 ++++++++++++ sushy/tests/unit/test_connector.py | 17 +++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/sushy/connector.py b/sushy/connector.py index 9420ccc..7e48655 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -113,6 +113,18 @@ class Connector(object): """ return self._op('PATCH', path, data, headers) + def put(self, path='', data=None, headers=None): + """HTTP PUT method. + + :param path: Optional sub-URI path to the resource. + :param data: Optional JSON data. + :param headers: Optional dictionary of headers. + :returns: The response object from the requests library. + :raises: ConnectionError + :raises: HTTPError + """ + return self._op('PUT', path, data, headers) + def delete(self, path='', data=None, headers=None): """HTTP DELETE method. diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index c523754..c1215c8 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -55,6 +55,13 @@ class ConnectorMethodsTestCase(base.TestCase): mock__op.assert_called_once_with(mock.ANY, 'PATCH', 'fake/path', self.data, self.headers) + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_put(self, mock__op): + self.conn.put(path='fake/path', data=self.data.copy(), + headers=self.headers.copy()) + mock__op.assert_called_once_with(mock.ANY, 'PUT', 'fake/path', + self.data, self.headers) + @mock.patch.object(connector.Connector, '_op', autospec=True) def test_delete(self, mock__op): self.conn.delete(path='fake/path', data=self.data.copy(), @@ -95,6 +102,16 @@ class ConnectorOpTestCase(base.TestCase): 'POST', 'http://foo.bar:1234/fake/path', data=json.dumps(self.data), headers=expected_headers) + def test_ok_put(self): + expected_headers = self.headers.copy() + expected_headers['Content-Type'] = 'application/json' + + self.conn._op('PUT', path='fake/path', data=self.data.copy(), + headers=self.headers) + self.request.assert_called_once_with( + 'PUT', 'http://foo.bar:1234/fake/path', + data=json.dumps(self.data), headers=expected_headers) + def test_ok_delete(self): expected_headers = self.headers.copy() -- GitLab From 2aa31ddf4f90517695831379eee919bbf72d4ff7 Mon Sep 17 00:00:00 2001 From: Nate Potter Date: Fri, 29 Sep 2017 14:31:01 -0700 Subject: [PATCH 010/303] Add int_or_none adapter function Add a function that allows a value to either be translated to an int or left as None if the json value is null to account for null values without throwing an error. Change-Id: I03c76d01740cfb36648f09112611111a3b316467 Closes-bug: #1720443 --- sushy/resources/system/system.py | 3 ++- sushy/tests/unit/resources/system/test_system.py | 5 +++++ sushy/tests/unit/test_utils.py | 4 ++++ sushy/utils.py | 12 ++++++++++++ 4 files changed, 23 insertions(+), 1 deletion(-) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index f9eee1a..11c2f83 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -21,6 +21,7 @@ from sushy.resources import common from sushy.resources.system import constants as sys_cons from sushy.resources.system import mappings as sys_maps from sushy.resources.system import processor +from sushy import utils LOG = logging.getLogger(__name__) @@ -52,7 +53,7 @@ class MemorySummaryField(base.CompositeField): This signifies health state of memory along with its dependent resources. """ - size_gib = base.Field('TotalSystemMemoryGiB', adapter=int) + size_gib = base.Field('TotalSystemMemoryGiB', adapter=utils.int_or_none) """The size of memory of the system in GiB. This signifies the total installed, operating system-accessible memory diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index b3c602f..30a98eb 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -79,6 +79,11 @@ class SystemTestCase(base.TestCase): 'attribute Actions/#ComputerSystem.Reset/target', self.sys_inst._parse_attributes) + def test__parse_attributes_null_memory_capacity(self): + self.sys_inst.json['MemorySummary']['TotalSystemMemoryGiB'] = None + self.sys_inst._parse_attributes() + self.assertIsNone(self.sys_inst.memory_summary.size_gib) + def test_get__reset_action_element(self): value = self.sys_inst._get_reset_action_element() self.assertEqual("/redfish/v1/Systems/437XR1138R2/Actions/" diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index f53731a..2003276 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -35,3 +35,7 @@ class UtilsTestCase(base.TestCase): expected = ('/redfish/v1/Systems/FOO', '/redfish/v1/Systems/BAR') self.assertEqual(expected, utils.get_members_identities(members)) self.assertEqual(1, log_mock.call_count) + + def test_int_or_none(self): + self.assertEqual(1, utils.int_or_none('1')) + self.assertIsNone(None, utils.int_or_none(None)) diff --git a/sushy/utils.py b/sushy/utils.py index f1e243c..689a296 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -45,3 +45,15 @@ def get_members_identities(members): members_list.append(path.rstrip('/')) return tuple(members_list) + + +def int_or_none(x): + """Given a value x it cast as int or None + + :param x: The value to transform and return + :returns: Either None or x cast to an int + + """ + if x is None: + return None + return int(x) -- GitLab From 8fe2904a62b0f56dc3fc3fefc5a5a746911ce891 Mon Sep 17 00:00:00 2001 From: Nisha Agarwal Date: Sun, 2 Apr 2017 04:14:47 -0700 Subject: [PATCH 011/303] Adds EthernetInterface to the library This commit adds the EthernetInterface to the library. This returns the MAC addresses and its status as a dictionary to its caller. This has been tested on HPE Redfish hardware. Change-Id: If57184d71d244cdc6f04d3f66d56c374d4336d24 --- ...d_ethernet_interface-df308f814f0e4bce.yaml | 5 + sushy/resources/system/constants.py | 7 ++ sushy/resources/system/ethernet_interface.py | 84 ++++++++++++++ sushy/resources/system/mappings.py | 17 +++ sushy/resources/system/system.py | 21 +++- .../json_samples/ethernet_interfaces.json | 37 +++++++ .../ethernet_interfaces_collection.json | 12 ++ .../system/test_ethernet_interfaces.py | 103 ++++++++++++++++++ .../unit/resources/system/test_system.py | 25 +++++ sushy/utils.py | 21 ++++ 10 files changed, 327 insertions(+), 5 deletions(-) create mode 100644 releasenotes/notes/add_ethernet_interface-df308f814f0e4bce.yaml create mode 100644 sushy/resources/system/ethernet_interface.py create mode 100644 sushy/tests/unit/json_samples/ethernet_interfaces.json create mode 100644 sushy/tests/unit/json_samples/ethernet_interfaces_collection.json create mode 100644 sushy/tests/unit/resources/system/test_ethernet_interfaces.py diff --git a/releasenotes/notes/add_ethernet_interface-df308f814f0e4bce.yaml b/releasenotes/notes/add_ethernet_interface-df308f814f0e4bce.yaml new file mode 100644 index 0000000..6eb0328 --- /dev/null +++ b/releasenotes/notes/add_ethernet_interface-df308f814f0e4bce.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds the "EthernetInterfaces" to the library. + It also returns the list of connected MACs. diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index f0cb2e1..e76bdb1 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -108,3 +108,10 @@ PROCESSOR_ARCH_IA_64 = 'Intel Itanium' PROCESSOR_ARCH_ARM = 'ARM' PROCESSOR_ARCH_MIPS = 'MIPS' PROCESSOR_ARCH_OEM = 'OEM-defined' + +# Health related constants. +HEALTH_STATE_ENABLED = 'Enabled' +HEALTH_STATE_DISABLED = 'Disabled' +HEALTH_OK = 'OK' +HEALTH_WARNING = 'Warning' +HEALTH_CRITICAL = 'Critical' diff --git a/sushy/resources/system/ethernet_interface.py b/sushy/resources/system/ethernet_interface.py new file mode 100644 index 0000000..86baea7 --- /dev/null +++ b/sushy/resources/system/ethernet_interface.py @@ -0,0 +1,84 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/EthernetInterface.v1_3_0.json + +import logging + +from sushy.resources import base +from sushy.resources.system import constants as sys_cons +from sushy.resources.system import mappings as sys_map + +LOG = logging.getLogger(__name__) + + +class HealthStatusField(base.CompositeField): + state = base.MappedField( + 'State', sys_map.HEALTH_STATE_VALUE_MAP) + health = base.Field('Health') + + +class EthernetInterface(base.ResourceBase): + """This class adds the EthernetInterface resource""" + + identity = base.Field('Id', required=True) + """The Ethernet Interface identity string""" + + name = base.Field('Name') + """The name of the resource or array element""" + + description = base.Field('Description') + """Description""" + + permanent_mac_address = base.Field('PermanentMACAddress') + """This is the permanent MAC address assigned to this interface (port) """ + + mac_address = base.Field('MACAddress') + """This is the currently configured MAC address of the interface.""" + + speed_mbps = base.Field('SpeedMbps') + """This is the current speed in Mbps of this interface.""" + + status = HealthStatusField("Status") + + +class EthernetInterfaceCollection(base.ResourceCollectionBase): + + _summary = None + + @property + def _resource_type(self): + return EthernetInterface + + @property + def summary(self): + """Summary MAC addresses and interfaces state + + This filters the MACs whose health is OK, + which means the MACs in both 'Enabled' and 'Disabled' States + are returned. + :returns dictionary in the format {'aa:bb:cc:dd:ee:ff': 'Enabled'} + """ + if self._summary is None: + mac_dict = {} + for eth in self.get_members(): + if eth.mac_address is not None: + if (eth.status is not None and + eth.status.health == sys_cons.HEALTH_OK): + mac_dict[eth.mac_address] = eth.status.state + self._summary = mac_dict + return self._summary + + def refresh(self): + super(EthernetInterfaceCollection, self).refresh() + self._summary = None diff --git a/sushy/resources/system/mappings.py b/sushy/resources/system/mappings.py index 6a7949b..fb6e5ab 100644 --- a/sushy/resources/system/mappings.py +++ b/sushy/resources/system/mappings.py @@ -82,3 +82,20 @@ PROCESSOR_ARCH_VALUE_MAP = { PROCESSOR_ARCH_VALUE_MAP_REV = ( utils.revert_dictionary(PROCESSOR_ARCH_VALUE_MAP)) + +HEALTH_STATE_VALUE_MAP = { + 'Enabled': sys_cons.HEALTH_STATE_ENABLED, + 'Disabled': sys_cons.HEALTH_STATE_DISABLED, +} + +HEALTH_STATE_VALUE_MAP_REV = ( + utils.revert_dictionary(HEALTH_STATE_VALUE_MAP)) + +HEALTH_VALUE_MAP = { + 'OK': sys_cons.HEALTH_OK, + 'Warning': sys_cons.HEALTH_WARNING, + 'Critical': sys_cons.HEALTH_CRITICAL +} + +HEALTH_VALUE_MAP_REV = ( + utils.revert_dictionary(HEALTH_VALUE_MAP)) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 11c2f83..7f081ae 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -19,6 +19,7 @@ from sushy import exceptions from sushy.resources import base from sushy.resources import common from sushy.resources.system import constants as sys_cons +from sushy.resources.system import ethernet_interface from sushy.resources.system import mappings as sys_maps from sushy.resources.system import processor from sushy import utils @@ -118,6 +119,8 @@ class System(base.ResourceBase): _actions = ActionsField('Actions', required=True) + _ethernet_interfaces = None + def __init__(self, connector, identity, redfish_version=None): """A class representing a ComputerSystem @@ -241,11 +244,7 @@ class System(base.ResourceBase): def _get_processor_collection_path(self): """Helper function to find the ProcessorCollection path""" - processor_col = self.json.get('Processors') - if not processor_col: - raise exceptions.MissingAttributeError(attribute='Processors', - resource=self._path) - return processor_col.get('@odata.id') + return utils.get_sub_resource_path_by(self, 'Processors') @property def processors(self): @@ -264,6 +263,18 @@ class System(base.ResourceBase): def refresh(self): super(System, self).refresh() self._processors = None + self._ethernet_interfaces = None + + @property + def ethernet_interfaces(self): + if self._ethernet_interfaces is None: + self._ethernet_interfaces = ( + ethernet_interface.EthernetInterfaceCollection( + self._conn, + utils.get_sub_resource_path_by(self, "EthernetInterfaces"), + redfish_version=self.redfish_version)) + + return self._ethernet_interfaces class SystemCollection(base.ResourceCollectionBase): diff --git a/sushy/tests/unit/json_samples/ethernet_interfaces.json b/sushy/tests/unit/json_samples/ethernet_interfaces.json new file mode 100644 index 0000000..88f9417 --- /dev/null +++ b/sushy/tests/unit/json_samples/ethernet_interfaces.json @@ -0,0 +1,37 @@ +{ + "@odata.type": "#EthernetInterface.v1_0_2.EthernetInterface", + "Id": "1", + "Name": "Ethernet Interface", + "Description": "System NIC 1", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "PermanentMACAddress": "12:44:6A:3B:04:11", + "MACAddress": "12:44:6A:3B:04:11", + "SpeedMbps": 1000, + "FullDuplex": true, + "HostName": "web483", + "FQDN": "web483.contoso.com", + "IPv6DefaultGateway": "fe80::3ed9:2bff:fe34:600", + "NameServers": [ + "names.contoso.com" + ], + "IPv4Addresses": [{ + "Address": "192.168.0.10", + "SubnetMask": "255.255.252.0", + "AddressOrigin": "Static", + "Gateway": "192.168.0.1" + }], + "IPv6Addresses": [{ + "Address": "fe80::1ec1:deff:fe6f:1e24", + "PrefixLength": 64, + "AddressOrigin": "Static", + "AddressState": "Preferred" + }], + "VLANs": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/12446A3B0411/VLANs" + }, + "@odata.context": "/redfish/v1/$metadata#EthernetInterface.EthernetInterface", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/12446A3B0411" +} diff --git a/sushy/tests/unit/json_samples/ethernet_interfaces_collection.json b/sushy/tests/unit/json_samples/ethernet_interfaces_collection.json new file mode 100644 index 0000000..4623de0 --- /dev/null +++ b/sushy/tests/unit/json_samples/ethernet_interfaces_collection.json @@ -0,0 +1,12 @@ +{ + "@odata.type": "#EthernetInterfaceCollection.EthernetInterfaceCollection", + "Name": "Ethernet Interface Collection", + "Description": "System NICs on Contoso Servers", + "Members@odata.count": 1, + "Members": [{ + "@odata.id": "/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/12446A3B0411" + }], + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#EthernetInterfaceCollection.EthernetInterfaceCollection", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/EthernetInterfaces" +} diff --git a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py new file mode 100644 index 0000000..539f7af --- /dev/null +++ b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py @@ -0,0 +1,103 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy.resources.system import constants as sys_cons +from sushy.resources.system import ethernet_interface +from sushy.tests.unit import base + + +class EthernetInterfaceTestCase(base.TestCase): + + def setUp(self): + super(EthernetInterfaceTestCase, self).setUp() + self.conn = mock.Mock() + eth_file = 'sushy/tests/unit/json_samples/ethernet_interfaces.json' + with open(eth_file, 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + + eth_path = ("/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/" + "12446A3B0411") + self.sys_eth = ethernet_interface.EthernetInterface( + self.conn, eth_path, redfish_version='1.0.2') + + def test__parse_attributes(self): + self.sys_eth._parse_attributes() + self.assertEqual('1.0.2', self.sys_eth.redfish_version) + self.assertEqual('1', self.sys_eth.identity) + self.assertEqual('Ethernet Interface', self.sys_eth.name) + self.assertEqual('System NIC 1', self.sys_eth.description) + self.assertEqual( + '12:44:6A:3B:04:11', self.sys_eth.permanent_mac_address) + self.assertEqual('12:44:6A:3B:04:11', self.sys_eth.mac_address) + self.assertEqual('Enabled', self.sys_eth.status.state) + self.assertEqual('OK', self.sys_eth.status.health) + self.assertEqual(1000, self.sys_eth.speed_mbps) + + +class EthernetInterfaceCollectionTestCase(base.TestCase): + + def setUp(self): + super(EthernetInterfaceCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'ethernet_interfaces_collection.json', 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + self.sys_eth_col = ethernet_interface.EthernetInterfaceCollection( + self.conn, '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.sys_eth_col._parse_attributes() + self.assertEqual('1.0.2', self.sys_eth_col.redfish_version) + self.assertEqual('Ethernet Interface Collection', + self.sys_eth_col.name) + eth_path = ('/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/' + '12446A3B0411',) + self.assertEqual(eth_path, self.sys_eth_col.members_identities) + + @mock.patch.object(ethernet_interface, 'EthernetInterface', autospec=True) + def test_get_member(self, mock_eth): + self.sys_eth_col.get_member( + '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/' + '12446A3B0411') + mock_eth.assert_called_once_with( + self.sys_eth_col._conn, + ('/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/' + '12446A3B0411'), + redfish_version=self.sys_eth_col.redfish_version) + + @mock.patch.object(ethernet_interface, 'EthernetInterface', autospec=True) + def test_get_members(self, mock_eth): + members = self.sys_eth_col.get_members() + eth_path = ("/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/" + "12446A3B0411") + calls = [ + mock.call(self.sys_eth_col._conn, eth_path, + redfish_version=self.sys_eth_col.redfish_version), + ] + mock_eth.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) + + def test_eth_summary(self): + self.assertIsNone(self.sys_eth_col._summary) + self.conn.get.return_value.json.reset_mock() + path = 'sushy/tests/unit/json_samples/ethernet_interfaces.json' + with open(path, 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + expected_summary = {'12:44:6A:3B:04:11': sys_cons.HEALTH_STATE_ENABLED} + actual_summary = self.sys_eth_col.summary + self.assertEqual(expected_summary, actual_summary) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 30a98eb..6b23711 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -19,6 +19,8 @@ import mock import sushy from sushy import exceptions +from sushy.resources.system import constants as sys_cons +from sushy.resources.system import ethernet_interface from sushy.resources.system import processor from sushy.resources.system import system from sushy.tests.unit import base @@ -58,6 +60,7 @@ class SystemTestCase(base.TestCase): self.assertEqual(96, self.sys_inst.memory_summary.size_gib) self.assertEqual("OK", self.sys_inst.memory_summary.health) self.assertIsNone(self.sys_inst._processors) + self.assertIsNone(self.sys_inst._ethernet_interfaces) def test__parse_attributes_missing_actions(self): self.sys_inst.json.pop('Actions') @@ -338,6 +341,28 @@ class SystemTestCase(base.TestCase): self.sys_inst.processors.summary) self.conn.get.return_value.json.assert_not_called() + def test_ethernet_interfaces(self): + self.conn.get.return_value.json.reset_mock() + eth_coll_return_value = None + eth_return_value = None + path = ('sushy/tests/unit/json_samples/' + 'ethernet_interfaces_collection.json') + with open(path, 'r') as f: + eth_coll_return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/ethernet_interfaces.json', + 'r') as f: + eth_return_value = (json.loads(f.read())) + + self.conn.get.return_value.json.side_effect = [eth_coll_return_value, + eth_return_value] + + self.assertIsNone(self.sys_inst._ethernet_interfaces) + actual_macs = self.sys_inst.ethernet_interfaces.summary + self.assertEqual({'12:44:6A:3B:04:11': sys_cons.HEALTH_STATE_ENABLED}, + actual_macs) + self.assertIsInstance(self.sys_inst._ethernet_interfaces, + ethernet_interface.EthernetInterfaceCollection) + class SystemCollectionTestCase(base.TestCase): diff --git a/sushy/utils.py b/sushy/utils.py index 689a296..3bc616a 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -15,6 +15,8 @@ import logging +from sushy import exceptions + LOG = logging.getLogger(__name__) @@ -57,3 +59,22 @@ def int_or_none(x): if x is None: return None return int(x) + + +def get_sub_resource_path_by(resource, subresource_name): + """Helper function to find the subresource path + + :param resource: ResourceBase instance on which the name + gets queried upon. + :param subresource_name: name of the resource field to + fetch the '@odata.id' from. + """ + subresource_element = resource.json.get(subresource_name) + if not subresource_element: + raise exceptions.MissingAttributeError(attribute=subresource_name, + resource=resource.path) + if '@odata.id' not in subresource_element: + raise exceptions.MissingAttributeError( + attribute=(subresource_name + '/@odata.id'), + resource=resource.path) + return subresource_element['@odata.id'] -- GitLab From 3220598e39dc5d4019daa7d36aa0aef2ce62e44e Mon Sep 17 00:00:00 2001 From: Nate Potter Date: Fri, 18 Aug 2017 16:10:17 -0700 Subject: [PATCH 012/303] Add ListField class to resource base Add a class capable of handling lists of fields in resource body JSON. This is a general purpose class, and is being added for use in the rsd-lib sushy extension as there are JSON responses such as {"Initiator": [{"iSCSI": 1}, {"iSCSI": 2}]} that need to be handled. Change-Id: I6053967177ddb045a79f373e1cf22529d6d71a5c --- sushy/resources/base.py | 34 +++++++++++++++++++++++++ sushy/tests/unit/resources/test_base.py | 21 ++++++++++++++- 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index a6083ba..31d06e6 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -153,6 +153,40 @@ class CompositeField(collections.Mapping, Field): return iter(self._subfields) +class ListField(Field): + """Base class for fields consisting of a list of several sub-fields.""" + + def __init__(self, *args, **kwargs): + super(ListField, self).__init__(*args, **kwargs) + self._subfields = dict(_collect_fields(self)) + + def _load(self, body, resource, nested_in=None): + """Load the field list. + + :param body: parent JSON body. + :param resource: parent resource. + :param nested_in: parent resource name (for error reporting only). + :returns: a new list object containing subfields. + """ + nested_in = (nested_in or []) + self._path + values = super(ListField, self)._load(body, resource) + if values is None: + return None + + # Initialize the list that will contain each field instance + instances = [] + for value in values: + instance = copy.copy(self) + for attr, field in self._subfields.items(): + # Hide the Field object behind the real value + setattr(instance, attr, field._load(value, + resource, + nested_in)) + instances.append(instance) + + return instances + + class MappedField(Field): """Field taking real value from a mapping.""" diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 96a1db4..ad7e0ed 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -138,7 +138,17 @@ TEST_JSON = { 'Field': 'field value' }, 'Mapped': 'raw' - } + }, + 'ListField': [ + { + 'String': 'a third string', + 'Integer': 1 + }, + { + 'String': 'a fourth string', + 'Integer': 2 + } + ] } @@ -155,10 +165,16 @@ class NestedTestField(resource_base.CompositeField): non_existing = resource_base.Field('NonExisting', default=3.14) +class TestListField(resource_base.ListField): + string = resource_base.Field('String', required=True) + integer = resource_base.Field('Integer', adapter=int) + + class ComplexResource(resource_base.ResourceBase): string = resource_base.Field('String', required=True) integer = resource_base.Field('Integer', adapter=int) nested = NestedTestField('Nested') + field_list = TestListField('ListField') non_existing_nested = NestedTestField('NonExistingNested') non_existing_mapped = resource_base.MappedField('NonExistingMapped', MAPPING) @@ -181,6 +197,9 @@ class FieldTestCase(base.TestCase): self.assertEqual('field value', self.test_resource.nested.nested_field) self.assertEqual('real', self.test_resource.nested.mapped) self.assertEqual(3.14, self.test_resource.nested.non_existing) + self.assertEqual('a third string', + self.test_resource.field_list[0].string) + self.assertEqual(2, self.test_resource.field_list[1].integer) self.assertIsNone(self.test_resource.non_existing_nested) self.assertIsNone(self.test_resource.non_existing_mapped) -- GitLab From 407baf85f1c64c61567daf249c35d14e149b5eb3 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Fri, 27 Oct 2017 20:24:12 +0200 Subject: [PATCH 013/303] Initial packaging. --- debian/changelog | 5 ++ debian/compat | 1 + debian/control | 100 +++++++++++++++++++++++++++++++ debian/copyright | 30 ++++++++++ debian/python-sushy-doc.doc-base | 9 +++ debian/rules | 38 ++++++++++++ debian/source/format | 1 + debian/source/options | 1 + debian/watch | 4 ++ 9 files changed, 189 insertions(+) create mode 100644 debian/changelog create mode 100644 debian/compat create mode 100644 debian/control create mode 100644 debian/copyright create mode 100644 debian/python-sushy-doc.doc-base create mode 100755 debian/rules create mode 100644 debian/source/format create mode 100644 debian/source/options create mode 100644 debian/watch diff --git a/debian/changelog b/debian/changelog new file mode 100644 index 0000000..cb5b485 --- /dev/null +++ b/debian/changelog @@ -0,0 +1,5 @@ +python-sushy (1.1.0-1) unstable; urgency=medium + + * Initial release. (Closes: #879968) + + -- Thomas Goirand Fri, 27 Oct 2017 20:11:38 +0200 diff --git a/debian/compat b/debian/compat new file mode 100644 index 0000000..f599e28 --- /dev/null +++ b/debian/compat @@ -0,0 +1 @@ +10 diff --git a/debian/control b/debian/control new file mode 100644 index 0000000..2a781c0 --- /dev/null +++ b/debian/control @@ -0,0 +1,100 @@ +Source: python-sushy +Section: python +Priority: optional +Maintainer: PKG OpenStack +Uploaders: + Thomas Goirand , +Build-Depends: + debhelper (>= 10), + dh-python, + openstack-pkg-tools, + python-all, + python-pbr (>= 2.0.0), + python-setuptools, + python-sphinx, + python-sphinx (>= 1.6.2), + python3-all, + python3-pbr (>= 2.0.0), + python3-setuptools, +Build-Depends-Indep: + python-coverage, + python-hacking, + python-openstackdocstheme (>= 1.16.0), + python-oslotest (>= 1.10.0), + python-requests (>= 2.14.2), + python-six, + python-testscenarios, + python-testtools, + python3-oslotest (>= 1.10.0), + python3-requests (>= 2.14.2), + python3-six, + python3-testscenarios, + python3-testtools, + subunit, + testrepository, +Standards-Version: 4.1.1 +Vcs-Browser: https://anonscm.debian.org/cgit/openstack/python-sushy.git/ +Vcs-Git: https://anonscm.debian.org/git/openstack/python-sushy.git +Homepage: https://docs.openstack.org/sushy + +Package: python-sushy +Architecture: all +Depends: + python-pbr (>= 2.0.0), + python-requests (>= 2.14.2), + python-six, + ${misc:Depends}, + ${python:Depends}, +Suggests: + python-sushy-doc, +Description: small library to communicate with Redfish based systems - Python 2.7 + Sushy is a Python library to communicate with Redfish based systems. The goal + of the library is to be extremely simple, small, have as few dependencies as + possible and be very conservative when dealing with BMCs by issuing just + enough requests to it (BMCs are very flaky). + . + Therefore, the scope of the library has been limited to what is supported by + the OpenStack Ironic project. As the project grows and more features from + Redfish are needed we can expand Sushy to fullfil those requirements. + . + This package contains the Python 2.7 module. + +Package: python-sushy-doc +Section: doc +Architecture: all +Depends: + ${misc:Depends}, + ${sphinxdoc:Depends}, +Description: small library to communicate with Redfish based systems - doc + Sushy is a Python library to communicate with Redfish based systems. The goal + of the library is to be extremely simple, small, have as few dependencies as + possible and be very conservative when dealing with BMCs by issuing just + enough requests to it (BMCs are very flaky). + . + Therefore, the scope of the library has been limited to what is supported by + the OpenStack Ironic project. As the project grows and more features from + Redfish are needed we can expand Sushy to fullfil those requirements. + . + This package contains the documentation. + +Package: python3-sushy +Architecture: all +Depends: + python3-pbr (>= 2.0.0), + python3-requests (>= 2.14.2), + python3-six, + ${misc:Depends}, + ${python3:Depends}, +Suggests: + python-sushy-doc, +Description: small library to communicate with Redfish based systems - Python 3.x + Sushy is a Python library to communicate with Redfish based systems. The goal + of the library is to be extremely simple, small, have as few dependencies as + possible and be very conservative when dealing with BMCs by issuing just + enough requests to it (BMCs are very flaky). + . + Therefore, the scope of the library has been limited to what is supported by + the OpenStack Ironic project. As the project grows and more features from + Redfish are needed we can expand Sushy to fullfil those requirements. + . + This package contains the Python 3.x module. diff --git a/debian/copyright b/debian/copyright new file mode 100644 index 0000000..3bea704 --- /dev/null +++ b/debian/copyright @@ -0,0 +1,30 @@ +Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: sushy +Source: https://docs.openstack.org/sushy + +Files: * +Copyright: (c) 2014-2016, Distributed Management Task Force, Inc. (DMTF). + (c) 2010-2016, OpenStack Foundation + (c) 2017, Red Hat, Inc. + (c) 2013 Hewlett-Packard Development Company, L.P. +License: Apache-2 + +Files: debian/* +Copyright: (c) 2017, Thomas Goirand +License: Apache-2 + +License: Apache-2 + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + . + http://www.apache.org/licenses/LICENSE-2.0 + . + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + . + On Debian-based systems the full text of the Apache version 2.0 license + can be found in /usr/share/common-licenses/Apache-2.0. diff --git a/debian/python-sushy-doc.doc-base b/debian/python-sushy-doc.doc-base new file mode 100644 index 0000000..8d58cc1 --- /dev/null +++ b/debian/python-sushy-doc.doc-base @@ -0,0 +1,9 @@ +Document: python-sushy-doc +Title: sushy Documentation +Author: N/A +Abstract: Sphinx documentation for sushy +Section: Programming/Python + +Format: HTML +Index: /usr/share/doc/python-sushy-doc/html/index.html +Files: /usr/share/doc/python-sushy-doc/html/* diff --git a/debian/rules b/debian/rules new file mode 100755 index 0000000..680ad8e --- /dev/null +++ b/debian/rules @@ -0,0 +1,38 @@ +#!/usr/bin/make -f + +UPSTREAM_GIT := https://github.com/openstack/sushy.git +include /usr/share/openstack-pkg-tools/pkgos.make + +%: + dh $@ --buildsystem=python_distutils --with python2,python3,sphinxdoc + +override_dh_auto_install: + pkgos-dh_auto_install + +override_dh_auto_test: +ifeq (,$(findstring nocheck, $(DEB_BUILD_OPTIONS))) + pkgos-dh_auto_test +endif + + +override_dh_sphinxdoc: +ifeq (,$(findstring nodocs, $(DEB_BUILD_OPTIONS))) + sphinx-build -b html doc/source debian/python-sushy-doc/usr/share/doc/python-sushy-doc/html + dh_sphinxdoc -O--buildsystem=python_distutils +endif + +override_dh_clean: + dh_clean -O--buildsystem=python_distutils + rm -rf build + + +# Commands not to run +override_dh_installcatalogs: +override_dh_installemacsen override_dh_installifupdown: +override_dh_installinfo override_dh_installmenu override_dh_installmime: +override_dh_installmodules override_dh_installlogcheck: +override_dh_installpam override_dh_installppp override_dh_installudev override_dh_installwm: +override_dh_installxfonts override_dh_gconf override_dh_icons override_dh_perl override_dh_usrlocal: +override_dh_installcron override_dh_installdebconf: +override_dh_installlogrotate override_dh_installgsettings: + diff --git a/debian/source/format b/debian/source/format new file mode 100644 index 0000000..163aaf8 --- /dev/null +++ b/debian/source/format @@ -0,0 +1 @@ +3.0 (quilt) diff --git a/debian/source/options b/debian/source/options new file mode 100644 index 0000000..cb61fa5 --- /dev/null +++ b/debian/source/options @@ -0,0 +1 @@ +extend-diff-ignore = "^[^/]*[.]egg-info/" diff --git a/debian/watch b/debian/watch new file mode 100644 index 0000000..5e22772 --- /dev/null +++ b/debian/watch @@ -0,0 +1,4 @@ +version=3 +opts=uversionmangle=s/(rc|a|b|c)/~$1/ \ +https://pypi.debian.net/sushy/sushy-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) + -- GitLab From d914897cd663c9fd487e72c41cd05934887c4ff3 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Fri, 27 Oct 2017 18:29:37 +0000 Subject: [PATCH 014/303] Fix long desc. --- debian/control | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/debian/control b/debian/control index 2a781c0..b7b625f 100644 --- a/debian/control +++ b/debian/control @@ -55,7 +55,7 @@ Description: small library to communicate with Redfish based systems - Python 2. . Therefore, the scope of the library has been limited to what is supported by the OpenStack Ironic project. As the project grows and more features from - Redfish are needed we can expand Sushy to fullfil those requirements. + Redfish are needed Sushy will expand to fulfil those requirements. . This package contains the Python 2.7 module. @@ -73,7 +73,7 @@ Description: small library to communicate with Redfish based systems - doc . Therefore, the scope of the library has been limited to what is supported by the OpenStack Ironic project. As the project grows and more features from - Redfish are needed we can expand Sushy to fullfil those requirements. + Redfish are needed Sushy will expand to fulfil those requirements. . This package contains the documentation. @@ -95,6 +95,6 @@ Description: small library to communicate with Redfish based systems - Python 3. . Therefore, the scope of the library has been limited to what is supported by the OpenStack Ironic project. As the project grows and more features from - Redfish are needed we can expand Sushy to fullfil those requirements. + Redfish are needed Sushy will expand to fulfil those requirements. . This package contains the Python 3.x module. -- GitLab From fb44c162ba20f99017e77666627c65b7684c9dfb Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Fri, 27 Oct 2017 18:32:03 +0000 Subject: [PATCH 015/303] duplicate sphinx b-d --- debian/control | 1 - 1 file changed, 1 deletion(-) diff --git a/debian/control b/debian/control index b7b625f..1c9ffed 100644 --- a/debian/control +++ b/debian/control @@ -11,7 +11,6 @@ Build-Depends: python-all, python-pbr (>= 2.0.0), python-setuptools, - python-sphinx, python-sphinx (>= 1.6.2), python3-all, python3-pbr (>= 2.0.0), -- GitLab From 2b39751e193c56995bbb95741afcdabc8bccdb05 Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Thu, 20 Jul 2017 16:40:50 -0400 Subject: [PATCH 016/303] Refining the resource refresh Introduces a public method named ``invalidate()`` in ResourceBase and facilitates the re-fetching of the resource attributes by invoking ``refresh()`` method only when the resource is marked as stale. Nested resource/s are invalidated in case of resource refresh and are lazily reloaded only when they are accessed the next time, provided those are already initialized in contrast to recreation as earlier. ``force`` argument introduced in ``refresh()`` method which will force refresh the resource and its sub-resources, if set to True. So if the resource needs to be reloaded post its initialization, it has to be invalidated first and then refreshed: resource.invalidate() resource.refresh() or if the resource needs to be reloaded forcefully post its initialization, it can be achieved by: resource.refresh(force=True) this will also invariably reload (greedy-refresh) its sub-resources as well. Co-Authored-By: Dmitry Tantsur Closes-Bug: 1709039 Change-Id: I89fc69ef0569c5a56abe7631d1fa6a0e5da17b80 --- ...ine-resource-refresh-86c21ce230967251.yaml | 6 +++ sushy/resources/base.py | 53 ++++++++++++++++++- sushy/resources/system/ethernet_interface.py | 9 +++- sushy/resources/system/processor.py | 10 ++-- sushy/resources/system/system.py | 32 ++++++++--- .../unit/resources/system/test_processor.py | 2 +- .../unit/resources/system/test_system.py | 6 ++- sushy/tests/unit/resources/test_base.py | 34 +++++++++--- 8 files changed, 128 insertions(+), 24 deletions(-) create mode 100644 releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml diff --git a/releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml b/releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml new file mode 100644 index 0000000..9005ae5 --- /dev/null +++ b/releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + The library now supports reloading of the attributes by invoking + ``refresh()`` method for nested resources in contrast to recreation. + Resources can now be marked stale by invoking ``invalidate()``. diff --git a/sushy/resources/base.py b/sushy/resources/base.py index a6083ba..5221b6c 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -197,6 +197,11 @@ class ResourceBase(object): self._path = path self._json = None self.redfish_version = redfish_version + # Note(deray): Indicates if the resource holds stale data or not. + # Starting off with True and eventually gets set to False when + # attribute values are fetched. + self._is_stale = True + self.refresh() def _parse_attributes(self): @@ -205,20 +210,66 @@ class ResourceBase(object): # Hide the Field object behind the real value setattr(self, attr, field._load(self.json, self)) - def refresh(self): + def refresh(self, force=False): """Refresh the resource Freshly retrieves/fetches the resource attributes and invokes ``_parse_attributes()`` method on successful retrieval. + Advised not to override this method in concrete ResourceBase classes. + Resource classes can place their refresh specific operations in + ``_do_refresh()`` method, if needed. This method represents the + template method in the paradigm of Template design pattern. + + :param force: will force refresh the resource and its sub-resources, + if set to True. :raises: ResourceNotFoundError :raises: ConnectionError :raises: HTTPError """ + # Note(deray): Don't re-fetch / invalidate the sub-resources if the + # resource is "_not_ stale" (i.e. fresh) OR _not_ forced. + if not self._is_stale and not force: + return + self._json = self._conn.get(path=self._path).json() LOG.debug('Received representation of %(type)s %(path)s: %(json)s', {'type': self.__class__.__name__, 'path': self._path, 'json': self._json}) self._parse_attributes() + self._do_refresh(force) + + # Mark it fresh + self._is_stale = False + + def _do_refresh(self, force): + """Primitive method to be overridden by refresh related activities. + + Derived classes are supposed to override this method with the + resource specific refresh operations to be performed. This is a + primitive method in the paradigm of Template design pattern. + + :param force: should force refresh the resource and its sub-resources, + if set to True. + :raises: ResourceNotFoundError + :raises: ConnectionError + :raises: HTTPError + """ + + def invalidate(self, force_refresh=False): + """Mark the resource as stale, prompting refresh() before getting used. + + If ``force_refresh`` is set to True, then it invokes ``refresh()`` + on the resource. + + :param force_refresh: will invoke refresh on the resource, + if set to True. + :raises: ResourceNotFoundError + :raises: ConnectionError + :raises: HTTPError + """ + self._is_stale = True + if force_refresh: + self.refresh() @property def json(self): diff --git a/sushy/resources/system/ethernet_interface.py b/sushy/resources/system/ethernet_interface.py index 86baea7..9911e1f 100644 --- a/sushy/resources/system/ethernet_interface.py +++ b/sushy/resources/system/ethernet_interface.py @@ -79,6 +79,11 @@ class EthernetInterfaceCollection(base.ResourceCollectionBase): self._summary = mac_dict return self._summary - def refresh(self): - super(EthernetInterfaceCollection, self).refresh() + def _do_refresh(self, force=False): + """Do custom resource specific refresh activities + + On refresh, all sub-resources are marked as stale, i.e. + greedy-refresh not done for them unless forced by ``force`` + argument. + """ self._summary = None diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index 72fa416..a4415c9 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -120,8 +120,12 @@ class ProcessorCollection(base.ResourceCollectionBase): super(ProcessorCollection, self).__init__(connector, path, redfish_version) - def refresh(self): - """Refresh the resource""" - super(ProcessorCollection, self).refresh() + def _do_refresh(self, force=False): + """Do custom resource specific refresh activities + + On refresh, all sub-resources are marked as stale, i.e. + greedy-refresh not done for them unless forced by ``force`` + argument. + """ # Reset summary attribute self._summary = None diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 7f081ae..35d1ae4 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -248,25 +248,28 @@ class System(base.ResourceBase): @property def processors(self): - """Property to provide reference to `ProcessorCollection` instance + """Property to reference `ProcessorCollection` instance - It is calculated once when the first time it is queried. On refresh, - this property gets reset. + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. """ if self._processors is None: self._processors = processor.ProcessorCollection( self._conn, self._get_processor_collection_path(), redfish_version=self.redfish_version) + self._processors.refresh(force=False) return self._processors - def refresh(self): - super(System, self).refresh() - self._processors = None - self._ethernet_interfaces = None - @property def ethernet_interfaces(self): + """Property to reference `EthernetInterfaceCollection` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ if self._ethernet_interfaces is None: self._ethernet_interfaces = ( ethernet_interface.EthernetInterfaceCollection( @@ -274,8 +277,21 @@ class System(base.ResourceBase): utils.get_sub_resource_path_by(self, "EthernetInterfaces"), redfish_version=self.redfish_version)) + self._ethernet_interfaces.refresh(force=False) return self._ethernet_interfaces + def _do_refresh(self, force=False): + """Do custom resource specific refresh activities + + On refresh, all sub-resources are marked as stale, i.e. + greedy-refresh not done for them unless forced by ``force`` + argument. + """ + if self._processors is not None: + self._processors.invalidate(force) + if self._ethernet_interfaces is not None: + self._ethernet_interfaces.invalidate(force) + class SystemCollection(base.ResourceCollectionBase): diff --git a/sushy/tests/unit/resources/system/test_processor.py b/sushy/tests/unit/resources/system/test_processor.py index 849c6e3..0dd2dad 100644 --- a/sushy/tests/unit/resources/system/test_processor.py +++ b/sushy/tests/unit/resources/system/test_processor.py @@ -140,7 +140,7 @@ class ProcessorCollectionTestCase(base.TestCase): with open('sushy/tests/unit/json_samples/' 'processor_collection.json', 'r') as f: self.conn.get.return_value.json.return_value = json.loads(f.read()) - self.sys_processor_col.refresh() + self.sys_processor_col.refresh(force=True) # | WHEN & THEN | self.assertIsNone(self.sys_processor_col._summary) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 6b23711..22eac5e 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -290,10 +290,13 @@ class SystemTestCase(base.TestCase): # On refreshing the system instance... with open('sushy/tests/unit/json_samples/system.json', 'r') as f: self.conn.get.return_value.json.return_value = json.loads(f.read()) + + self.sys_inst.invalidate() self.sys_inst.refresh() # | WHEN & THEN | - self.assertIsNone(self.sys_inst._processors) + self.assertIsNotNone(self.sys_inst._processors) + self.assertTrue(self.sys_inst._processors._is_stale) # | GIVEN | with open('sushy/tests/unit/json_samples/processor_collection.json', @@ -302,6 +305,7 @@ class SystemTestCase(base.TestCase): # | WHEN & THEN | self.assertIsInstance(self.sys_inst.processors, processor.ProcessorCollection) + self.assertFalse(self.sys_inst._processors._is_stale) def _setUp_processor_summary(self): self.conn.get.return_value.json.reset_mock() diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index ff4e9df..678a4f4 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -36,13 +36,29 @@ class ResourceBaseTestCase(base.TestCase): self.conn = mock.Mock() self.base_resource = BaseResource(connector=self.conn, path='/Foo', redfish_version='1.0.2') + self.assertFalse(self.base_resource._is_stale) # refresh() is called in the constructor self.conn.reset_mock() def test_refresh(self): + self.base_resource.refresh() + self.conn.get.assert_not_called() + + def test_refresh_force(self): + self.base_resource.refresh(force=True) + self.conn.get.assert_called_once_with(path='/Foo') + + def test_invalidate(self): + self.base_resource.invalidate() + self.conn.get.assert_not_called() + self.base_resource.refresh() self.conn.get.assert_called_once_with(path='/Foo') + def test_invalidate_force_refresh(self): + self.base_resource.invalidate(force_refresh=True) + self.conn.get.assert_called_once_with(path='/Foo') + class TestResource(resource_base.ResourceBase): """A concrete Test Resource to test against""" @@ -187,37 +203,39 @@ class FieldTestCase(base.TestCase): def test_missing_required(self): del self.json['String'] - self.assertRaisesRegex(exceptions.MissingAttributeError, - 'String', self.test_resource.refresh) + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'String', self.test_resource.refresh, force=True) def test_missing_nested_required(self): del self.json['Nested']['String'] - self.assertRaisesRegex(exceptions.MissingAttributeError, - 'Nested/String', self.test_resource.refresh) + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Nested/String', self.test_resource.refresh, force=True) def test_missing_nested_required2(self): del self.json['Nested']['Object']['Field'] self.assertRaisesRegex(exceptions.MissingAttributeError, 'Nested/Object/Field', - self.test_resource.refresh) + self.test_resource.refresh, force=True) def test_malformed_int(self): self.json['Integer'] = 'banana' self.assertRaisesRegex( exceptions.MalformedAttributeError, 'attribute Integer is malformed.*invalid literal for int', - self.test_resource.refresh) + self.test_resource.refresh, force=True) def test_malformed_nested_int(self): self.json['Nested']['Integer'] = 'banana' self.assertRaisesRegex( exceptions.MalformedAttributeError, 'attribute Nested/Integer is malformed.*invalid literal for int', - self.test_resource.refresh) + self.test_resource.refresh, force=True) def test_mapping_missing(self): self.json['Nested']['Mapped'] = 'banana' - self.test_resource.refresh() + self.test_resource.refresh(force=True) self.assertIsNone(self.test_resource.nested.mapped) -- GitLab From c98978311ea5536fd6784c4fb9c297b31eed5e89 Mon Sep 17 00:00:00 2001 From: Nisha Agarwal Date: Mon, 6 Nov 2017 23:05:52 -0800 Subject: [PATCH 017/303] Add Zuul v3 jobs in tree This patch adds the zuulV3 jobs in tree in sushy project. Change-Id: I42b2b69dbd5bfccad5c53a936d9a5747910f9f07 --- .../post.yaml | 15 ++ .../run.yaml | 166 ++++++++++++++++++ zuul.d/legacy-sushy-jobs.yaml | 16 ++ zuul.d/project.yaml | 14 ++ 4 files changed, 211 insertions(+) create mode 100644 playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml create mode 100644 playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml create mode 100644 zuul.d/legacy-sushy-jobs.yaml create mode 100644 zuul.d/project.yaml diff --git a/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml b/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml new file mode 100644 index 0000000..e07f551 --- /dev/null +++ b/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml @@ -0,0 +1,15 @@ +- hosts: primary + tasks: + + - name: Copy files from {{ ansible_user_dir }}/workspace/ on node + synchronize: + src: '{{ ansible_user_dir }}/workspace/' + dest: '{{ zuul.executor.log_root }}' + mode: pull + copy_links: true + verify_host: true + rsync_opts: + - --include=/logs/** + - --include=*/ + - --exclude=* + - --prune-empty-dirs diff --git a/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml b/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml new file mode 100644 index 0000000..a243979 --- /dev/null +++ b/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml @@ -0,0 +1,166 @@ +- hosts: all + name: Autoconverted job legacy-tempest-dsvm-ironic-ipa-partition-redfish-sushy-src + from old job gate-tempest-dsvm-ironic-ipa-partition-redfish-sushy-src-ubuntu-xenial + tasks: + + - name: Ensure legacy workspace directory + file: + path: '{{ ansible_user_dir }}/workspace' + state: directory + + - shell: + cmd: | + set -e + set -x + cat > clonemap.yaml << EOF + clonemap: + - name: openstack-infra/devstack-gate + dest: devstack-gate + EOF + /usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \ + git://git.openstack.org \ + openstack-infra/devstack-gate + executable: /bin/bash + chdir: '{{ ansible_user_dir }}/workspace' + environment: '{{ zuul | zuul_legacy_vars }}' + + - shell: + cmd: | + cat << 'EOF' >> ironic-extra-vars + export DEVSTACK_PROJECT_FROM_GIT="sushy,$DEVSTACK_PROJECT_FROM_GIT" + + EOF + chdir: '{{ ansible_user_dir }}/workspace' + environment: '{{ zuul | zuul_legacy_vars }}' + + - shell: + cmd: | + cat << 'EOF' >> ironic-extra-vars + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_DEPLOY_DRIVER_ISCSI_WITH_IPA=True" + # Standardize VM size for each supported ramdisk + case "tinyipa" in + 'tinyipa') + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_SPECS_RAM=384" + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_RAMDISK_TYPE=tinyipa" + ;; + 'tinyipa256') + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_SPECS_RAM=256" + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_RAMDISK_TYPE=tinyipa" + ;; + 'coreos') + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_SPECS_RAM=1280" + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_RAMDISK_TYPE=coreos" + ;; + # if using a ramdisk without a known good value, use the devstack + # default by not exporting any value for IRONIC_VM_SPECS_RAM + esac + + EOF + chdir: '{{ ansible_user_dir }}/workspace' + environment: '{{ zuul | zuul_legacy_vars }}' + + - shell: + cmd: | + cat << 'EOF' >> ironic-extra-vars + export DEVSTACK_GATE_TEMPEST_REGEX="ironic" + + EOF + chdir: '{{ ansible_user_dir }}/workspace' + environment: '{{ zuul | zuul_legacy_vars }}' + + - shell: + cmd: | + cat << 'EOF' >> ironic-vars-early + # use tempest plugin + if [[ "$ZUUL_BRANCH" != "master" ]] ; then + # NOTE(jroll) if this is not a patch against master, then + # fetch master to install the plugin + export DEVSTACK_LOCAL_CONFIG+=$'\n'"TEMPEST_PLUGINS+=' git+git://git.openstack.org/openstack/ironic'" + else + # on master, use the local change, so we can pick up any changes to the plugin + export DEVSTACK_LOCAL_CONFIG+=$'\n'"TEMPEST_PLUGINS+=' /opt/stack/new/ironic'" + fi + export TEMPEST_CONCURRENCY=1 + + EOF + chdir: '{{ ansible_user_dir }}/workspace' + environment: '{{ zuul | zuul_legacy_vars }}' + + - shell: + cmd: | + set -e + set -x + export PROJECTS="openstack/ironic $PROJECTS" + export PROJECTS="openstack/ironic-lib $PROJECTS" + export PROJECTS="openstack/ironic-python-agent $PROJECTS" + export PROJECTS="openstack/python-ironicclient $PROJECTS" + export PROJECTS="openstack/pyghmi $PROJECTS" + export PROJECTS="openstack/virtualbmc $PROJECTS" + export PYTHONUNBUFFERED=true + export DEVSTACK_GATE_TEMPEST=1 + export DEVSTACK_GATE_IRONIC=1 + export DEVSTACK_GATE_NEUTRON=1 + export DEVSTACK_GATE_VIRT_DRIVER=ironic + export DEVSTACK_GATE_CONFIGDRIVE=1 + export DEVSTACK_GATE_IRONIC_DRIVER=redfish + export BRANCH_OVERRIDE=default + if [ "$BRANCH_OVERRIDE" != "default" ] ; then + export OVERRIDE_ZUUL_BRANCH=$BRANCH_OVERRIDE + fi + + if [[ ! "stable/newton stable/ocata stable/pike" =~ $ZUUL_BRANCH ]] ; then + export DEVSTACK_GATE_TLSPROXY=1 + fi + + if [ "redfish" == "pxe_snmp" ] ; then + # explicitly enable pxe_snmp driver + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_ENABLED_DRIVERS=fake,pxe_snmp" + fi + + if [ "redfish" == "redfish" ] ; then + # When deploying with redfish we need to enable the "redfish" + # hardware type + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_ENABLED_HARDWARE_TYPES=redfish" + fi + + if [ "partition" == "wholedisk" ] ; then + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_TEMPEST_WHOLE_DISK_IMAGE=True" + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_EPHEMERAL_DISK=0" + else + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_TEMPEST_WHOLE_DISK_IMAGE=False" + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_EPHEMERAL_DISK=1" + fi + + if [ -n "" ] ; then + export DEVSTACK_GATE_IRONIC_BUILD_RAMDISK=1 + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_INSPECTOR_BUILD_RAMDISK=True" + export DEVSTACK_LOCAL_CONFIG+=$'\n'"USE_SUBNETPOOL=False" + else + export DEVSTACK_GATE_IRONIC_BUILD_RAMDISK=0 + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_INSPECTOR_BUILD_RAMDISK=False" + fi + + if [ "bios" == "uefi" ] ; then + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_BOOT_MODE=uefi" + fi + + export DEVSTACK_PROJECT_FROM_GIT="" + export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_COUNT=1" + + # Ensure the ironic-vars-EARLY file exists + touch ironic-vars-early + # Pull in the EARLY variables injected by the optional builders + source ironic-vars-early + + export DEVSTACK_LOCAL_CONFIG+=$'\n'"enable_plugin ironic git://git.openstack.org/openstack/ironic" + + # Ensure the ironic-EXTRA-vars file exists + touch ironic-extra-vars + # Pull in the EXTRA variables injected by the optional builders + source ironic-extra-vars + + cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh + ./safe-devstack-vm-gate-wrap.sh + executable: /bin/bash + chdir: '{{ ansible_user_dir }}/workspace' + environment: '{{ zuul | zuul_legacy_vars }}' diff --git a/zuul.d/legacy-sushy-jobs.yaml b/zuul.d/legacy-sushy-jobs.yaml new file mode 100644 index 0000000..8dbcd71 --- /dev/null +++ b/zuul.d/legacy-sushy-jobs.yaml @@ -0,0 +1,16 @@ +- job: + name: sushy-tempest-dsvm-ironic-ipa-partition-redfish-src + parent: legacy-dsvm-base + run: playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml + post-run: playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml + timeout: 5400 + required-projects: + - openstack-infra/devstack-gate + - openstack/ironic + - openstack/ironic-lib + - openstack/ironic-python-agent + - openstack/pyghmi + - openstack/python-ironicclient + - openstack/sushy + - openstack/tempest + - openstack/virtualbmc diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml new file mode 100644 index 0000000..7b90271 --- /dev/null +++ b/zuul.d/project.yaml @@ -0,0 +1,14 @@ +- project: + name: openstack/sushy + check: + jobs: + - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src: + irrelevant-files: + - ^(test-|)requirements.txt$ + - ^setup.cfg$ + gate: + jobs: + - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src: + irrelevant-files: + - ^(test-|)requirements.txt$ + - ^setup.cfg$ -- GitLab From 5758dbe30ff294383d010cd69edb5519ccd990c1 Mon Sep 17 00:00:00 2001 From: Ruby Loo Date: Tue, 14 Nov 2017 13:05:27 -0500 Subject: [PATCH 018/303] zuul: clean up job definition This cleans up the zuul job definition: - moves the 'irrelevant-files' list from project.yaml to legacy-sushy-jobs.yaml so that it isn't duplicated - adds more irrelevant files to the list - removes requirements.txt from the list of irrelevant files, since we want the job to run if this file changes - reorders the job definition so that the irrelevant-files and required-projects are first. This makes it more consistent with the other ironic-related projects Change-Id: Ifea6a3d220943a5a454a803b0633664a91af5c49 --- zuul.d/legacy-sushy-jobs.yaml | 15 ++++++++++++--- zuul.d/project.yaml | 10 ++-------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/zuul.d/legacy-sushy-jobs.yaml b/zuul.d/legacy-sushy-jobs.yaml index 8dbcd71..d7bb6be 100644 --- a/zuul.d/legacy-sushy-jobs.yaml +++ b/zuul.d/legacy-sushy-jobs.yaml @@ -1,9 +1,15 @@ - job: name: sushy-tempest-dsvm-ironic-ipa-partition-redfish-src parent: legacy-dsvm-base - run: playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml - post-run: playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml - timeout: 5400 + irrelevant-files: + - ^test-requirements.txt$ + - ^.*\.rst$ + - ^doc/.*$ + - ^releasenotes/.*$ + - ^setup.cfg$ + - ^sushy/tests/.*$ + - ^tools/.*$ + - ^tox.ini$ required-projects: - openstack-infra/devstack-gate - openstack/ironic @@ -14,3 +20,6 @@ - openstack/sushy - openstack/tempest - openstack/virtualbmc + run: playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml + post-run: playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml + timeout: 5400 diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 7b90271..67eed03 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -2,13 +2,7 @@ name: openstack/sushy check: jobs: - - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src: - irrelevant-files: - - ^(test-|)requirements.txt$ - - ^setup.cfg$ + - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src gate: jobs: - - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src: - irrelevant-files: - - ^(test-|)requirements.txt$ - - ^setup.cfg$ + - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src -- GitLab From b52a8c7ef87233ea2b6d229be5eff045b665a49c Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Thu, 16 Nov 2017 11:28:47 +0000 Subject: [PATCH 019/303] Updated from global requirements Change-Id: I3aa2df3acecf20b204083a04ffd1437d4e3c4723 --- requirements.txt | 2 +- test-requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 3ef27e5..ad13f8c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,4 @@ pbr!=2.1.0,>=2.0.0 # Apache-2.0 requests>=2.14.2 # Apache-2.0 -six>=1.9.0 # MIT +six>=1.10.0 # MIT diff --git a/test-requirements.txt b/test-requirements.txt index c11b9f4..c324b29 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -5,13 +5,13 @@ hacking>=1.0.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 -python-subunit>=0.0.18 # Apache-2.0/BSD +python-subunit>=1.0.0 # Apache-2.0/BSD sphinx>=1.6.2 # BSD openstackdocstheme>=1.17.0 # Apache-2.0 oslotest>=1.10.0 # Apache-2.0 testrepository>=0.0.18 # Apache-2.0/BSD testscenarios>=0.4 # Apache-2.0/BSD -testtools>=1.4.0 # MIT +testtools>=2.2.0 # MIT # releasenotes reno>=2.5.0 # Apache-2.0 -- GitLab From 2f746cbdad6ce3097f1db684036ce34c3e1bc05e Mon Sep 17 00:00:00 2001 From: Nisha Agarwal Date: Tue, 10 Oct 2017 21:43:45 -0700 Subject: [PATCH 020/303] Follow-up patch for 8fe2904a62b0f56dc3fc3fefc5a5a746911ce891 This addresses the nit comments left over from the parent patch 8fe2904a62b0f56dc3fc3fefc5a5a746911ce891. Adds the missing test case from the parent patch. Change-Id: Ic60235834284302331e29122417d6490a3207bf8 --- ...alth_literals_change-0e3fc0c439b765e3.yaml | 7 +++ sushy/resources/system/constants.py | 10 ++-- sushy/resources/system/ethernet_interface.py | 18 ++++--- .../system/test_ethernet_interfaces.py | 10 ++-- .../unit/resources/system/test_system.py | 8 ++- sushy/tests/unit/test_utils.py | 51 +++++++++++++++++++ sushy/utils.py | 25 ++++++--- 7 files changed, 106 insertions(+), 23 deletions(-) create mode 100644 releasenotes/notes/health_literals_change-0e3fc0c439b765e3.yaml diff --git a/releasenotes/notes/health_literals_change-0e3fc0c439b765e3.yaml b/releasenotes/notes/health_literals_change-0e3fc0c439b765e3.yaml new file mode 100644 index 0000000..f4a4c1b --- /dev/null +++ b/releasenotes/notes/health_literals_change-0e3fc0c439b765e3.yaml @@ -0,0 +1,7 @@ +--- +other: + - | + Changes the values for the constants ``HEALTH_STATE_ENABLED``, + ``HEALTH_STATE_DISABLED``, ``HEALTH_OK``, ``HEALTH_WARNING`` + and ``HEALTH_CRITICAL``. These could be correctly used + with their mapped values in mappings.py. diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index e76bdb1..fc2ddb1 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -110,8 +110,8 @@ PROCESSOR_ARCH_MIPS = 'MIPS' PROCESSOR_ARCH_OEM = 'OEM-defined' # Health related constants. -HEALTH_STATE_ENABLED = 'Enabled' -HEALTH_STATE_DISABLED = 'Disabled' -HEALTH_OK = 'OK' -HEALTH_WARNING = 'Warning' -HEALTH_CRITICAL = 'Critical' +HEALTH_STATE_ENABLED = 'enabled' +HEALTH_STATE_DISABLED = 'disabled' +HEALTH_OK = 'ok' +HEALTH_WARNING = 'warning' +HEALTH_CRITICAL = 'critical' diff --git a/sushy/resources/system/ethernet_interface.py b/sushy/resources/system/ethernet_interface.py index 86baea7..5ef75fd 100644 --- a/sushy/resources/system/ethernet_interface.py +++ b/sushy/resources/system/ethernet_interface.py @@ -62,20 +62,26 @@ class EthernetInterfaceCollection(base.ResourceCollectionBase): @property def summary(self): - """Summary MAC addresses and interfaces state + """Summary of MAC addresses and interfaces state This filters the MACs whose health is OK, which means the MACs in both 'Enabled' and 'Disabled' States are returned. - :returns dictionary in the format {'aa:bb:cc:dd:ee:ff': 'Enabled'} + + :returns: dictionary in the format + {'aa:bb:cc:dd:ee:ff': 'Enabled', + 'aa:bb:aa:aa:aa:aa': 'Disabled'} """ if self._summary is None: mac_dict = {} for eth in self.get_members(): - if eth.mac_address is not None: - if (eth.status is not None and - eth.status.health == sys_cons.HEALTH_OK): - mac_dict[eth.mac_address] = eth.status.state + if (eth.mac_address is not None and eth.status is not None): + if (eth.status.health == + sys_map.HEALTH_VALUE_MAP_REV.get( + sys_cons.HEALTH_OK)): + state = sys_map.HEALTH_STATE_VALUE_MAP_REV.get( + eth.status.state) + mac_dict[eth.mac_address] = state self._summary = mac_dict return self._summary diff --git a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py index 539f7af..c3cfdea 100644 --- a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py +++ b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py @@ -16,6 +16,7 @@ import mock from sushy.resources.system import constants as sys_cons from sushy.resources.system import ethernet_interface +from sushy.resources.system import mappings as sys_map from sushy.tests.unit import base @@ -42,7 +43,7 @@ class EthernetInterfaceTestCase(base.TestCase): self.assertEqual( '12:44:6A:3B:04:11', self.sys_eth.permanent_mac_address) self.assertEqual('12:44:6A:3B:04:11', self.sys_eth.mac_address) - self.assertEqual('Enabled', self.sys_eth.status.state) + self.assertEqual('enabled', self.sys_eth.status.state) self.assertEqual('OK', self.sys_eth.status.health) self.assertEqual(1000, self.sys_eth.speed_mbps) @@ -92,12 +93,15 @@ class EthernetInterfaceCollectionTestCase(base.TestCase): self.assertIsInstance(members, list) self.assertEqual(1, len(members)) - def test_eth_summary(self): + def test_summary(self): self.assertIsNone(self.sys_eth_col._summary) self.conn.get.return_value.json.reset_mock() path = 'sushy/tests/unit/json_samples/ethernet_interfaces.json' with open(path, 'r') as f: self.conn.get.return_value.json.return_value = json.loads(f.read()) - expected_summary = {'12:44:6A:3B:04:11': sys_cons.HEALTH_STATE_ENABLED} + expected_summary = { + '12:44:6A:3B:04:11': + sys_map.HEALTH_STATE_VALUE_MAP_REV.get( + sys_cons.HEALTH_STATE_ENABLED)} actual_summary = self.sys_eth_col.summary self.assertEqual(expected_summary, actual_summary) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 6b23711..c0f08ca 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -21,6 +21,7 @@ import sushy from sushy import exceptions from sushy.resources.system import constants as sys_cons from sushy.resources.system import ethernet_interface +from sushy.resources.system import mappings as sys_map from sushy.resources.system import processor from sushy.resources.system import system from sushy.tests.unit import base @@ -358,8 +359,11 @@ class SystemTestCase(base.TestCase): self.assertIsNone(self.sys_inst._ethernet_interfaces) actual_macs = self.sys_inst.ethernet_interfaces.summary - self.assertEqual({'12:44:6A:3B:04:11': sys_cons.HEALTH_STATE_ENABLED}, - actual_macs) + expected_macs = ( + {'12:44:6A:3B:04:11': + sys_map.HEALTH_STATE_VALUE_MAP_REV.get( + sys_cons.HEALTH_STATE_ENABLED)}) + self.assertEqual(expected_macs, actual_macs) self.assertIsInstance(self.sys_inst._ethernet_interfaces, ethernet_interface.EthernetInterfaceCollection) diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index 2003276..de75de4 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -13,9 +13,12 @@ # License for the specific language governing permissions and limitations # under the License. +import json import mock +from sushy import exceptions +from sushy.resources.system import system from sushy.tests.unit import base from sushy import utils @@ -39,3 +42,51 @@ class UtilsTestCase(base.TestCase): def test_int_or_none(self): self.assertEqual(1, utils.int_or_none('1')) self.assertIsNone(None, utils.int_or_none(None)) + + def setUp(self): + super(UtilsTestCase, self).setUp() + self.conn = mock.MagicMock() + with open('sushy/tests/unit/json_samples/system.json', 'r') as f: + system_json = json.loads(f.read()) + self.conn.get.return_value.json.return_value = system_json + + self.sys_inst = system.System(self.conn, + '/redfish/v1/Systems/437XR1138R2', + redfish_version='1.0.2') + + def test_get_sub_resource_path_by(self): + subresource_path = 'EthernetInterfaces' + expected_result = '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces' + value = utils.get_sub_resource_path_by(self.sys_inst, + subresource_path) + self.assertEqual(expected_result, value) + + def test_get_sub_resource_path_by_list(self): + subresource_path = ['EthernetInterfaces'] + expected_result = '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces' + value = utils.get_sub_resource_path_by(self.sys_inst, + subresource_path) + self.assertEqual(expected_result, value) + + def test_get_sub_resource_path_by_fails(self): + subresource_path = ['Links', 'Chassis'] + expected_result = 'attribute Links/Chassis/@odata.id is missing' + self.assertRaisesRegex( + exceptions.MissingAttributeError, + expected_result, + utils.get_sub_resource_path_by, + self.sys_inst, subresource_path) + + def test_get_sub_resource_path_by_fails_with_empty_path(self): + self.assertRaisesRegex( + ValueError, + '"subresource_name" cannot be empty', + utils.get_sub_resource_path_by, + self.sys_inst, []) + + def test_get_sub_resource_path_by_fails_with_empty_string(self): + self.assertRaisesRegex( + ValueError, + '"subresource_name" cannot be empty', + utils.get_sub_resource_path_by, + self.sys_inst, '') diff --git a/sushy/utils.py b/sushy/utils.py index 3bc616a..15519cc 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -69,12 +69,23 @@ def get_sub_resource_path_by(resource, subresource_name): :param subresource_name: name of the resource field to fetch the '@odata.id' from. """ - subresource_element = resource.json.get(subresource_name) - if not subresource_element: - raise exceptions.MissingAttributeError(attribute=subresource_name, - resource=resource.path) - if '@odata.id' not in subresource_element: + if not subresource_name: + raise ValueError('"subresource_name" cannot be empty') + + if not isinstance(subresource_name, list): + subresource_name = [subresource_name] + + body = resource.json + for path_item in subresource_name: + body = body.get(path_item, {}) + + if not body: raise exceptions.MissingAttributeError( - attribute=(subresource_name + '/@odata.id'), + attribute='/'.join(subresource_name), resource=resource.path) + + if '@odata.id' not in body: + raise exceptions.MissingAttributeError( + attribute='/'.join(subresource_name) + '/@odata.id', resource=resource.path) - return subresource_element['@odata.id'] + + return body['@odata.id'] -- GitLab From 5ebee30934fb086c69adc52d0643e913d00311e1 Mon Sep 17 00:00:00 2001 From: Andreas Jaeger Date: Sat, 2 Dec 2017 07:33:30 +0100 Subject: [PATCH 021/303] Avoid tox_install.sh for constraints support We do not need tox_install.sh, pip can handle constraints itself and install the project correctly. Thus update tox.ini and remove the now obsolete tools/tox_install.sh file. This follows https://review.openstack.org/#/c/508061 to remove tools/tox_install.sh. Change-Id: I984feb279774895efbe587b962afeda882c55e65 --- tools/tox_install.sh | 55 -------------------------------------------- tox.ini | 8 ++++--- 2 files changed, 5 insertions(+), 58 deletions(-) delete mode 100755 tools/tox_install.sh diff --git a/tools/tox_install.sh b/tools/tox_install.sh deleted file mode 100755 index 456aadc..0000000 --- a/tools/tox_install.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env bash - -# Client constraint file contains this client version pin that is in conflict -# with installing the client from source. We should replace the version pin in -# the constraints file before applying it for from-source installation. - -ZUUL_CLONER=/usr/zuul-env/bin/zuul-cloner -BRANCH_NAME=master -CLIENT_NAME=sushy -requirements_installed=$(echo "import openstack_requirements" | python 2>/dev/null ; echo $?) - -set -e - -CONSTRAINTS_FILE=$1 -shift - -install_cmd="pip install" -mydir=$(mktemp -dt "$CLIENT_NAME-tox_install-XXXXXXX") -trap "rm -rf $mydir" EXIT -localfile=$mydir/upper-constraints.txt -if [[ $CONSTRAINTS_FILE != http* ]]; then - CONSTRAINTS_FILE=file://$CONSTRAINTS_FILE -fi -curl $CONSTRAINTS_FILE -k -o $localfile -install_cmd="$install_cmd -c$localfile" - -if [ $requirements_installed -eq 0 ]; then - echo "ALREADY INSTALLED" > /tmp/tox_install.txt - echo "Requirements already installed; using existing package" -elif [ -x "$ZUUL_CLONER" ]; then - echo "ZUUL CLONER" > /tmp/tox_install.txt - pushd $mydir - $ZUUL_CLONER --cache-dir \ - /opt/git \ - --branch $BRANCH_NAME \ - git://git.openstack.org \ - openstack/requirements - cd openstack/requirements - $install_cmd -e . - popd -else - echo "PIP HARDCODE" > /tmp/tox_install.txt - if [ -z "$REQUIREMENTS_PIP_LOCATION" ]; then - REQUIREMENTS_PIP_LOCATION="git+https://git.openstack.org/openstack/requirements@$BRANCH_NAME#egg=requirements" - fi - $install_cmd -U -e ${REQUIREMENTS_PIP_LOCATION} -fi - -# This is the main purpose of the script: Allow local installation of -# the current repo. It is listed in constraints file and thus any -# install will be constrained and we need to unconstrain it. -edit-constraints $localfile -- $CLIENT_NAME "-e file://$PWD#egg=$CLIENT_NAME" - -$install_cmd -U $* -exit $? diff --git a/tox.ini b/tox.ini index 693edab..2c937f8 100644 --- a/tox.ini +++ b/tox.ini @@ -8,9 +8,11 @@ usedevelop = True setenv = VIRTUAL_ENV={envdir} PYTHONWARNINGS=default::DeprecationWarning -install_command = - {toxinidir}/tools/tox_install.sh {env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} {opts} {packages} -deps = -r{toxinidir}/test-requirements.txt +install_command = pip install {opts} {packages} +deps = + -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} + -r{toxinidir}/test-requirements.txt + -r{toxinidir}/requirements.txt commands = python setup.py test --slowest --testr-args='{posargs}' [testenv:pep8] -- GitLab From 6451e5023737ed8f5debb950dd1793b5dbfa6770 Mon Sep 17 00:00:00 2001 From: "John L. Villalovos" Date: Thu, 14 Dec 2017 13:27:01 -0800 Subject: [PATCH 022/303] Use the tempest plugin from openstack/ironic-tempest-plugin Use the tempest plugin from openstack/ironic-tempest-plugin as we have moved the tempest code there. Soon the tempest code will be deleted from openstack/ironic. Change-Id: I4989f4c581bebd8435d57c32dc3e36131a5a2c4e --- .../run.yaml | 13 +++---------- zuul.d/legacy-sushy-jobs.yaml | 1 + 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml b/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml index a243979..065c2e4 100644 --- a/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml +++ b/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml @@ -72,16 +72,8 @@ cmd: | cat << 'EOF' >> ironic-vars-early # use tempest plugin - if [[ "$ZUUL_BRANCH" != "master" ]] ; then - # NOTE(jroll) if this is not a patch against master, then - # fetch master to install the plugin - export DEVSTACK_LOCAL_CONFIG+=$'\n'"TEMPEST_PLUGINS+=' git+git://git.openstack.org/openstack/ironic'" - else - # on master, use the local change, so we can pick up any changes to the plugin - export DEVSTACK_LOCAL_CONFIG+=$'\n'"TEMPEST_PLUGINS+=' /opt/stack/new/ironic'" - fi - export TEMPEST_CONCURRENCY=1 - + export DEVSTACK_LOCAL_CONFIG+=$'\n'"TEMPEST_PLUGINS+=' /opt/stack/new/ironic-tempest-plugin'" + export TEMPEST_CONCURRENCY=1 EOF chdir: '{{ ansible_user_dir }}/workspace' environment: '{{ zuul | zuul_legacy_vars }}' @@ -93,6 +85,7 @@ export PROJECTS="openstack/ironic $PROJECTS" export PROJECTS="openstack/ironic-lib $PROJECTS" export PROJECTS="openstack/ironic-python-agent $PROJECTS" + export PROJECTS="openstack/ironic-tempest-plugin $PROJECTS" export PROJECTS="openstack/python-ironicclient $PROJECTS" export PROJECTS="openstack/pyghmi $PROJECTS" export PROJECTS="openstack/virtualbmc $PROJECTS" diff --git a/zuul.d/legacy-sushy-jobs.yaml b/zuul.d/legacy-sushy-jobs.yaml index d7bb6be..e8a8902 100644 --- a/zuul.d/legacy-sushy-jobs.yaml +++ b/zuul.d/legacy-sushy-jobs.yaml @@ -15,6 +15,7 @@ - openstack/ironic - openstack/ironic-lib - openstack/ironic-python-agent + - openstack/ironic-tempest-plugin - openstack/pyghmi - openstack/python-ironicclient - openstack/sushy -- GitLab From 84edb962f3a6652ca51f2e2caa98eb0ce36e0564 Mon Sep 17 00:00:00 2001 From: Nguyen Van Trung Date: Wed, 27 Dec 2017 15:00:08 +0700 Subject: [PATCH 023/303] Indicating the location tests directory in oslo_debug_helper According to [1], we can passing a "-t" argument to oslo_debug_helper to indicate the directory where tests are located. This will solves ImportError exception. [1] https://docs.openstack.org/developer/oslotest/features.html Change-Id: I1403ef435d17f31668da2d906c73c80cb66d4209 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 2c937f8..5f05dd9 100644 --- a/tox.ini +++ b/tox.ini @@ -38,7 +38,7 @@ commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [testenv:debug] -commands = oslo_debug_helper {posargs} +commands = oslo_debug_helper -t sushy/tests {posargs} [flake8] # E123, E125 skipped as they are invalid PEP-8. -- GitLab From 92f79530b9d1319f89534b792b516d36a021d44c Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Wed, 3 Jan 2018 04:47:25 -0500 Subject: [PATCH 024/303] Update method docstring Update the method docstring for the the method ResourceBase.refresh() in sushy/resources/base.py This is a follow-up to patch 2b39751e193c56995bbb95741afcdabc8bccdb05 Change-Id: Ic2569184a61d4bba95ebb59c71ccb70360bee3ca --- sushy/resources/base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index b36cfd7..f04cf61 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -249,9 +249,9 @@ class ResourceBase(object): Freshly retrieves/fetches the resource attributes and invokes ``_parse_attributes()`` method on successful retrieval. - Advised not to override this method in concrete ResourceBase classes. - Resource classes can place their refresh specific operations in - ``_do_refresh()`` method, if needed. This method represents the + It is recommended not to override this method in concrete ResourceBase + classes. Resource classes can place their refresh specific operations + in ``_do_refresh()`` method, if needed. This method represents the template method in the paradigm of Template design pattern. :param force: will force refresh the resource and its sub-resources, -- GitLab From 171a018aec859d59255c1db89b143332539afd67 Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Tue, 16 Jan 2018 04:35:19 +0000 Subject: [PATCH 025/303] Updated from global requirements Change-Id: I140e8ecc3852b96616779105776fadae62fca18d --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index c324b29..179c311 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -8,7 +8,7 @@ coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD sphinx>=1.6.2 # BSD openstackdocstheme>=1.17.0 # Apache-2.0 -oslotest>=1.10.0 # Apache-2.0 +oslotest>=3.2.0 # Apache-2.0 testrepository>=0.0.18 # Apache-2.0/BSD testscenarios>=0.4 # Apache-2.0/BSD testtools>=2.2.0 # MIT -- GitLab From aaa9c6a9fbd4b57fbe6efe2ace845918fa985139 Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Tue, 16 Jan 2018 04:35:22 +0000 Subject: [PATCH 026/303] Updated from global requirements Change-Id: I4b6e879fb45e10029d7d763d47c96a6361527e7a --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index c324b29..179c311 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -8,7 +8,7 @@ coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD sphinx>=1.6.2 # BSD openstackdocstheme>=1.17.0 # Apache-2.0 -oslotest>=1.10.0 # Apache-2.0 +oslotest>=3.2.0 # Apache-2.0 testrepository>=0.0.18 # Apache-2.0/BSD testscenarios>=0.4 # Apache-2.0/BSD testtools>=2.2.0 # MIT -- GitLab From b4cb4aaaa895246c03e2078ea31965dffadd7c71 Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Fri, 5 Jan 2018 04:48:04 -0500 Subject: [PATCH 027/303] Update documentation related to 'refresh()' Update the documentation with the relevant changes pertaining to the new behaviour of ResourceBase.refresh() [0] method. Also, removed CPU and memory information from 'Missing Features' portion as those are available now. [0] Commit patch 2b39751e193c56995bbb95741afcdabc8bccdb05 Change-Id: Ifb81e0c58d70097eda1f65c9dbec13803056111c --- doc/source/reference/index.rst | 2 +- doc/source/reference/usage.rst | 24 ++++++++++++++++++++---- 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/doc/source/reference/index.rst b/doc/source/reference/index.rst index a0259ed..d07e4f9 100644 --- a/doc/source/reference/index.rst +++ b/doc/source/reference/index.rst @@ -22,7 +22,7 @@ Missing Features These are some features that sushy is presently missing. * Collect sensor data (Health state, temperature, fans etc...) -* System inspection (Number of CPUs, memory and disk size) +* System disk size * Serial console Sushy Python API Reference diff --git a/doc/source/reference/usage.rst b/doc/source/reference/usage.rst index 827ce9d..eccc27d 100644 --- a/doc/source/reference/usage.rst +++ b/doc/source/reference/usage.rst @@ -47,6 +47,14 @@ Creating and using a sushy system object sys_inst = sys_col.get_member(sys_col.members_identities[0]) # Refresh the system collection object + # + # In order to reload a resource post its initialization it has to be marked + # as stale (i.e. invoking 'invalidate()') first and then 'refresh()' has to + # be called. This will only reload the resource w/o reloading/refreshing its + # sub-resources (lazy-refresh of sub-resources). + # Note that calling 'refresh()' only, i.e. w/o calling 'invalidate()' first, + # will be a no-op wrt resource reload in this case. + sys_col.invalidate() sys_col.refresh() @@ -59,8 +67,15 @@ Creating and using a sushy system object # Get a list of allowed reset values print(sys_inst.get_allowed_reset_system_values()) - # Refresh the system object - sys_inst.refresh() + # Refresh the system object (with all its sub-resources) + # + # Alternatively, this is the other way of reloading a resource object: + # The resource can be reloaded w/o the need of marking it stale + # (i.e. not invoking 'invalidate()'). It is achieved when the "force" + # argument of 'refresh()' method is set to True. Do note that the + # sub-resources of the resource being reloaded will also get reloaded + # (greedy-refresh of sub-resources) when this mode is adopted. + sys_inst.refresh(force=True) # Get the current power state print(sys_inst.power_state) @@ -126,6 +141,7 @@ Creating and using a sushy manager object mgr_inst = mgr_col.get_member(mgr_col.members_identities[0]) # Refresh the manager collection object + mgr_col.invalidate() mgr_col.refresh() @@ -147,8 +163,8 @@ Creating and using a sushy manager object # Reset the manager mgr_inst.reset_manager(sushy.RESET_MANAGER_FORCE_RESTART) - # Refresh the manager object - mgr_inst.refresh() + # Refresh the manager object (with all its sub-resources) + mgr_inst.refresh(force=True) If you do not have any real baremetal machine that supports the Redfish -- GitLab From 9610c06a179be10b19939b4285f23657b2925245 Mon Sep 17 00:00:00 2001 From: Yusef Shaban Date: Tue, 6 Jun 2017 15:56:48 -0700 Subject: [PATCH 028/303] Implement Redfish Sessions Implement Redfish Sessions because some vendor implementations have disabled basic auth and require a Redfish Session to access resources. This was done by creating the following objects: SessionService, SessionCollection and Session, BasicAuth, SessionAuth, SessionOrBasicAuth Session state is managed internally by ourselves. Unit tests have been updated accordingly. Closes-Bug: 1695972 Co-Authored-By: Debayan Ray Change-Id: I6623538383388caf1115a7c100b0f97e73df69d9 --- doc/source/reference/index.rst | 1 + doc/source/reference/usage.rst | 92 +++++ releasenotes/notes/sessions.yml | 6 + sushy/auth.py | 241 +++++++++++++ sushy/connector.py | 45 ++- sushy/exceptions.py | 5 + sushy/main.py | 47 ++- sushy/resources/sessionservice/__init__.py | 0 sushy/resources/sessionservice/session.py | 76 ++++ .../sessionservice/sessionservice.py | 128 +++++++ sushy/tests/unit/json_samples/session.json | 11 + .../unit/json_samples/session_collection.json | 12 + .../session_creation_headers.json | 18 + .../unit/json_samples/session_error.json | 17 + .../unit/json_samples/session_service.json | 18 + .../unit/resources/sessionservice/__init__.py | 0 .../resources/sessionservice/test_session.py | 100 ++++++ .../sessionservice/test_sessionservice.py | 175 ++++++++++ sushy/tests/unit/test_auth.py | 325 ++++++++++++++++++ sushy/tests/unit/test_connector.py | 91 ++++- sushy/tests/unit/test_main.py | 39 ++- 21 files changed, 1426 insertions(+), 21 deletions(-) create mode 100644 releasenotes/notes/sessions.yml create mode 100644 sushy/auth.py create mode 100644 sushy/resources/sessionservice/__init__.py create mode 100644 sushy/resources/sessionservice/session.py create mode 100644 sushy/resources/sessionservice/sessionservice.py create mode 100644 sushy/tests/unit/json_samples/session.json create mode 100644 sushy/tests/unit/json_samples/session_collection.json create mode 100644 sushy/tests/unit/json_samples/session_creation_headers.json create mode 100644 sushy/tests/unit/json_samples/session_error.json create mode 100644 sushy/tests/unit/json_samples/session_service.json create mode 100644 sushy/tests/unit/resources/sessionservice/__init__.py create mode 100644 sushy/tests/unit/resources/sessionservice/test_session.py create mode 100644 sushy/tests/unit/resources/sessionservice/test_sessionservice.py create mode 100644 sushy/tests/unit/test_auth.py diff --git a/doc/source/reference/index.rst b/doc/source/reference/index.rst index a0259ed..4a2594f 100644 --- a/doc/source/reference/index.rst +++ b/doc/source/reference/index.rst @@ -10,6 +10,7 @@ Features * Systems power management (Both soft and hard; Including NMI injection) * Changing systems boot device, frequency (Once or permanently) and mode (UEFI or BIOS) +* SessionManagement .. toctree:: :maxdepth: 2 diff --git a/doc/source/reference/usage.rst b/doc/source/reference/usage.rst index 827ce9d..a9004d7 100644 --- a/doc/source/reference/usage.rst +++ b/doc/source/reference/usage.rst @@ -5,6 +5,53 @@ Using Sushy To use sushy in a project: +----------------------------------------- +Specifying an authentication type +----------------------------------------- + +There are three authentication objects. By default we use SessionOrBasicAuth. +Authentication Modes: +auth.SessionOrBasicAuth: Use session based authentication. If we are unable +to create a session we will fallback to basic authentication. +auth.BasicAuth: Use basic authentication only. +auth.SessionAuth: Use session based authentication only. + +.. code-block:: python + + import logging + + import sushy + from sushy import auth + + # Enable logging at DEBUG level + LOG = logging.getLogger('sushy') + LOG.setLevel(logging.DEBUG) + LOG.addHandler(logging.StreamHandler()) + + basic_auth = auth.BasicAuth(username='foo', password='bar') + session_auth = auth.SessionAuth(username='foo', password='bar') + session_or_basic_auth = auth.SessionOrBasicAuth(username='foo', + password='bar') + + s = sushy.Sushy('http://localhost:8000/redfish/v1', + auth=basic_auth) + + s = sushy.Sushy('http://localhost:8000/redfish/v1', + auth=session_auth) + + s = sushy.Sushy('http://localhost:8000/redfish/v1', + auth=session_or_basic_auth) + + # It is important to note that you can + # call sushy without supplying an + # authentication object. In that case we + # will use the SessionOrBasicAuth authentication + # object in an attempt to connect to all different + # types of redfish servers. + s = sushy.Sushy('http://localhost:8000/redfish/v1', + username='foo', + password='bar') + ---------------------------------------- Creating and using a sushy system object ---------------------------------------- @@ -150,6 +197,51 @@ Creating and using a sushy manager object # Refresh the manager object mgr_inst.refresh() +------------------------------------------------- +Creating and using a sushy session service object +------------------------------------------------- + +.. code-block:: python + + import logging + + import sushy + + # Enable logging at DEBUG level + LOG = logging.getLogger('sushy') + LOG.setLevel(logging.DEBUG) + LOG.addHandler(logging.StreamHandler()) + + s = sushy.Sushy('http://localhost:8000/redfish/v1', + username='foo', password='bar') + + # Instantiate a SessionService object + sess_serv = s.get_session_service() + + # Get SessionCollection + sess_col = sess_serv.sessions + + # Print the ID of the sessions available in the collection + print(sess_col.members_identities) + + # Get a list of systems objects available in the collection + sess_col_insts = sess_col.get_members() + + # Instantiate a session object, same as getting it directly + sess_inst = sess_col.get_member(sess_col.members_identities[0]) + # Getting it directly + sess_inst = s.get_session(sess_col.members_identities[0]) + + # Delete the session + sess_inst.delete() + + # Create a new session + session_key, session_id = sess_serv.create_session( + username='foo', password='bar') + + # Delete a session + sess_serv.close_session(sess_col.members_identities[0]) + If you do not have any real baremetal machine that supports the Redfish protocol you can look at the :ref:`contributing` page to learn how to diff --git a/releasenotes/notes/sessions.yml b/releasenotes/notes/sessions.yml new file mode 100644 index 0000000..294ab47 --- /dev/null +++ b/releasenotes/notes/sessions.yml @@ -0,0 +1,6 @@ +--- +features: + - | + Adds "SessionService" and "Sessions" to the library. + - | + Adds the abillity to specify authentication type on creation of root sushy object. diff --git a/sushy/auth.py b/sushy/auth.py new file mode 100644 index 0000000..22e7a48 --- /dev/null +++ b/sushy/auth.py @@ -0,0 +1,241 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Sushy Redfish Authentication Modes + +import abc +import logging + +import six + +from sushy import exceptions + +LOG = logging.getLogger(__name__) + + +@six.add_metaclass(abc.ABCMeta) +class AuthBase(object): + + def __init__(self, username=None, password=None): + """A class representing a base Sushy authentication mechanism + + :param username: User account with admin/server-profile + access privilege. + :param password: User account password. + """ + self._username = username + self._password = password + self._root_resource = None + self._connector = None + + def set_context(self, root_resource, connector): + """Set the context of the authentication object. + + :param root_resource: Root sushy object + :param connector: Connector for http connections + """ + self._root_resource = root_resource + self._connector = connector + self._connector.set_auth(self) + + def authenticate(self): + """Perform authentication. + + :raises: RuntimeError + """ + if self._root_resource is None or self._connector is None: + raise RuntimeError('_root_resource / _connector is missing. ' + 'Forgot to call set_context()?') + self._do_authenticate() + + @abc.abstractmethod + def _do_authenticate(self): + """Method to establish a session to a Redfish controller. + + Needs to be implemented by extending auth class, + because each authentication type will authenticate in its own way. + """ + + @abc.abstractmethod + def can_refresh_session(self): + """Method to assert if session based refresh can be done.""" + + def __enter__(self): + """Allow object to be called with the 'with' statement.""" + return self + + def __exit__(self, exception_type, exception_value, traceback): + """Allow object to be called with the 'with' statement. + + Allow object to be called with the 'with' statement but + also ensure we call close method on exit. + """ + self.close() + + +class BasicAuth(AuthBase): + """Basic Authentication class. + + This is a class used to encapsulate a basic authentication session. + + :param username: User account with admin/server-profile + access privilege. + :param password: User account password. + """ + + def _do_authenticate(self): + """Attempts to establish a Basic Authentication Session. + + """ + self._connector.set_http_basic_auth(self._username, self._password) + + def can_refresh_session(self): + """Method to assert if session based refresh can be done.""" + return False + + +class SessionAuth(AuthBase): + """Session Authentication class. + + This is a class used to encapsulate a redfish session. + """ + + def __init__(self, username=None, password=None): + """A class representing a Session Authentication object. + + :param username: User account with admin/server-profile access + privilege. + :param password: User account password. + """ + self._session_key = None + """Our Sessions Key""" + self._session_resource_id = None + """Our Sessions Unique Resource ID or URL""" + + super(SessionAuth, self).__init__(username, + password) + + def get_session_key(self): + """Returns the session key. + + :returns: The session key. + """ + return self._session_key + + def get_session_resource_id(self): + """Returns the session resource id. + + :returns: The session resource id. + """ + return self._session_resource_id + + def _do_authenticate(self): + """Establish a redfish session. + + :raises: MissingXAuthToken + :raises: ConnectionError + :raises: AccessError + :raises: HTTPError + """ + session_service = self._root_resource.get_session_service() + session_auth_token, session_uri = ( + session_service.create_session(self._username, + self._password)) + self._session_key = session_auth_token + self._session_resource_id = session_uri + self._connector.set_http_session_auth(session_auth_token) + + def can_refresh_session(self): + """Method to assert if session based refresh can be done.""" + return True + + def refresh_session(self): + """Method to refresh a session to a Redfish controller. + + This method is called to create a new session after + a session that has already been established + has timed-out or expired. + + :raises: MissingXAuthToken + :raises: ConnectionError + :raises: AccessError + :raises: HTTPError + """ + self.reset_session_attrs() + self._do_authenticate() + + def close(self): + """Close the Redfish Session. + + Attempts to close an established RedfishSession by + deleting it from the remote Redfish controller. + """ + if self._session_resource_id is not None: + try: + self._connector.delete(self._session_resource_id) + except (exceptions.AccessError, + exceptions.ServerSideError) as exc: + LOG.warning('Received exception "%(exception)s" while ' + 'attempting to delete the active session: ' + '%(session_id)s', + {'exception': exc, + 'session_id': self._session_resource_id}) + self.reset_session_attrs() + + def reset_session_attrs(self): + """Reset active session related attributes.""" + self._session_key = None + self._session_resource_id = None + + +class SessionOrBasicAuth(SessionAuth): + + def __init__(self, username=None, password=None): + super(SessionOrBasicAuth, self).__init__(username, password) + self.basic_auth = BasicAuth(username=username, password=password) + + def _do_authenticate(self): + """Establish a RedfishSession. + + We will attempt to establish a redfish session. If we are unable + to establish one, fallback to basic authentication. + """ + try: + # Attempt session based authentication + super(SessionOrBasicAuth, self)._do_authenticate() + except exceptions.SushyError as e: + LOG.debug('Received exception "%(exception)s" while ' + 'attempting to establish a session. ' + 'Falling back to basic authentication.', + {'exception': e}) + + # Fall back to basic authentication + self.reset_session_attrs() + self.basic_auth.set_context(self._root_resource, self._connector) + self.basic_auth.authenticate() + + def can_refresh_session(self): + """Method to assert if session based refresh can be done.""" + return (self._session_key is not None and + self._session_resource_id is not None) + + def refresh_session(self): + """Method to refresh a session to a Redfish controller. + + This method is called to create a new RedfishSession + if we have previously established a RedfishSession and + the previous session has timed-out or expired. + If we did not previously have an established session, + we simply return our BasicAuthentication requests.Session. + """ + if self.can_refresh_session(): + super(SessionOrBasicAuth, self).refresh_session() diff --git a/sushy/connector.py b/sushy/connector.py index 7e48655..84474d4 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -26,12 +26,24 @@ LOG = logging.getLogger(__name__) class Connector(object): - def __init__(self, url, username=None, password=None, verify=True): + def __init__(self, url, verify=True): self._url = url + self._verify = verify self._session = requests.Session() - self._session.verify = verify - if username and password: - self._session.auth = (username, password) + self._session.verify = self._verify + + def set_auth(self, auth): + """Sets the authentication mechanism for our connector.""" + self._auth = auth + + def set_http_basic_auth(self, username, password): + """Sets the http basic authentication information.""" + self._session.auth = (username, password) + + def set_http_session_auth(self, session_auth_token): + """Sets the session authentication information.""" + self._session.auth = None + self._session.headers.update({'X-Auth-Token': session_auth_token}) def close(self): """Close this connector and the associated HTTP session.""" @@ -49,11 +61,12 @@ class Connector(object): :raises: ConnectionError :raises: HTTPError """ + json_data = None if headers is None: headers = {} if data is not None: - data = json.dumps(data) + json_data = json.dumps(data) headers['Content-Type'] = 'application/json' url = parse.urljoin(self._url, path) @@ -62,14 +75,30 @@ class Connector(object): LOG.debug('HTTP request: %(method)s %(url)s; ' 'headers: %(headers)s; body: %(data)s', {'method': method, 'url': url, 'headers': headers, - 'data': data}) + 'data': json_data}) try: - response = self._session.request(method, url, data=data, + response = self._session.request(method, url, + data=json_data, headers=headers) except requests.ConnectionError as e: raise exceptions.ConnectionError(url=url, error=e) + # If we received an AccessError, and we + # previously established a redfish session + # there is a chance that the session has timed-out. + # Attempt to re-establish a session. + try: + exceptions.raise_for_response(method, url, response) + except exceptions.AccessError: + if self._auth.can_refresh_session(): + self._auth.refresh_session() + LOG.debug("Authentication refreshed successfully, " + "retrying the call.") + response = self._session.request(method, url, + data=json_data, + headers=headers) + else: + raise - exceptions.raise_for_response(method, url, response) LOG.debug('HTTP response for %(method)s %(url)s: ' 'status code: %(code)s', {'method': method, 'url': url, diff --git a/sushy/exceptions.py b/sushy/exceptions.py index 3a611d8..21531c7 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -115,6 +115,11 @@ class AccessError(HTTPError): pass +class MissingXAuthToken(HTTPError): + message = ('No X-Auth-Token returned from remote host when ' + 'attempting to establish a session. Error: %(error)s') + + def raise_for_response(method, url, response): """Raise a correct error class, if needed.""" if response.status_code < http_client.BAD_REQUEST: diff --git a/sushy/main.py b/sushy/main.py index a5e2520..bff147c 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -12,12 +12,18 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +import logging +from sushy import auth as sushy_auth from sushy import connector from sushy.resources import base from sushy.resources.manager import manager +from sushy.resources.sessionservice import session +from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system +LOG = logging.getLogger(__name__) + class Sushy(base.ResourceBase): @@ -36,8 +42,13 @@ class Sushy(base.ResourceBase): _managers_path = base.Field(['Managers', '@odata.id'], required=True) """ManagerCollection path""" + _session_service_path = base.Field(['SessionService', '@odata.id'], + required=True) + """SessionService path""" + def __init__(self, base_url, username=None, password=None, - root_prefix='/redfish/v1/', verify=True): + root_prefix='/redfish/v1/', verify=True, + auth=None): """A class representing a RootService :param base_url: The base URL to the Redfish controller. It @@ -54,11 +65,24 @@ class Sushy(base.ResourceBase): the driver will ignore verifying the SSL certificate; if it's a path the driver will use the specified certificate or one of the certificates in the directory. Defaults to True. + :param auth: An authentication mechanism to utilize. """ self._root_prefix = root_prefix + if (auth is not None and (password is not None or + username is not None)): + msg = ('Username or Password were provided to Sushy ' + 'when an authentication mechanism was specified.') + raise ValueError(msg) + else: + auth = sushy_auth.SessionOrBasicAuth(username=username, + password=password) + super(Sushy, self).__init__( - connector.Connector(base_url, username, password, verify), + connector.Connector(base_url, verify), path=self._root_prefix) + self._auth = auth + self._auth.set_context(self, self._conn) + self._auth.authenticate() def _parse_attributes(self): super(Sushy, self)._parse_attributes() @@ -101,3 +125,22 @@ class Sushy(base.ResourceBase): """ return manager.Manager(self._conn, identity, redfish_version=self.redfish_version) + + def get_session_service(self): + """Get the SessionService object + + :raises: MissingAttributeError, if the collection attribue is not found + :returns: as SessionCollection object + """ + return sessionservice.SessionService( + self._conn, self._session_service_path, + redfish_version=self.redfish_version) + + def get_session(self, identity): + """Given the identity return a Session object + + :param identity: The identity of the session resource + :returns: The Session object + """ + return session.Session(self._conn, identity, + redfish_version=self.redfish_version) diff --git a/sushy/resources/sessionservice/__init__.py b/sushy/resources/sessionservice/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/resources/sessionservice/session.py b/sushy/resources/sessionservice/session.py new file mode 100644 index 0000000..753cbde --- /dev/null +++ b/sushy/resources/sessionservice/session.py @@ -0,0 +1,76 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from sushy.resources import base + +LOG = logging.getLogger(__name__) + + +class Session(base.ResourceBase): + + description = base.Field('Description') + """The session service description""" + + identity = base.Field('Id', required=True) + """The session service identify string""" + + name = base.Field('Name', required=True) + """The session service name""" + + username = base.Field('UserName') + """The UserName for the account for this session.""" + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a Session + + :param connector: A Connector instance + :param identity: The identity of the Session resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + """ + super(Session, self).__init__(connector, identity, redfish_version) + + def delete(self): + """Method for deleting a Session. + + :raises: ServerSideError + """ + self._conn.delete(self.path) + + +class SessionCollection(base.ResourceCollectionBase): + + name = base.Field('Name') + """The session collection name""" + + description = base.Field('Description') + """The session collection description""" + + @property + def _resource_type(self): + return Session + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a SessionCollection + + :param connector: A Connector instance + :param identity: The identity of the Session resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + """ + super(SessionCollection, self).__init__( + connector, identity, redfish_version) diff --git a/sushy/resources/sessionservice/sessionservice.py b/sushy/resources/sessionservice/sessionservice.py new file mode 100644 index 0000000..6de40d0 --- /dev/null +++ b/sushy/resources/sessionservice/sessionservice.py @@ -0,0 +1,128 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources.sessionservice import session + +LOG = logging.getLogger(__name__) + + +class SessionService(base.ResourceBase): + + description = base.Field('Description') + """The session service description""" + + identity = base.Field('Id', required=True) + """The session service identify string""" + + name = base.Field('Name', required=True) + """The session service name""" + + service_enabled = base.Field('ServiceEnabled') + """Tells us if session service is enabled""" + + _sessions = None # ref to SessionCollection instance + + session_timeout = base.Field('SessionTimeout') + """The session service timeout""" + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a SessionService + + :param connector: A Connector instance + :param identity: The identity of the SessionService resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + """ + try: + super(SessionService, self).__init__( + connector, identity, redfish_version) + except exceptions.AccessError as ae: + LOG.warning('Received access error "%(ae)s". ' + 'Unable to refresh SessionService.', + {'ae': ae}) + + def _get_sessions_collection_path(self): + """Helper function to find the SessionCollections path""" + sessions_col = self.json.get('Sessions') + if not sessions_col: + raise exceptions.MissingAttributeError( + attribute='Sessions', resource=self._path) + return sessions_col.get('@odata.id') + + @property + def sessions(self): + """Property to provide reference to the `SessionCollection` instance + + It is calculated once when the first time it is queried. On refresh, + this property gets reset. + """ + if self._sessions is None: + self._sessions = session.SessionCollection( + self._conn, self._get_sessions_collection_path(), + redfish_version=self.redfish_version) + + self._sessions.refresh(force=False) + return self._sessions + + def _do_refresh(self, force=False): + """Do custom resource specific refresh activities + + On refresh, all sub-resources are marked as stale, i.e. + greedy-refresh not done for them unless forced by ``force`` + argument. + """ + if self._sessions is not None: + self._sessions.invalidate(force) + + def close_session(self, session_uri): + """This function is for closing a session based on its id. + + :raises: ServerSideError + """ + self._conn.delete(session_uri) + + def create_session(self, username, password): + """This function will try to create a session. + + :returns: A session key and uri in the form of a tuple + :raises: MissingXAuthToken + :raises: ConnectionError + :raises: AccessError + :raises: HTTPError + """ + try: + target_uri = self._get_sessions_collection_path() + except Exception: + # Defaulting to /Sessions + target_uri = self.path + '/Sessions' + + data = {'UserName': username, 'Password': password} + headers = {'X-Auth-Token': None} + + rsp = self._conn.post(target_uri, data=data, headers=headers) + session_key = rsp.headers.get('X-Auth-Token') + if session_key is None: + raise exceptions.MissingXAuthToken( + method='POST', url=target_uri, response=rsp) + + session_uri = rsp.headers.get('Location') + if session_uri is None: + LOG.warning("Received X-Auth-Token but NO session uri.") + + return session_key, session_uri diff --git a/sushy/tests/unit/json_samples/session.json b/sushy/tests/unit/json_samples/session.json new file mode 100644 index 0000000..61508b7 --- /dev/null +++ b/sushy/tests/unit/json_samples/session.json @@ -0,0 +1,11 @@ +{ + "@odata.type": "#Session.v1_0_2.Session", + "Id": "1234567890ABCDEF", + "Name": "User Session", + "Description": "Manager User Session", + "UserName": "Administrator", + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#Session.Session", + "@odata.id": "/redfish/v1/SessionService/Sessions/1234567890ABCDEF", + "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/session_collection.json b/sushy/tests/unit/json_samples/session_collection.json new file mode 100644 index 0000000..30090e1 --- /dev/null +++ b/sushy/tests/unit/json_samples/session_collection.json @@ -0,0 +1,12 @@ +{ + "@odata.type": "#SessionCollection.SessionCollection", + "Name": "Session Collection", + "Members@odata.count": 1, + "@odata.id": "/redfish/v1/SessionService/Sessions", + "@odata.context": "/redfish/v1/$metadata#SessionService/Sessions/$entity", + "Members": [ + { + "@odata.id": "/redfish/v1/SessionService/Sessions/104f9d68f58abb85" + } + ] +} diff --git a/sushy/tests/unit/json_samples/session_creation_headers.json b/sushy/tests/unit/json_samples/session_creation_headers.json new file mode 100644 index 0000000..e2d3b38 --- /dev/null +++ b/sushy/tests/unit/json_samples/session_creation_headers.json @@ -0,0 +1,18 @@ +{ + "Content-Security-Policy": "default-src 'none'; script-src 'self' 'unsafe-inline' 'unsafe-eval'; connect-src 'self'; img-src 'self'; frame-src 'self'; font-src 'self'; object-src 'self'; style-src 'self' 'unsafe-inline'", + "ETag": "'W/\"7dc5e2b9\"'", + "Cache-Control": "max-age=0, no-cache, no-store, must-revalidate", + "Location": "/redfish/v1/SessionService/Sessions/151edd65d41c0b89", + "Connection": "Keep-Alive", + "X-XSS-Protection": "1; mode=block", + "X-Auth-Token": "adc530e2016a0ea98c76c087f0e4b76f", + "Expires": "0", + "X-Frame-Options": "SAMEORIGIN", + "Content-Length": "392", + "X-Content-Type-Options": "nosniff", + "Content-Type": "application/json;charset=utf-8", + "OData-Version": "4.0", + "Keep-Alive": "timeout=1, max=32", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "Date": "Tue, 06 Jun 2017 17:07:48 GMT" +} diff --git a/sushy/tests/unit/json_samples/session_error.json b/sushy/tests/unit/json_samples/session_error.json new file mode 100644 index 0000000..9ac70d9 --- /dev/null +++ b/sushy/tests/unit/json_samples/session_error.json @@ -0,0 +1,17 @@ +{ + "error": { + "code": "Base.1.0.GeneralError", + "message": "A general error has occurred. See ExtendedInfo for more information.", + "@Message.ExtendedInfo": [ + { + "@odata.type": "/redfish/v1/$metadata#MessageRegistry.1.0.0.MessageRegistry", + "MessageId": "Base.1.0.NoValidSession", + "RelatedProperties": [], + "Message": "There is no valid session established with the implementation.", + "MessageArgs": [], + "Severity": "Critical", + "Resolution": "Establish as session before attempting any operations." + } + ] + } +} diff --git a/sushy/tests/unit/json_samples/session_service.json b/sushy/tests/unit/json_samples/session_service.json new file mode 100644 index 0000000..ab28afa --- /dev/null +++ b/sushy/tests/unit/json_samples/session_service.json @@ -0,0 +1,18 @@ +{ + "@odata.type": "#SessionService.v1_0_2.SessionService", + "Id": "SessionService", + "Name": "Session Service", + "Description": "Session Service", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "ServiceEnabled": true, + "SessionTimeout": 30, + "Sessions": { + "@odata.id": "/redfish/v1/SessionService/Sessions" + }, + "@odata.context": "/redfish/v1/$metadata#SessionService", + "@odata.id": "/redfish/v1/SessionService", + "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/resources/sessionservice/__init__.py b/sushy/tests/unit/resources/sessionservice/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/tests/unit/resources/sessionservice/test_session.py b/sushy/tests/unit/resources/sessionservice/test_session.py new file mode 100644 index 0000000..4e8ec4b --- /dev/null +++ b/sushy/tests/unit/resources/sessionservice/test_session.py @@ -0,0 +1,100 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +import mock + +from sushy import exceptions +from sushy.resources.sessionservice import session +from sushy.tests.unit import base + + +class SessionTestCase(base.TestCase): + + def setUp(self): + super(SessionTestCase, self).setUp() + self.conn = mock.Mock() + self.auth = mock.Mock() + with open('sushy/tests/unit/json_samples/session.json', 'r') as f: + sample_json = json.loads(f.read()) + self.conn.get.return_value.json.return_value = sample_json + self.auth._session_key = 'fake_x_auth_token' + self.auth._session_uri = sample_json['@odata.id'] + self.conn._auth = self.auth + + self.sess_inst = session.Session( + self.conn, '/redfish/v1/SessionService/Sessions/1234567890ABCDEF', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.sess_inst._parse_attributes() + self.assertEqual('1.0.2', self.sess_inst.redfish_version) + self.assertEqual('1234567890ABCDEF', self.sess_inst.identity) + self.assertEqual('User Session', self.sess_inst.name) + exp_path = '/redfish/v1/SessionService/Sessions/1234567890ABCDEF' + self.assertEqual(exp_path, self.sess_inst.path) + + def test__parse_attributes_missing_identity(self): + self.sess_inst.json.pop('Id') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Id', + self.sess_inst._parse_attributes) + + def test_session_close(self): + session_key = self.sess_inst._conn._auth._session_key + session_uri = self.sess_inst._conn._auth._session_uri + self.assertEqual(session_key, 'fake_x_auth_token') + self.assertEqual(session_uri, self.sess_inst.path) + self.sess_inst.delete() + self.sess_inst._conn.delete.assert_called_with(session_uri) + + +class SessionCollectionTestCase(base.TestCase): + + def setUp(self): + super(SessionCollectionTestCase, self).setUp() + self.conn = mock.Mock() + js_f = 'sushy/tests/unit/json_samples/session_collection.json' + with open(js_f, 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + + self.sess_col = session.SessionCollection( + self.conn, '/redfish/v1/SessionService/Sessions', + redfish_version='1.0.2') + + def test__parse_attributes(self): + path = '/redfish/v1/SessionService/Sessions/104f9d68f58abb85' + self.sess_col._parse_attributes() + self.assertEqual('1.0.2', self.sess_col.redfish_version) + self.assertEqual('Session Collection', self.sess_col.name) + self.assertEqual((path,), self.sess_col.members_identities) + + @mock.patch.object(session, 'Session', autospec=True) + def test_get_member(self, mock_session): + path = '/redfish/v1/SessionService/Sessions/104f9d68f58abb85' + self.sess_col.get_member(path) + mock_session.assert_called_once_with( + self.sess_col._conn, path, + redfish_version=self.sess_col.redfish_version) + + @mock.patch.object(session, 'Session', autospec=True) + def test_get_members(self, mock_session): + path = '/redfish/v1/SessionService/Sessions/104f9d68f58abb85' + members = self.sess_col.get_members() + mock_session.assert_called_once_with( + self.sess_col._conn, path, + redfish_version=self.sess_col.redfish_version) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py new file mode 100644 index 0000000..6ae2066 --- /dev/null +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -0,0 +1,175 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy import exceptions +from sushy.resources.sessionservice import session +from sushy.resources.sessionservice import sessionservice +from sushy.tests.unit import base + + +class SessionServiceTestCase(base.TestCase): + + def setUp(self): + super(SessionServiceTestCase, self).setUp() + self.conn = mock.Mock() + js_f = 'sushy/tests/unit/json_samples/session_service.json' + with open(js_f, 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + + self.sess_serv_inst = sessionservice.SessionService( + self.conn, '/redfish/v1/SessionService', + redfish_version='1.0.2') + + @mock.patch.object(sessionservice, 'LOG', autospec=True) + def test__init_throws_exception(self, mock_LOG): + self.conn.get.return_value.json.reset_mock() + self.conn.get.return_value.json.side_effect = ( + exceptions.AccessError( + 'GET', 'any_url', mock.MagicMock())) + sessionservice.SessionService( + self.conn, '/redfish/v1/SessionService', redfish_version='1.0.2') + self.assertTrue(mock_LOG.warning.called) + + def test__parse_attributes(self): + self.sess_serv_inst._parse_attributes() + exp_path = '/redfish/v1/SessionService' + self.assertEqual('1.0.2', self.sess_serv_inst.redfish_version) + self.assertEqual('SessionService', self.sess_serv_inst.identity) + self.assertEqual('Session Service', self.sess_serv_inst.name) + self.assertEqual(30, self.sess_serv_inst.session_timeout) + self.assertEqual(exp_path, self.sess_serv_inst.path) + self.assertIsNone(self.sess_serv_inst._sessions) + + def test__parse_attributes_missing_timeout(self): + self.sess_serv_inst.json.pop('SessionTimeout') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute SessionTimeout', + self.sess_serv_inst._parse_attributes()) + + def test__get_sessions_collection_path(self): + self.sess_serv_inst.json.pop('Sessions') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Sessions', + self.sess_serv_inst._get_sessions_collection_path) + + @mock.patch.object(session, 'SessionCollection', autospec=True) + def test_session_collection(self, mock_sess_col): + self.sess_serv_inst.sessions + mock_sess_col.assert_called_once_with( + self.sess_serv_inst._conn, + '/redfish/v1/SessionService/Sessions', + redfish_version=self.sess_serv_inst.redfish_version) + + def test_create_session(self): + with open('sushy/tests/unit/json_samples/' + 'session_creation_headers.json', 'r') as f: + self.conn.post.return_value.headers = json.loads(f.read()) + + session_key, session_uri = ( + self.sess_serv_inst.create_session('foo', 'secret')) + self.assertEqual('adc530e2016a0ea98c76c087f0e4b76f', session_key) + self.assertEqual( + '/redfish/v1/SessionService/Sessions/151edd65d41c0b89', + session_uri) + + def test_create_session_unknown_path(self): + del self.sess_serv_inst.json['Sessions'] + with open('sushy/tests/unit/json_samples/' + 'session_creation_headers.json', 'r') as f: + self.conn.post.return_value.headers = json.loads(f.read()) + + session_key, session_uri = ( + self.sess_serv_inst.create_session('foo', 'secret')) + self.assertEqual('adc530e2016a0ea98c76c087f0e4b76f', session_key) + self.assertEqual( + '/redfish/v1/SessionService/Sessions/151edd65d41c0b89', + session_uri) + uri = self.sess_serv_inst.path + '/Sessions' + data = {'UserName': 'foo', 'Password': 'secret'} + headers = {'X-Auth-Token': None} + self.conn.post.assert_called_once_with(uri, + data=data, + headers=headers) + + def test_create_session_missing_x_auth_token(self): + with open('sushy/tests/unit/json_samples/' + 'session_creation_headers.json', 'r') as f: + self.conn.post.return_value.headers = json.loads(f.read()) + + self.conn.post.return_value.headers.pop('X-Auth-Token') + self.assertRaisesRegex( + exceptions.MissingXAuthToken, 'No X-Auth-Token returned', + self.sess_serv_inst.create_session, 'foo', 'bar') + + @mock.patch.object(sessionservice, 'LOG', autospec=True) + def test_create_session_missing_location(self, mock_LOG): + with open('sushy/tests/unit/json_samples/' + 'session_creation_headers.json', 'r') as f: + self.conn.post.return_value.headers = json.loads(f.read()) + + self.conn.post.return_value.headers.pop('Location') + self.sess_serv_inst.create_session('foo', 'bar') + self.assertTrue(mock_LOG.warning.called) + + def _setUp_sessions(self): + self.conn.get.return_value.json.reset_mock() + successive_return_values = [] + with open('sushy/tests/unit/json_samples/session.json', 'r') as f: + successive_return_values.append(json.loads(f.read())) + self.conn.get.return_value.json.side_effect = successive_return_values + + def test_sessions(self): + # check for the underneath variable value + self.assertIsNone(self.sess_serv_inst._sessions) + # | GIVEN | + self._setUp_sessions() + # | WHEN | + actual_sessions = self.sess_serv_inst.sessions + # | THEN | + self.assertIsInstance(actual_sessions, session.SessionCollection) + self.conn.get.return_value.json.assert_called_once_with() + + # reset mock + self.conn.get.return_value.json.reset_mock() + + # | WHEN & THEN | + # tests for same object on invoking subsequently + self.assertIs(actual_sessions, self.sess_serv_inst.sessions) + self.conn.get.return_value.json.assert_not_called() + + def test_sessions_on_refresh(self): + # | GIVEN | + self._setUp_sessions() + # | WHEN & THEN | + self.assertIsInstance(self.sess_serv_inst.sessions, + session.SessionCollection) + + self.conn.get.return_value.json.side_effect = None + # On refreshing the sess_serv_inst instance... + with open('sushy/tests/unit/json_samples/session.json', 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + self.sess_serv_inst.refresh(force=True) + + # | WHEN & THEN | + self.assertIsNotNone(self.sess_serv_inst._sessions) + self.assertFalse(self.sess_serv_inst._sessions._is_stale) + + def test_close_session(self): + self.sess_serv_inst.close_session('session/identity') + self.conn.delete.assert_called_once_with('session/identity') diff --git a/sushy/tests/unit/test_auth.py b/sushy/tests/unit/test_auth.py new file mode 100644 index 0000000..9e5d661 --- /dev/null +++ b/sushy/tests/unit/test_auth.py @@ -0,0 +1,325 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock + +from sushy import auth +from sushy import connector +from sushy import exceptions +from sushy import main +from sushy.tests.unit import base + + +class BasicAuthTestCase(base.TestCase): + + @mock.patch.object(main, 'Sushy', autospec=True) + @mock.patch.object(connector, 'Connector', autospec=True) + def setUp(self, mock_connector, mock_root): + super(BasicAuthTestCase, self).setUp() + self.username = 'TestUsername' + self.password = 'TestP@$$W0RD' + self.base_auth = auth.BasicAuth(self.username, + self.password) + self.conn = mock_connector.return_value + self.root = mock_root.return_value + + def test_init(self): + self.assertEqual(self.username, + self.base_auth._username) + self.assertEqual(self.password, + self.base_auth._password) + self.assertIsNone(self.base_auth._root_resource) + self.assertIsNone(self.base_auth._connector) + + def test_set_context(self): + self.base_auth.set_context(self.root, self.conn) + self.assertEqual(self.base_auth._root_resource, + self.root) + self.assertEqual(self.base_auth._connector, + self.conn) + + def test__do_authenticate_no_context(self): + self.assertRaises(RuntimeError, + self.base_auth.authenticate) + + def test__do_authenticate(self): + self.base_auth.set_context(self.root, self.conn) + self.base_auth.authenticate() + self.conn.set_http_basic_auth.assert_called_once_with(self.username, + self.password) + + def test_can_refresh_session(self): + self.assertFalse(self.base_auth.can_refresh_session()) + + +class SessionAuthTestCase(base.TestCase): + + @mock.patch.object(main, 'Sushy', autospec=True) + @mock.patch.object(connector, 'Connector', autospec=True) + def setUp(self, mock_connector, mock_root): + super(SessionAuthTestCase, self).setUp() + self.username = 'TestUsername' + self.password = 'TestP@$$W0RD' + self.sess_key = 'TestingKey' + self.sess_uri = ('https://testing:8000/redfish/v1/' + 'SessionService/Sessions/testing') + self.sess_auth = auth.SessionAuth(self.username, + self.password) + self.conn = mock_connector.return_value + self.root = mock_root.return_value + + def test_init(self): + self.assertEqual(self.username, + self.sess_auth._username) + self.assertEqual(self.password, + self.sess_auth._password) + self.assertIsNone(self.sess_auth._root_resource) + self.assertIsNone(self.sess_auth._connector) + self.assertIsNone(self.sess_auth._session_key) + self.assertIsNone(self.sess_auth._session_resource_id) + + def test_get_session_key(self): + self.sess_auth._session_key = self.sess_key + self.assertEqual(self.sess_key, + self.sess_auth.get_session_key()) + + def test_get_session_resource_id(self): + self.sess_auth._session_resource_id = self.sess_uri + self.assertEqual(self.sess_uri, + self.sess_auth.get_session_resource_id()) + + def test_reset_session_attrs(self): + self.sess_auth._session_key = self.sess_key + self.sess_auth._session_resource_id = self.sess_uri + self.assertEqual(self.sess_uri, + self.sess_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_auth.get_session_key()) + self.sess_auth.reset_session_attrs() + self.assertIsNone(self.sess_auth.get_session_resource_id()) + self.assertIsNone(self.sess_auth.get_session_key()) + + def test_set_context(self): + self.sess_auth.set_context(self.root, self.conn) + self.assertEqual(self.sess_auth._root_resource, + self.root) + self.assertEqual(self.sess_auth._connector, + self.conn) + + def test__do_authenticate_no_context(self): + self.assertRaises(RuntimeError, + self.sess_auth.authenticate) + + def test__do_authenticate(self): + self.assertIsNone(self.sess_auth.get_session_resource_id()) + self.assertIsNone(self.sess_auth.get_session_key()) + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.authenticate() + self.assertEqual(self.sess_uri, + self.sess_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_auth.get_session_key()) + self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) + + def test_can_refresh_session(self): + self.assertTrue(self.sess_auth.can_refresh_session()) + + def test_refresh(self): + self.assertIsNone(self.sess_auth.get_session_resource_id()) + self.assertIsNone(self.sess_auth.get_session_key()) + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.refresh_session() + self.assertEqual(self.sess_uri, + self.sess_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_auth.get_session_key()) + self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) + + def test_close_do_nothing(self): + self.sess_auth._session_key = None + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.close() + self.conn.delete.assert_not_called() + + def test_close(self): + self.sess_auth._session_key = self.sess_key + self.sess_auth._session_resource_id = self.sess_uri + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.close() + self.conn.delete.assert_called_once_with(self.sess_uri) + self.assertIsNone(self.sess_auth.get_session_resource_id()) + self.assertIsNone(self.sess_auth.get_session_key()) + + @mock.patch.object(auth, 'LOG', autospec=True) + def test_close_fail(self, mock_LOG): + self.sess_auth._session_key = self.sess_key + self.sess_auth._session_resource_id = self.sess_uri + self.conn.delete.side_effect = ( + exceptions.ServerSideError( + 'DELETE', 'any_url', mock.MagicMock())) + + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.close() + + self.assertTrue(mock_LOG.warning.called) + self.assertIsNone(self.sess_auth.get_session_resource_id()) + self.assertIsNone(self.sess_auth.get_session_key()) + + +class SessionOrBasicAuthTestCase(base.TestCase): + + @mock.patch.object(main, 'Sushy', autospec=True) + @mock.patch.object(connector, 'Connector', autospec=True) + def setUp(self, mock_connector, mock_root): + super(SessionOrBasicAuthTestCase, self).setUp() + self.username = 'TestUsername' + self.password = 'TestP@$$W0RD' + self.sess_key = 'TestingKey' + self.sess_uri = ('https://testing:8000/redfish/v1/' + 'SessionService/Sessions/testing') + self.conn = mock_connector.return_value + self.root = mock_root.return_value + self.sess_basic_auth = auth.SessionOrBasicAuth(self.username, + self.password) + + def test_init(self): + self.assertEqual(self.username, + self.sess_basic_auth._username) + self.assertEqual(self.password, + self.sess_basic_auth._password) + self.assertIsNone(self.sess_basic_auth._root_resource) + self.assertIsNone(self.sess_basic_auth._connector) + self.assertIsNone(self.sess_basic_auth._session_key) + self.assertIsNone(self.sess_basic_auth._session_resource_id) + + def test_get_session_key(self): + self.sess_basic_auth._session_key = self.sess_key + self.assertEqual(self.sess_key, + self.sess_basic_auth.get_session_key()) + + def test_get_session_resource_id(self): + self.sess_basic_auth._session_resource_id = self.sess_uri + self.assertEqual(self.sess_uri, + self.sess_basic_auth.get_session_resource_id()) + + def test_reset_session_attrs(self): + self.sess_basic_auth._session_key = self.sess_key + self.sess_basic_auth._session_resource_id = self.sess_uri + self.assertEqual(self.sess_uri, + self.sess_basic_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_basic_auth.get_session_key()) + self.sess_basic_auth.reset_session_attrs() + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + + def test_set_context(self): + self.sess_basic_auth.set_context(self.root, self.conn) + self.assertEqual(self.sess_basic_auth._root_resource, + self.root) + self.assertEqual(self.sess_basic_auth._connector, + self.conn) + + def test__do_authenticate_no_context(self): + self.assertRaises(RuntimeError, + self.sess_basic_auth.authenticate) + + def test__do_authenticate(self): + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.authenticate() + self.assertEqual(self.sess_uri, + self.sess_basic_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_basic_auth.get_session_key()) + self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) + + def test__do_authenticate_for_basic_auth(self): + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.side_effect = exceptions.SushyError + self.root.get_session_service.return_value = mock_sess_serv + + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.authenticate() + + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + self.conn.set_http_basic_auth.assert_called_once_with( + self.username, self.password) + + def test_can_refresh_session(self): + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.authenticate() + + self.assertTrue(self.sess_basic_auth.can_refresh_session()) + + def test_refresh_no_previous_session(self): + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.refresh_session() + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + self.conn.set_http_session_auth.assert_not_called() + self.conn.set_http_basic_auth.assert_not_called() + + def test_refresh_previous_session_exists(self): + self.sess_basic_auth._session_key = 'ThisisFirstKey' + test_url = ('https://testing:8000/redfish/v1/SessionService' + '/Sessions/testingfirst') + self.sess_basic_auth._session_resource_id = test_url + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.refresh_session() + self.assertEqual(self.sess_uri, + self.sess_basic_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_basic_auth.get_session_key()) + self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) + + def test_close_do_nothing(self): + self.conn.delete.assert_not_called() + + def test_close(self): + self.sess_basic_auth._session_key = self.sess_key + self.sess_basic_auth._session_resource_id = self.sess_uri + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.close() + self.conn.delete.assert_called_once_with(self.sess_uri) + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index c1215c8..ebbf8eb 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -19,6 +19,7 @@ import mock import requests from six.moves import http_client +from sushy import auth as sushy_auth from sushy import connector from sushy import exceptions from sushy.tests.unit import base @@ -26,11 +27,13 @@ from sushy.tests.unit import base class ConnectorMethodsTestCase(base.TestCase): - def setUp(self): + @mock.patch.object(sushy_auth, 'SessionOrBasicAuth', autospec=True) + def setUp(self, mock_auth): + mock_auth.get_session_key.return_value = None super(ConnectorMethodsTestCase, self).setUp() self.conn = connector.Connector( - 'http://foo.bar:1234', username='user', - password='pass', verify=True) + 'http://foo.bar:1234', verify=True) + self.conn._auth = mock_auth self.data = {'fake': 'data'} self.headers = {'X-Fake': 'header'} @@ -69,14 +72,39 @@ class ConnectorMethodsTestCase(base.TestCase): mock__op.assert_called_once_with(mock.ANY, 'DELETE', 'fake/path', self.data, self.headers) + def test_set_auth(self): + mock_auth = mock.MagicMock() + self.conn.set_auth(mock_auth) + self.assertEqual(mock_auth, self.conn._auth) + + def test_set_http_basic_auth(self): + self.conn.set_http_basic_auth('foo', 'secret') + self.assertEqual(('foo', 'secret'), self.conn._session.auth) + + def test_set_http_session_auth(self): + self.conn.set_http_session_auth('hash-token') + self.assertTrue('X-Auth-Token' in self.conn._session.headers) + self.assertEqual( + 'hash-token', self.conn._session.headers['X-Auth-Token']) + + def test_close(self): + session = mock.Mock(spec=requests.Session) + self.conn._session = session + self.conn.close() + session.close.assert_called_once_with() + class ConnectorOpTestCase(base.TestCase): - def setUp(self): + @mock.patch.object(sushy_auth, 'SessionOrBasicAuth', autospec=True) + def setUp(self, mock_auth): + mock_auth.get_session_key.return_value = None + mock_auth._session_key = None + self.auth = mock_auth super(ConnectorOpTestCase, self).setUp() self.conn = connector.Connector( - 'http://foo.bar:1234', username='user', - password='pass', verify=True) + 'http://foo.bar:1234', verify=True) + self.conn._auth = mock_auth self.data = {'fake': 'data'} self.headers = {'X-Fake': 'header'} self.session = mock.Mock(spec=requests.Session) @@ -120,6 +148,55 @@ class ConnectorOpTestCase(base.TestCase): 'DELETE', 'http://foo.bar:1234/fake/path', data=None, headers=expected_headers) + def test_ok_post_with_session(self): + self.conn._session.headers = {} + self.conn._session.headers['X-Auth-Token'] = 'asdf1234' + expected_headers = self.headers.copy() + expected_headers['Content-Type'] = 'application/json' + + self.conn._op('POST', path='fake/path', data=self.data, + headers=self.headers) + self.request.assert_called_once_with( + 'POST', 'http://foo.bar:1234/fake/path', + data=json.dumps(self.data), headers=expected_headers) + self.assertEqual(self.conn._session.headers, + {'X-Auth-Token': 'asdf1234'}) + + def test_timed_out_session_unable_to_create_session(self): + self.conn._auth.can_refresh_session.return_value = False + expected_headers = self.headers.copy() + expected_headers['Content-Type'] = 'application/json' + self.conn._session = self.session + self.request = self.session.request + self.request.return_value.status_code = http_client.FORBIDDEN + self.request.return_value.json.side_effect = ValueError('no json') + with self.assertRaisesRegex(exceptions.AccessError, + 'unknown error') as ae: + self.conn._op('POST', path='fake/path', data=self.data, + headers=self.headers) + exc = ae.exception + self.assertEqual(http_client.FORBIDDEN, exc.status_code) + + def test_timed_out_session_re_established(self): + self.auth._session_key = 'asdf1234' + self.auth.get_session_key.return_value = 'asdf1234' + self.conn._auth = self.auth + self.session = mock.Mock(spec=requests.Session) + self.conn._session = self.session + self.request = self.session.request + first_expected_headers = self.headers.copy() + first_expected_headers['Content-Type'] = 'application/json' + first_response = mock.Mock() + first_response.status_code = http_client.FORBIDDEN + second_response = mock.Mock() + second_response.status_code = http_client.OK + second_response.json = {'Test': 'Testing'} + self.request.side_effect = [first_response, second_response] + response = self.conn._op('POST', path='fake/path', data=self.data, + headers=self.headers) + self.auth.refresh_session.assert_called_with() + self.assertEqual(response.json, second_response.json) + def test_connection_error(self): self.request.side_effect = requests.exceptions.ConnectionError self.assertRaises(exceptions.ConnectionError, self.conn._op, 'GET') @@ -171,6 +248,8 @@ class ConnectorOpTestCase(base.TestCase): self.assertEqual(http_client.INTERNAL_SERVER_ERROR, exc.status_code) def test_access_error(self): + self.conn._auth.can_refresh_session.return_value = False + self.request.return_value.status_code = http_client.FORBIDDEN self.request.return_value.json.side_effect = ValueError('no json') diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 6f300be..95a65c1 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -17,27 +17,34 @@ import json import mock +from sushy import auth from sushy import connector from sushy import main from sushy.resources.manager import manager +from sushy.resources.sessionservice import session +from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system from sushy.tests.unit import base class MainTestCase(base.TestCase): + @mock.patch.object(auth, 'SessionOrBasicAuth', autospec=True) @mock.patch.object(connector, 'Connector', autospec=True) - def setUp(self, mock_connector): + @mock.patch.object(sessionservice, 'SessionService', autospec=True) + def setUp(self, mock_session_service, mock_connector, mock_auth): super(MainTestCase, self).setUp() self.conn = mock.Mock() + self.sess_serv = mock.Mock() + self.sess_serv.create_session.return_value = (None, None) + mock_session_service.return_value = self.sess_serv mock_connector.return_value = self.conn with open('sushy/tests/unit/json_samples/root.json', 'r') as f: self.conn.get.return_value.json.return_value = json.loads(f.read()) - self.root = main.Sushy( - 'http://foo.bar:1234', username='foo', password='bar', - verify=True) + self.root = main.Sushy('http://foo.bar:1234', + verify=True, auth=mock_auth) mock_connector.assert_called_once_with( - 'http://foo.bar:1234', 'foo', 'bar', True) + 'http://foo.bar:1234', True) def test__parse_attributes(self): self.root._parse_attributes() @@ -48,6 +55,14 @@ class MainTestCase(base.TestCase): self.root.uuid) self.assertEqual('/redfish/v1/Systems', self.root._systems_path) self.assertEqual('/redfish/v1/Managers', self.root._managers_path) + self.assertEqual('/redfish/v1/SessionService', + self.root._session_service_path) + + @mock.patch.object(connector, 'Connector', autospec=True) + def test__init_throws_exception(self, mock_Connector): + self.assertRaises( + ValueError, main.Sushy, 'http://foo.bar:1234', + 'foo', 'bar', auth=mock.MagicMock()) @mock.patch.object(system, 'SystemCollection', autospec=True) def test_get_system_collection(self, mock_system_collection): @@ -76,3 +91,17 @@ class MainTestCase(base.TestCase): Manager_mock.assert_called_once_with( self.root._conn, 'fake-manager-id', redfish_version=self.root.redfish_version) + + @mock.patch.object(sessionservice, 'SessionService', autospec=True) + def test_get_sessionservice(self, mock_sess_serv): + self.root.get_session_service() + mock_sess_serv.assert_called_once_with( + self.root._conn, '/redfish/v1/SessionService', + redfish_version=self.root.redfish_version) + + @mock.patch.object(session, 'Session', autospec=True) + def test_get_session(self, mock_sess): + self.root.get_session('asdf') + mock_sess.assert_called_once_with( + self.root._conn, 'asdf', + redfish_version=self.root.redfish_version) -- GitLab From 90527433f387e2aa21af3a3852bd6cdf0712d3dc Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Thu, 18 Jan 2018 03:35:11 +0000 Subject: [PATCH 029/303] Updated from global requirements Change-Id: I8b309a9b0f32d9364e18f8b4492edb524daaaf7a --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index 179c311..fd3ffa3 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,7 +6,7 @@ hacking>=1.0.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD -sphinx>=1.6.2 # BSD +sphinx!=1.6.6,>=1.6.2 # BSD openstackdocstheme>=1.17.0 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 testrepository>=0.0.18 # Apache-2.0/BSD -- GitLab From 15f00a0347900afb85d62ee8d20ac2623db3609a Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Thu, 18 Jan 2018 03:36:58 -0500 Subject: [PATCH 030/303] Add support to accept custom connector object There are libraries which use Sushy to connect to Redfish based systems. At times there arises some need for those extension libraries to pass on their custom made Connector objects which can extend the functionality of Sushy connector. For instance, refer [0]. Hence, adding the support to accept custom connector object in Sushy's constructor. [0] https://github.com/openstack/proliantutils/blob/master/proliantutils/redfish/connector.py Change-Id: I220b44b2c5a65b0ee410097594a5a87dd87e4753 --- ...dd-custom-connector-support-0a49c6649d5f7eaf.yaml | 5 +++++ sushy/main.py | 7 ++++--- sushy/tests/unit/test_main.py | 12 ++++++++++++ 3 files changed, 21 insertions(+), 3 deletions(-) create mode 100644 releasenotes/notes/add-custom-connector-support-0a49c6649d5f7eaf.yaml diff --git a/releasenotes/notes/add-custom-connector-support-0a49c6649d5f7eaf.yaml b/releasenotes/notes/add-custom-connector-support-0a49c6649d5f7eaf.yaml new file mode 100644 index 0000000..881de7c --- /dev/null +++ b/releasenotes/notes/add-custom-connector-support-0a49c6649d5f7eaf.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds the ability to specify user-defined connector object on creation + of a root Sushy instance. diff --git a/sushy/main.py b/sushy/main.py index bff147c..ffe29be 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -15,7 +15,7 @@ import logging from sushy import auth as sushy_auth -from sushy import connector +from sushy import connector as sushy_connector from sushy.resources import base from sushy.resources.manager import manager from sushy.resources.sessionservice import session @@ -48,7 +48,7 @@ class Sushy(base.ResourceBase): def __init__(self, base_url, username=None, password=None, root_prefix='/redfish/v1/', verify=True, - auth=None): + auth=None, connector=None): """A class representing a RootService :param base_url: The base URL to the Redfish controller. It @@ -66,6 +66,7 @@ class Sushy(base.ResourceBase): a path the driver will use the specified certificate or one of the certificates in the directory. Defaults to True. :param auth: An authentication mechanism to utilize. + :param connector: A user-defined connector object. Defaults to None. """ self._root_prefix = root_prefix if (auth is not None and (password is not None or @@ -78,7 +79,7 @@ class Sushy(base.ResourceBase): password=password) super(Sushy, self).__init__( - connector.Connector(base_url, verify), + connector or sushy_connector.Connector(base_url, verify), path=self._root_prefix) self._auth = auth self._auth.set_context(self, self._conn) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 95a65c1..293e930 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -64,6 +64,18 @@ class MainTestCase(base.TestCase): ValueError, main.Sushy, 'http://foo.bar:1234', 'foo', 'bar', auth=mock.MagicMock()) + @mock.patch.object(connector, 'Connector', autospec=True) + def test_custom_connector(self, mock_Sushy_Connector): + connector_mock = mock.MagicMock() + with open('sushy/tests/unit/json_samples/root.json', 'r') as f: + connector_mock.get.return_value.json.return_value = ( + json.loads(f.read())) + main.Sushy('http://foo.bar:1234', 'foo', 'bar', + connector=connector_mock) + self.assertTrue(connector_mock.post.called) + self.assertTrue(connector_mock.get.called) + self.assertFalse(mock_Sushy_Connector.called) + @mock.patch.object(system, 'SystemCollection', autospec=True) def test_get_system_collection(self, mock_system_collection): self.root.get_system_collection() -- GitLab From 7b6f37f73a83bf92a458489ba2052e499abfa340 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Thu, 18 Jan 2018 18:10:26 +0100 Subject: [PATCH 031/303] Restore interface of Connector It was changed in a backwards incompatible way. This change corrects it and issues a deprecation warning on using old arguments. Change-Id: I991ddb50818fd84a4e707204c86d50d3a2b553c8 --- sushy/connector.py | 8 +++++++- sushy/tests/unit/test_connector.py | 6 ++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/sushy/connector.py b/sushy/connector.py index 84474d4..d507f68 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -26,11 +26,17 @@ LOG = logging.getLogger(__name__) class Connector(object): - def __init__(self, url, verify=True): + def __init__(self, url, username=None, password=None, verify=True): self._url = url self._verify = verify self._session = requests.Session() self._session.verify = self._verify + if username or password: + LOG.warning('Passing username and password to Connector is ' + 'deprecated. Authentication is passed through ' + 'set_auth now, support for these arguments will ' + 'be removed in the future') + self.set_http_basic_auth(username, password) def set_auth(self, auth): """Sets the authentication mechanism for our connector.""" diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index ebbf8eb..a46ecfa 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -37,6 +37,12 @@ class ConnectorMethodsTestCase(base.TestCase): self.data = {'fake': 'data'} self.headers = {'X-Fake': 'header'} + def test_init_with_credentials(self): + conn = connector.Connector('http://foo.bar:1234', + username='admin', + password='password') + self.assertEqual(conn._session.auth, ('admin', 'password')) + @mock.patch.object(connector.Connector, '_op', autospec=True) def test_get(self, mock__op): self.conn.get(path='fake/path', data=self.data.copy(), -- GitLab From ecb9814aed477f14b474ef76ac68237c0db9413d Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Thu, 18 Jan 2018 18:01:56 +0100 Subject: [PATCH 032/303] Restore the default semantics of refresh() It's fair to expect refresh() to actually refresh the resource, and that's how it works in the previous release. This change flips the default value of the newly introduced "force" argument to keep this behavior. Change-Id: Ib50d7e52b76899bb6eb1ef1c5096bcb6f7a477a6 --- doc/source/reference/usage.rst | 26 ++++++++----------- ...ine-resource-refresh-86c21ce230967251.yaml | 8 +++--- sushy/resources/base.py | 7 ++--- .../unit/resources/system/test_system.py | 2 +- sushy/tests/unit/resources/test_base.py | 8 +++--- 5 files changed, 24 insertions(+), 27 deletions(-) diff --git a/doc/source/reference/usage.rst b/doc/source/reference/usage.rst index e59376b..66ac552 100644 --- a/doc/source/reference/usage.rst +++ b/doc/source/reference/usage.rst @@ -95,13 +95,7 @@ Creating and using a sushy system object # Refresh the system collection object # - # In order to reload a resource post its initialization it has to be marked - # as stale (i.e. invoking 'invalidate()') first and then 'refresh()' has to - # be called. This will only reload the resource w/o reloading/refreshing its - # sub-resources (lazy-refresh of sub-resources). - # Note that calling 'refresh()' only, i.e. w/o calling 'invalidate()' first, - # will be a no-op wrt resource reload in this case. - sys_col.invalidate() + # See below for more options on how to refresh resources. sys_col.refresh() @@ -115,14 +109,16 @@ Creating and using a sushy system object print(sys_inst.get_allowed_reset_system_values()) # Refresh the system object (with all its sub-resources) - # - # Alternatively, this is the other way of reloading a resource object: - # The resource can be reloaded w/o the need of marking it stale - # (i.e. not invoking 'invalidate()'). It is achieved when the "force" - # argument of 'refresh()' method is set to True. Do note that the - # sub-resources of the resource being reloaded will also get reloaded - # (greedy-refresh of sub-resources) when this mode is adopted. - sys_inst.refresh(force=True) + sys_inst.refresh() + + # Alternatively, you can only refresh the resource if it is stale by passing + # force=False: + sys_inst.refresh(force=False) + + # A resource can be marked stale by calling invalidate. Note that its + # subresources won't be marked as stale, and thus they won't be refreshed by + # a call to refresh(force=False) + sys_inst.invalidate() # Get the current power state print(sys_inst.power_state) diff --git a/releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml b/releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml index 9005ae5..f09eeac 100644 --- a/releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml +++ b/releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml @@ -1,6 +1,6 @@ --- -fixes: +features: - | - The library now supports reloading of the attributes by invoking - ``refresh()`` method for nested resources in contrast to recreation. - Resources can now be marked stale by invoking ``invalidate()``. + New ``force`` argument to the ``refresh`` method on resources can be set to + ``False`` to prevent refreshing of resources that are not stale. Resources + can be marked as stale by calling a new ``invalidate`` method. diff --git a/sushy/resources/base.py b/sushy/resources/base.py index f04cf61..43d053f 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -244,7 +244,7 @@ class ResourceBase(object): # Hide the Field object behind the real value setattr(self, attr, field._load(self.json, self)) - def refresh(self, force=False): + def refresh(self, force=True): """Refresh the resource Freshly retrieves/fetches the resource attributes and invokes @@ -254,8 +254,9 @@ class ResourceBase(object): in ``_do_refresh()`` method, if needed. This method represents the template method in the paradigm of Template design pattern. - :param force: will force refresh the resource and its sub-resources, - if set to True. + :param force: if set to False, will only refresh if the resource is + marked as stale, otherwise neither it nor its subresources will + be refreshed. :raises: ResourceNotFoundError :raises: ConnectionError :raises: HTTPError diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index f5ed6b3..20007bf 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -293,7 +293,7 @@ class SystemTestCase(base.TestCase): self.conn.get.return_value.json.return_value = json.loads(f.read()) self.sys_inst.invalidate() - self.sys_inst.refresh() + self.sys_inst.refresh(force=False) # | WHEN & THEN | self.assertIsNotNone(self.sys_inst._processors) diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 9353cf7..8f92ba7 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -40,19 +40,19 @@ class ResourceBaseTestCase(base.TestCase): # refresh() is called in the constructor self.conn.reset_mock() - def test_refresh(self): - self.base_resource.refresh() + def test_refresh_no_force(self): + self.base_resource.refresh(force=False) self.conn.get.assert_not_called() def test_refresh_force(self): - self.base_resource.refresh(force=True) + self.base_resource.refresh() self.conn.get.assert_called_once_with(path='/Foo') def test_invalidate(self): self.base_resource.invalidate() self.conn.get.assert_not_called() - self.base_resource.refresh() + self.base_resource.refresh(force=False) self.conn.get.assert_called_once_with(path='/Foo') def test_invalidate_force_refresh(self): -- GitLab From bbe729c53d33e46eff9a80332f7eaa0d52fca6c6 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Thu, 18 Jan 2018 20:26:13 +0000 Subject: [PATCH 033/303] Update .gitreview for stable/queens Change-Id: I30df267ed7a49b37c1b57eb6bc99ceee7d9c36d5 --- .gitreview | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitreview b/.gitreview index 6ec9ed8..6279292 100644 --- a/.gitreview +++ b/.gitreview @@ -2,3 +2,4 @@ host=review.openstack.org port=29418 project=openstack/sushy.git +defaultbranch=stable/queens -- GitLab From e2beefce05955f02938b638e6a1076cd809b19fb Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Thu, 18 Jan 2018 20:26:16 +0000 Subject: [PATCH 034/303] Update UPPER_CONSTRAINTS_FILE for stable/queens Change-Id: Ie85e5316ceb09836fff535c56f6ff41afaa22791 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 5f05dd9..a458c85 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ setenv = PYTHONWARNINGS=default::DeprecationWarning install_command = pip install {opts} {packages} deps = - -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} + -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt?h=stable/queens} -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt commands = python setup.py test --slowest --testr-args='{posargs}' -- GitLab From a5294e19cb1cdae384d8e0a6fb07e52bc57a9f44 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Thu, 18 Jan 2018 20:26:19 +0000 Subject: [PATCH 035/303] Update reno for stable/queens Change-Id: I005f8953d365eb4412b456ef10d5815238ab01c7 --- releasenotes/source/index.rst | 1 + releasenotes/source/queens.rst | 6 ++++++ 2 files changed, 7 insertions(+) create mode 100644 releasenotes/source/queens.rst diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst index 4205f91..6e99bf6 100644 --- a/releasenotes/source/index.rst +++ b/releasenotes/source/index.rst @@ -6,4 +6,5 @@ :maxdepth: 1 unreleased + queens pike diff --git a/releasenotes/source/queens.rst b/releasenotes/source/queens.rst new file mode 100644 index 0000000..36ac616 --- /dev/null +++ b/releasenotes/source/queens.rst @@ -0,0 +1,6 @@ +=================================== + Queens Series Release Notes +=================================== + +.. release-notes:: + :branch: stable/queens -- GitLab From 8bfa4efeb4eee27d6b90b93b35f59f23a9fc34e1 Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Wed, 24 Jan 2018 01:36:47 +0000 Subject: [PATCH 036/303] Updated from global requirements Change-Id: I6dc688846675dd2cd73ceddfb4ade84e412b386e --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index fd3ffa3..55ac42e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -7,7 +7,7 @@ hacking>=1.0.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD sphinx!=1.6.6,>=1.6.2 # BSD -openstackdocstheme>=1.17.0 # Apache-2.0 +openstackdocstheme>=1.18.1 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 testrepository>=0.0.18 # Apache-2.0/BSD testscenarios>=0.4 # Apache-2.0/BSD -- GitLab From d03a70f70075879645525a9c30e96ab84d1e45e5 Mon Sep 17 00:00:00 2001 From: Yusef Shaban Date: Fri, 19 Jan 2018 12:33:13 -0700 Subject: [PATCH 037/303] Restores sushy session functionality. This patch restores sushy session functionality to the SessionOrBasicAuth by moving can_refresh_session to its parent SessionAuth. Also fixed verify being ignored. Closes-Bug: 1744378 Change-Id: Ia9ce0fe965f13e2d7f7634957306fa4ce6746b39 --- sushy/auth.py | 8 ++------ sushy/main.py | 4 ++-- sushy/tests/unit/test_auth.py | 8 ++++++++ sushy/tests/unit/test_main.py | 2 +- 4 files changed, 13 insertions(+), 9 deletions(-) diff --git a/sushy/auth.py b/sushy/auth.py index 22e7a48..db65fc2 100644 --- a/sushy/auth.py +++ b/sushy/auth.py @@ -156,7 +156,8 @@ class SessionAuth(AuthBase): def can_refresh_session(self): """Method to assert if session based refresh can be done.""" - return True + return (self._session_key is not None and + self._session_resource_id is not None) def refresh_session(self): """Method to refresh a session to a Redfish controller. @@ -223,11 +224,6 @@ class SessionOrBasicAuth(SessionAuth): self.basic_auth.set_context(self._root_resource, self._connector) self.basic_auth.authenticate() - def can_refresh_session(self): - """Method to assert if session based refresh can be done.""" - return (self._session_key is not None and - self._session_resource_id is not None) - def refresh_session(self): """Method to refresh a session to a Redfish controller. diff --git a/sushy/main.py b/sushy/main.py index ffe29be..ec17822 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -74,12 +74,12 @@ class Sushy(base.ResourceBase): msg = ('Username or Password were provided to Sushy ' 'when an authentication mechanism was specified.') raise ValueError(msg) - else: + if auth is None: auth = sushy_auth.SessionOrBasicAuth(username=username, password=password) super(Sushy, self).__init__( - connector or sushy_connector.Connector(base_url, verify), + connector or sushy_connector.Connector(base_url, verify=verify), path=self._root_prefix) self._auth = auth self._auth.set_context(self, self._conn) diff --git a/sushy/tests/unit/test_auth.py b/sushy/tests/unit/test_auth.py index 9e5d661..eb0405d 100644 --- a/sushy/tests/unit/test_auth.py +++ b/sushy/tests/unit/test_auth.py @@ -138,6 +138,14 @@ class SessionAuthTestCase(base.TestCase): self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) def test_can_refresh_session(self): + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.authenticate() + self.assertTrue(self.sess_auth.can_refresh_session()) def test_refresh(self): diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 293e930..362cbb4 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -44,7 +44,7 @@ class MainTestCase(base.TestCase): self.root = main.Sushy('http://foo.bar:1234', verify=True, auth=mock_auth) mock_connector.assert_called_once_with( - 'http://foo.bar:1234', True) + 'http://foo.bar:1234', verify=True) def test__parse_attributes(self): self.root._parse_attributes() -- GitLab From 89505c0c06de4e6972ced2b183bb896d643ad2c1 Mon Sep 17 00:00:00 2001 From: "James E. Blair" Date: Wed, 24 Jan 2018 16:37:41 -0800 Subject: [PATCH 038/303] Zuul: Remove project name Zuul no longer requires the project-name for in-repo configuration. Omitting it makes forking or renaming projects easier. Change-Id: I398fac925a844025027994d808f9d958089ec0a6 --- zuul.d/project.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 67eed03..c86dfa3 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -1,5 +1,4 @@ - project: - name: openstack/sushy check: jobs: - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src -- GitLab From 86933af0b1b6650a9cd34af90af85e5cc339bfa2 Mon Sep 17 00:00:00 2001 From: "James E. Blair" Date: Wed, 24 Jan 2018 16:37:41 -0800 Subject: [PATCH 039/303] Zuul: Remove project name Zuul no longer requires the project-name for in-repo configuration. Omitting it makes forking or renaming projects easier. Change-Id: Ide4c7852b384d0c981bf3ad0a7dd928d17b5974b --- zuul.d/project.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 67eed03..c86dfa3 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -1,5 +1,4 @@ - project: - name: openstack/sushy check: jobs: - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src -- GitLab From d817543ec4100503c07059bf70cfe4d719bdeacf Mon Sep 17 00:00:00 2001 From: Yusef Shaban Date: Fri, 19 Jan 2018 12:33:13 -0700 Subject: [PATCH 040/303] Restores sushy session functionality. This patch restores sushy session functionality to the SessionOrBasicAuth by moving can_refresh_session to its parent SessionAuth. Also fixed verify being ignored. Closes-Bug: 1744378 Change-Id: Ia9ce0fe965f13e2d7f7634957306fa4ce6746b39 (cherry picked from commit d03a70f70075879645525a9c30e96ab84d1e45e5) --- sushy/auth.py | 8 ++------ sushy/main.py | 4 ++-- sushy/tests/unit/test_auth.py | 8 ++++++++ sushy/tests/unit/test_main.py | 2 +- 4 files changed, 13 insertions(+), 9 deletions(-) diff --git a/sushy/auth.py b/sushy/auth.py index 22e7a48..db65fc2 100644 --- a/sushy/auth.py +++ b/sushy/auth.py @@ -156,7 +156,8 @@ class SessionAuth(AuthBase): def can_refresh_session(self): """Method to assert if session based refresh can be done.""" - return True + return (self._session_key is not None and + self._session_resource_id is not None) def refresh_session(self): """Method to refresh a session to a Redfish controller. @@ -223,11 +224,6 @@ class SessionOrBasicAuth(SessionAuth): self.basic_auth.set_context(self._root_resource, self._connector) self.basic_auth.authenticate() - def can_refresh_session(self): - """Method to assert if session based refresh can be done.""" - return (self._session_key is not None and - self._session_resource_id is not None) - def refresh_session(self): """Method to refresh a session to a Redfish controller. diff --git a/sushy/main.py b/sushy/main.py index ffe29be..ec17822 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -74,12 +74,12 @@ class Sushy(base.ResourceBase): msg = ('Username or Password were provided to Sushy ' 'when an authentication mechanism was specified.') raise ValueError(msg) - else: + if auth is None: auth = sushy_auth.SessionOrBasicAuth(username=username, password=password) super(Sushy, self).__init__( - connector or sushy_connector.Connector(base_url, verify), + connector or sushy_connector.Connector(base_url, verify=verify), path=self._root_prefix) self._auth = auth self._auth.set_context(self, self._conn) diff --git a/sushy/tests/unit/test_auth.py b/sushy/tests/unit/test_auth.py index 9e5d661..eb0405d 100644 --- a/sushy/tests/unit/test_auth.py +++ b/sushy/tests/unit/test_auth.py @@ -138,6 +138,14 @@ class SessionAuthTestCase(base.TestCase): self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) def test_can_refresh_session(self): + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.authenticate() + self.assertTrue(self.sess_auth.can_refresh_session()) def test_refresh(self): diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 293e930..362cbb4 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -44,7 +44,7 @@ class MainTestCase(base.TestCase): self.root = main.Sushy('http://foo.bar:1234', verify=True, auth=mock_auth) mock_connector.assert_called_once_with( - 'http://foo.bar:1234', True) + 'http://foo.bar:1234', verify=True) def test__parse_attributes(self): self.root._parse_attributes() -- GitLab From aff7dda8770cc303a3dbcda477ad633509d819df Mon Sep 17 00:00:00 2001 From: melissaml Date: Tue, 6 Feb 2018 00:58:23 +0800 Subject: [PATCH 041/303] fix error url Change-Id: Icf3e095e7f035f040a1d6ebaa76f3eff59ded1ad --- HACKING.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HACKING.rst b/HACKING.rst index c001f18..9660e99 100644 --- a/HACKING.rst +++ b/HACKING.rst @@ -1,4 +1,4 @@ Sushy Style Commandments ======================== -Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/ +Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/ -- GitLab From e694292116b8a796ad9dee0fa12e8cad3da393b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Nov=C3=BD?= Date: Mon, 12 Feb 2018 10:27:40 +0100 Subject: [PATCH 042/303] d/control: Set Vcs-* to salsa.debian.org --- debian/changelog | 6 ++++++ debian/control | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/debian/changelog b/debian/changelog index cb5b485..f128bbd 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (1.1.0-2) UNRELEASED; urgency=medium + + * d/control: Set Vcs-* to salsa.debian.org + + -- Ondřej Nový Mon, 12 Feb 2018 10:27:40 +0100 + python-sushy (1.1.0-1) unstable; urgency=medium * Initial release. (Closes: #879968) diff --git a/debian/control b/debian/control index 1c9ffed..4d46f31 100644 --- a/debian/control +++ b/debian/control @@ -32,8 +32,8 @@ Build-Depends-Indep: subunit, testrepository, Standards-Version: 4.1.1 -Vcs-Browser: https://anonscm.debian.org/cgit/openstack/python-sushy.git/ -Vcs-Git: https://anonscm.debian.org/git/openstack/python-sushy.git +Vcs-Browser: https://salsa.debian.org/openstack-team/libs/python-sushy +Vcs-Git: https://salsa.debian.org/openstack-team/libs/python-sushy.git Homepage: https://docs.openstack.org/sushy Package: python-sushy -- GitLab From fc1d68fe367f4b788043385f82dfdef1e6777031 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Nov=C3=BD?= Date: Tue, 13 Feb 2018 14:41:52 +0100 Subject: [PATCH 043/303] d/copyright: Use https in Format --- debian/changelog | 1 + debian/copyright | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/debian/changelog b/debian/changelog index f128bbd..f10a7f9 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,6 +1,7 @@ python-sushy (1.1.0-2) UNRELEASED; urgency=medium * d/control: Set Vcs-* to salsa.debian.org + * d/copyright: Use https in Format -- Ondřej Nový Mon, 12 Feb 2018 10:27:40 +0100 diff --git a/debian/copyright b/debian/copyright index 3bea704..c773807 100644 --- a/debian/copyright +++ b/debian/copyright @@ -1,4 +1,4 @@ -Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: sushy Source: https://docs.openstack.org/sushy -- GitLab From 3cf5a30d37301d026ea7a3e18b6d61aab63283e9 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Sun, 18 Feb 2018 21:49:20 +0100 Subject: [PATCH 044/303] Now packaging 1.3.1 --- debian/changelog | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/debian/changelog b/debian/changelog index f10a7f9..671bd08 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,9 +1,13 @@ -python-sushy (1.1.0-2) UNRELEASED; urgency=medium +python-sushy (1.3.1-1) UNRELEASED; urgency=medium + [ Ondřej Nový ] * d/control: Set Vcs-* to salsa.debian.org * d/copyright: Use https in Format - -- Ondřej Nový Mon, 12 Feb 2018 10:27:40 +0100 + [ Thomas Goirand ] + * New upstream release. + + -- Thomas Goirand Sun, 18 Feb 2018 21:49:06 +0100 python-sushy (1.1.0-1) unstable; urgency=medium -- GitLab From b37c574f90f6052d83c406c3438096450028f1ae Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Sun, 18 Feb 2018 21:51:29 +0100 Subject: [PATCH 045/303] Fixed (build-)depends for this release. --- debian/changelog | 1 + debian/control | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/debian/changelog b/debian/changelog index 671bd08..2687af8 100644 --- a/debian/changelog +++ b/debian/changelog @@ -6,6 +6,7 @@ python-sushy (1.3.1-1) UNRELEASED; urgency=medium [ Thomas Goirand ] * New upstream release. + * Fixed (build-)depends for this release. -- Thomas Goirand Sun, 18 Feb 2018 21:49:06 +0100 diff --git a/debian/control b/debian/control index 4d46f31..e4fb0e5 100644 --- a/debian/control +++ b/debian/control @@ -18,17 +18,17 @@ Build-Depends: Build-Depends-Indep: python-coverage, python-hacking, - python-openstackdocstheme (>= 1.16.0), - python-oslotest (>= 1.10.0), + python-openstackdocstheme (>= 1.17.0), + python-oslotest (>= 1:3.2.0), python-requests (>= 2.14.2), python-six, python-testscenarios, - python-testtools, - python3-oslotest (>= 1.10.0), + python-testtools (>= 2.2.0), + python3-oslotest (>= 1:3.2.0), python3-requests (>= 2.14.2), python3-six, python3-testscenarios, - python3-testtools, + python3-testtools (>= 2.2.0), subunit, testrepository, Standards-Version: 4.1.1 -- GitLab From 4062b986de8b14e5a9c4b5fc304b9c66c13c304f Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Sun, 18 Feb 2018 20:52:15 +0000 Subject: [PATCH 046/303] Releasing to experimental. --- debian/changelog | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/debian/changelog b/debian/changelog index 2687af8..ca7cb21 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,4 +1,4 @@ -python-sushy (1.3.1-1) UNRELEASED; urgency=medium +python-sushy (1.3.1-1) experimental; urgency=medium [ Ondřej Nový ] * d/control: Set Vcs-* to salsa.debian.org @@ -8,7 +8,7 @@ python-sushy (1.3.1-1) UNRELEASED; urgency=medium * New upstream release. * Fixed (build-)depends for this release. - -- Thomas Goirand Sun, 18 Feb 2018 21:49:06 +0100 + -- Thomas Goirand Sun, 18 Feb 2018 20:52:04 +0000 python-sushy (1.1.0-1) unstable; urgency=medium -- GitLab From 96eff9a3bdd23f4afee1e60e8736c82340dbba38 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Sun, 18 Feb 2018 20:52:38 +0000 Subject: [PATCH 047/303] Standards-Version: 4.1.3 --- debian/changelog | 1 + debian/control | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/debian/changelog b/debian/changelog index ca7cb21..bf29ebe 100644 --- a/debian/changelog +++ b/debian/changelog @@ -7,6 +7,7 @@ python-sushy (1.3.1-1) experimental; urgency=medium [ Thomas Goirand ] * New upstream release. * Fixed (build-)depends for this release. + * Standards-Version is now 4.1.3. -- Thomas Goirand Sun, 18 Feb 2018 20:52:04 +0000 diff --git a/debian/control b/debian/control index e4fb0e5..bc93f61 100644 --- a/debian/control +++ b/debian/control @@ -31,7 +31,7 @@ Build-Depends-Indep: python3-testtools (>= 2.2.0), subunit, testrepository, -Standards-Version: 4.1.1 +Standards-Version: 4.1.3 Vcs-Browser: https://salsa.debian.org/openstack-team/libs/python-sushy Vcs-Git: https://salsa.debian.org/openstack-team/libs/python-sushy.git Homepage: https://docs.openstack.org/sushy -- GitLab From 31a02de0dd0d0dc73368e32f141c39772c208ed4 Mon Sep 17 00:00:00 2001 From: wangqi Date: Sat, 24 Feb 2018 11:32:53 +0800 Subject: [PATCH 048/303] Replace curly quotes with straight quotes Change-Id: Id0321f85a40bbb95fb54829688316a28e7fb9da8 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 5f05dd9..5d65172 100644 --- a/tox.ini +++ b/tox.ini @@ -48,7 +48,7 @@ ignore = E123,E125 # [H203] Use assertIs(Not)None to check for None. # [H204] Use assert(Not)Equal to check for equality. # [H205] Use assert(Greater|Less)(Equal) for comparison. -# [H210] Require ‘autospec’, ‘spec’, or ‘spec_set’ in mock.patch/mock.patch.object calls +# [H210] Require 'autospec', 'spec', or 'spec_set' in mock.patch/mock.patch.object calls # [H904] Delay string interpolations at logging calls. enable-extensions=H106,H203,H204,H205,H210,H904 builtins = _ -- GitLab From e5dafd5d443378b00bfe1c4441e0693168fb4063 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Sun, 25 Feb 2018 22:56:36 +0000 Subject: [PATCH 049/303] Releasing to unstable. --- debian/changelog | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/debian/changelog b/debian/changelog index bf29ebe..a222148 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (1.3.1-2) unstable; urgency=medium + + * Uploading to unstable. + + -- Thomas Goirand Sun, 25 Feb 2018 22:56:36 +0000 + python-sushy (1.3.1-1) experimental; urgency=medium [ Ondřej Nový ] -- GitLab From 311736b9d3629e65d761dde36a05285e094c6b6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Nov=C3=BD?= Date: Tue, 27 Feb 2018 16:40:03 +0100 Subject: [PATCH 050/303] d/control: Add trailing tilde to min version depend to allow backports --- debian/changelog | 7 +++++++ debian/control | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/debian/changelog b/debian/changelog index a222148..f38c014 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,10 @@ +python-sushy (1.3.1-3) UNRELEASED; urgency=medium + + * d/control: Add trailing tilde to min version depend to allow + backports + + -- Ondřej Nový Tue, 27 Feb 2018 16:40:03 +0100 + python-sushy (1.3.1-2) unstable; urgency=medium * Uploading to unstable. diff --git a/debian/control b/debian/control index bc93f61..fd81074 100644 --- a/debian/control +++ b/debian/control @@ -5,7 +5,7 @@ Maintainer: PKG OpenStack Uploaders: Thomas Goirand , Build-Depends: - debhelper (>= 10), + debhelper (>= 10~), dh-python, openstack-pkg-tools, python-all, -- GitLab From 8c12c2505c488d4e1974b496dc8308e3fb2ce662 Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Wed, 14 Mar 2018 07:29:48 +0000 Subject: [PATCH 051/303] Mark Systems/Managers/SessionService optional Apparently in Redfish standard the root resource doesn't enforce any of the depicted top level attributes to be mandatory. Till now all those attributes are mandatory parameters for Sushy object creation. Therefore, to keep Sushy aligned to standard Redfish this patch marks them as optional. Change-Id: Ic7750b48e5f5f3b5976f0a020d68c7d3197eaa8d Closes-Bug: #1754514 --- .../notes/bug-1754514-ca6ebe16c4e4b3b0.yaml | 5 ++++ sushy/main.py | 23 +++++++++++---- .../unit/json_samples/bare_minimum_root.json | 11 ++++++++ sushy/tests/unit/test_main.py | 28 +++++++++++++++++++ 4 files changed, 62 insertions(+), 5 deletions(-) create mode 100644 releasenotes/notes/bug-1754514-ca6ebe16c4e4b3b0.yaml create mode 100644 sushy/tests/unit/json_samples/bare_minimum_root.json diff --git a/releasenotes/notes/bug-1754514-ca6ebe16c4e4b3b0.yaml b/releasenotes/notes/bug-1754514-ca6ebe16c4e4b3b0.yaml new file mode 100644 index 0000000..e8e6f8a --- /dev/null +++ b/releasenotes/notes/bug-1754514-ca6ebe16c4e4b3b0.yaml @@ -0,0 +1,5 @@ +--- +critical: + - | + Fixes authentication failure when SessionService attribute is + not present in the root resource. diff --git a/sushy/main.py b/sushy/main.py index ec17822..b0a4334 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -16,6 +16,7 @@ import logging from sushy import auth as sushy_auth from sushy import connector as sushy_connector +from sushy import exceptions from sushy.resources import base from sushy.resources.manager import manager from sushy.resources.sessionservice import session @@ -36,14 +37,13 @@ class Sushy(base.ResourceBase): uuid = base.Field('UUID') """The Redfish root service UUID""" - _systems_path = base.Field(['Systems', '@odata.id'], required=True) + _systems_path = base.Field(['Systems', '@odata.id']) """SystemCollection path""" - _managers_path = base.Field(['Managers', '@odata.id'], required=True) + _managers_path = base.Field(['Managers', '@odata.id']) """ManagerCollection path""" - _session_service_path = base.Field(['SessionService', '@odata.id'], - required=True) + _session_service_path = base.Field(['SessionService', '@odata.id']) """SessionService path""" def __init__(self, base_url, username=None, password=None, @@ -96,6 +96,10 @@ class Sushy(base.ResourceBase): not found :returns: a SystemCollection object """ + if not self._systems_path: + raise exceptions.MissingAttributeError( + attribute='Systems/@odata.id', resource=self._path) + return system.SystemCollection(self._conn, self._systems_path, redfish_version=self.redfish_version) @@ -115,6 +119,10 @@ class Sushy(base.ResourceBase): not found :returns: a ManagerCollection object """ + if not self._managers_path: + raise exceptions.MissingAttributeError( + attribute='Managers/@odata.id', resource=self._path) + return manager.ManagerCollection(self._conn, self._managers_path, redfish_version=self.redfish_version) @@ -130,9 +138,14 @@ class Sushy(base.ResourceBase): def get_session_service(self): """Get the SessionService object - :raises: MissingAttributeError, if the collection attribue is not found + :raises: MissingAttributeError, if the collection attribute is + not found :returns: as SessionCollection object """ + if not self._session_service_path: + raise exceptions.MissingAttributeError( + attribute='SessionService/@odata.id', resource=self._path) + return sessionservice.SessionService( self._conn, self._session_service_path, redfish_version=self.redfish_version) diff --git a/sushy/tests/unit/json_samples/bare_minimum_root.json b/sushy/tests/unit/json_samples/bare_minimum_root.json new file mode 100644 index 0000000..1f55814 --- /dev/null +++ b/sushy/tests/unit/json_samples/bare_minimum_root.json @@ -0,0 +1,11 @@ +{ + "@odata.type": "#ServiceRoot.v1_0_2.ServiceRoot", + "Id": "RootService", + "Name": "Root Service", + "RedfishVersion": "1.0.2", + "UUID": "92384634-2938-2342-8820-489239905423", + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#ServiceRoot", + "@odata.id": "/redfish/v1/", + "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 362cbb4..8b1a969 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -19,6 +19,7 @@ import mock from sushy import auth from sushy import connector +from sushy import exceptions from sushy import main from sushy.resources.manager import manager from sushy.resources.sessionservice import session @@ -117,3 +118,30 @@ class MainTestCase(base.TestCase): mock_sess.assert_called_once_with( self.root._conn, 'asdf', redfish_version=self.root.redfish_version) + + +class BareMinimumMainTestCase(base.TestCase): + + def setUp(self): + super(BareMinimumMainTestCase, self).setUp() + self.conn = mock.MagicMock() + with open('sushy/tests/unit/json_samples/' + 'bare_minimum_root.json', 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + self.root = main.Sushy('http://foo.bar:1234', verify=True, + auth=mock.MagicMock(), connector=self.conn) + + def test_get_system_collection_when_systems_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Systems/@odata.id', self.root.get_system_collection) + + def test_get_manager_collection_when_managers_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Managers/@odata.id', self.root.get_manager_collection) + + def test_get_session_service_when_sessionservice_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'SessionService/@odata.id', self.root.get_session_service) -- GitLab From 569b6dcd1a6636dddc04a93a9c406c1db8343cf3 Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Thu, 15 Mar 2018 09:34:22 +0000 Subject: [PATCH 052/303] Updated from global requirements Change-Id: If255cdbd97f076b1f9703956808e5b67dfceb14d --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index 55ac42e..10d8240 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,7 +6,7 @@ hacking>=1.0.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD -sphinx!=1.6.6,>=1.6.2 # BSD +sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD openstackdocstheme>=1.18.1 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 testrepository>=0.0.18 # Apache-2.0/BSD -- GitLab From 6983511582ed91db3255ae7ede932b82b9a80b66 Mon Sep 17 00:00:00 2001 From: jinxingfang Date: Thu, 8 Mar 2018 10:37:04 +0800 Subject: [PATCH 053/303] Add system status field Add the system status field to show the system status which have not been composed to a node. Change-Id: I490c3e177d5cfadf633c99810da3793784e764f4 --- .../notes/add-system-status-field-41b3f2a8c4b85f38.yaml | 4 ++++ sushy/resources/system/system.py | 9 +++++++++ sushy/tests/unit/resources/system/test_system.py | 3 +++ 3 files changed, 16 insertions(+) create mode 100644 releasenotes/notes/add-system-status-field-41b3f2a8c4b85f38.yaml diff --git a/releasenotes/notes/add-system-status-field-41b3f2a8c4b85f38.yaml b/releasenotes/notes/add-system-status-field-41b3f2a8c4b85f38.yaml new file mode 100644 index 0000000..c18c7b0 --- /dev/null +++ b/releasenotes/notes/add-system-status-field-41b3f2a8c4b85f38.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds the system status field to show the system status. diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 35d1ae4..8c84bde 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -62,6 +62,12 @@ class MemorySummaryField(base.CompositeField): """ +class StatusField(base.CompositeField): + state = base.Field('State') + health = base.Field('Health') + health_rollup = base.Field('HealthRollup') + + class System(base.ResourceBase): asset_tag = base.Field('AssetTag') @@ -105,6 +111,9 @@ class System(base.ResourceBase): sku = base.Field('SKU') """The system stock-keeping unit""" + status = StatusField('Status') + """The system status""" + # TODO(lucasagomes): Create mappings for the system_type system_type = base.Field('SystemType') """The system type""" diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 20007bf..44b0e7d 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -56,6 +56,9 @@ class SystemTestCase(base.TestCase): self.assertEqual('Physical', self.sys_inst.system_type) self.assertEqual('38947555-7742-3448-3784-823347823834', self.sys_inst.uuid) + self.assertEqual('Enabled', self.sys_inst.status.state) + self.assertEqual('OK', self.sys_inst.status.health) + self.assertEqual('OK', self.sys_inst.status.health_rollup) self.assertEqual(sushy.SYSTEM_POWER_STATE_ON, self.sys_inst.power_state) self.assertEqual(96, self.sys_inst.memory_summary.size_gib) -- GitLab From 589d544177cf9c57f2d15f97893f4c93af8ee223 Mon Sep 17 00:00:00 2001 From: Doug Hellmann Date: Thu, 22 Mar 2018 18:20:37 -0400 Subject: [PATCH 054/303] add lower-constraints job Create a tox environment for running the unit tests against the lower bounds of the dependencies. Create a lower-constraints.txt to be used to enforce the lower bounds in those tests. Add openstack-tox-lower-constraints job to the zuul configuration. See http://lists.openstack.org/pipermail/openstack-dev/2018-March/128352.html for more details. Change-Id: Id863789398c112317bb5cf380fd82751b39ba0ce Depends-On: https://review.openstack.org/555034 Signed-off-by: Doug Hellmann --- lower-constraints.txt | 43 +++++++++++++++++++++++++++++++++++++++++++ tox.ini | 7 +++++++ zuul.d/project.yaml | 2 ++ 3 files changed, 52 insertions(+) create mode 100644 lower-constraints.txt diff --git a/lower-constraints.txt b/lower-constraints.txt new file mode 100644 index 0000000..fc86940 --- /dev/null +++ b/lower-constraints.txt @@ -0,0 +1,43 @@ +alabaster==0.7.10 +appdirs==1.3.0 +Babel==2.3.4 +coverage==4.0 +docutils==0.11 +dulwich==0.15.0 +extras==1.0.0 +fixtures==3.0.0 +flake8==2.5.5 +hacking==1.0.0 +imagesize==0.7.1 +iso8601==0.1.11 +Jinja2==2.10 +keystoneauth1==3.4.0 +linecache2==1.0.0 +MarkupSafe==1.0 +mccabe==0.2.1 +mock==2.0.0 +mox3==0.20.0 +openstackdocstheme==1.18.1 +os-client-config==1.28.0 +oslotest==3.2.0 +pbr==2.0.0 +pep8==1.5.7 +pyflakes==0.8.1 +Pygments==2.2.0 +python-mimeparse==1.6.0 +python-subunit==1.0.0 +pytz==2013.6 +PyYAML==3.12 +reno==2.5.0 +requests==2.14.2 +requestsexceptions==1.2.0 +six==1.10.0 +snowballstemmer==1.2.1 +Sphinx==1.6.5 +sphinxcontrib-websupport==1.0.1 +stevedore==1.20.0 +testrepository==0.0.18 +testscenarios==0.4 +testtools==2.2.0 +traceback2==1.4.0 +unittest2==1.1.0 diff --git a/tox.ini b/tox.ini index 5d65172..551fd23 100644 --- a/tox.ini +++ b/tox.ini @@ -53,3 +53,10 @@ ignore = E123,E125 enable-extensions=H106,H203,H204,H205,H210,H904 builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build + +[testenv:lower-constraints] +basepython = python3 +deps = + -c{toxinidir}/lower-constraints.txt + -r{toxinidir}/test-requirements.txt + -r{toxinidir}/requirements.txt diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index c86dfa3..f619715 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -2,6 +2,8 @@ check: jobs: - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src + - openstack-tox-lower-constraints gate: jobs: - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src + - openstack-tox-lower-constraints -- GitLab From 49c66bbdc8b380e1f17764c131b39378544a834f Mon Sep 17 00:00:00 2001 From: Lin Yang Date: Sat, 3 Mar 2018 12:58:43 -0800 Subject: [PATCH 055/303] Fix wrong message of invalid boot "enabled" parameter Previously, it return allowed boot "target" values by mistake. So fixed it by using allowed boot "enabled" values as expected. Closes-Bug: #1758202 Change-Id: Id6a7665c3d2d18ee5cefde41b7b575bc1b3413a4 --- sushy/resources/system/system.py | 2 +- sushy/tests/unit/resources/system/test_system.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 35d1ae4..08ee315 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -214,7 +214,7 @@ class System(base.ResourceBase): if enabled not in sys_maps.BOOT_SOURCE_ENABLED_MAP_REV: raise exceptions.InvalidParameterValueError( parameter='enabled', value=enabled, - valid_values=list(sys_maps.BOOT_SOURCE_TARGET_MAP_REV)) + valid_values=list(sys_maps.BOOT_SOURCE_ENABLED_MAP_REV)) data = { 'Boot': { diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 20007bf..9841651 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -213,10 +213,14 @@ class SystemTestCase(base.TestCase): 'invalid-target') def test_set_system_boot_source_invalid_enabled(self): - self.assertRaises(exceptions.InvalidParameterValueError, - self.sys_inst.set_system_boot_source, - sushy.BOOT_SOURCE_TARGET_HDD, - enabled='invalid-enabled') + with self.assertRaisesRegex( + exceptions.InvalidParameterValueError, + '"enabled" value.*{0}'.format( + list(sys_map.BOOT_SOURCE_ENABLED_MAP_REV))): + + self.sys_inst.set_system_boot_source( + sushy.BOOT_SOURCE_TARGET_HDD, + enabled='invalid-enabled') def test__get_processor_collection_path_missing_processors_attr(self): self.sys_inst._json.pop('Processors') -- GitLab From 998909f4aeb46c957f97ab8c97e68f068248aceb Mon Sep 17 00:00:00 2001 From: Julia Kreger Date: Mon, 26 Mar 2018 12:21:23 -0700 Subject: [PATCH 056/303] Update Launchpad references to Storyboard Change-Id: Ie3ea54e982e375b5c763cfa028aa81ab0c24ff54 --- CONTRIBUTING.rst | 4 ++-- README.rst | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 3765c1b..2190b38 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -12,6 +12,6 @@ submitted for review via the Gerrit tool: Pull requests submitted through GitHub will be ignored. -Bugs should be filed on Launchpad, not GitHub: +Bugs should be filed in StoryBoard, not GitHub: - https://bugs.launchpad.net/sushy + https://storyboard.openstack.org/#!/project/960 diff --git a/README.rst b/README.rst index c404680..f8414a8 100644 --- a/README.rst +++ b/README.rst @@ -16,6 +16,6 @@ needed we can expand Sushy to fullfil those requirements. * Documentation: https://docs.openstack.org/sushy/latest/ * Usage: https://docs.openstack.org/sushy/latest/reference/usage.html * Source: https://git.openstack.org/cgit/openstack/sushy -* Bugs: https://bugs.launchpad.net/sushy +* Bugs: https://storyboard.openstack.org/#!/project/960 .. _Redfish: http://www.dmtf.org/standards/redfish -- GitLab From 43ea0c0bd86663501d930c58c7eae8d93821cb4a Mon Sep 17 00:00:00 2001 From: "ya.wang" Date: Wed, 21 Mar 2018 17:13:50 +0800 Subject: [PATCH 057/303] Add processor ProcessorId field and Status field Add ProcessorId field to show basic information of the processor. Add Status field to show the processor status. Change-Id: Ic90e422d7a86cb14a874ef68d7d7b92406af0a9a --- ...cessor-id-and-status-b81d4c6e6c14c25f.yaml | 4 ++ sushy/resources/system/processor.py | 39 +++++++++++++++++++ sushy/tests/unit/json_samples/processor.json | 5 ++- .../unit/resources/system/test_processor.py | 17 ++++++++ 4 files changed, 63 insertions(+), 2 deletions(-) create mode 100644 releasenotes/notes/add-processor-id-and-status-b81d4c6e6c14c25f.yaml diff --git a/releasenotes/notes/add-processor-id-and-status-b81d4c6e6c14c25f.yaml b/releasenotes/notes/add-processor-id-and-status-b81d4c6e6c14c25f.yaml new file mode 100644 index 0000000..76cdb0e --- /dev/null +++ b/releasenotes/notes/add-processor-id-and-status-b81d4c6e6c14c25f.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds the processor status and id fields to the ``Processor`` class. diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index a4415c9..b094c26 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -24,6 +24,39 @@ ProcessorSummary = collections.namedtuple('ProcessorSummary', LOG = logging.getLogger(__name__) +class ProcessorIdField(base.CompositeField): + + effective_family = base.Field('EffectiveFamily') + """The processor effective family""" + + effective_model = base.Field('EffectiveModel') + """The processor effective model""" + + identification_registers = base.Field('IdentificationRegisters') + """The processor identification registers""" + + microcode_info = base.Field('MicrocodeInfo') + """The processor microcode info""" + + step = base.Field('Step') + """The processor stepping""" + + vendor_id = base.Field('VendorID') + """The processor vendor id""" + + +class StatusField(base.CompositeField): + + health = base.Field('Health') + """The processor health""" + + health_rollup = base.Field('HealthRollup') + """The processor health rollup""" + + state = base.Field('State') + """The processor state""" + + class Processor(base.ResourceBase): identity = base.Field('Id', required=True) @@ -53,6 +86,12 @@ class Processor(base.ResourceBase): max_speed_mhz = base.Field('MaxSpeedMHz', adapter=int) """The maximum clock speed of the processor in MHz.""" + processor_id = ProcessorIdField('ProcessorId') + """The processor id""" + + status = StatusField('Status') + """The processor status""" + total_cores = base.Field('TotalCores', adapter=int) """The total number of cores contained in this processor""" diff --git a/sushy/tests/unit/json_samples/processor.json b/sushy/tests/unit/json_samples/processor.json index 19ce642..7b6d47f 100644 --- a/sushy/tests/unit/json_samples/processor.json +++ b/sushy/tests/unit/json_samples/processor.json @@ -7,7 +7,7 @@ "InstructionSet": "x86-64", "Manufacturer": "Intel(R) Corporation", "Model": "Multi-Core Intel(R) Xeon(R) processor 7xxx Series", - "ProcessorID": { + "ProcessorId": { "VendorID": "GenuineIntel", "IdentificationRegisters": "0x34AC34DC8901274A", "EffectiveFamily": "0x42", @@ -20,7 +20,8 @@ "TotalThreads": 16, "Status": { "State": "Enabled", - "Health": "OK" + "Health": "OK", + "HealthRollup": "OK" }, "@odata.context": "/redfish/v1/$metadata#Systems/Members/437XR1138R2/Processors/Members/$entity", "@odata.id": "/redfish/v1/Systems/437XR1138R2/Processors/CPU1", diff --git a/sushy/tests/unit/resources/system/test_processor.py b/sushy/tests/unit/resources/system/test_processor.py index 0dd2dad..b35ca19 100644 --- a/sushy/tests/unit/resources/system/test_processor.py +++ b/sushy/tests/unit/resources/system/test_processor.py @@ -46,9 +46,26 @@ class ProcessorTestCase(base.TestCase): self.sys_processor.manufacturer) self.assertEqual('Multi-Core Intel(R) Xeon(R) processor 7xxx Series', self.sys_processor.model) + self.assertEqual('0x42', + self.sys_processor.processor_id.effective_family) + self.assertEqual('0x61', + self.sys_processor.processor_id.effective_model) + self.assertEqual('0x34AC34DC8901274A', + self.sys_processor.processor_id. + identification_registers) + self.assertEqual('0x429943', + self.sys_processor.processor_id.microcode_info) + self.assertEqual('0x1', + self.sys_processor.processor_id.step) + self.assertEqual('GenuineIntel', + self.sys_processor.processor_id.vendor_id) + self.assertEqual(3700, self.sys_processor.max_speed_mhz) self.assertEqual(8, self.sys_processor.total_cores) self.assertEqual(16, self.sys_processor.total_threads) + self.assertEqual('Enabled', self.sys_processor.status.state) + self.assertEqual('OK', self.sys_processor.status.health) + self.assertEqual('OK', self.sys_processor.status.health_rollup) class ProcessorCollectionTestCase(base.TestCase): -- GitLab From ecaa3c8062163502a3307e9b4892531222c7026e Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Thu, 12 Apr 2018 17:27:16 +0000 Subject: [PATCH 058/303] Add storage volume It adds the storage volume resource of Redfish standard schema and also provides the attribute ``max_size_bytes`` which gives largest logical disk size available in bytes among all disk volumes. Co-Authored-By: Nisha Agarwal Partial-Bug: 1751143 Change-Id: I38724275967cb01a0e905ef2a10c5b99dde482f2 --- sushy/resources/system/storage/__init__.py | 0 sushy/resources/system/storage/volume.py | 57 ++++++++ sushy/tests/unit/json_samples/volume.json | 44 +++++++ sushy/tests/unit/json_samples/volume2.json | 41 ++++++ sushy/tests/unit/json_samples/volume3.json | 41 ++++++ .../unit/json_samples/volume_collection.json | 21 +++ .../unit/resources/system/storage/__init__.py | 0 .../resources/system/storage/test_volume.py | 123 ++++++++++++++++++ sushy/tests/unit/test_utils.py | 6 + sushy/utils.py | 17 +++ 10 files changed, 350 insertions(+) create mode 100644 sushy/resources/system/storage/__init__.py create mode 100644 sushy/resources/system/storage/volume.py create mode 100644 sushy/tests/unit/json_samples/volume.json create mode 100644 sushy/tests/unit/json_samples/volume2.json create mode 100644 sushy/tests/unit/json_samples/volume3.json create mode 100644 sushy/tests/unit/json_samples/volume_collection.json create mode 100644 sushy/tests/unit/resources/system/storage/__init__.py create mode 100644 sushy/tests/unit/resources/system/storage/test_volume.py diff --git a/sushy/resources/system/storage/__init__.py b/sushy/resources/system/storage/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py new file mode 100644 index 0000000..6e6c558 --- /dev/null +++ b/sushy/resources/system/storage/volume.py @@ -0,0 +1,57 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Volume.v1_0_3.json + +import logging + +from sushy.resources import base +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class Volume(base.ResourceBase): + """This class adds the Storage Volume resource""" + + identity = base.Field('Id', required=True) + """The Volume identity string""" + + name = base.Field('Name') + """The name of the resource""" + + capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) + """The size in bytes of this Volume.""" + + +class VolumeCollection(base.ResourceCollectionBase): + """This class represents the Storage Volume collection""" + + _max_size_bytes = None + + @property + def _resource_type(self): + return Volume + + @property + def max_size_bytes(self): + """Max size available in bytes among all Volumes of this collection.""" + if self._max_size_bytes is None: + self._max_size_bytes = ( + utils.max_safe([vol.capacity_bytes + for vol in self.get_members()])) + return self._max_size_bytes + + def _do_refresh(self, force=False): + # invalidate the attribute + self._max_size_bytes = None diff --git a/sushy/tests/unit/json_samples/volume.json b/sushy/tests/unit/json_samples/volume.json new file mode 100644 index 0000000..f19b528 --- /dev/null +++ b/sushy/tests/unit/json_samples/volume.json @@ -0,0 +1,44 @@ +{ + "@odata.type": "#Volume.v1_0_3.Volume", + "Id": "1", + "Name": "Virtual Disk 1", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "Encrypted": false, + "VolumeType": "Mirrored", + "CapacityBytes": 899527000000, + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "UUID", + "DurableName": "38f1818b-111e-463a-aa19-fa54f792e468" + } + ], + "Links": { + "@odata.type": "#Volume.v1_0_0.Links", + "Drives": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3" + } + ] + }, + "Actions": { + "@odata.type": "#Volume.v1_0_0.Actions", + "#Volume.Initialize": { + "target": "/redfish/v1/Systems/3/Storage/RAIDIntegrated/Volumes/1/Actions/Volume.Initialize", + "InitializeType@Redfish.AllowableValues": [ + "Fast", + "Slow" + ] + } + }, + "@odata.context": "/redfish/v1/$metadata#Volume.Volume", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/volume2.json b/sushy/tests/unit/json_samples/volume2.json new file mode 100644 index 0000000..a1804f1 --- /dev/null +++ b/sushy/tests/unit/json_samples/volume2.json @@ -0,0 +1,41 @@ +{ + "@odata.type": "#Volume.v1_0_3.Volume", + "Id": "2", + "Name": "Virtual Disk 2", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "Encrypted": false, + "VolumeType": "NonRedundant", + "CapacityBytes": 107374182400, + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "UUID", + "DurableName": "0324c96c-8031-4f5e-886c-50cd90aca854" + } + ], + "Links": { + "@odata.type": "#Volume.v1_0_0.Links", + "Drives": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2" + } + ] + }, + "Actions": { + "@odata.type": "#Volume.v1_0_0.Actions", + "#Volume.Initialize": { + "target": "/redfish/v1/Systems/3/Storage/RAIDIntegrated/Volumes/1/Actions/Volume.Initialize", + "InitializeType@Redfish.AllowableValues": [ + "Fast", + "Slow" + ] + } + }, + "@odata.context": "/redfish/v1/$metadata#Volume.Volume", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/volume3.json b/sushy/tests/unit/json_samples/volume3.json new file mode 100644 index 0000000..d5b6b86 --- /dev/null +++ b/sushy/tests/unit/json_samples/volume3.json @@ -0,0 +1,41 @@ +{ + "@odata.type": "#Volume.v1_0_3.Volume", + "Id": "3", + "Name": "Virtual Disk 3", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "Encrypted": false, + "VolumeType": "NonRedundant", + "CapacityBytes": 1073741824000, + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "UUID", + "DurableName": "eb179a30-6f87-4fdb-8f92-639eb7aaabcb" + } + ], + "Links": { + "@odata.type": "#Volume.v1_0_0.Links", + "Drives": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2" + } + ] + }, + "Actions": { + "@odata.type": "#Volume.v1_0_0.Actions", + "#Volume.Initialize": { + "target": "/redfish/v1/Systems/3/Storage/RAIDIntegrated/Volumes/1/Actions/Volume.Initialize", + "InitializeType@Redfish.AllowableValues": [ + "Fast", + "Slow" + ] + } + }, + "@odata.context": "/redfish/v1/$metadata#Volume.Volume", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/volume_collection.json b/sushy/tests/unit/json_samples/volume_collection.json new file mode 100644 index 0000000..0643e68 --- /dev/null +++ b/sushy/tests/unit/json_samples/volume_collection.json @@ -0,0 +1,21 @@ +{ + "@odata.type": "#VolumeCollection.VolumeCollection", + "Name": "Storage Volume Collection", + "Description": "Storage Volume Collection", + "Members@odata.count": 3, + "Members": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3" + } + ], + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#VolumeCollection.VolumeCollection", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/resources/system/storage/__init__.py b/sushy/tests/unit/resources/system/storage/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py new file mode 100644 index 0000000..a595f74 --- /dev/null +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -0,0 +1,123 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy.resources.system.storage import volume +from sushy.tests.unit import base + + +class VolumeTestCase(base.TestCase): + + def setUp(self): + super(VolumeTestCase, self).setUp() + self.conn = mock.Mock() + volume_file = 'sushy/tests/unit/json_samples/volume.json' + with open(volume_file, 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + + self.stor_volume = volume.Volume( + self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.stor_volume._parse_attributes() + self.assertEqual('1.0.2', self.stor_volume.redfish_version) + self.assertEqual('1', self.stor_volume.identity) + self.assertEqual('Virtual Disk 1', self.stor_volume.name) + self.assertEqual(899527000000, self.stor_volume.capacity_bytes) + + +class VolumeCollectionTestCase(base.TestCase): + + def setUp(self): + super(VolumeCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'volume_collection.json', 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + self.stor_vol_col = volume.VolumeCollection( + self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.stor_vol_col._parse_attributes() + self.assertEqual(( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2', + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3'), + self.stor_vol_col.members_identities) + + @mock.patch.object(volume, 'Volume', autospec=True) + def test_get_member(self, Volume_mock): + self.stor_vol_col.get_member( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1') + Volume_mock.assert_called_once_with( + self.stor_vol_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', + redfish_version=self.stor_vol_col.redfish_version) + + @mock.patch.object(volume, 'Volume', autospec=True) + def test_get_members(self, Volume_mock): + members = self.stor_vol_col.get_members() + calls = [ + mock.call(self.stor_vol_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', + redfish_version=self.stor_vol_col.redfish_version), + mock.call(self.stor_vol_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2', + redfish_version=self.stor_vol_col.redfish_version), + mock.call(self.stor_vol_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3', + redfish_version=self.stor_vol_col.redfish_version), + ] + Volume_mock.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(3, len(members)) + + def test_max_size_bytes(self): + self.assertIsNone(self.stor_vol_col._max_size_bytes) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/volume.json', 'r') as f: + successive_return_values.append(json.loads(f.read())) + with open('sushy/tests/unit/json_samples/volume2.json', 'r') as f: + successive_return_values.append(json.loads(f.read())) + with open('sushy/tests/unit/json_samples/volume3.json', 'r') as f: + successive_return_values.append(json.loads(f.read())) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(1073741824000, self.stor_vol_col.max_size_bytes) + + # for any subsequent fetching it gets it from the cached value + self.conn.get.return_value.json.reset_mock() + self.assertEqual(1073741824000, self.stor_vol_col.max_size_bytes) + self.conn.get.return_value.json.assert_not_called() + + def test_max_size_bytes_after_refresh(self): + self.stor_vol_col.refresh() + self.assertIsNone(self.stor_vol_col._max_size_bytes) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/volume.json', 'r') as f: + successive_return_values.append(json.loads(f.read())) + with open('sushy/tests/unit/json_samples/volume2.json', 'r') as f: + successive_return_values.append(json.loads(f.read())) + with open('sushy/tests/unit/json_samples/volume3.json', 'r') as f: + successive_return_values.append(json.loads(f.read())) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(1073741824000, self.stor_vol_col.max_size_bytes) diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index de75de4..115bfed 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -90,3 +90,9 @@ class UtilsTestCase(base.TestCase): '"subresource_name" cannot be empty', utils.get_sub_resource_path_by, self.sys_inst, '') + + def test_max_safe(self): + self.assertEqual(10, utils.max_safe([1, 3, 2, 8, 5, 10, 6])) + self.assertEqual(821, utils.max_safe([15, 300, 270, None, 821, None])) + self.assertEqual(0, utils.max_safe([])) + self.assertIsNone(utils.max_safe([], default=None)) diff --git a/sushy/utils.py b/sushy/utils.py index 15519cc..0f687d0 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -89,3 +89,20 @@ def get_sub_resource_path_by(resource, subresource_name): resource=resource.path) return body['@odata.id'] + + +def max_safe(iterable, default=0): + """Helper wrapper over builtin max() function. + + This function is just a wrapper over builtin max() w/o ``key`` argument. + The ``default`` argument specifies an object to return if the provided + ``iterable`` is empty. Also it filters out the None type values. + :param iterable: an iterable + :param default: 0 by default + """ + + try: + return max([x for x in iterable if x is not None]) + except ValueError: + # TypeError is not caught here as that should be thrown. + return default -- GitLab From 5cc1a6fd4ba716d4c72be7da9649900f4b154c0b Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Mon, 7 May 2018 15:18:53 +0200 Subject: [PATCH 059/303] Do not run functional (API) tests in the CI These tests exercise Ironic API with the fake driver, thus they provide no coverage for sushy and can be excluded. Change-Id: I9bff52c29b763478934e0068e3280f508ed1141f --- .../run.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml b/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml index 065c2e4..cae77b8 100644 --- a/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml +++ b/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml @@ -62,7 +62,7 @@ - shell: cmd: | cat << 'EOF' >> ironic-extra-vars - export DEVSTACK_GATE_TEMPEST_REGEX="ironic" + export DEVSTACK_GATE_TEMPEST_REGEX="ironic_tempest_plugin.tests.scenario" EOF chdir: '{{ ansible_user_dir }}/workspace' -- GitLab From fc012cb706540c5f72709ebdc4d284021c8e3141 Mon Sep 17 00:00:00 2001 From: ghanshyam Date: Wed, 9 May 2018 01:12:55 +0000 Subject: [PATCH 060/303] Gate fix: Cap hacking to avoid gate failure hacking is not capped in g-r and it is in blacklist for requirement as hacking new version can break the gate jobs. Hacking can break gate jobs because of various reasons: - There might be new rule addition in hacking - Some rules becomes default from non-default - Updates in pycodestyle etc That was the main reason it was not added in g-r auto sync also. Most of the project maintained the compatible and cap the hacking version in test-requirements.txt and update to new version when project is ready. Bumping new version might need code fix also on project side depends on what new in that version. If project does not have cap the hacking version then, there is possibility of gate failure whenever new hacking version is released by QA team. Example of such failure in recent release of hacking 1.1.0 - http://lists.openstack.org/pipermail/openstack-dev/2018-May/130282.html Also fixes the lower-constraints file to allow the CI to pass. Change-Id: I7d61303c145784c86d95ccc5cd58f4ced376a713 --- lower-constraints.txt | 2 +- test-requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index fc86940..dc38ac7 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -33,7 +33,7 @@ requests==2.14.2 requestsexceptions==1.2.0 six==1.10.0 snowballstemmer==1.2.1 -Sphinx==1.6.5 +Sphinx==1.6.2 sphinxcontrib-websupport==1.0.1 stevedore==1.20.0 testrepository==0.0.18 diff --git a/test-requirements.txt b/test-requirements.txt index 10d8240..6573de0 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,7 +2,7 @@ # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. -hacking>=1.0.0 # Apache-2.0 +hacking>=1.0.0,<1.1.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD -- GitLab From 0b9497dfa6add5c6bdf46903da17b0464400ccaa Mon Sep 17 00:00:00 2001 From: Nate Potter Date: Wed, 19 Jul 2017 16:13:43 -0700 Subject: [PATCH 061/303] Create mappings for System Indicator LED Add mappings and constants for the Indicator LED value in the System class. Change-Id: I5ecad5d81a6ca1487fab2002d2bc8b57736ab4ef --- .../indicator-led-mappings-e7b34da03f6abb06.yaml | 5 +++++ sushy/resources/system/constants.py | 14 ++++++++++++++ sushy/resources/system/mappings.py | 7 +++++++ sushy/resources/system/system.py | 4 ++-- sushy/tests/unit/resources/system/test_system.py | 3 ++- 5 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 releasenotes/notes/indicator-led-mappings-e7b34da03f6abb06.yaml diff --git a/releasenotes/notes/indicator-led-mappings-e7b34da03f6abb06.yaml b/releasenotes/notes/indicator-led-mappings-e7b34da03f6abb06.yaml new file mode 100644 index 0000000..e5b6c82 --- /dev/null +++ b/releasenotes/notes/indicator-led-mappings-e7b34da03f6abb06.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds mappings and constants for possible values of the Indicator LED + value in the System class. diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index fc2ddb1..9e80b6a 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -44,6 +44,20 @@ SYSTEM_POWER_STATE_POWERING_OFF = 'powering off' """A temporary state between On and Off. The power off action can take time while the OS is in the shutdown process""" +# Indicator LED Constants + +SYSTEM_INDICATOR_LED_LIT = 'Lit' +"""The Indicator LED is lit""" + +SYSTEM_INDICATOR_LED_BLINKING = 'Blinking' +"""The Indicator LED is blinking""" + +SYSTEM_INDICATOR_LED_OFF = 'Off' +"""The Indicator LED is off""" + +SYSTEM_INDICATOR_LED_UNKNOWN = 'Unknown' +"""The state of the Indicator LED cannot be determine""" + # Boot source target constants BOOT_SOURCE_TARGET_NONE = 'none' diff --git a/sushy/resources/system/mappings.py b/sushy/resources/system/mappings.py index fb6e5ab..a275eb0 100644 --- a/sushy/resources/system/mappings.py +++ b/sushy/resources/system/mappings.py @@ -39,6 +39,13 @@ SYSTEM_POWER_STATE_MAP = { SYSTEM_POWER_STATE_MAP_REV = utils.revert_dictionary(SYSTEM_POWER_STATE_MAP) +SYSTEM_INDICATOR_LED_MAP = { + 'Lit': sys_cons.SYSTEM_INDICATOR_LED_LIT, + 'Blinking': sys_cons.SYSTEM_INDICATOR_LED_BLINKING, + 'Off': sys_cons.SYSTEM_INDICATOR_LED_OFF, + 'Unknown': sys_cons.SYSTEM_INDICATOR_LED_UNKNOWN, +} + BOOT_SOURCE_TARGET_MAP = { 'None': sys_cons.BOOT_SOURCE_TARGET_NONE, 'Pxe': sys_cons.BOOT_SOURCE_TARGET_PXE, diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index fbf2c55..b2a53b2 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -88,8 +88,8 @@ class System(base.ResourceBase): identity = base.Field('Id', required=True) """The system identity string""" - # TODO(lucasagomes): Create mappings for the indicator_led - indicator_led = base.Field('IndicatorLED') + indicator_led = base.MappedField('IndicatorLED', + sys_maps.SYSTEM_INDICATOR_LED_MAP) """Whether the indicator LED is lit or off""" manufacturer = base.Field('Manufacturer') diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index c31a521..1ec4faa 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -47,7 +47,8 @@ class SystemTestCase(base.TestCase): self.assertEqual('Web Front End node', self.sys_inst.description) self.assertEqual('web483', self.sys_inst.hostname) self.assertEqual('437XR1138R2', self.sys_inst.identity) - self.assertEqual('Off', self.sys_inst.indicator_led) + self.assertEqual(sushy.SYSTEM_INDICATOR_LED_OFF, + self.sys_inst.indicator_led) self.assertEqual('Contoso', self.sys_inst.manufacturer) self.assertEqual('WebFrontEnd483', self.sys_inst.name) self.assertEqual('224071-J23', self.sys_inst.part_number) -- GitLab From 1b10e1c4006982c163dae25eabab268f992e1dd0 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Fri, 25 May 2018 10:07:11 +0300 Subject: [PATCH 062/303] tox: Use default Python 3 version, remove pypy When running tests locally, use only default Python version 3 so that when Python version is updated, this will use the latest py3. Remove pypy environment as this implementation is not being used. This does not affect Zuul configuration, only developer's local. Change-Id: I9eb0834f3d39f33b05ab638a744d798f1eda0e08 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 551fd23..f5ccd96 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] minversion = 2.0 -envlist = py36,py35,py27,pypy,pep8 +envlist = py3,py27,pep8 skipsdist = True [testenv] -- GitLab From d223fffa67c1cea59d2b4b837faab6410768be74 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Wed, 6 Jun 2018 19:56:47 +0300 Subject: [PATCH 063/303] Add reusable ActionField Also refactors existing ResetActionField to inherit from ActionField Change-Id: Id7501f0cd622d143bf1565bac21cd703c52a1ced --- sushy/resources/common.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sushy/resources/common.py b/sushy/resources/common.py index 24322cf..839c97d 100644 --- a/sushy/resources/common.py +++ b/sushy/resources/common.py @@ -13,8 +13,10 @@ from sushy.resources import base -class ResetActionField(base.CompositeField): +class ActionField(base.CompositeField): + target_uri = base.Field('target', required=True) + + +class ResetActionField(ActionField): allowed_values = base.Field('ResetType@Redfish.AllowableValues', adapter=list) - - target_uri = base.Field('target', required=True) -- GitLab From fcb590e64a2e097d4d1c707d675bf0a3fd463341 Mon Sep 17 00:00:00 2001 From: Doug Hellmann Date: Wed, 6 Jun 2018 15:27:01 -0400 Subject: [PATCH 064/303] fix tox python3 overrides We want to default to running all tox environments under python 3, so set the basepython value in each environment. We do not want to specify a minor version number, because we do not want to have to update the file every time we upgrade python. We do not want to set the override once in testenv, because that breaks the more specific versions used in default environments like py35 and py36. Change-Id: Ic74f736e58df3db16b991dd229c9c77b3d13023f Signed-off-by: Doug Hellmann --- tox.ini | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tox.ini b/tox.ini index f5ccd96..0531d12 100644 --- a/tox.ini +++ b/tox.ini @@ -16,12 +16,15 @@ deps = commands = python setup.py test --slowest --testr-args='{posargs}' [testenv:pep8] +basepython = python3 commands = flake8 {posargs} [testenv:venv] +basepython = python3 commands = {posargs} [testenv:cover] +basepython = python3 # After running this target, visit sushy/cover/index.html # in your browser, to see a nicer presentation report with annotated # HTML listings detailing missed lines. @@ -31,13 +34,16 @@ commands = coverage erase coverage html --omit=*test* [testenv:docs] +basepython = python3 commands = python setup.py build_sphinx [testenv:releasenotes] +basepython = python3 commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [testenv:debug] +basepython = python3 commands = oslo_debug_helper -t sushy/tests {posargs} [flake8] -- GitLab From 31cdd0f951bcd6c8a9b2d3f9628468aef5e7883b Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Wed, 20 Jun 2018 18:59:31 +0200 Subject: [PATCH 065/303] Change BootSourceOverrideMode from BIOS to Legacy Because this is what DMTF schema [1] requires. 1. https://redfish.dmtf.org/schemas/ComputerSystem.v1_5_0.json Co-Authored-By: Richard G. Pioso Story: 2002635 Task: 22277 Change-Id: Ib158b2ea974a9022893a2fb3400be4017e3ff7b2 --- .../notes/fix-malformed-boot-mode-1ba1117cad8dcc47.yaml | 6 ++++++ sushy/resources/system/mappings.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/fix-malformed-boot-mode-1ba1117cad8dcc47.yaml diff --git a/releasenotes/notes/fix-malformed-boot-mode-1ba1117cad8dcc47.yaml b/releasenotes/notes/fix-malformed-boot-mode-1ba1117cad8dcc47.yaml new file mode 100644 index 0000000..c3aa85e --- /dev/null +++ b/releasenotes/notes/fix-malformed-boot-mode-1ba1117cad8dcc47.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Fixes malformed value of the ``BootSourceOverrideMode`` element which goes + against the Redfish schema and causes some of the boot mode calls to + fail. diff --git a/sushy/resources/system/mappings.py b/sushy/resources/system/mappings.py index a275eb0..995ba89 100644 --- a/sushy/resources/system/mappings.py +++ b/sushy/resources/system/mappings.py @@ -65,7 +65,7 @@ BOOT_SOURCE_TARGET_MAP = { BOOT_SOURCE_TARGET_MAP_REV = utils.revert_dictionary(BOOT_SOURCE_TARGET_MAP) BOOT_SOURCE_MODE_MAP = { - 'BIOS': sys_cons.BOOT_SOURCE_MODE_BIOS, + 'Legacy': sys_cons.BOOT_SOURCE_MODE_BIOS, 'UEFI': sys_cons.BOOT_SOURCE_MODE_UEFI, } -- GitLab From fb44452cdd10f443079d282e1c267b98f3ba61d7 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Fri, 25 May 2018 13:29:10 +0300 Subject: [PATCH 066/303] Introduce BIOS API Add `system` resource to have reference to `bios`. Support patching via @Redfish.Settings settings object resource using new SettingsField. SettingsField is a reusable field that corresponds to @Redfish.Settings in other resources. This SettingsField corresponds to Settings.v1_0_0 field schema, newer versions introduce additional fields that will be added later. Also at least iDRAC does not support anything newer than Settings.v1_0_0 field schema now. In the unit tests, settings.json contains imaginary values for Message, Severity, MessageArgs and Resolution fields as could not find existing samples holding these fields. @Redfish.Settings is a special field (annotation) that does not appear as a property in any schemas of Redfish resources. Theoretically it can be present in any resource. Practically this field is added to resources that support future state of the resource where immediate application of updates is not possible and need to restart system or a service for the values to take effect. So far it has been identified that resources that can have this field are: BIOS, Ethernet Interface and Storage. If @Redfish.Settings is present on the resource, then need to use URI in its subfield SettingsObject to update resource. If it is not present on the resource, then can update the resource directly using the resource's URI. Change-Id: Ib2ef3d6380975ec7b964ebb634575e2b86d51991 Task: 12508 Story: 2001791 --- .../notes/add-bios-bf69ac56c4ae8f50.yaml | 4 + sushy/resources/common.py | 7 + sushy/resources/settings.py | 101 +++++++++++ sushy/resources/system/bios.py | 162 ++++++++++++++++++ sushy/resources/system/system.py | 22 +++ sushy/tests/unit/json_samples/bios.json | 45 +++++ .../unit/json_samples/bios_settings.json | 21 +++ sushy/tests/unit/json_samples/settings.json | 22 +++ .../tests/unit/resources/system/test_bios.py | 154 +++++++++++++++++ .../unit/resources/system/test_system.py | 14 ++ sushy/tests/unit/resources/test_settings.py | 74 ++++++++ 11 files changed, 626 insertions(+) create mode 100644 releasenotes/notes/add-bios-bf69ac56c4ae8f50.yaml create mode 100644 sushy/resources/settings.py create mode 100644 sushy/resources/system/bios.py create mode 100644 sushy/tests/unit/json_samples/bios.json create mode 100644 sushy/tests/unit/json_samples/bios_settings.json create mode 100644 sushy/tests/unit/json_samples/settings.json create mode 100644 sushy/tests/unit/resources/system/test_bios.py create mode 100644 sushy/tests/unit/resources/test_settings.py diff --git a/releasenotes/notes/add-bios-bf69ac56c4ae8f50.yaml b/releasenotes/notes/add-bios-bf69ac56c4ae8f50.yaml new file mode 100644 index 0000000..d82ef15 --- /dev/null +++ b/releasenotes/notes/add-bios-bf69ac56c4ae8f50.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds support for the BIOS resource to the library. diff --git a/sushy/resources/common.py b/sushy/resources/common.py index 839c97d..17cfe41 100644 --- a/sushy/resources/common.py +++ b/sushy/resources/common.py @@ -20,3 +20,10 @@ class ActionField(base.CompositeField): class ResetActionField(ActionField): allowed_values = base.Field('ResetType@Redfish.AllowableValues', adapter=list) + + +class IdRefField(base.CompositeField): + """Reference to the resource for updating settings""" + + resource_uri = base.Field('@odata.id') + """The unique identifier for a resource""" diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py new file mode 100644 index 0000000..29254fd --- /dev/null +++ b/sushy/resources/settings.py @@ -0,0 +1,101 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Settings.v1_0_0.json + + +from sushy.resources import base +from sushy.resources import common + + +class MessageListField(base.ListField): + """List of messages with details of settings update status""" + + message_id = base.Field('MessageId', required=True) + """The key for this message which can be used + to look up the message in a message registry + """ + + message = base.Field('Message') + """Human readable message, if provided""" + + severity = base.Field('Severity') + """Severity of the error""" + + resolution = base.Field('Resolution') + """Used to provide suggestions on how to resolve + the situation that caused the error + """ + + _related_properties = base.Field('RelatedProperties') + """List of properties described by the message""" + + message_args = base.Field('MessageArgs') + """List of message substitution arguments for the message + referenced by `message_id` from the message registry + """ + + +class SettingsField(base.CompositeField): + """The settings of a resource + + Represents the future state and configuration of the resource. The + field is added to resources that support future state and + configuration. + + This field includes several properties to help clients monitor when + the resource is consumed by the service and determine the results of + applying the values, which may or may not have been successful. + """ + + def __init__(self): + super(SettingsField, self).__init__(path="@Redfish.Settings") + + time = base.Field('Time') + """Indicates the time the settings were applied to the server""" + + _etag = base.Field('ETag') + """The ETag of the resource to which the settings were applied, + after the application + """ + + _settings_object_idref = common.IdRefField("SettingsObject") + """Reference to the resource the client may PUT/PATCH in order + to change this resource + """ + + messages = MessageListField("Messages") + """Represents the results of the last time the values of the Settings + resource were applied to the server""" + + def commit(self, connector, value, etag=None): + """Commits new settings values + + The new values will be applied when the system or a service + restarts. + + :param connector: A Connector instance + :param value: Value representing JSON whose structure is specific + to each resource and the caller must format it correctly + :param etag: Optional ETag of resource version to update. If + this ETag is provided and it does not match on server, then + the new values will not be committed + """ + + connector.patch(self.resource_uri, + data=value, + headers={'If-Match': etag} if etag else None) + + @property + def resource_uri(self): + return self._settings_object_idref.resource_uri diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py new file mode 100644 index 0000000..3ccfeb9 --- /dev/null +++ b/sushy/resources/system/bios.py @@ -0,0 +1,162 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Bios.v1_0_3.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources import settings + +LOG = logging.getLogger(__name__) + + +class ActionsField(base.CompositeField): + change_password = common.ActionField('#Bios.ChangePassword') + reset_bios = common.ActionField('#Bios.ResetBios') + + +class Bios(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The Bios resource identity string""" + + name = base.Field('Name') + """The name of the resource""" + + description = base.Field('Description') + """Human-readable description of the BIOS resource""" + + attribute_registry = base.Field('AttributeRegistry') + """The Resource ID of the Attribute Registry + for the BIOS Attributes resource + """ + + _settings = settings.SettingsField() + """Results of last BIOS attribute update""" + + attributes = base.Field('Attributes') + """Vendor-specific key-value dict of effective BIOS attributes + + Attributes cannot be updated directly. + To update use :py:func:`~set_attribute` or :py:func:`~set_attributes` + """ + + _actions = ActionsField('Actions') + + _etag = base.Field('@odata.etag') + + _pending_settings_resource = None + + @property + def pending_attributes(self): + """Pending BIOS attributes + + BIOS attributes that have been comitted to the system, + but for them to take effect system restart is necessary + """ + + if not self._pending_settings_resource: + self._pending_settings_resource = Bios( + self._conn, + self._settings.resource_uri, + redfish_version=self.redfish_version) + self._pending_settings_resource.refresh(force=False) + return self._pending_settings_resource.attributes + + def set_attribute(self, key, value): + """Update an attribute + + Attribute update is not immediate but requires system restart. + Committed attributes can be checked at :py:attr:`~pending_attributes` + property + + :param key: Attribute name + :param value: Attribute value + """ + self.set_attributes({key: value}) + + def set_attributes(self, value): + """Update many attributes at once + + Attribute update is not immediate but requires system restart. + Committed attributes can be checked at :py:attr:`~pending_attributes` + property + + :param value: Key-value pairs for attribute name and value + """ + self._settings.commit(self._conn, + {'Attributes': value}, + self._etag) + if self._pending_settings_resource: + self._pending_settings_resource.invalidate() + + def _get_reset_bios_action_element(self): + actions = self._actions + + if not actions: + raise exceptions.MissingAttributeError(attribute="Actions", + resource=self._path) + + reset_bios_action = actions.reset_bios + + if not reset_bios_action: + raise exceptions.MissingActionError(action='#Bios.ResetBios', + resource=self._path) + return reset_bios_action + + def _get_change_password_element(self): + actions = self._actions + + if not actions: + raise exceptions.MissingAttributeError(attribute="Actions", + resource=self._path) + + change_password_action = actions.change_password + + if not change_password_action: + raise exceptions.MissingActionError(action='#Bios.ChangePassword', + resource=self._path) + return change_password_action + + def reset_bios(self): + """Reset the BIOS attributes to default""" + + target_uri = self._get_reset_bios_action_element().target_uri + + LOG.debug('Resetting BIOS attributes %s ...', self.identity) + self._conn.post(target_uri) + LOG.info('BIOS attributes %s is being reset', self.identity) + + def change_password(self, new_password, old_password, password_name): + """Change BIOS password""" + + target_uri = self._get_change_password_element().target_uri + + LOG.debug('Changing BIOS password %s ...', self.identity) + self._conn.post(target_uri, data={'NewPassword': new_password, + 'OldPassword': old_password, + 'PasswordName': password_name}) + LOG.info('BIOS password %s is being changed', self.identity) + + def _do_refresh(self, force=False): + """Do custom resource specific refresh activities + + On refresh, all sub-resources are marked as stale, i.e. + greedy-refresh not done for them unless forced by ``force`` + argument. + """ + if self._pending_settings_resource is not None: + self._pending_settings_resource.invalidate(force) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index b2a53b2..b976da2 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -18,6 +18,7 @@ import logging from sushy import exceptions from sushy.resources import base from sushy.resources import common +from sushy.resources.system import bios from sushy.resources.system import constants as sys_cons from sushy.resources.system import ethernet_interface from sushy.resources.system import mappings as sys_maps @@ -130,6 +131,8 @@ class System(base.ResourceBase): _ethernet_interfaces = None + _bios = None + def __init__(self, connector, identity, redfish_version=None): """A class representing a ComputerSystem @@ -289,6 +292,23 @@ class System(base.ResourceBase): self._ethernet_interfaces.refresh(force=False) return self._ethernet_interfaces + @property + def bios(self): + """Property to reference `Bios` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + if self._bios is None: + self._bios = bios.Bios( + self._conn, + utils.get_sub_resource_path_by(self, 'Bios'), + redfish_version=self.redfish_version) + + self._bios.refresh(force=False) + return self._bios + def _do_refresh(self, force=False): """Do custom resource specific refresh activities @@ -300,6 +320,8 @@ class System(base.ResourceBase): self._processors.invalidate(force) if self._ethernet_interfaces is not None: self._ethernet_interfaces.invalidate(force) + if self._bios is not None: + self._bios.invalidate(force) class SystemCollection(base.ResourceCollectionBase): diff --git a/sushy/tests/unit/json_samples/bios.json b/sushy/tests/unit/json_samples/bios.json new file mode 100644 index 0000000..7625af9 --- /dev/null +++ b/sushy/tests/unit/json_samples/bios.json @@ -0,0 +1,45 @@ +{ + "@odata.type": "#Bios.v1_0_0.Bios", + "Id": "BIOS", + "Name": "BIOS Configuration Current Settings", + "AttributeRegistry": "BiosAttributeRegistryP89.v1_0_0", + "Attributes": { + "AdminPhone": "", + "BootMode": "Uefi", + "EmbeddedSata": "Raid", + "NicBoot1": "NetworkBoot", + "NicBoot2": "Disabled", + "PowerProfile": "MaxPerf", + "ProcCoreDisable": 0, + "ProcHyperthreading": "Enabled", + "ProcTurboMode": "Enabled", + "UsbControl": "UsbEnabled" + }, + "@Redfish.Settings": { + "@odata.type": "#Settings.v1_0_0.Settings", + "ETag": "9234ac83b9700123cc32", + "Messages": [ + { + "MessageId": "Base.1.0.SettingsFailed", + "RelatedProperties": [ + "#/Attributes/ProcTurboMode" + ] + } + ], + "SettingsObject": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" + }, + "Time": "2016-03-07T14:44.30-05:00" + }, + "Actions": { + "#Bios.ResetBios": { + "target": "/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios" + }, + "#Bios.ChangePassword": { + "target": "/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ChangePassword" + } + }, + "@odata.etag": "123", + "@odata.context": "/redfish/v1/$metadata#Bios.Bios", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS" +} diff --git a/sushy/tests/unit/json_samples/bios_settings.json b/sushy/tests/unit/json_samples/bios_settings.json new file mode 100644 index 0000000..b7c7772 --- /dev/null +++ b/sushy/tests/unit/json_samples/bios_settings.json @@ -0,0 +1,21 @@ +{ + "@odata.type": "#Bios.v1_0_0.Bios", + "Id": "Settings", + "Name": "BIOS Configuration Pending Settings", + "AttributeRegistry": "BiosAttributeRegistryP89.v1_0_0", + "Attributes": { + "AdminPhone": "(404) 555-1212", + "BootMode": "Uefi", + "EmbeddedSata": "Ahci", + "NicBoot1": "NetworkBoot", + "NicBoot2": "NetworkBoot", + "PowerProfile": "MaxPerf", + "ProcCoreDisable": 0, + "ProcHyperthreading": "Enabled", + "ProcTurboMode": "Disabled", + "UsbControl": "UsbEnabled" + }, + "@odata.context": "/redfish/v1/$metadata#Bios.Bios", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings", + "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/settings.json b/sushy/tests/unit/json_samples/settings.json new file mode 100644 index 0000000..8119885 --- /dev/null +++ b/sushy/tests/unit/json_samples/settings.json @@ -0,0 +1,22 @@ +{ + "@Redfish.Settings": { + "@odata.type": "#Settings.v1_0_0.Settings", + "ETag": "9234ac83b9700123cc32", + "Messages": [{ + "MessageId": "Base.1.0.SettingsFailed", + "Message": "Settings update failed due to invalid value", + "Severity": "High", + "Resolution": "Fix the value and try again", + "MessageArgs": [ + "arg1" + ], + "RelatedProperties": [ + "#/Attributes/ProcTurboMode" + ] + }], + "SettingsObject": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" + }, + "Time": "2016-03-07T14:44.30-05:00" + } +} diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py new file mode 100644 index 0000000..1497c07 --- /dev/null +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -0,0 +1,154 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy import exceptions +from sushy.resources.system import bios +from sushy.tests.unit import base + + +class BiosTestCase(base.TestCase): + + def setUp(self): + super(BiosTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/bios.json', 'r') as f: + bios_json = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/bios_settings.json', + 'r') as f: + bios_settings_json = json.loads(f.read()) + + self.conn.get.return_value.json.side_effect = [ + bios_json, + bios_settings_json, + bios_settings_json] + + self.sys_bios = bios.Bios( + self.conn, '/redfish/v1/Systems/437XR1138R2/BIOS', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.sys_bios._parse_attributes() + self.assertEqual('1.0.2', self.sys_bios.redfish_version) + self.assertEqual('BIOS', self.sys_bios.identity) + self.assertEqual('BIOS Configuration Current Settings', + self.sys_bios.name) + self.assertIsNone(self.sys_bios.description) + self.assertEqual('123', self.sys_bios._etag) + self.assertEqual('BiosAttributeRegistryP89.v1_0_0', + self.sys_bios.attribute_registry) + self.assertEqual('', self.sys_bios.attributes['AdminPhone']) + self.assertEqual('Uefi', self.sys_bios.attributes['BootMode']) + self.assertEqual(0, self.sys_bios.attributes['ProcCoreDisable']) + # testing here if settings subfield parsed by checking ETag, + # other settings fields tested in specific settings test + self.assertEqual('9234ac83b9700123cc32', + self.sys_bios._settings._etag) + self.assertEqual('(404) 555-1212', + self.sys_bios.pending_attributes['AdminPhone']) + + def test_set_attribute(self): + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled') + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled'}}, + headers={'If-Match': '123'}) + + def test_set_attribute_on_refresh(self): + # make it to instantiate pending attributes + self.sys_bios.pending_attributes + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled') + self.assertTrue(self.sys_bios._pending_settings_resource._is_stale) + # make it to refresh pending attributes on next retrieval + self.sys_bios.pending_attributes + self.assertFalse(self.sys_bios._pending_settings_resource._is_stale) + + def test_set_attributes(self): + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}}, + headers={'If-Match': '123'}) + + def test_set_attributes_on_refresh(self): + # make it to instantiate pending attributes + self.sys_bios.pending_attributes + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}) + self.assertTrue(self.sys_bios._pending_settings_resource._is_stale) + # make it to refresh pending attributes on next retrieval + self.sys_bios.pending_attributes + self.assertFalse(self.sys_bios._pending_settings_resource._is_stale) + + def test__get_reset_bios_action_element(self): + value = self.sys_bios._get_reset_bios_action_element() + self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Actions/' + 'Bios.ResetBios', + value.target_uri) + + def test_reset_bios_missing_action(self): + self.sys_bios._actions.reset_bios = None + self.assertRaisesRegex( + exceptions.MissingActionError, '#Bios.ResetBios', + self.sys_bios.reset_bios) + + def test__parse_attributes_missing_reset_bios_target(self): + self.sys_bios.json['Actions']['#Bios.ResetBios'].pop( + 'target') + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'attribute Actions/#Bios.ResetBios/target', + self.sys_bios._parse_attributes) + + def test_reset_bios(self): + self.sys_bios.reset_bios() + self.sys_bios._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios') + + def test__get_change_password_element(self): + value = self.sys_bios._get_change_password_element() + self.assertEqual("/redfish/v1/Systems/437XR1138R2/BIOS/Actions/" + "Bios.ChangePassword", + value.target_uri) + + def test_change_password_missing_action(self): + self.sys_bios._actions.change_password = None + self.assertRaisesRegex( + exceptions.MissingActionError, '#Bios.ChangePassword', + self.sys_bios.change_password, 'newpassword', + 'oldpassword', + 'adminpassword') + + def test__parse_attributes_missing_change_password_target(self): + self.sys_bios.json['Actions']['#Bios.ChangePassword'].pop( + 'target') + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'attribute Actions/#Bios.ChangePassword/target', + self.sys_bios._parse_attributes) + + def test_change_password(self): + self.sys_bios.change_password('newpassword', + 'oldpassword', + 'adminpassword') + self.sys_bios._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ChangePassword', + data={'OldPassword': 'oldpassword', + 'NewPassword': 'newpassword', + 'PasswordName': 'adminpassword'}) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 1ec4faa..774e60c 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -19,6 +19,7 @@ import mock import sushy from sushy import exceptions +from sushy.resources.system import bios from sushy.resources.system import constants as sys_cons from sushy.resources.system import ethernet_interface from sushy.resources.system import mappings as sys_map @@ -66,6 +67,7 @@ class SystemTestCase(base.TestCase): self.assertEqual("OK", self.sys_inst.memory_summary.health) self.assertIsNone(self.sys_inst._processors) self.assertIsNone(self.sys_inst._ethernet_interfaces) + self.assertIsNone(self.sys_inst._bios) def test__parse_attributes_missing_actions(self): self.sys_inst.json.pop('Actions') @@ -379,6 +381,18 @@ class SystemTestCase(base.TestCase): self.assertIsInstance(self.sys_inst._ethernet_interfaces, ethernet_interface.EthernetInterfaceCollection) + def test_bios(self): + self.conn.get.return_value.json.reset_mock() + bios_return_value = None + with open('sushy/tests/unit/json_samples/bios.json', 'r') as f: + bios_return_value = json.loads(f.read()) + self.conn.get.return_value.json.side_effect = [bios_return_value] + + self.assertIsNone(self.sys_inst._bios) + self.assertIsInstance(self.sys_inst.bios, bios.Bios) + self.assertEqual('BIOS Configuration Current Settings', + self.sys_inst.bios.name) + class SystemCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py new file mode 100644 index 0000000..fd212c2 --- /dev/null +++ b/sushy/tests/unit/resources/test_settings.py @@ -0,0 +1,74 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +import mock + +from sushy.resources import settings +from sushy.tests.unit import base + + +class SettingsFieldTestCase(base.TestCase): + + def setUp(self): + super(SettingsFieldTestCase, self).setUp() + with open('sushy/tests/unit/json_samples/settings.json', + 'r') as f: + self.json = json.loads(f.read()) + + self.settings = settings.SettingsField() + + def test__load(self): + instance = self.settings._load(self.json, mock.Mock()) + + self.assertEqual('9234ac83b9700123cc32', + instance._etag) + self.assertEqual('2016-03-07T14:44.30-05:00', + instance.time) + self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + instance._settings_object_idref.resource_uri) + self.assertEqual('Base.1.0.SettingsFailed', + instance.messages[0].message_id) + self.assertEqual('Settings update failed due to invalid value', + instance.messages[0].message) + self.assertEqual('High', + instance.messages[0].severity) + self.assertEqual('Fix the value and try again', + instance.messages[0].resolution) + self.assertEqual('arg1', + instance.messages[0].message_args[0]) + self.assertEqual('#/Attributes/ProcTurboMode', + instance.messages[0]._related_properties[0]) + self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + instance._settings_object_idref.resource_uri) + + def test_commit(self): + conn = mock.Mock() + instance = self.settings._load(self.json, conn) + instance.commit(conn, {'Attributes': {'key': 'value'}}) + conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'key': 'value'}}, headers=None) + + def test_commit_with_etag(self): + conn = mock.Mock() + instance = self.settings._load(self.json, conn) + instance.commit(conn, + {'Attributes': {'key': 'value'}}, + '123') + conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'key': 'value'}}, + headers={'If-Match': '123'}) -- GitLab From e96cb4e7a4e919f43af5886c3d3caf0ec493a977 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Tue, 26 Jun 2018 14:01:46 +0300 Subject: [PATCH 067/303] Hide Attribute Registry property in Bios Make Attribute Registry private until decide how to expose this in user-friendly way. Change-Id: I45ced95546460cfa3b82e8fcf8170bd2c0614544 --- sushy/resources/system/bios.py | 2 +- sushy/tests/unit/resources/system/test_bios.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 3ccfeb9..5a4a856 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -39,7 +39,7 @@ class Bios(base.ResourceBase): description = base.Field('Description') """Human-readable description of the BIOS resource""" - attribute_registry = base.Field('AttributeRegistry') + _attribute_registry = base.Field('AttributeRegistry') """The Resource ID of the Attribute Registry for the BIOS Attributes resource """ diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index 1497c07..f11dea3 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -50,7 +50,7 @@ class BiosTestCase(base.TestCase): self.assertIsNone(self.sys_bios.description) self.assertEqual('123', self.sys_bios._etag) self.assertEqual('BiosAttributeRegistryP89.v1_0_0', - self.sys_bios.attribute_registry) + self.sys_bios._attribute_registry) self.assertEqual('', self.sys_bios.attributes['AdminPhone']) self.assertEqual('Uefi', self.sys_bios.attributes['BootMode']) self.assertEqual(0, self.sys_bios.attributes['ProcCoreDisable']) -- GitLab From 1831b87d83c00e0b598d8f2a192f20a301a1ddec Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Thu, 28 Jun 2018 17:42:15 +0300 Subject: [PATCH 068/303] Remove etag from Bios Previously implemented etag will not work as intended. The property @odata.etag will never be populated for BIOS resource. etag value should come from HTTP headers. At the moment sushy does not support retrieving ETag from headers, so this is removed for now. Change-Id: I7a6ebaac3d4f9a8a722aad32dfaec69153e7bd3a --- sushy/resources/settings.py | 6 ++---- sushy/resources/system/bios.py | 5 +---- sushy/tests/unit/resources/system/test_bios.py | 7 ++----- sushy/tests/unit/resources/test_settings.py | 13 +------------ 4 files changed, 6 insertions(+), 25 deletions(-) diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index 29254fd..fb3cb2f 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -78,7 +78,7 @@ class SettingsField(base.CompositeField): """Represents the results of the last time the values of the Settings resource were applied to the server""" - def commit(self, connector, value, etag=None): + def commit(self, connector, value): """Commits new settings values The new values will be applied when the system or a service @@ -92,9 +92,7 @@ class SettingsField(base.CompositeField): the new values will not be committed """ - connector.patch(self.resource_uri, - data=value, - headers={'If-Match': etag} if etag else None) + connector.patch(self.resource_uri, data=value) @property def resource_uri(self): diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 3ccfeb9..2386138 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -56,8 +56,6 @@ class Bios(base.ResourceBase): _actions = ActionsField('Actions') - _etag = base.Field('@odata.etag') - _pending_settings_resource = None @property @@ -98,8 +96,7 @@ class Bios(base.ResourceBase): :param value: Key-value pairs for attribute name and value """ self._settings.commit(self._conn, - {'Attributes': value}, - self._etag) + {'Attributes': value}) if self._pending_settings_resource: self._pending_settings_resource.invalidate() diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index 1497c07..95577ec 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -48,7 +48,6 @@ class BiosTestCase(base.TestCase): self.assertEqual('BIOS Configuration Current Settings', self.sys_bios.name) self.assertIsNone(self.sys_bios.description) - self.assertEqual('123', self.sys_bios._etag) self.assertEqual('BiosAttributeRegistryP89.v1_0_0', self.sys_bios.attribute_registry) self.assertEqual('', self.sys_bios.attributes['AdminPhone']) @@ -65,8 +64,7 @@ class BiosTestCase(base.TestCase): self.sys_bios.set_attribute('ProcTurboMode', 'Disabled') self.sys_bios._conn.patch.assert_called_once_with( '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', - data={'Attributes': {'ProcTurboMode': 'Disabled'}}, - headers={'If-Match': '123'}) + data={'Attributes': {'ProcTurboMode': 'Disabled'}}) def test_set_attribute_on_refresh(self): # make it to instantiate pending attributes @@ -83,8 +81,7 @@ class BiosTestCase(base.TestCase): self.sys_bios._conn.patch.assert_called_once_with( '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', data={'Attributes': {'ProcTurboMode': 'Disabled', - 'UsbControl': 'UsbDisabled'}}, - headers={'If-Match': '123'}) + 'UsbControl': 'UsbDisabled'}}) def test_set_attributes_on_refresh(self): # make it to instantiate pending attributes diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index fd212c2..e4d8830 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -60,15 +60,4 @@ class SettingsFieldTestCase(base.TestCase): instance.commit(conn, {'Attributes': {'key': 'value'}}) conn.patch.assert_called_once_with( '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', - data={'Attributes': {'key': 'value'}}, headers=None) - - def test_commit_with_etag(self): - conn = mock.Mock() - instance = self.settings._load(self.json, conn) - instance.commit(conn, - {'Attributes': {'key': 'value'}}, - '123') - conn.patch.assert_called_once_with( - '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', - data={'Attributes': {'key': 'value'}}, - headers={'If-Match': '123'}) + data={'Attributes': {'key': 'value'}}) -- GitLab From 33c4635ff3223c0dd296f00735a3801f3ee6ce92 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Tue, 10 Jul 2018 16:32:25 +0300 Subject: [PATCH 069/303] Cleanup unittest file loading Followup patch to update how test files are loaded accross all project: - don't specify 'r' when opening a file as it is default mode - use json.load(file) instead of json.loads(string) - in some cases use file path inline instead of defining new var - use loops to load several related files to avoid repetitive code Change-Id: Ib4a66a4d97e7025849e3f84fc489fa1fa98af327 --- .../unit/resources/manager/test_manager.py | 8 +-- .../resources/sessionservice/test_session.py | 10 +-- .../sessionservice/test_sessionservice.py | 29 +++++---- .../resources/system/storage/test_volume.py | 33 +++++----- .../tests/unit/resources/system/test_bios.py | 9 ++- .../system/test_ethernet_interfaces.py | 16 ++--- .../unit/resources/system/test_processor.py | 21 ++++--- .../unit/resources/system/test_system.py | 62 +++++++++---------- sushy/tests/unit/resources/test_settings.py | 5 +- sushy/tests/unit/test_connector.py | 2 +- sushy/tests/unit/test_main.py | 12 ++-- sushy/tests/unit/test_utils.py | 4 +- 12 files changed, 104 insertions(+), 107 deletions(-) diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index 1e78be7..d150709 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -25,8 +25,8 @@ class ManagerTestCase(base.TestCase): def setUp(self): super(ManagerTestCase, self).setUp() self.conn = mock.Mock() - with open('sushy/tests/unit/json_samples/manager.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/manager.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.manager = manager.Manager(self.conn, '/redfish/v1/Managers/BMC', redfish_version='1.0.2') @@ -213,8 +213,8 @@ class ManagerCollectionTestCase(base.TestCase): super(ManagerCollectionTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' - 'manager_collection.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + 'manager_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.managers = manager.ManagerCollection( self.conn, '/redfish/v1/Managers', redfish_version='1.0.2') diff --git a/sushy/tests/unit/resources/sessionservice/test_session.py b/sushy/tests/unit/resources/sessionservice/test_session.py index 4e8ec4b..4cc7419 100644 --- a/sushy/tests/unit/resources/sessionservice/test_session.py +++ b/sushy/tests/unit/resources/sessionservice/test_session.py @@ -27,8 +27,8 @@ class SessionTestCase(base.TestCase): super(SessionTestCase, self).setUp() self.conn = mock.Mock() self.auth = mock.Mock() - with open('sushy/tests/unit/json_samples/session.json', 'r') as f: - sample_json = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/session.json') as f: + sample_json = json.load(f) self.conn.get.return_value.json.return_value = sample_json self.auth._session_key = 'fake_x_auth_token' self.auth._session_uri = sample_json['@odata.id'] @@ -66,9 +66,9 @@ class SessionCollectionTestCase(base.TestCase): def setUp(self): super(SessionCollectionTestCase, self).setUp() self.conn = mock.Mock() - js_f = 'sushy/tests/unit/json_samples/session_collection.json' - with open(js_f, 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'session_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.sess_col = session.SessionCollection( self.conn, '/redfish/v1/SessionService/Sessions', diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py index 6ae2066..eacaf09 100644 --- a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -28,9 +28,8 @@ class SessionServiceTestCase(base.TestCase): def setUp(self): super(SessionServiceTestCase, self).setUp() self.conn = mock.Mock() - js_f = 'sushy/tests/unit/json_samples/session_service.json' - with open(js_f, 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/session_service.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.sess_serv_inst = sessionservice.SessionService( self.conn, '/redfish/v1/SessionService', @@ -78,8 +77,8 @@ class SessionServiceTestCase(base.TestCase): def test_create_session(self): with open('sushy/tests/unit/json_samples/' - 'session_creation_headers.json', 'r') as f: - self.conn.post.return_value.headers = json.loads(f.read()) + 'session_creation_headers.json') as f: + self.conn.post.return_value.headers = json.load(f) session_key, session_uri = ( self.sess_serv_inst.create_session('foo', 'secret')) @@ -91,8 +90,8 @@ class SessionServiceTestCase(base.TestCase): def test_create_session_unknown_path(self): del self.sess_serv_inst.json['Sessions'] with open('sushy/tests/unit/json_samples/' - 'session_creation_headers.json', 'r') as f: - self.conn.post.return_value.headers = json.loads(f.read()) + 'session_creation_headers.json') as f: + self.conn.post.return_value.headers = json.load(f) session_key, session_uri = ( self.sess_serv_inst.create_session('foo', 'secret')) @@ -109,8 +108,8 @@ class SessionServiceTestCase(base.TestCase): def test_create_session_missing_x_auth_token(self): with open('sushy/tests/unit/json_samples/' - 'session_creation_headers.json', 'r') as f: - self.conn.post.return_value.headers = json.loads(f.read()) + 'session_creation_headers.json') as f: + self.conn.post.return_value.headers = json.load(f) self.conn.post.return_value.headers.pop('X-Auth-Token') self.assertRaisesRegex( @@ -120,8 +119,8 @@ class SessionServiceTestCase(base.TestCase): @mock.patch.object(sessionservice, 'LOG', autospec=True) def test_create_session_missing_location(self, mock_LOG): with open('sushy/tests/unit/json_samples/' - 'session_creation_headers.json', 'r') as f: - self.conn.post.return_value.headers = json.loads(f.read()) + 'session_creation_headers.json') as f: + self.conn.post.return_value.headers = json.load(f) self.conn.post.return_value.headers.pop('Location') self.sess_serv_inst.create_session('foo', 'bar') @@ -130,8 +129,8 @@ class SessionServiceTestCase(base.TestCase): def _setUp_sessions(self): self.conn.get.return_value.json.reset_mock() successive_return_values = [] - with open('sushy/tests/unit/json_samples/session.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) + with open('sushy/tests/unit/json_samples/session.json') as f: + successive_return_values.append(json.load(f)) self.conn.get.return_value.json.side_effect = successive_return_values def test_sessions(self): @@ -162,8 +161,8 @@ class SessionServiceTestCase(base.TestCase): self.conn.get.return_value.json.side_effect = None # On refreshing the sess_serv_inst instance... - with open('sushy/tests/unit/json_samples/session.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/session.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.sess_serv_inst.refresh(force=True) # | WHEN & THEN | diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index a595f74..9d3d0db 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -23,9 +23,8 @@ class VolumeTestCase(base.TestCase): def setUp(self): super(VolumeTestCase, self).setUp() self.conn = mock.Mock() - volume_file = 'sushy/tests/unit/json_samples/volume.json' - with open(volume_file, 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/volume.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.stor_volume = volume.Volume( self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', @@ -45,8 +44,8 @@ class VolumeCollectionTestCase(base.TestCase): super(VolumeCollectionTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' - 'volume_collection.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + 'volume_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.stor_vol_col = volume.VolumeCollection( self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', redfish_version='1.0.2') @@ -91,12 +90,12 @@ class VolumeCollectionTestCase(base.TestCase): self.conn.get.return_value.json.reset_mock() successive_return_values = [] - with open('sushy/tests/unit/json_samples/volume.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) - with open('sushy/tests/unit/json_samples/volume2.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) - with open('sushy/tests/unit/json_samples/volume3.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) + file_names = ['sushy/tests/unit/json_samples/volume.json', + 'sushy/tests/unit/json_samples/volume2.json', + 'sushy/tests/unit/json_samples/volume3.json'] + for file_name in file_names: + with open(file_name) as f: + successive_return_values.append(json.load(f)) self.conn.get.return_value.json.side_effect = successive_return_values self.assertEqual(1073741824000, self.stor_vol_col.max_size_bytes) @@ -112,12 +111,12 @@ class VolumeCollectionTestCase(base.TestCase): self.conn.get.return_value.json.reset_mock() successive_return_values = [] - with open('sushy/tests/unit/json_samples/volume.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) - with open('sushy/tests/unit/json_samples/volume2.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) - with open('sushy/tests/unit/json_samples/volume3.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) + file_names = ['sushy/tests/unit/json_samples/volume.json', + 'sushy/tests/unit/json_samples/volume2.json', + 'sushy/tests/unit/json_samples/volume3.json'] + for file_name in file_names: + with open(file_name) as f: + successive_return_values.append(json.load(f)) self.conn.get.return_value.json.side_effect = successive_return_values self.assertEqual(1073741824000, self.stor_vol_col.max_size_bytes) diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index 0371005..63c4f53 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -26,11 +26,10 @@ class BiosTestCase(base.TestCase): def setUp(self): super(BiosTestCase, self).setUp() self.conn = mock.Mock() - with open('sushy/tests/unit/json_samples/bios.json', 'r') as f: - bios_json = json.loads(f.read()) - with open('sushy/tests/unit/json_samples/bios_settings.json', - 'r') as f: - bios_settings_json = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/bios.json') as f: + bios_json = json.load(f) + with open('sushy/tests/unit/json_samples/bios_settings.json') as f: + bios_settings_json = json.load(f) self.conn.get.return_value.json.side_effect = [ bios_json, diff --git a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py index c3cfdea..a5944d6 100644 --- a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py +++ b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py @@ -25,9 +25,9 @@ class EthernetInterfaceTestCase(base.TestCase): def setUp(self): super(EthernetInterfaceTestCase, self).setUp() self.conn = mock.Mock() - eth_file = 'sushy/tests/unit/json_samples/ethernet_interfaces.json' - with open(eth_file, 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'ethernet_interfaces.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) eth_path = ("/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/" "12446A3B0411") @@ -54,8 +54,8 @@ class EthernetInterfaceCollectionTestCase(base.TestCase): super(EthernetInterfaceCollectionTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' - 'ethernet_interfaces_collection.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + 'ethernet_interfaces_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.sys_eth_col = ethernet_interface.EthernetInterfaceCollection( self.conn, '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces', redfish_version='1.0.2') @@ -96,9 +96,9 @@ class EthernetInterfaceCollectionTestCase(base.TestCase): def test_summary(self): self.assertIsNone(self.sys_eth_col._summary) self.conn.get.return_value.json.reset_mock() - path = 'sushy/tests/unit/json_samples/ethernet_interfaces.json' - with open(path, 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'ethernet_interfaces.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) expected_summary = { '12:44:6A:3B:04:11': sys_map.HEALTH_STATE_VALUE_MAP_REV.get( diff --git a/sushy/tests/unit/resources/system/test_processor.py b/sushy/tests/unit/resources/system/test_processor.py index b35ca19..fee022d 100644 --- a/sushy/tests/unit/resources/system/test_processor.py +++ b/sushy/tests/unit/resources/system/test_processor.py @@ -26,8 +26,8 @@ class ProcessorTestCase(base.TestCase): def setUp(self): super(ProcessorTestCase, self).setUp() self.conn = mock.Mock() - with open('sushy/tests/unit/json_samples/processor.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/processor.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.sys_processor = processor.Processor( self.conn, '/redfish/v1/Systems/437XR1138R2/Processors/CPU1', @@ -74,8 +74,8 @@ class ProcessorCollectionTestCase(base.TestCase): super(ProcessorCollectionTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' - 'processor_collection.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.sys_processor_col = processor.ProcessorCollection( self.conn, '/redfish/v1/Systems/437XR1138R2/Processors', redfish_version='1.0.2') @@ -115,10 +115,11 @@ class ProcessorCollectionTestCase(base.TestCase): def _setUp_processor_summary(self): self.conn.get.return_value.json.reset_mock() successive_return_values = [] - with open('sushy/tests/unit/json_samples/processor.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) - with open('sushy/tests/unit/json_samples/processor2.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) + file_names = ['sushy/tests/unit/json_samples/processor.json', + 'sushy/tests/unit/json_samples/processor2.json'] + for file_name in file_names: + with open(file_name) as f: + successive_return_values.append(json.load(f)) self.conn.get.return_value.json.side_effect = successive_return_values @@ -155,8 +156,8 @@ class ProcessorCollectionTestCase(base.TestCase): self.conn.get.return_value.json.side_effect = None # On refreshing the sys_processor_col instance... with open('sushy/tests/unit/json_samples/' - 'processor_collection.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.sys_processor_col.refresh(force=True) # | WHEN & THEN | diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 774e60c..b0b5c28 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -33,8 +33,8 @@ class SystemTestCase(base.TestCase): def setUp(self): super(SystemTestCase, self).setUp() self.conn = mock.Mock() - with open('sushy/tests/unit/json_samples/system.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.sys_inst = system.System( self.conn, '/redfish/v1/Systems/437XR1138R2', @@ -271,9 +271,9 @@ class SystemTestCase(base.TestCase): self.assertIsNone(self.sys_inst._processors) # | GIVEN | self.conn.get.return_value.json.reset_mock() - with open('sushy/tests/unit/json_samples/processor_collection.json', - 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) # | WHEN | actual_processors = self.sys_inst.processors # | THEN | @@ -291,16 +291,16 @@ class SystemTestCase(base.TestCase): def test_processors_on_refresh(self): # | GIVEN | - with open('sushy/tests/unit/json_samples/processor_collection.json', - 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) # | WHEN & THEN | self.assertIsInstance(self.sys_inst.processors, processor.ProcessorCollection) # On refreshing the system instance... - with open('sushy/tests/unit/json_samples/system.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.sys_inst.invalidate() self.sys_inst.refresh(force=False) @@ -310,9 +310,9 @@ class SystemTestCase(base.TestCase): self.assertTrue(self.sys_inst._processors._is_stale) # | GIVEN | - with open('sushy/tests/unit/json_samples/processor_collection.json', - 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) # | WHEN & THEN | self.assertIsInstance(self.sys_inst.processors, processor.ProcessorCollection) @@ -320,18 +320,19 @@ class SystemTestCase(base.TestCase): def _setUp_processor_summary(self): self.conn.get.return_value.json.reset_mock() - with open('sushy/tests/unit/json_samples/processor_collection.json', - 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) # fetch processors for the first time self.sys_inst.processors successive_return_values = [] - with open('sushy/tests/unit/json_samples/processor.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) - with open('sushy/tests/unit/json_samples/processor2.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) + file_names = ['sushy/tests/unit/json_samples/processor.json', + 'sushy/tests/unit/json_samples/processor2.json'] + for file_name in file_names: + with open(file_name) as f: + successive_return_values.append(json.load(f)) self.conn.get.return_value.json.side_effect = successive_return_values @@ -360,13 +361,12 @@ class SystemTestCase(base.TestCase): self.conn.get.return_value.json.reset_mock() eth_coll_return_value = None eth_return_value = None - path = ('sushy/tests/unit/json_samples/' - 'ethernet_interfaces_collection.json') - with open(path, 'r') as f: - eth_coll_return_value = json.loads(f.read()) - with open('sushy/tests/unit/json_samples/ethernet_interfaces.json', - 'r') as f: - eth_return_value = (json.loads(f.read())) + with open('sushy/tests/unit/json_samples/' + 'ethernet_interfaces_collection.json') as f: + eth_coll_return_value = json.load(f) + with open('sushy/tests/unit/json_samples/' + 'ethernet_interfaces.json') as f: + eth_return_value = json.load(f) self.conn.get.return_value.json.side_effect = [eth_coll_return_value, eth_return_value] @@ -384,8 +384,8 @@ class SystemTestCase(base.TestCase): def test_bios(self): self.conn.get.return_value.json.reset_mock() bios_return_value = None - with open('sushy/tests/unit/json_samples/bios.json', 'r') as f: - bios_return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/bios.json') as f: + bios_return_value = json.load(f) self.conn.get.return_value.json.side_effect = [bios_return_value] self.assertIsNone(self.sys_inst._bios) @@ -400,8 +400,8 @@ class SystemCollectionTestCase(base.TestCase): super(SystemCollectionTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' - 'system_collection.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + 'system_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.sys_col = system.SystemCollection( self.conn, '/redfish/v1/Systems', redfish_version='1.0.2') diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index e4d8830..a2edec8 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -24,9 +24,8 @@ class SettingsFieldTestCase(base.TestCase): def setUp(self): super(SettingsFieldTestCase, self).setUp() - with open('sushy/tests/unit/json_samples/settings.json', - 'r') as f: - self.json = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/settings.json') as f: + self.json = json.load(f) self.settings = settings.SettingsField() diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index a46ecfa..a519c06 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -221,7 +221,7 @@ class ConnectorOpTestCase(base.TestCase): def test_known_http_error(self): self.request.return_value.status_code = http_client.BAD_REQUEST - with open('sushy/tests/unit/json_samples/error.json', 'r') as f: + with open('sushy/tests/unit/json_samples/error.json') as f: self.request.return_value.json.return_value = json.load(f) with self.assertRaisesRegex(exceptions.BadRequestError, diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 8b1a969..1d186bd 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -40,8 +40,8 @@ class MainTestCase(base.TestCase): self.sess_serv.create_session.return_value = (None, None) mock_session_service.return_value = self.sess_serv mock_connector.return_value = self.conn - with open('sushy/tests/unit/json_samples/root.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/root.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.root = main.Sushy('http://foo.bar:1234', verify=True, auth=mock_auth) mock_connector.assert_called_once_with( @@ -68,9 +68,9 @@ class MainTestCase(base.TestCase): @mock.patch.object(connector, 'Connector', autospec=True) def test_custom_connector(self, mock_Sushy_Connector): connector_mock = mock.MagicMock() - with open('sushy/tests/unit/json_samples/root.json', 'r') as f: + with open('sushy/tests/unit/json_samples/root.json') as f: connector_mock.get.return_value.json.return_value = ( - json.loads(f.read())) + json.load(f)) main.Sushy('http://foo.bar:1234', 'foo', 'bar', connector=connector_mock) self.assertTrue(connector_mock.post.called) @@ -126,8 +126,8 @@ class BareMinimumMainTestCase(base.TestCase): super(BareMinimumMainTestCase, self).setUp() self.conn = mock.MagicMock() with open('sushy/tests/unit/json_samples/' - 'bare_minimum_root.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + 'bare_minimum_root.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.root = main.Sushy('http://foo.bar:1234', verify=True, auth=mock.MagicMock(), connector=self.conn) diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index 115bfed..c7253b8 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -46,8 +46,8 @@ class UtilsTestCase(base.TestCase): def setUp(self): super(UtilsTestCase, self).setUp() self.conn = mock.MagicMock() - with open('sushy/tests/unit/json_samples/system.json', 'r') as f: - system_json = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/system.json') as f: + system_json = json.load(f) self.conn.get.return_value.json.return_value = system_json self.sys_inst = system.System(self.conn, -- GitLab From 9a0823ae0c5d3ad3aa757017945b81feb9d51d08 Mon Sep 17 00:00:00 2001 From: Ha Manh Dong Date: Thu, 19 Jul 2018 09:58:27 +0700 Subject: [PATCH 070/303] Switch to use stestr for unit test According to Openstack summit session [1], stestr is maintained project to which all Openstack projects should migrate. So we should switch to stestr. [1] https://etherpad.openstack.org/p/YVR-python-pti Change-Id: Ifb9683e3c4f2146322050cca44820475bf67039f --- .gitignore | 2 +- .stestr.conf | 3 +++ .testr.conf | 7 ------- lower-constraints.txt | 2 +- test-requirements.txt | 2 +- tox.ini | 12 ++++++++---- 6 files changed, 14 insertions(+), 14 deletions(-) create mode 100644 .stestr.conf delete mode 100644 .testr.conf diff --git a/.gitignore b/.gitignore index 82d3306..0c088d0 100644 --- a/.gitignore +++ b/.gitignore @@ -27,7 +27,7 @@ cover/ !.coveragerc .tox nosetests.xml -.testrepository +.stestr/ .venv # Translations diff --git a/.stestr.conf b/.stestr.conf new file mode 100644 index 0000000..beace55 --- /dev/null +++ b/.stestr.conf @@ -0,0 +1,3 @@ +[DEFAULT] +test_path=./sushy/tests +top_dir=. diff --git a/.testr.conf b/.testr.conf deleted file mode 100644 index 6d83b3c..0000000 --- a/.testr.conf +++ /dev/null @@ -1,7 +0,0 @@ -[DEFAULT] -test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \ - OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \ - OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \ - ${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION -test_id_option=--load-list $IDFILE -test_list_option=--list diff --git a/lower-constraints.txt b/lower-constraints.txt index dc38ac7..2d10fe4 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -36,7 +36,7 @@ snowballstemmer==1.2.1 Sphinx==1.6.2 sphinxcontrib-websupport==1.0.1 stevedore==1.20.0 -testrepository==0.0.18 +stestr==2.0.0 testscenarios==0.4 testtools==2.2.0 traceback2==1.4.0 diff --git a/test-requirements.txt b/test-requirements.txt index 6573de0..1d8472a 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -9,7 +9,7 @@ python-subunit>=1.0.0 # Apache-2.0/BSD sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD openstackdocstheme>=1.18.1 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 -testrepository>=0.0.18 # Apache-2.0/BSD +stestr>=2.0.0 # Apache-2.0 testscenarios>=0.4 # Apache-2.0/BSD testtools>=2.2.0 # MIT diff --git a/tox.ini b/tox.ini index 0531d12..f2a14fe 100644 --- a/tox.ini +++ b/tox.ini @@ -13,7 +13,7 @@ deps = -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt -commands = python setup.py test --slowest --testr-args='{posargs}' +commands = stestr run --slowest {posargs} [testenv:pep8] basepython = python3 @@ -25,13 +25,17 @@ commands = {posargs} [testenv:cover] basepython = python3 +setenv = + {[testenv]setenv} + PYTHON=coverage run --source sushy --parallel-mode # After running this target, visit sushy/cover/index.html # in your browser, to see a nicer presentation report with annotated # HTML listings detailing missed lines. commands = coverage erase - python setup.py test --coverage --testr-args='{posargs}' - coverage report --omit=*test* - coverage html --omit=*test* + stestr run {posargs} + coverage combine + coverage html -d cover + coverage xml -o cover/coverage.xml [testenv:docs] basepython = python3 -- GitLab From fdeb8b8d44b2af50b80977676dbb65cd0dc4b2cf Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Mon, 16 Apr 2018 07:29:48 +0000 Subject: [PATCH 071/303] Add storage disk drive Adds storage disk drive resource of Redfish standard schema and also provides the attribute ``drives_max_size_bytes`` on storage which gives largest physical disk drive size available in bytes among all physical disk drives. Story: 1668487 Task: 23040 Partial-Bug: 1751143 Change-Id: Id7a5eb9af07730f11727c8b71c6852e080d75357 --- sushy/resources/base.py | 6 +- sushy/resources/system/storage/drive.py | 34 +++++++ sushy/resources/system/storage/storage.py | 71 ++++++++++++++ sushy/tests/unit/json_samples/drive.json | 46 +++++++++ sushy/tests/unit/json_samples/drive2.json | 51 ++++++++++ sushy/tests/unit/json_samples/drive3.json | 54 +++++++++++ sushy/tests/unit/json_samples/storage.json | 74 +++++++++++++++ .../resources/system/storage/test_drive.py | 40 ++++++++ .../resources/system/storage/test_storage.py | 93 +++++++++++++++++++ 9 files changed, 466 insertions(+), 3 deletions(-) create mode 100644 sushy/resources/system/storage/drive.py create mode 100644 sushy/resources/system/storage/storage.py create mode 100644 sushy/tests/unit/json_samples/drive.json create mode 100644 sushy/tests/unit/json_samples/drive2.json create mode 100644 sushy/tests/unit/json_samples/drive3.json create mode 100644 sushy/tests/unit/json_samples/storage.json create mode 100644 sushy/tests/unit/resources/system/storage/test_drive.py create mode 100644 sushy/tests/unit/resources/system/storage/test_storage.py diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 43d053f..d370443 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -224,7 +224,7 @@ class ResourceBase(object): time from here (constructor). :param connector: A Connector instance :param path: sub-URI path to the resource. - :param redfish_version: The version of RedFish. Used to construct + :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. """ self._conn = connector @@ -328,11 +328,11 @@ class ResourceCollectionBase(ResourceBase): def __init__(self, connector, path, redfish_version=None): """A class representing the base of any Redfish resource collection - It gets inherited ``ResourceBase`` and invokes the base class + It gets inherited from ``ResourceBase`` and invokes the base class constructor. :param connector: A Connector instance :param path: sub-URI path to the resource collection. - :param redfish_version: The version of RedFish. Used to construct + :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. """ super(ResourceCollectionBase, self).__init__(connector, path, diff --git a/sushy/resources/system/storage/drive.py b/sushy/resources/system/storage/drive.py new file mode 100644 index 0000000..a6d0f92 --- /dev/null +++ b/sushy/resources/system/storage/drive.py @@ -0,0 +1,34 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Drive.v1_3_0.json + +import logging + +from sushy.resources import base +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class Drive(base.ResourceBase): + """This class represents a disk drive or other physical storage medium.""" + + identity = base.Field('Id', required=True) + """The Drive identity string""" + + name = base.Field('Name') + """The name of the resource""" + + capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) + """The size in bytes of this Drive""" diff --git a/sushy/resources/system/storage/storage.py b/sushy/resources/system/storage/storage.py new file mode 100644 index 0000000..5fe16d6 --- /dev/null +++ b/sushy/resources/system/storage/storage.py @@ -0,0 +1,71 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Storage.v1_4_0.json + +import logging + +from sushy.resources import base +from sushy.resources.system.storage import drive +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class Storage(base.ResourceBase): + """This class represents resources that represent a storage subsystem. + + A storage subsystem represents a set of storage controllers (physical or + virtual) and the resources such as drives and volumes that can be accessed + from that subsystem. + """ + + identity = base.Field('Id', required=True) + """The Storage identity string""" + + name = base.Field('Name') + """The name of the resource""" + + drives_identities = base.Field('Drives', + adapter=utils.get_members_identities) + """A tuple with the drive identities""" + + _drives_max_size_bytes = None + + def get_drive(self, drive_identity): + """Given the drive identity return a ``Drive`` object + + :param identity: The identity of the ``Drive`` + :returns: The ``Drive`` object + :raises: ResourceNotFoundError + """ + return drive.Drive(self._conn, drive_identity, + redfish_version=self.redfish_version) + + @property + def drives_max_size_bytes(self): + """Max size available in bytes among all Drives of this collection.""" + if self._drives_max_size_bytes is None: + self._drives_max_size_bytes = ( + utils.max_safe(self.get_drive(drive_id).capacity_bytes + for drive_id in self.drives_identities)) + return self._drives_max_size_bytes + + def _do_refresh(self, force=False): + """Do resource specific refresh activities + + On refresh, all sub-resources are marked as stale, i.e. + greedy-refresh not done for them unless forced by ``force`` + argument. + """ + self._drives_max_size_bytes = None diff --git a/sushy/tests/unit/json_samples/drive.json b/sushy/tests/unit/json_samples/drive.json new file mode 100644 index 0000000..a3f1184 --- /dev/null +++ b/sushy/tests/unit/json_samples/drive.json @@ -0,0 +1,46 @@ +{ + "@odata.type": "#Drive.v1_2_0.Drive", + "Id": "32ADF365C6C1B7BD", + "Name": "Drive Sample", + "IndicatorLED": "Lit", + "Model": "C123", + "Revision": "100A", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "CapacityBytes": 899527000000, + "FailurePredicted": false, + "Protocol": "SAS", + "MediaType": "HDD", + "Manufacturer": "Contoso", + "SerialNumber": "1234570", + "PartNumber": "C123-1111", + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "NAA", + "DurableName": "32ADF365C6C1B7BD" + } + ], + "HotspareType": "Global", + "EncryptionAbility": "SelfEncryptingDrive", + "EncryptionStatus": "Unlocked", + "RotationSpeedRPM": 15000, + "BlockSizeBytes": 512, + "CapableSpeedGbs": 12, + "NegotiatedSpeedGbs": 12, + "Links": { + "@odata.type": "#Drive.v1_2_0.Links" + }, + "Actions": { + "@odata.type": "#Drive.v1_0_0.Actions", + "#Drive.SecureErase": { + "target": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD/Actions/Drive.SecureErase" + } + }, + "@odata.context": "/redfish/v1/$metadata#Drive.Drive", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/drive2.json b/sushy/tests/unit/json_samples/drive2.json new file mode 100644 index 0000000..ab05b6f --- /dev/null +++ b/sushy/tests/unit/json_samples/drive2.json @@ -0,0 +1,51 @@ +{ + "@odata.type": "#Drive.v1_2_0.Drive", + "Id": "35D38F11ACEF7BD3", + "Name": "Drive Sample", + "IndicatorLED": "Lit", + "Model": "C123", + "Revision": "100A", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "CapacityBytes": 899527000000, + "FailurePredicted": false, + "Protocol": "SAS", + "MediaType": "HDD", + "Manufacturer": "Contoso", + "SerialNumber": "1234567", + "PartNumber": "C123-1111", + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "NAA", + "DurableName": "35D38F11ACEF7BD3" + } + ], + "HotspareType": "None", + "EncryptionAbility": "SelfEncryptingDrive", + "EncryptionStatus": "Unlocked", + "RotationSpeedRPM": 15000, + "BlockSizeBytes": 512, + "CapableSpeedGbs": 12, + "NegotiatedSpeedGbs": 12, + "Links": { + "@odata.type": "#Drive.v1_2_0.Links", + "Volumes": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1" + } + ] + }, + "Actions": { + "@odata.type": "#Drive.v1_0_0.Actions", + "#Drive.SecureErase": { + "target": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3/Actions/Drive.SecureErase" + } + }, + "@odata.context": "/redfish/v1/$metadata#Drive.Drive", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/drive3.json b/sushy/tests/unit/json_samples/drive3.json new file mode 100644 index 0000000..9a4faf9 --- /dev/null +++ b/sushy/tests/unit/json_samples/drive3.json @@ -0,0 +1,54 @@ +{ + "@odata.type": "#Drive.v1_2_0.Drive", + "Id": "3D58ECBC375FD9F2", + "Name": "Drive Sample", + "IndicatorLED": "Lit", + "Model": "C123", + "Revision": "100A", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "CapacityBytes": 899527000000, + "FailurePredicted": false, + "Protocol": "SAS", + "MediaType": "HDD", + "Manufacturer": "Contoso", + "SerialNumber": "1234568", + "PartNumber": "C123-1111", + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "NAA", + "DurableName": "32ADF365C6C1B7BD" + } + ], + "HotspareType": "None", + "EncryptionAbility": "SelfEncryptingDrive", + "EncryptionStatus": "Unlocked", + "RotationSpeedRPM": 15000, + "BlockSizeBytes": 512, + "CapableSpeedGbs": 12, + "NegotiatedSpeedGbs": 12, + "Links": { + "@odata.type": "#Drive.v1_2_0.Links", + "Volumes": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3" + } + ] + }, + "Actions": { + "@odata.type": "#Drive.v1_0_0.Actions", + "#Drive.SecureErase": { + "target": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2/Actions/Drive.SecureErase" + } + }, + "@odata.context": "/redfish/v1/$metadata#Drive.Drive", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/storage.json b/sushy/tests/unit/json_samples/storage.json new file mode 100644 index 0000000..e272cce --- /dev/null +++ b/sushy/tests/unit/json_samples/storage.json @@ -0,0 +1,74 @@ +{ + "@odata.type": "#Storage.v1_3_0.Storage", + "Id": "1", + "Name": "Local Storage Controller", + "Description": "Integrated RAID Controller", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK", + "HealthRollup": "OK" + }, + "StorageControllers": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1#/StorageControllers/0", + "@odata.type": "#Storage.v1_3_0.StorageController", + "MemberId": "0", + "Name": "Contoso Integrated RAID", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "NAA", + "DurableName": "345C59DBD970859C" + } + ], + "Manufacturer": "Contoso", + "Model": "12Gbs Integrated RAID", + "SerialNumber": "2M220100SL", + "PartNumber": "CT18754", + "SpeedGbps": 12, + "FirmwareVersion": "1.0.0.7", + "SupportedControllerProtocols": [ + "PCIe" + ], + "SupportedDeviceProtocols": [ + "SAS", + "SATA" + ] + } + ], + "Drives": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2" + } + ], + "Volumes": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes" + }, + "Links": { + "@odata.type": "#Storage.v1_0_0.Storage" + }, + "Actions": { + "@odata.type": "#Storage.v1_0_0.Actions", + "#Storage.SetEncryptionKey": { + "target": "/redfish/v1/Systems/437XR1138R2/Storage/1/Actions/Storage.SetEncryptionKey" + } + }, + "@odata.context": "/redfish/v1/$metadata#Storage.Storage", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/resources/system/storage/test_drive.py b/sushy/tests/unit/resources/system/storage/test_drive.py new file mode 100644 index 0000000..30c89c4 --- /dev/null +++ b/sushy/tests/unit/resources/system/storage/test_drive.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy.resources.system.storage import drive +from sushy.tests.unit import base + + +class DriveTestCase(base.TestCase): + + def setUp(self): + super(DriveTestCase, self).setUp() + self.conn = mock.Mock() + drive_file = 'sushy/tests/unit/json_samples/drive.json' + with open(drive_file, 'r') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.stor_drive = drive.Drive( + self.conn, + '/redfish/v1/Systems/437XR1138/Storage/1/Drives/32ADF365C6C1B7BD', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.stor_drive._parse_attributes() + self.assertEqual('1.0.2', self.stor_drive.redfish_version) + self.assertEqual('32ADF365C6C1B7BD', self.stor_drive.identity) + self.assertEqual('Drive Sample', self.stor_drive.name) + self.assertEqual(899527000000, self.stor_drive.capacity_bytes) diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py new file mode 100644 index 0000000..6013320 --- /dev/null +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -0,0 +1,93 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy.resources.system.storage import drive +from sushy.resources.system.storage import storage +from sushy.tests.unit import base + + +STORAGE_DRIVE_FILE_NAMES = [ + 'sushy/tests/unit/json_samples/drive.json', + 'sushy/tests/unit/json_samples/drive2.json', + 'sushy/tests/unit/json_samples/drive3.json' +] + + +class StorageTestCase(base.TestCase): + + def setUp(self): + super(StorageTestCase, self).setUp() + self.conn = mock.Mock() + storage_file = 'sushy/tests/unit/json_samples/storage.json' + with open(storage_file, 'r') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.storage = storage.Storage( + self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.storage._parse_attributes() + self.assertEqual('1.0.2', self.storage.redfish_version) + self.assertEqual('1', self.storage.identity) + self.assertEqual('Local Storage Controller', self.storage.name) + self.assertEqual( + ('/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3', # noqa + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233', # noqa + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD', # noqa + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2', # noqa + ), self.storage.drives_identities) + + def test_get_drive(self): + # | WHEN | + actual_drive = self.storage.get_drive( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/' + '35D38F11ACEF7BD3') + # | THEN | + self.assertIsInstance(actual_drive, drive.Drive) + self.assertTrue(self.conn.get.return_value.json.called) + + def test_drives_max_size_bytes(self): + self.assertIsNone(self.storage._drives_max_size_bytes) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname, 'r') as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(899527000000, self.storage.drives_max_size_bytes) + + # for any subsequent fetching it gets it from the cached value + self.conn.get.return_value.json.reset_mock() + self.assertEqual(899527000000, self.storage.drives_max_size_bytes) + self.conn.get.return_value.json.assert_not_called() + + def test_drives_max_size_bytes_after_refresh(self): + self.storage.refresh() + self.assertIsNone(self.storage._drives_max_size_bytes) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname, 'r') as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(899527000000, self.storage.drives_max_size_bytes) -- GitLab From d44059483cbcb14a9c38bff25ec3013721e6e56c Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Mon, 16 Apr 2018 10:51:58 +0000 Subject: [PATCH 072/303] Add system simple storage resource support Adds the simple storage resource of Redfish standard schema. This new resource represents the properties of a storage controller and its directly-attached devices. This patch introduces the property ``max_size_bytes`` of SimpleStorageCollection resource to expose the size of the largest storage size available among all directly attached devices available to the System. Also brought in the common 'Status' (comprising of Health, HealthRollup and State sub-fields) field and refactored the code base to use that field across all the Redfish resources. Story: 1668487 Task: 23041 Change-Id: I512c2507bf78f4a9cf1e2525fd685836387a7581 --- .../add-simple-storage-915464811737bb05.yaml | 5 + sushy/__init__.py | 1 + sushy/resources/common.py | 18 ++- sushy/resources/constants.py | 32 +++++ sushy/resources/mappings.py | 36 ++++++ sushy/resources/system/constants.py | 7 -- sushy/resources/system/ethernet_interface.py | 27 ++-- sushy/resources/system/mappings.py | 17 --- sushy/resources/system/processor.py | 15 +-- sushy/resources/system/simple_storage.py | 86 +++++++++++++ sushy/resources/system/system.py | 8 +- .../unit/json_samples/simple_storage.json | 59 +++++++++ .../simple_storage_collection.json | 13 ++ .../resources/system/storage/test_drive.py | 3 +- .../resources/system/storage/test_storage.py | 3 +- .../system/test_ethernet_interfaces.py | 12 +- .../unit/resources/system/test_processor.py | 9 +- .../resources/system/test_simple_storage.py | 115 ++++++++++++++++++ .../unit/resources/system/test_system.py | 13 +- 19 files changed, 394 insertions(+), 85 deletions(-) create mode 100644 releasenotes/notes/add-simple-storage-915464811737bb05.yaml create mode 100644 sushy/resources/constants.py create mode 100644 sushy/resources/mappings.py create mode 100644 sushy/resources/system/simple_storage.py create mode 100644 sushy/tests/unit/json_samples/simple_storage.json create mode 100644 sushy/tests/unit/json_samples/simple_storage_collection.json create mode 100644 sushy/tests/unit/resources/system/test_simple_storage.py diff --git a/releasenotes/notes/add-simple-storage-915464811737bb05.yaml b/releasenotes/notes/add-simple-storage-915464811737bb05.yaml new file mode 100644 index 0000000..b98823b --- /dev/null +++ b/releasenotes/notes/add-simple-storage-915464811737bb05.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds the "SimpleStorage" to the library. It also provides the max size + available (in bytes) among all its directly attached devices. diff --git a/sushy/__init__.py b/sushy/__init__.py index afc6c8a..913dd05 100644 --- a/sushy/__init__.py +++ b/sushy/__init__.py @@ -17,6 +17,7 @@ import logging import pbr.version from sushy.main import Sushy +from sushy.resources.constants import * # noqa from sushy.resources.system.constants import * # noqa from sushy.resources.manager.constants import * # noqa diff --git a/sushy/resources/common.py b/sushy/resources/common.py index 17cfe41..9e6e634 100644 --- a/sushy/resources/common.py +++ b/sushy/resources/common.py @@ -11,6 +11,7 @@ # under the License. from sushy.resources import base +from sushy.resources import mappings as res_maps class ActionField(base.CompositeField): @@ -23,7 +24,22 @@ class ResetActionField(ActionField): class IdRefField(base.CompositeField): - """Reference to the resource for updating settings""" + """Reference to the resource odata identity field.""" resource_uri = base.Field('@odata.id') """The unique identifier for a resource""" + + +class StatusField(base.CompositeField): + """This Field describes the status of a resource and its children. + + This field shall contain any state or health properties of a resource. + """ + health = base.MappedField('Health', res_maps.HEALTH_VALUE_MAP) + """Represents health of resource w/o considering its dependent resources""" + + health_rollup = base.MappedField('HealthRollup', res_maps.HEALTH_VALUE_MAP) + """Represents health state of resource and its dependent resources""" + + state = base.MappedField('State', res_maps.STATE_VALUE_MAP) + """Indicates the known state of the resource, such as if it is enabled.""" diff --git a/sushy/resources/constants.py b/sushy/resources/constants.py new file mode 100644 index 0000000..19276bc --- /dev/null +++ b/sushy/resources/constants.py @@ -0,0 +1,32 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values comes from the Redfish System json-schema 1.0.0: +# http://redfish.dmtf.org/schemas/v1/Resource.json + +# Health related constants. +HEALTH_OK = 'ok' +HEALTH_WARNING = 'warning' +HEALTH_CRITICAL = 'critical' + +# State related constants. +STATE_ENABLED = 'enabled' +STATE_DISABLED = 'disabled' +STATE_STANDBYOFFLINE = 'standby offline' +STATE_STANDBYSPARE = 'standby spare' +STATE_INTEST = 'in test' +STATE_STARTING = 'starting' +STATE_ABSENT = 'absent' +STATE_UNAVAILABLEOFFLINE = 'unavailable offline' +STATE_DEFERRING = 'deferring' +STATE_QUIESCED = 'quiesced' +STATE_UPDATING = 'updating' diff --git a/sushy/resources/mappings.py b/sushy/resources/mappings.py new file mode 100644 index 0000000..26588f2 --- /dev/null +++ b/sushy/resources/mappings.py @@ -0,0 +1,36 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources import constants as res_cons +from sushy import utils + + +STATE_VALUE_MAP = { + 'Enabled': res_cons.STATE_ENABLED, + 'Disabled': res_cons.STATE_DISABLED, + 'Absent': res_cons.STATE_ABSENT, +} + +STATE_VALUE_MAP_REV = ( + utils.revert_dictionary(STATE_VALUE_MAP)) + +HEALTH_VALUE_MAP = { + 'OK': res_cons.HEALTH_OK, + 'Warning': res_cons.HEALTH_WARNING, + 'Critical': res_cons.HEALTH_CRITICAL +} + +HEALTH_VALUE_MAP_REV = ( + utils.revert_dictionary(HEALTH_VALUE_MAP)) diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index 9e80b6a..06b07b0 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -122,10 +122,3 @@ PROCESSOR_ARCH_IA_64 = 'Intel Itanium' PROCESSOR_ARCH_ARM = 'ARM' PROCESSOR_ARCH_MIPS = 'MIPS' PROCESSOR_ARCH_OEM = 'OEM-defined' - -# Health related constants. -HEALTH_STATE_ENABLED = 'enabled' -HEALTH_STATE_DISABLED = 'disabled' -HEALTH_OK = 'ok' -HEALTH_WARNING = 'warning' -HEALTH_CRITICAL = 'critical' diff --git a/sushy/resources/system/ethernet_interface.py b/sushy/resources/system/ethernet_interface.py index bdd4c93..b89f2ed 100644 --- a/sushy/resources/system/ethernet_interface.py +++ b/sushy/resources/system/ethernet_interface.py @@ -16,18 +16,12 @@ import logging from sushy.resources import base -from sushy.resources.system import constants as sys_cons -from sushy.resources.system import mappings as sys_map +from sushy.resources import common +from sushy.resources import constants as res_cons LOG = logging.getLogger(__name__) -class HealthStatusField(base.CompositeField): - state = base.MappedField( - 'State', sys_map.HEALTH_STATE_VALUE_MAP) - health = base.Field('Health') - - class EthernetInterface(base.ResourceBase): """This class adds the EthernetInterface resource""" @@ -49,7 +43,8 @@ class EthernetInterface(base.ResourceBase): speed_mbps = base.Field('SpeedMbps') """This is the current speed in Mbps of this interface.""" - status = HealthStatusField("Status") + status = common.StatusField("Status") + """Describes the status and health of this interface.""" class EthernetInterfaceCollection(base.ResourceCollectionBase): @@ -69,19 +64,15 @@ class EthernetInterfaceCollection(base.ResourceCollectionBase): are returned. :returns: dictionary in the format - {'aa:bb:cc:dd:ee:ff': 'Enabled', - 'aa:bb:aa:aa:aa:aa': 'Disabled'} + {'aa:bb:cc:dd:ee:ff': sushy.STATE_ENABLED, + 'aa:bb:aa:aa:aa:aa': sushy.STATE_DISABLED} """ if self._summary is None: mac_dict = {} for eth in self.get_members(): - if (eth.mac_address is not None and eth.status is not None): - if (eth.status.health == - sys_map.HEALTH_VALUE_MAP_REV.get( - sys_cons.HEALTH_OK)): - state = sys_map.HEALTH_STATE_VALUE_MAP_REV.get( - eth.status.state) - mac_dict[eth.mac_address] = state + if eth.mac_address is not None and eth.status is not None: + if eth.status.health == res_cons.HEALTH_OK: + mac_dict[eth.mac_address] = eth.status.state self._summary = mac_dict return self._summary diff --git a/sushy/resources/system/mappings.py b/sushy/resources/system/mappings.py index 995ba89..c9a3244 100644 --- a/sushy/resources/system/mappings.py +++ b/sushy/resources/system/mappings.py @@ -89,20 +89,3 @@ PROCESSOR_ARCH_VALUE_MAP = { PROCESSOR_ARCH_VALUE_MAP_REV = ( utils.revert_dictionary(PROCESSOR_ARCH_VALUE_MAP)) - -HEALTH_STATE_VALUE_MAP = { - 'Enabled': sys_cons.HEALTH_STATE_ENABLED, - 'Disabled': sys_cons.HEALTH_STATE_DISABLED, -} - -HEALTH_STATE_VALUE_MAP_REV = ( - utils.revert_dictionary(HEALTH_STATE_VALUE_MAP)) - -HEALTH_VALUE_MAP = { - 'OK': sys_cons.HEALTH_OK, - 'Warning': sys_cons.HEALTH_WARNING, - 'Critical': sys_cons.HEALTH_CRITICAL -} - -HEALTH_VALUE_MAP_REV = ( - utils.revert_dictionary(HEALTH_VALUE_MAP)) diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index b094c26..1ee39e3 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -16,6 +16,7 @@ import collections import logging from sushy.resources import base +from sushy.resources import common from sushy.resources.system import mappings as sys_maps # Representation of Summary of Processor information @@ -45,18 +46,6 @@ class ProcessorIdField(base.CompositeField): """The processor vendor id""" -class StatusField(base.CompositeField): - - health = base.Field('Health') - """The processor health""" - - health_rollup = base.Field('HealthRollup') - """The processor health rollup""" - - state = base.Field('State') - """The processor state""" - - class Processor(base.ResourceBase): identity = base.Field('Id', required=True) @@ -89,7 +78,7 @@ class Processor(base.ResourceBase): processor_id = ProcessorIdField('ProcessorId') """The processor id""" - status = StatusField('Status') + status = common.StatusField('Status') """The processor status""" total_cores = base.Field('TotalCores', adapter=int) diff --git a/sushy/resources/system/simple_storage.py b/sushy/resources/system/simple_storage.py new file mode 100644 index 0000000..f615400 --- /dev/null +++ b/sushy/resources/system/simple_storage.py @@ -0,0 +1,86 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/SimpleStorage.v1_2_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources import constants as res_cons +from sushy import utils + + +LOG = logging.getLogger(__name__) + + +class DeviceListField(base.ListField): + """The storage device/s associated with SimpleStorage.""" + + name = base.Field('Name', required=True) + """The name of the storage device""" + + capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) + """The size of the storage device.""" + + status = common.StatusField('Status') + """Describes the status and health of a storage device.""" + + +class SimpleStorage(base.ResourceBase): + """This class represents a simple storage. + + It represents the properties of a storage controller and its + directly-attached devices. A storage device can be a disk drive or optical + media device. + """ + + identity = base.Field('Id', required=True) + """The SimpleStorage identity string""" + + name = base.Field('Name') + """The name of the resource""" + + devices = DeviceListField('Devices', default=[]) + """The storage devices associated with this resource.""" + + +class SimpleStorageCollection(base.ResourceCollectionBase): + """Represents a collection of simple storage associated with system.""" + + _max_size_bytes = None + + @property + def _resource_type(self): + return SimpleStorage + + @property + def max_size_bytes(self): + """Max size available (in bytes) among all enabled device resources. + + It returns the cached value until it (or its parent resource) is + refreshed. + """ + if self._max_size_bytes is None: + self._max_size_bytes = ( + utils.max_safe(device.capacity_bytes + for simpl_stor in self.get_members() + for device in simpl_stor.devices + if (device.status.state == + res_cons.STATE_ENABLED))) + return self._max_size_bytes + + def _do_refresh(self, force=False): + # Note(deray): undefine the attribute here for fresh creation in + # subsequent calls to it's exposed property. + self._max_size_bytes = None diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index b976da2..be109bc 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -63,12 +63,6 @@ class MemorySummaryField(base.CompositeField): """ -class StatusField(base.CompositeField): - state = base.Field('State') - health = base.Field('Health') - health_rollup = base.Field('HealthRollup') - - class System(base.ResourceBase): asset_tag = base.Field('AssetTag') @@ -112,7 +106,7 @@ class System(base.ResourceBase): sku = base.Field('SKU') """The system stock-keeping unit""" - status = StatusField('Status') + status = common.StatusField('Status') """The system status""" # TODO(lucasagomes): Create mappings for the system_type diff --git a/sushy/tests/unit/json_samples/simple_storage.json b/sushy/tests/unit/json_samples/simple_storage.json new file mode 100644 index 0000000..3e55a46 --- /dev/null +++ b/sushy/tests/unit/json_samples/simple_storage.json @@ -0,0 +1,59 @@ +{ + "@odata.type": "#SimpleStorage.v1_2_0.SimpleStorage", + "Id": "1", + "Name": "Simple Storage Controller", + "Description": "System SATA", + "UefiDevicePath": "Acpi(PNP0A03,0)/Pci(1F|1)/Ata(Primary,Master)/HD(Part3, Sig00110011)", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK", + "HealthRollup": "Warning" + }, + "Devices": [ + { + "@odata.type": "#SimpleStorage.v1_1_0.Device", + "Name": "SATA Bay 1", + "Manufacturer": "Contoso", + "Model": "3000GT8", + "CapacityBytes": 8000000000000, + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + } + }, + { + "@odata.type": "#SimpleStorage.v1_1_0.Device", + "Name": "SATA Bay 2", + "Manufacturer": "Contoso", + "Model": "3000GT7", + "CapacityBytes": 4000000000000, + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "Critical" + } + }, + { + "@odata.type": "#SimpleStorage.v1_1_0.Device", + "Name": "SATA Bay 3", + "CapacityBytes": 9000000000000, + "Status": { + "@odata.type": "#Resource.Status", + "State": "Absent" + } + }, + { + "@odata.type": "#SimpleStorage.v1_1_0.Device", + "Name": "SATA Bay 4", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Absent" + } + } + ], + "@odata.context": "/redfish/v1/$metadata#SimpleStorage.SimpleStorage", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SimpleStorage/1", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/simple_storage_collection.json b/sushy/tests/unit/json_samples/simple_storage_collection.json new file mode 100644 index 0000000..cb352af --- /dev/null +++ b/sushy/tests/unit/json_samples/simple_storage_collection.json @@ -0,0 +1,13 @@ +{ + "@odata.type": "#SimpleStorageCollection.SimpleStorageCollection", + "Name": "Simple Storage Collection", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SimpleStorage/1" + } + ], + "@odata.context": "/redfish/v1/$metadata#SimpleStorageCollection.SimpleStorageCollection", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SimpleStorage", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/resources/system/storage/test_drive.py b/sushy/tests/unit/resources/system/storage/test_drive.py index 30c89c4..8d3edb0 100644 --- a/sushy/tests/unit/resources/system/storage/test_drive.py +++ b/sushy/tests/unit/resources/system/storage/test_drive.py @@ -23,8 +23,7 @@ class DriveTestCase(base.TestCase): def setUp(self): super(DriveTestCase, self).setUp() self.conn = mock.Mock() - drive_file = 'sushy/tests/unit/json_samples/drive.json' - with open(drive_file, 'r') as f: + with open('sushy/tests/unit/json_samples/drive.json') as f: self.conn.get.return_value.json.return_value = json.load(f) self.stor_drive = drive.Drive( diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index 6013320..cdda80e 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -31,8 +31,7 @@ class StorageTestCase(base.TestCase): def setUp(self): super(StorageTestCase, self).setUp() self.conn = mock.Mock() - storage_file = 'sushy/tests/unit/json_samples/storage.json' - with open(storage_file, 'r') as f: + with open('sushy/tests/unit/json_samples/storage.json') as f: self.conn.get.return_value.json.return_value = json.load(f) self.storage = storage.Storage( diff --git a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py index a5944d6..ebb5710 100644 --- a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py +++ b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py @@ -14,9 +14,8 @@ import json import mock -from sushy.resources.system import constants as sys_cons +from sushy.resources import constants as res_cons from sushy.resources.system import ethernet_interface -from sushy.resources.system import mappings as sys_map from sushy.tests.unit import base @@ -43,8 +42,8 @@ class EthernetInterfaceTestCase(base.TestCase): self.assertEqual( '12:44:6A:3B:04:11', self.sys_eth.permanent_mac_address) self.assertEqual('12:44:6A:3B:04:11', self.sys_eth.mac_address) - self.assertEqual('enabled', self.sys_eth.status.state) - self.assertEqual('OK', self.sys_eth.status.health) + self.assertEqual(res_cons.STATE_ENABLED, self.sys_eth.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.sys_eth.status.health) self.assertEqual(1000, self.sys_eth.speed_mbps) @@ -99,9 +98,6 @@ class EthernetInterfaceCollectionTestCase(base.TestCase): with open('sushy/tests/unit/json_samples/' 'ethernet_interfaces.json') as f: self.conn.get.return_value.json.return_value = json.load(f) - expected_summary = { - '12:44:6A:3B:04:11': - sys_map.HEALTH_STATE_VALUE_MAP_REV.get( - sys_cons.HEALTH_STATE_ENABLED)} + expected_summary = {'12:44:6A:3B:04:11': res_cons.STATE_ENABLED} actual_summary = self.sys_eth_col.summary self.assertEqual(expected_summary, actual_summary) diff --git a/sushy/tests/unit/resources/system/test_processor.py b/sushy/tests/unit/resources/system/test_processor.py index fee022d..f06d4aa 100644 --- a/sushy/tests/unit/resources/system/test_processor.py +++ b/sushy/tests/unit/resources/system/test_processor.py @@ -17,6 +17,7 @@ import json import mock import sushy +from sushy.resources import constants as res_cons from sushy.resources.system import processor from sushy.tests.unit import base @@ -63,9 +64,11 @@ class ProcessorTestCase(base.TestCase): self.assertEqual(3700, self.sys_processor.max_speed_mhz) self.assertEqual(8, self.sys_processor.total_cores) self.assertEqual(16, self.sys_processor.total_threads) - self.assertEqual('Enabled', self.sys_processor.status.state) - self.assertEqual('OK', self.sys_processor.status.health) - self.assertEqual('OK', self.sys_processor.status.health_rollup) + self.assertEqual(res_cons.STATE_ENABLED, + self.sys_processor.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.sys_processor.status.health) + self.assertEqual(res_cons.HEALTH_OK, + self.sys_processor.status.health_rollup) class ProcessorCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/resources/system/test_simple_storage.py b/sushy/tests/unit/resources/system/test_simple_storage.py new file mode 100644 index 0000000..e1716af --- /dev/null +++ b/sushy/tests/unit/resources/system/test_simple_storage.py @@ -0,0 +1,115 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy.resources import constants as res_cons +from sushy.resources.system import simple_storage +from sushy.tests.unit import base + + +class SimpleStorageTestCase(base.TestCase): + + def setUp(self): + super(SimpleStorageTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'simple_storage.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.simpl_stor = simple_storage.SimpleStorage( + self.conn, '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.simpl_stor._parse_attributes() + self.assertEqual('1.0.2', self.simpl_stor.redfish_version) + self.assertEqual('1', self.simpl_stor.identity) + self.assertEqual('Simple Storage Controller', self.simpl_stor.name) + self.assertEqual(8000000000000, + self.simpl_stor.devices[0].capacity_bytes) + self.assertEqual(4000000000000, + self.simpl_stor.devices[1].capacity_bytes) + self.assertEqual(res_cons.STATE_ENABLED, + self.simpl_stor.devices[0].status.state) + self.assertEqual(res_cons.STATE_ABSENT, + self.simpl_stor.devices[2].status.state) + self.assertEqual(res_cons.HEALTH_OK, + self.simpl_stor.devices[0].status.health) + + +class SimpleStorageCollectionTestCase(base.TestCase): + + def setUp(self): + super(SimpleStorageCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'simple_storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.simpl_stor_col = simple_storage.SimpleStorageCollection( + self.conn, '/redfish/v1/Systems/437XR1138R2/SimpleStorage', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.simpl_stor_col._parse_attributes() + self.assertEqual(( + '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1',), + self.simpl_stor_col.members_identities) + + @mock.patch.object(simple_storage, 'SimpleStorage', autospec=True) + def test_get_member(self, SimpleStorage_mock): + self.simpl_stor_col.get_member( + '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1') + SimpleStorage_mock.assert_called_once_with( + self.simpl_stor_col._conn, + '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', + redfish_version=self.simpl_stor_col.redfish_version) + + @mock.patch.object(simple_storage, 'SimpleStorage', autospec=True) + def test_get_members(self, SimpleStorage_mock): + members = self.simpl_stor_col.get_members() + calls = [ + mock.call(self.simpl_stor_col._conn, + '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', + redfish_version=self.simpl_stor_col.redfish_version), + ] + SimpleStorage_mock.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) + + def test_max_size_bytes(self): + self.assertIsNone(self.simpl_stor_col._max_size_bytes) + self.conn.get.return_value.json.reset_mock() + + with open('sushy/tests/unit/json_samples/' + 'simple_storage.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.assertEqual(8000000000000, self.simpl_stor_col.max_size_bytes) + + # for any subsequent fetching it gets it from the cached value + self.conn.get.return_value.json.reset_mock() + self.assertEqual(8000000000000, self.simpl_stor_col.max_size_bytes) + self.conn.get.return_value.json.assert_not_called() + + def test_max_size_bytes_after_refresh(self): + self.simpl_stor_col.refresh() + self.assertIsNone(self.simpl_stor_col._max_size_bytes) + self.conn.get.return_value.json.reset_mock() + + with open('sushy/tests/unit/json_samples/' + 'simple_storage.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.assertEqual(8000000000000, self.simpl_stor_col.max_size_bytes) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index b0b5c28..5bcd93f 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -19,8 +19,8 @@ import mock import sushy from sushy import exceptions +from sushy.resources import constants as res_cons from sushy.resources.system import bios -from sushy.resources.system import constants as sys_cons from sushy.resources.system import ethernet_interface from sushy.resources.system import mappings as sys_map from sushy.resources.system import processor @@ -58,9 +58,10 @@ class SystemTestCase(base.TestCase): self.assertEqual('Physical', self.sys_inst.system_type) self.assertEqual('38947555-7742-3448-3784-823347823834', self.sys_inst.uuid) - self.assertEqual('Enabled', self.sys_inst.status.state) - self.assertEqual('OK', self.sys_inst.status.health) - self.assertEqual('OK', self.sys_inst.status.health_rollup) + self.assertEqual(res_cons.STATE_ENABLED, self.sys_inst.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.sys_inst.status.health) + self.assertEqual(res_cons.HEALTH_OK, + self.sys_inst.status.health_rollup) self.assertEqual(sushy.SYSTEM_POWER_STATE_ON, self.sys_inst.power_state) self.assertEqual(96, self.sys_inst.memory_summary.size_gib) @@ -374,9 +375,7 @@ class SystemTestCase(base.TestCase): self.assertIsNone(self.sys_inst._ethernet_interfaces) actual_macs = self.sys_inst.ethernet_interfaces.summary expected_macs = ( - {'12:44:6A:3B:04:11': - sys_map.HEALTH_STATE_VALUE_MAP_REV.get( - sys_cons.HEALTH_STATE_ENABLED)}) + {'12:44:6A:3B:04:11': res_cons.STATE_ENABLED}) self.assertEqual(expected_macs, actual_macs) self.assertIsInstance(self.sys_inst._ethernet_interfaces, ethernet_interface.EthernetInterfaceCollection) -- GitLab From e12309f4d2b554acab63a3174ea67410e5cb72b6 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Thu, 19 Jul 2018 20:40:57 +0000 Subject: [PATCH 073/303] Update reno for stable/rocky Change-Id: I8ba2c40ee2a147de028ae5de67cd4165670781ed --- releasenotes/source/index.rst | 1 + releasenotes/source/rocky.rst | 6 ++++++ 2 files changed, 7 insertions(+) create mode 100644 releasenotes/source/rocky.rst diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst index 6e99bf6..4c1d79a 100644 --- a/releasenotes/source/index.rst +++ b/releasenotes/source/index.rst @@ -6,5 +6,6 @@ :maxdepth: 1 unreleased + rocky queens pike diff --git a/releasenotes/source/rocky.rst b/releasenotes/source/rocky.rst new file mode 100644 index 0000000..40dd517 --- /dev/null +++ b/releasenotes/source/rocky.rst @@ -0,0 +1,6 @@ +=================================== + Rocky Series Release Notes +=================================== + +.. release-notes:: + :branch: stable/rocky -- GitLab From 893ccec796b6c161a0d9db28e90ceed75f311b67 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Thu, 26 Jul 2018 11:38:47 +0300 Subject: [PATCH 074/303] Add DictionaryField class to resource base Add `DictionaryField` class capable of handling dictionaries in resource body JSON. This is necessary to be able to parse resources like Message Registry: "Messages": { "Success": { "Message": "Completed successfully", "Severity": "OK", [...] }, "Failure": { "Message": "Failed", "Severity": "Critical", [...] } } Story: 2001791 Task: 23226 Change-Id: I8a0b6fe2dda4c244be4b7a0abaf601f533dd30f9 --- sushy/resources/base.py | 33 +++++++++++++++++++++++++ sushy/tests/unit/resources/test_base.py | 17 ++++++++++++- 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index d370443..1ad5147 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -187,6 +187,39 @@ class ListField(Field): return instances +class DictionaryField(Field): + """Base class for fields consisting of dictionary of several sub-fields.""" + + def __init__(self, *args, **kwargs): + super(DictionaryField, self).__init__(*args, **kwargs) + self._subfields = dict(_collect_fields(self)) + + def _load(self, body, resource, nested_in=None): + """Load the dictionary. + + :param body: parent JSON body. + :param resource: parent resource. + :param nested_in: parent resource name (for error reporting only). + :returns: a new dictionary object containing subfields. + """ + nested_in = (nested_in or []) + self._path + values = super(DictionaryField, self)._load(body, resource) + if values is None: + return None + + instances = {} + for key, value in values.items(): + instance_value = copy.copy(self) + for attr, field in self._subfields.items(): + # Hide the Field object behind the real value + setattr(instance_value, attr, field._load(value, + resource, + nested_in)) + instances[key] = instance_value + + return instances + + class MappedField(Field): """Field taking real value from a mapping.""" diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 8f92ba7..9509f02 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -165,7 +165,11 @@ TEST_JSON = { 'String': 'a fourth string', 'Integer': 2 } - ] + ], + 'Dictionary': { + 'key1': {'property_a': 'value1', 'property_b': 'value2'}, + 'key2': {'property_a': 'value3', 'property_b': 'value4'} + } } @@ -187,11 +191,17 @@ class TestListField(resource_base.ListField): integer = resource_base.Field('Integer', adapter=int) +class TestDictionaryField(resource_base.DictionaryField): + property_a = resource_base.Field('property_a') + property_b = resource_base.Field('property_b') + + class ComplexResource(resource_base.ResourceBase): string = resource_base.Field('String', required=True) integer = resource_base.Field('Integer', adapter=int) nested = NestedTestField('Nested') field_list = TestListField('ListField') + dictionary = TestDictionaryField('Dictionary') non_existing_nested = NestedTestField('NonExistingNested') non_existing_mapped = resource_base.MappedField('NonExistingMapped', MAPPING) @@ -217,6 +227,11 @@ class FieldTestCase(base.TestCase): self.assertEqual('a third string', self.test_resource.field_list[0].string) self.assertEqual(2, self.test_resource.field_list[1].integer) + self.assertEqual(2, len(self.test_resource.dictionary)) + self.assertEqual('value1', + self.test_resource.dictionary['key1'].property_a) + self.assertEqual('value4', + self.test_resource.dictionary['key2'].property_b) self.assertIsNone(self.test_resource.non_existing_nested) self.assertIsNone(self.test_resource.non_existing_mapped) -- GitLab From 6bddd338edac3d83fb6438e12bf4767c3f9fb48c Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Thu, 26 Jul 2018 16:59:21 +0300 Subject: [PATCH 075/303] Add Redfish Message Registry resource This is one of the first patches to support Redfish Message Registry. It delivers only Redfish Message Registry parsing to sushy fields. Any other processing will be added in followup patches. Redfish Message Registry will be used by sushy internally, as of this writing it is not intended to be exposed to sushy users. Change-Id: I9a1735230a8328fd8365e375889c6ab066c3df16 Story: 2001791 Task: 23062 --- sushy/resources/constants.py | 12 ++- sushy/resources/mappings.py | 11 +++ sushy/resources/registry/__init__.py | 0 sushy/resources/registry/messageregistry.py | 80 +++++++++++++++++++ .../unit/json_samples/message_registry.json | 40 ++++++++++ .../tests/unit/resources/registry/__init__.py | 0 .../registry/test_messageregistry.py | 67 ++++++++++++++++ 7 files changed, 209 insertions(+), 1 deletion(-) create mode 100644 sushy/resources/registry/__init__.py create mode 100644 sushy/resources/registry/messageregistry.py create mode 100644 sushy/tests/unit/json_samples/message_registry.json create mode 100644 sushy/tests/unit/resources/registry/__init__.py create mode 100644 sushy/tests/unit/resources/registry/test_messageregistry.py diff --git a/sushy/resources/constants.py b/sushy/resources/constants.py index 19276bc..ea8a279 100644 --- a/sushy/resources/constants.py +++ b/sushy/resources/constants.py @@ -11,7 +11,8 @@ # under the License. # Values comes from the Redfish System json-schema 1.0.0: -# http://redfish.dmtf.org/schemas/v1/Resource.json +# http://redfish.dmtf.org/schemas/v1/Resource.json or +# https://redfish.dmtf.org/schemas/v1/MessageRegistry.v1_1_1.json # Health related constants. HEALTH_OK = 'ok' @@ -30,3 +31,12 @@ STATE_UNAVAILABLEOFFLINE = 'unavailable offline' STATE_DEFERRING = 'deferring' STATE_QUIESCED = 'quiesced' STATE_UPDATING = 'updating' + +# Message Registry message parameter type related constants. +PARAMTYPE_STRING = 'string' +PARAMTYPE_NUMBER = 'number' + +# Severity related constants +SEVERITY_OK = 'ok' +SEVERITY_WARNING = 'warning' +SEVERITY_CRITICAL = 'critical' diff --git a/sushy/resources/mappings.py b/sushy/resources/mappings.py index 26588f2..c081982 100644 --- a/sushy/resources/mappings.py +++ b/sushy/resources/mappings.py @@ -34,3 +34,14 @@ HEALTH_VALUE_MAP = { HEALTH_VALUE_MAP_REV = ( utils.revert_dictionary(HEALTH_VALUE_MAP)) + +PARAMTYPE_MAP = { + 'string': res_cons.PARAMTYPE_STRING, + 'number': res_cons.PARAMTYPE_NUMBER +} + +SEVERITY_VALUE_MAP = { + 'OK': res_cons.SEVERITY_OK, + 'Warning': res_cons.SEVERITY_WARNING, + 'Critical': res_cons.SEVERITY_CRITICAL +} diff --git a/sushy/resources/registry/__init__.py b/sushy/resources/registry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/resources/registry/messageregistry.py b/sushy/resources/registry/messageregistry.py new file mode 100644 index 0000000..a5d80b5 --- /dev/null +++ b/sushy/resources/registry/messageregistry.py @@ -0,0 +1,80 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/v1/MessageRegistry.v1_1_1.json + + +from sushy.resources import base +from sushy.resources import mappings as res_maps + + +class MessageDictionaryField(base.DictionaryField): + + description = base.Field('Description', required=True) + """Indicates how and when the message is returned by the Redfish service""" + + message = base.Field('Message', required=True) + """Template text of the message + + Template can include placeholders for message arguments in form + % where denotes a position passed from MessageArgs. + """ + + number_of_args = base.Field('NumberOfArgs', required=True) + """Number of arguments to be expected to be passed in as MessageArgs + for this message + """ + + param_types = base.Field('ParamTypes', + adapter=lambda x: + [res_maps.PARAMTYPE_MAP[v] for v in x]) + """Mapped MessageArg types, in order, for the message""" + + resolution = base.Field('Resolution', required=True) + """Suggestions on how to resolve the situation that caused the error""" + + severity = base.MappedField('Severity', + res_maps.SEVERITY_VALUE_MAP, + required=True) + """Mapped severity of the message""" + + +class MessageRegistry(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The Message registry identity string""" + + name = base.Field('Name', required=True) + """The name of the message registry""" + + description = base.Field('Description') + """Human-readable description of the message registry""" + + language = base.Field('Language', required=True) + """RFC 5646 compliant language code for the registry""" + + owning_entity = base.Field('OwningEntity', required=True) + """Organization or company that publishes this registry""" + + registry_prefix = base.Field('RegistryPrefix', required=True) + """Prefix used in messageIDs which uniquely identifies all of + the messages in this registry as belonging to this registry + """ + + registry_version = base.Field('RegistryVersion', required=True) + """Message registry version which is used in the middle portion + of a messageID + """ + + messages = MessageDictionaryField('Messages') + """List of messages in this registry""" diff --git a/sushy/tests/unit/json_samples/message_registry.json b/sushy/tests/unit/json_samples/message_registry.json new file mode 100644 index 0000000..8b612a2 --- /dev/null +++ b/sushy/tests/unit/json_samples/message_registry.json @@ -0,0 +1,40 @@ +{ + "@odata.type": "#MessageRegistry.v1_0_0.MessageRegistry", + "Id": "Test.1.0.0", + "Name": "Test Message Registry", + "Language": "en", + "Description": "This registry defines messages for sushy testing", + "RegistryPrefix": "Test", + "RegistryVersion": "1.0.0", + "OwningEntity": "sushy", + "Messages": { + "Success": { + "Description": "Everything OK", + "Message": "Everything done successfully.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "Failed": { + "Description": "Nothing is OK", + "Message": "The property %1 broke everything.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Panic" + }, + "TooBig": { + "Description": "Value too big", + "Message": "Property's %1 value cannot be greater than %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Try again" + } + } +} diff --git a/sushy/tests/unit/resources/registry/__init__.py b/sushy/tests/unit/resources/registry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/tests/unit/resources/registry/test_messageregistry.py b/sushy/tests/unit/resources/registry/test_messageregistry.py new file mode 100644 index 0000000..59e2fc0 --- /dev/null +++ b/sushy/tests/unit/resources/registry/test_messageregistry.py @@ -0,0 +1,67 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import json + +import mock + +from sushy.resources import constants as res_cons +from sushy.resources.registry import messageregistry +from sushy.tests.unit import base + + +class MessageRegistryTestCase(base.TestCase): + + def setUp(self): + super(MessageRegistryTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.registry = messageregistry.MessageRegistry( + self.conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.registry._parse_attributes() + self.assertEqual('Test.1.0.0', self.registry.identity) + self.assertEqual('Test Message Registry', self.registry.name) + self.assertEqual('en', self.registry.language) + self.assertEqual('This registry defines messages for sushy testing', + self.registry.description) + self.assertEqual('Test', self.registry.registry_prefix) + self.assertEqual('1.0.0', self.registry.registry_version) + self.assertEqual('sushy', self.registry.owning_entity) + self.assertEqual(3, len(self.registry.messages)) + self.assertEqual('Everything OK', + self.registry.messages['Success'].description) + self.assertEqual('Everything done successfully.', + self.registry.messages['Success'].message) + self.assertEqual(res_cons.SEVERITY_OK, + self.registry.messages['Success'].severity) + self.assertEqual(0, self.registry.messages['Success'].number_of_args) + self.assertEqual(2, len(self.registry.messages['TooBig'].param_types)) + self.assertEqual(res_cons.PARAMTYPE_STRING, + self.registry.messages['TooBig'].param_types[0]) + self.assertEqual(res_cons.PARAMTYPE_NUMBER, + self.registry.messages['TooBig'].param_types[1]) + self.assertEqual('Panic', self.registry.messages['Failed'].resolution) + + def test__parse_attribtues_unknown_param_type(self): + self.registry.json['Messages']['Failed']['ParamTypes'] = \ + ['unknown_type'] + self.assertRaisesRegex(KeyError, + 'unknown_type', + self.registry._parse_attributes) -- GitLab From b8b3a205d060e8ccbbb3861454fa1a97be1a7620 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Nov=C3=BD?= Date: Fri, 3 Aug 2018 06:03:08 +0200 Subject: [PATCH 076/303] d/control: Use team+openstack@tracker.debian.org as maintainer --- debian/changelog | 1 + debian/control | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/debian/changelog b/debian/changelog index f38c014..0fbc621 100644 --- a/debian/changelog +++ b/debian/changelog @@ -2,6 +2,7 @@ python-sushy (1.3.1-3) UNRELEASED; urgency=medium * d/control: Add trailing tilde to min version depend to allow backports + * d/control: Use team+openstack@tracker.debian.org as maintainer -- Ondřej Nový Tue, 27 Feb 2018 16:40:03 +0100 diff --git a/debian/control b/debian/control index fd81074..bc29bbe 100644 --- a/debian/control +++ b/debian/control @@ -1,7 +1,7 @@ Source: python-sushy Section: python Priority: optional -Maintainer: PKG OpenStack +Maintainer: Debian OpenStack Uploaders: Thomas Goirand , Build-Depends: -- GitLab From 9becfd45ea0039438cdf10c641f8f1a4b104de37 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Mon, 13 Aug 2018 17:04:52 +0300 Subject: [PATCH 077/303] Cleanup docstring for removed etag param Missed this in I7a6ebaac3d4f9a8a722aad32dfaec69153e7bd3a Change-Id: Iad087e6624d5420a5e3009c0c4bd5aac3ea7162d --- sushy/resources/settings.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index fb3cb2f..172cbc4 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -87,9 +87,6 @@ class SettingsField(base.CompositeField): :param connector: A Connector instance :param value: Value representing JSON whose structure is specific to each resource and the caller must format it correctly - :param etag: Optional ETag of resource version to update. If - this ETag is provided and it does not match on server, then - the new values will not be committed """ connector.patch(self.resource_uri, data=value) -- GitLab From 06ce65ff3a5095bba4e65824c166bb057b1c30c5 Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Tue, 17 Apr 2018 09:59:30 +0000 Subject: [PATCH 078/303] Add system storage resource support Adds the system storage resource of Redfish standard schema. This new resource represents the properties of a storage subsystem that represents a set of storage resources consisting of storage controllers (physical or virtual), volumes and physical disk drives. The storage volumes are built from physical disks managed by the controllers. This patch introduces the properties ``max_volume_size_bytes`` and ``max_drive_size_bytes`` from the StorageCollection resource to expose the sizes of the largest storage devices (volumes and drives) available to the System. Story: 1668487 Task: 12506 Change-Id: If2b5114a04e2de91d3f126cd8a9af3c9c62837df --- .../notes/add-storage-da766d3dbf9fb385.yaml | 6 + sushy/resources/system/storage/storage.py | 89 ++++++- .../unit/json_samples/storage_collection.json | 13 + .../resources/system/storage/test_storage.py | 229 +++++++++++++++++- .../resources/system/test_simple_storage.py | 10 +- 5 files changed, 331 insertions(+), 16 deletions(-) create mode 100644 releasenotes/notes/add-storage-da766d3dbf9fb385.yaml create mode 100644 sushy/tests/unit/json_samples/storage_collection.json diff --git a/releasenotes/notes/add-storage-da766d3dbf9fb385.yaml b/releasenotes/notes/add-storage-da766d3dbf9fb385.yaml new file mode 100644 index 0000000..8bf426f --- /dev/null +++ b/releasenotes/notes/add-storage-da766d3dbf9fb385.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Adds the Storage resource to the library. It also provides the + max size available (in bytes) of drives and volumes that can be + accessed from storage. diff --git a/sushy/resources/system/storage/storage.py b/sushy/resources/system/storage/storage.py index 5fe16d6..5a35c88 100644 --- a/sushy/resources/system/storage/storage.py +++ b/sushy/resources/system/storage/storage.py @@ -17,13 +17,15 @@ import logging from sushy.resources import base from sushy.resources.system.storage import drive +from sushy.resources.system.storage import volume from sushy import utils + LOG = logging.getLogger(__name__) class Storage(base.ResourceBase): - """This class represents resources that represent a storage subsystem. + """This class represents the storage subsystem resources. A storage subsystem represents a set of storage controllers (physical or virtual) and the resources such as drives and volumes that can be accessed @@ -41,6 +43,8 @@ class Storage(base.ResourceBase): """A tuple with the drive identities""" _drives_max_size_bytes = None + _drives = None + _volumes = None # reference to VolumeCollection instance def get_drive(self, drive_identity): """Given the drive identity return a ``Drive`` object @@ -52,20 +56,89 @@ class Storage(base.ResourceBase): return drive.Drive(self._conn, drive_identity, redfish_version=self.redfish_version) + @property + def drives(self): + """Return a list of `Drive` objects present in the storage resource. + + It is set once when the first time it is queried. On subsequent + invocations, it returns a cached list of `Drives` objects until it is + marked stale. + + :returns: A list of `Drive` objects + :raises: ResourceNotFoundError + """ + if self._drives is None: + self._drives = [ + self.get_drive(id_) for id_ in self.drives_identities] + return self._drives + @property def drives_max_size_bytes(self): """Max size available in bytes among all Drives of this collection.""" if self._drives_max_size_bytes is None: self._drives_max_size_bytes = ( - utils.max_safe(self.get_drive(drive_id).capacity_bytes - for drive_id in self.drives_identities)) + utils.max_safe(drv.capacity_bytes for drv in self.drives)) return self._drives_max_size_bytes - def _do_refresh(self, force=False): - """Do resource specific refresh activities + @property + def volumes(self): + """Property to reference `VolumeCollection` instance - On refresh, all sub-resources are marked as stale, i.e. - greedy-refresh not done for them unless forced by ``force`` - argument. + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done at that + point). Here only the actual refresh of the sub-resource happens, + if resource is stale. """ + if self._volumes is None: + self._volumes = volume.VolumeCollection( + self._conn, utils.get_sub_resource_path_by(self, 'Volumes'), + redfish_version=self.redfish_version) + + self._volumes.refresh(force=False) + return self._volumes + + def _do_refresh(self, force=False): + """Do resource specific refresh activities.""" + # Note(deray): undefine the attribute here for fresh evaluation in + # subsequent calls to it's exposed property. self._drives_max_size_bytes = None + self._drives = None + # invalidate the nested resource + if self._volumes is not None: + self._volumes.invalidate(force) + + +class StorageCollection(base.ResourceCollectionBase): + """This class represents the collection of Storage resources""" + + _max_drive_size_bytes = None + _max_volume_size_bytes = None + + @property + def _resource_type(self): + return Storage + + @property + def max_drive_size_bytes(self): + """Max size available (in bytes) among all device resources.""" + if self._max_drive_size_bytes is None: + self._max_drive_size_bytes = max( + storage_.drives_max_size_bytes + for storage_ in self.get_members()) + return self._max_drive_size_bytes + + @property + def max_volume_size_bytes(self): + """Max size available (in bytes) among all Volumes under this.""" + if self._max_volume_size_bytes is None: + self._max_volume_size_bytes = max( + storage_.volumes.max_size_bytes + for storage_ in self.get_members()) + return self._max_volume_size_bytes + + def _do_refresh(self, force=False): + """Do resource specific refresh activities""" + # Note(deray): undefine the attributes here for fresh evaluation in + # subsequent calls to their exposed properties. + self._max_drive_size_bytes = None + self._max_volume_size_bytes = None diff --git a/sushy/tests/unit/json_samples/storage_collection.json b/sushy/tests/unit/json_samples/storage_collection.json new file mode 100644 index 0000000..16155fd --- /dev/null +++ b/sushy/tests/unit/json_samples/storage_collection.json @@ -0,0 +1,13 @@ +{ + "@odata.type": "#StorageCollection.StorageCollection", + "Name": "Storage Collection", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1" + } + ], + "@odata.context": "/redfish/v1/$metadata#StorageCollection.StorageCollection", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index cdda80e..2f9ce54 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -16,6 +16,7 @@ import mock from sushy.resources.system.storage import drive from sushy.resources.system.storage import storage +from sushy.resources.system.storage import volume from sushy.tests.unit import base @@ -25,6 +26,13 @@ STORAGE_DRIVE_FILE_NAMES = [ 'sushy/tests/unit/json_samples/drive3.json' ] +STORAGE_VOLUME_FILE_NAMES = [ + 'sushy/tests/unit/json_samples/volume_collection.json', + 'sushy/tests/unit/json_samples/volume.json', + 'sushy/tests/unit/json_samples/volume2.json', + 'sushy/tests/unit/json_samples/volume3.json' +] + class StorageTestCase(base.TestCase): @@ -59,6 +67,58 @@ class StorageTestCase(base.TestCase): self.assertIsInstance(actual_drive, drive.Drive) self.assertTrue(self.conn.get.return_value.json.called) + @mock.patch.object(drive, 'Drive', autospec=True) + def test_drives(self, Drive_mock): + # | WHEN | + all_drives = self.storage.drives + # | THEN | + calls = [ + mock.call(self.storage._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3', # noqa + redfish_version=self.storage.redfish_version), + mock.call(self.storage._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233', # noqa + redfish_version=self.storage.redfish_version), + mock.call(self.storage._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD', # noqa + redfish_version=self.storage.redfish_version), + mock.call(self.storage._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2', # noqa + redfish_version=self.storage.redfish_version) + ] + Drive_mock.assert_has_calls(calls) + self.assertIsInstance(all_drives, list) + self.assertEqual(4, len(all_drives)) + self.assertIsInstance(all_drives[0], drive.Drive.__class__) + + # returning cached value + Drive_mock.reset_mock() + # | WHEN | + all_drives = self.storage.drives + # | THEN | + self.assertFalse(Drive_mock.called) + self.assertIsInstance(all_drives, list) + self.assertEqual(4, len(all_drives)) + self.assertIsInstance(all_drives[0], drive.Drive.__class__) + + def test_drives_after_refresh(self): + self.storage.refresh() + self.assertIsNone(self.storage._drives) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + all_drives = self.storage.drives + self.assertIsInstance(all_drives, list) + self.assertEqual(4, len(all_drives)) + for drv in all_drives: + self.assertIsInstance(drv, drive.Drive) + def test_drives_max_size_bytes(self): self.assertIsNone(self.storage._drives_max_size_bytes) self.conn.get.return_value.json.reset_mock() @@ -66,7 +126,7 @@ class StorageTestCase(base.TestCase): successive_return_values = [] # repeating the 3rd one to provide mock data for 4th iteration. for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: - with open(fname, 'r') as f: + with open(fname) as f: successive_return_values.append(json.load(f)) self.conn.get.return_value.json.side_effect = successive_return_values @@ -85,8 +145,173 @@ class StorageTestCase(base.TestCase): successive_return_values = [] # repeating the 3rd one to provide mock data for 4th iteration. for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: - with open(fname, 'r') as f: + with open(fname) as f: successive_return_values.append(json.load(f)) self.conn.get.return_value.json.side_effect = successive_return_values self.assertEqual(899527000000, self.storage.drives_max_size_bytes) + + def test_volumes(self): + # check for the underneath variable value + self.assertIsNone(self.storage._volumes) + # | GIVEN | + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/volume_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN | + actual_volumes = self.storage.volumes + # | THEN | + self.assertIsInstance(actual_volumes, + volume.VolumeCollection) + self.conn.get.return_value.json.assert_called_once_with() + + def test_volumes_cached(self): + # | GIVEN | + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/volume_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # invoke it once + actual_volumes = self.storage.volumes + self.conn.get.return_value.json.reset_mock() + # | WHEN & THEN | + # tests for same object on invoking subsequently + self.assertIs(actual_volumes, + self.storage.volumes) + self.conn.get.return_value.json.assert_not_called() + + def test_volumes_on_refresh(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/volume_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.storage.volumes, + volume.VolumeCollection) + + # On refreshing the system instance... + with open('sushy/tests/unit/json_samples/storage.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.storage.invalidate() + self.storage.refresh(force=False) + + # | WHEN & THEN | + self.assertIsNotNone(self.storage._volumes) + self.assertTrue(self.storage._volumes._is_stale) + + # | GIVEN | + with open('sushy/tests/unit/json_samples/volume_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.storage.volumes, + volume.VolumeCollection) + self.assertFalse(self.storage._volumes._is_stale) + + +class StorageCollectionTestCase(base.TestCase): + + def setUp(self): + super(StorageCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.stor_col = storage.StorageCollection( + self.conn, '/redfish/v1/Systems/437XR1138R2/Storage', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.stor_col._parse_attributes() + self.assertEqual(( + '/redfish/v1/Systems/437XR1138R2/Storage/1',), + self.stor_col.members_identities) + + @mock.patch.object(storage, 'Storage', autospec=True) + def test_get_member(self, Storage_mock): + self.stor_col.get_member( + '/redfish/v1/Systems/437XR1138R2/Storage/1') + Storage_mock.assert_called_once_with( + self.stor_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1', + redfish_version=self.stor_col.redfish_version) + + @mock.patch.object(storage, 'Storage', autospec=True) + def test_get_members(self, Storage_mock): + members = self.stor_col.get_members() + Storage_mock.assert_called_once_with( + self.stor_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1', + redfish_version=self.stor_col.redfish_version) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) + + def test_max_drive_size_bytes(self): + self.assertIsNone(self.stor_col._max_drive_size_bytes) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(899527000000, self.stor_col.max_drive_size_bytes) + + # for any subsequent fetching it gets it from the cached value + self.conn.get.return_value.json.reset_mock() + self.assertEqual(899527000000, self.stor_col.max_drive_size_bytes) + self.conn.get.return_value.json.assert_not_called() + + def test_max_drive_size_bytes_after_refresh(self): + self.stor_col.refresh(force=False) + self.assertIsNone(self.stor_col._max_drive_size_bytes) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(899527000000, self.stor_col.max_drive_size_bytes) + + def test_max_volume_size_bytes(self): + self.assertIsNone(self.stor_col._max_volume_size_bytes) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_VOLUME_FILE_NAMES: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(1073741824000, self.stor_col.max_volume_size_bytes) + + # for any subsequent fetching it gets it from the cached value + self.conn.get.return_value.json.reset_mock() + self.assertEqual(1073741824000, self.stor_col.max_volume_size_bytes) + self.conn.get.return_value.json.assert_not_called() + + def test_max_volume_size_bytes_after_refresh(self): + self.stor_col.refresh(force=False) + self.assertIsNone(self.stor_col._max_volume_size_bytes) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_VOLUME_FILE_NAMES: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(1073741824000, self.stor_col.max_volume_size_bytes) diff --git a/sushy/tests/unit/resources/system/test_simple_storage.py b/sushy/tests/unit/resources/system/test_simple_storage.py index e1716af..07145ae 100644 --- a/sushy/tests/unit/resources/system/test_simple_storage.py +++ b/sushy/tests/unit/resources/system/test_simple_storage.py @@ -79,12 +79,10 @@ class SimpleStorageCollectionTestCase(base.TestCase): @mock.patch.object(simple_storage, 'SimpleStorage', autospec=True) def test_get_members(self, SimpleStorage_mock): members = self.simpl_stor_col.get_members() - calls = [ - mock.call(self.simpl_stor_col._conn, - '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', - redfish_version=self.simpl_stor_col.redfish_version), - ] - SimpleStorage_mock.assert_has_calls(calls) + SimpleStorage_mock.assert_called_once_with( + self.simpl_stor_col._conn, + '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', + redfish_version=self.simpl_stor_col.redfish_version) self.assertIsInstance(members, list) self.assertEqual(1, len(members)) -- GitLab From e5fe316096ada0c81e00671be6dcbca81d1bec4f Mon Sep 17 00:00:00 2001 From: Doug Hellmann Date: Thu, 16 Aug 2018 09:44:24 -0400 Subject: [PATCH 079/303] import zuul job settings from project-config This is a mechanically generated patch to complete step 1 of moving the zuul job settings out of project-config and into each project repository. Because there will be a separate patch on each branch, the branch specifiers for branch-specific jobs have been removed. Because this patch is generated by a script, there may be some cosmetic changes to the layout of the YAML file(s) as the contents are normalized. See the python3-first goal document for details: https://governance.openstack.org/tc/goals/stein/python3-first.html Change-Id: Ia580e8552855fa8c57902959d7a1682cccb3fa28 Story: #2002586 Task: #24302 --- zuul.d/project.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index f619715..731d8a3 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -1,9 +1,17 @@ - project: + templates: + - openstack-python-jobs + - openstack-python35-jobs + - check-requirements + - publish-openstack-sphinx-docs + - release-notes-jobs check: jobs: - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src - openstack-tox-lower-constraints + - openstack-tox-cover gate: jobs: - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src - openstack-tox-lower-constraints + - openstack-tox-cover -- GitLab From e37550c98d5ed15c2f9549f8705aec5412ed5790 Mon Sep 17 00:00:00 2001 From: Doug Hellmann Date: Thu, 16 Aug 2018 09:44:50 -0400 Subject: [PATCH 080/303] switch documentation job to new PTI This is a mechanically generated patch to switch the documentation jobs to use the new PTI versions of the jobs as part of the python3-first goal. See the python3-first goal document for details: https://governance.openstack.org/tc/goals/stein/python3-first.html Change-Id: I892761c155d6660065ae1e7da8c857066b3dbef2 Story: #2002586 Task: #24302 --- zuul.d/project.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 731d8a3..bd56818 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -3,8 +3,8 @@ - openstack-python-jobs - openstack-python35-jobs - check-requirements - - publish-openstack-sphinx-docs - - release-notes-jobs + - publish-openstack-docs-pti + - release-notes-jobs-python3 check: jobs: - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src -- GitLab From 03b5846975f1c2122d2d7f7593efdb03f17536f0 Mon Sep 17 00:00:00 2001 From: Doug Hellmann Date: Thu, 16 Aug 2018 09:44:56 -0400 Subject: [PATCH 081/303] add python 3.6 unit test job This is a mechanically generated patch to add a unit test job running under Python 3.6 as part of the python3-first goal. See the python3-first goal document for details: https://governance.openstack.org/tc/goals/stein/python3-first.html Change-Id: I8e0b47aaccd9ecb4d6629364c530425cbbf66b1d Story: #2002586 Task: #24302 --- zuul.d/project.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index bd56818..8fbb78a 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -2,6 +2,7 @@ templates: - openstack-python-jobs - openstack-python35-jobs + - openstack-python36-jobs - check-requirements - publish-openstack-docs-pti - release-notes-jobs-python3 -- GitLab From 8338d8b4cdc12a041a147bcb8336a622550cd3d4 Mon Sep 17 00:00:00 2001 From: Aleksandra Bezborodova Date: Mon, 28 May 2018 20:06:58 +0300 Subject: [PATCH 082/303] Add a virtual media resource Describe virtual media resources in Python classes close to Redfish schema. Story: 1526753 Task: 12509 Co-Authored-By: Ilya Etingof Change-Id: Ifccd43036378a2808f6c89d4e15307b54e00ed6f --- doc/source/reference/index.rst | 1 + doc/source/reference/usage.rst | 39 +++++++ ...irtual-media-support-f522fbec4420341c.yaml | 4 + sushy/resources/manager/constants.py | 14 +++ sushy/resources/manager/manager.py | 20 ++++ sushy/resources/manager/mappings.py | 14 +++ sushy/resources/manager/virtual_media.py | 108 ++++++++++++++++++ .../unit/json_samples/virtual_media.json | 24 ++++ .../virtual_media_collection.json | 15 +++ .../unit/resources/manager/test_manager.py | 60 ++++++++++ .../resources/manager/test_virtual_media.py | 82 +++++++++++++ 11 files changed, 381 insertions(+) create mode 100644 releasenotes/notes/add-virtual-media-support-f522fbec4420341c.yaml create mode 100644 sushy/resources/manager/virtual_media.py create mode 100644 sushy/tests/unit/json_samples/virtual_media.json create mode 100644 sushy/tests/unit/json_samples/virtual_media_collection.json create mode 100644 sushy/tests/unit/resources/manager/test_virtual_media.py diff --git a/doc/source/reference/index.rst b/doc/source/reference/index.rst index 2347d15..bf26477 100644 --- a/doc/source/reference/index.rst +++ b/doc/source/reference/index.rst @@ -10,6 +10,7 @@ Features * Systems power management (Both soft and hard; Including NMI injection) * Changing systems boot device, frequency (Once or permanently) and mode (UEFI or BIOS) +* Virtual media management * SessionManagement .. toctree:: diff --git a/doc/source/reference/usage.rst b/doc/source/reference/usage.rst index 66ac552..7854843 100644 --- a/doc/source/reference/usage.rst +++ b/doc/source/reference/usage.rst @@ -209,6 +209,45 @@ Creating and using a sushy manager object # Refresh the manager object (with all its sub-resources) mgr_inst.refresh(force=True) + + # Using Virtual Media + + # Instantiate a VirtualMediaCollection object + virtmedia_col = mgr_inst.virtual_media + + # Print the ID of the VirtualMedia available in the collection + print(virtmedia_col.members_identities) + + # Get a list of VirtualMedia objects available in the collection + virtmedia_insts = virtmedia_col.get_members() + + # Instantiate a VirtualMedia object + virtmedia_inst = virtmedia_col.get_member( + virtmedia_col.members_identities[0]) + + + # Print out some of the VirtualMedia properties + print(virtmedia_inst.name, + virtmedia_inst.media_types) + + # Insert virtual media (invalidates virtmedia_inst contents) + virtmedia_inst.insert_media('https://www.dmtf.org/freeImages/Sardine.img') + + # Refresh the resource to load actual contents + virtmedia_inst.refresh() + + # Print out some of the VirtualMedia properties + print(virtmedia_inst.image, + virtmedia_inst.image_path, + virtmedia_inst.inserted, + virtmedia_inst.write_protected) + + # ... Boot the system off the virtual media... + + # Eject virtual media (invalidates virtmedia_inst contents) + virtmedia_inst.eject_media() + + ------------------------------------------------- Creating and using a sushy session service object ------------------------------------------------- diff --git a/releasenotes/notes/add-virtual-media-support-f522fbec4420341c.yaml b/releasenotes/notes/add-virtual-media-support-f522fbec4420341c.yaml new file mode 100644 index 0000000..88ecfaf --- /dev/null +++ b/releasenotes/notes/add-virtual-media-support-f522fbec4420341c.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds support for the virtual media resource to the library. diff --git a/sushy/resources/manager/constants.py b/sushy/resources/manager/constants.py index a1b3a9f..69941c8 100644 --- a/sushy/resources/manager/constants.py +++ b/sushy/resources/manager/constants.py @@ -76,3 +76,17 @@ COMMAND_SHELL_IPMI = 'command shell ipmi' COMMAND_SHELL_OEM = 'command shell oem' """Command Shell connection using an OEM-specific protocol""" + +# Virtual Media Type constants + +VIRTUAL_MEDIA_CD = 'cd' +VIRTUAL_MEDIA_DVD = 'dvd' +VIRTUAL_MEDIA_FLOPPY = 'floppy' +VIRTUAL_MEDIA_USBSTICK = 'usb' + +# Connected Via constants + +CONNECTED_VIA_APPLET = 'applet' +CONNECTED_VIA_NOT_CONNECTED = 'not_connected' +CONNECTED_VIA_OEM = 'oem' +CONNECTED_VIA_URI = 'uri' diff --git a/sushy/resources/manager/manager.py b/sushy/resources/manager/manager.py index d027947..9568408 100644 --- a/sushy/resources/manager/manager.py +++ b/sushy/resources/manager/manager.py @@ -16,6 +16,9 @@ from sushy import exceptions from sushy.resources import base from sushy.resources import common from sushy.resources.manager import mappings as mgr_maps +from sushy.resources.manager import virtual_media +from sushy import utils + LOG = logging.getLogger(__name__) @@ -74,6 +77,8 @@ class Manager(base.ResourceBase): _actions = ActionsField('Actions', required=True) + _virtual_media = None + def __init__(self, connector, identity, redfish_version=None): """A class representing a Manager @@ -84,6 +89,10 @@ class Manager(base.ResourceBase): """ super(Manager, self).__init__(connector, identity, redfish_version) + def _do_refresh(self, force=False): + if self._virtual_media is not None: + self._virtual_media.invalidate(force) + def get_supported_graphical_console_types(self): """Get the supported values for Graphical Console connection types. @@ -178,6 +187,17 @@ class Manager(base.ResourceBase): self._conn.post(target_uri, data={'ResetType': value}) LOG.info('The Manager %s is being reset', self.identity) + @property + def virtual_media(self): + if self._virtual_media is None: + self._virtual_media = virtual_media.VirtualMediaCollection( + self._conn, + utils.get_sub_resource_path_by(self, 'VirtualMedia'), + redfish_version=self.redfish_version) + + self._virtual_media.refresh(force=False) + return self._virtual_media + class ManagerCollection(base.ResourceCollectionBase): diff --git a/sushy/resources/manager/mappings.py b/sushy/resources/manager/mappings.py index 451dc6e..c8ea5cb 100644 --- a/sushy/resources/manager/mappings.py +++ b/sushy/resources/manager/mappings.py @@ -59,3 +59,17 @@ COMMAND_SHELL_VALUE_MAP = { COMMAND_SHELL_VALUE_MAP_REV = ( utils.revert_dictionary(COMMAND_SHELL_VALUE_MAP)) + +MEDIA_TYPE_MAP = { + 'CD': mgr_cons.VIRTUAL_MEDIA_CD, + 'DVD': mgr_cons.VIRTUAL_MEDIA_DVD, + 'Floppy': mgr_cons.VIRTUAL_MEDIA_FLOPPY, + 'USBStick': mgr_cons.VIRTUAL_MEDIA_USBSTICK +} + +CONNECTED_VIA_MAP = { + "Applet": mgr_cons.CONNECTED_VIA_APPLET, + "NotConnected": mgr_cons.CONNECTED_VIA_NOT_CONNECTED, + "Oem": mgr_cons.CONNECTED_VIA_OEM, + "URI": mgr_cons.CONNECTED_VIA_URI +} diff --git a/sushy/resources/manager/virtual_media.py b/sushy/resources/manager/virtual_media.py new file mode 100644 index 0000000..d84d86f --- /dev/null +++ b/sushy/resources/manager/virtual_media.py @@ -0,0 +1,108 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/VirtualMedia.v1_2_0.json + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources.manager import mappings as mgr_maps + + +class ActionsField(base.CompositeField): + + insert_media = common.ActionField("#VirtualMedia.InsertMedia") + eject_media = common.ActionField("#VirtualMedia.EjectMedia") + + +class VirtualMedia(base.ResourceBase): + + identity = base.Field('Id', required=True) + """Virtual Media resource identity string""" + + name = base.Field('Name', required=True) + """The name of resource""" + + image = base.Field('Image') + """A URI providing the location of the selected image""" + + image_name = base.Field('ImageName') + """The image name""" + + inserted = base.Field('Inserted') + """Indicates if virtual media is inserted in the virtual device""" + + write_protected = base.Field('WriteProtected') + """Indicates the media is write protected""" + + media_types = base.MappedField('MediaTypes', mgr_maps.MEDIA_TYPE_MAP) + """This is the media types supported as virtual media""" + + connected_via = base.MappedField('ConnectedVia', + mgr_maps.CONNECTED_VIA_MAP) + """Current virtual media connection methods + + Applet: Connected to a client application + NotConnected: No current connection + Oem: Connected via an OEM-defined method + URI: Connected to a URI location + """ + + _actions = ActionsField('Actions') + """Insert/eject action fot virtual media""" + + def _get_insert_media_element(self): + insert_media = self._actions.insert_media + if not insert_media: + raise exceptions.MissingActionError( + action='#VirtualMedia.InsertMedia', resource=self._path) + return insert_media + + def _get_eject_media_element(self): + eject_media = self._actions.eject_media + if not eject_media: + raise exceptions.MissingActionError( + action='#VirtualMedia.EjectMedia', resource=self._path) + return eject_media + + def insert_media(self, image, inserted=True, write_protected=False): + """Attach remote media to virtual media + + :param image: a URI providing the location of the selected image + :param inserted: specify if the image is to be treated as inserted upon + completion of the action. + :param write_protected: indicates the media is write protected + """ + target_uri = self._get_insert_media_element().target_uri + self._conn.post(target_uri, data={"Image": image, "Inserted": inserted, + "WriteProtected": write_protected}) + self.invalidate() + + def eject_media(self): + """Detach remote media from virtual media + + After ejecting media inserted will be False and image_name will be + empty. + """ + + target_uri = self._get_eject_media_element().target_uri + self._conn.post(target_uri) + self.invalidate() + + +class VirtualMediaCollection(base.ResourceCollectionBase): + """A collection of virtual media attached to a Manager""" + + @property + def _resource_type(self): + return VirtualMedia diff --git a/sushy/tests/unit/json_samples/virtual_media.json b/sushy/tests/unit/json_samples/virtual_media.json new file mode 100644 index 0000000..e072872 --- /dev/null +++ b/sushy/tests/unit/json_samples/virtual_media.json @@ -0,0 +1,24 @@ +{ + "@odata.type": "#VirtualMedia.v1_1_0.VirtualMedia", + "Id": "Floppy1", + "Name": "Virtual Removable Media", + "MediaTypes": "Floppy", + "Actions": { + "#VirtualMedia.EjectMedia": { + "target": "/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions/VirtualMedia.EjectMedia", + "title": "Mock Eject Media" + }, + "#VirtualMedia.InsertMedia": { + "target": "/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions/VirtualMedia.InsertMedia", + "title": "Mock Insert Media" + } + }, + "Image": "https://www.dmtf.org/freeImages/Sardine.img", + "ImageName": "Sardine2.1.43.35.6a", + "ConnectedVia": "URI", + "Inserted": true, + "WriteProtected": false, + "@odata.context": "/redfish/v1/$metadata#VirtualMedia.VirtualMedia", + "@odata.id": "/redfish/v1/Managers/BMC/VirtualMedia/Floppy1", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/virtual_media_collection.json b/sushy/tests/unit/json_samples/virtual_media_collection.json new file mode 100644 index 0000000..5052d82 --- /dev/null +++ b/sushy/tests/unit/json_samples/virtual_media_collection.json @@ -0,0 +1,15 @@ +{ + "@odata.type": "#VirtualMediaCollection.VirtualMediaCollection", + "Name": "Virtual Media Services", + "Description": "Redfish-BMC Virtual Media Service Settings", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/Managers/BMC/VirtualMedia/Floppy1" + } + ], + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#VirtualMediaCollection.VirtualMediaCollection", + "@odata.id": "/redfish/v1/Managers/BMC/VirtualMedia", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index 1e78be7..19fe393 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -17,6 +17,7 @@ import mock import sushy from sushy import exceptions from sushy.resources.manager import manager +from sushy.resources.manager import virtual_media from sushy.tests.unit import base @@ -53,6 +54,7 @@ class ManagerTestCase(base.TestCase): self.assertEqual(sushy.MANAGER_TYPE_BMC, self.manager.manager_type) self.assertEqual('58893887-8974-2487-2389-841168418919', self.manager.uuid) + self.assertIsNone(self.manager._virtual_media) def test_get_supported_graphical_console_types(self): # | GIVEN | @@ -206,6 +208,64 @@ class ManagerTestCase(base.TestCase): self.assertRaises(exceptions.InvalidParameterValueError, self.manager.reset_manager, 'invalid-value') + def test_virtual_media(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'virtual_media_collection.json') as f: + virtual_media_collection_return_value = json.load(f) + + with open('sushy/tests/unit/json_samples/' + 'virtual_media.json') as f: + virtual_media_return_value = json.load(f) + + self.conn.get.return_value.json.side_effect = [ + virtual_media_collection_return_value, virtual_media_return_value] + + # | WHEN | + actual_virtual_media = self.manager.virtual_media + + # | THEN | + self.assertIsInstance(actual_virtual_media, + virtual_media.VirtualMediaCollection) + self.assertEqual(actual_virtual_media.name, 'Virtual Media Services') + + member = actual_virtual_media.get_member('Floppy1') + + self.assertEqual(member.image_name, "Sardine2.1.43.35.6a") + self.assertTrue(member.inserted) + self.assertFalse(member.write_protected) + + def test_virtual_media_on_refresh(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'virtual_media_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + self.assertIsInstance(self.manager.virtual_media, + virtual_media.VirtualMediaCollection) + + # On refreshing the manager instance... + with open('sushy/tests/unit/json_samples/manager.json', 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + + self.manager.invalidate() + self.manager.refresh(force=False) + + # | WHEN & THEN | + self.assertIsNotNone(self.manager._virtual_media) + self.assertTrue(self.manager._virtual_media._is_stale) + + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'virtual_media_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + self.assertIsInstance(self.manager.virtual_media, + virtual_media.VirtualMediaCollection) + self.assertFalse(self.manager._virtual_media._is_stale) + class ManagerCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py new file mode 100644 index 0000000..22eb9dd --- /dev/null +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -0,0 +1,82 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy import exceptions +from sushy.resources.manager import virtual_media +from sushy.tests.unit import base + + +class VirtualMediaTestCase(base.TestCase): + + def setUp(self): + super(VirtualMediaTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'virtual_media.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.sys_virtual_media = virtual_media.VirtualMedia( + self.conn, '/redfish/v1/Managers/BMC/VirtualMedia/Floppy1', + redfish_version='1.0.2') + + def test__parse_atrtributes(self): + self.sys_virtual_media._parse_attributes() + self.assertEqual('Virtual Removable Media', + self.sys_virtual_media.name) + self.assertEqual('Floppy1', self.sys_virtual_media.identity) + self.assertEqual('https://www.dmtf.org/freeImages/Sardine.img', + self.sys_virtual_media.image) + self.assertEqual('Sardine2.1.43.35.6a', + self.sys_virtual_media.image_name) + self.assertEqual('uri', self.sys_virtual_media.connected_via) + self.assertEqual('floppy', + self.sys_virtual_media.media_types) + self.assertEqual(True, self.sys_virtual_media.inserted) + self.assertEqual(False, self.sys_virtual_media.write_protected) + + def test_insert_media_none(self): + self.sys_virtual_media._actions.insert_media = None + self.assertRaisesRegex( + exceptions.MissingActionError, 'action #VirtualMedia.InsertMedia', + self.sys_virtual_media.insert_media, + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + + def test_insert_media(self): + self.assertFalse(self.sys_virtual_media._is_stale) + self.sys_virtual_media.insert_media( + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + self.sys_virtual_media._conn.post.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" + "/VirtualMedia.InsertMedia"), + data={"Image": "https://www.dmtf.org/freeImages/Sardine.img", + "Inserted": True, "WriteProtected": False} + ) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_none(self): + self.sys_virtual_media._actions.eject_media = None + self.assertRaisesRegex( + exceptions.MissingActionError, 'action #VirtualMedia.EjectMedia', + self.sys_virtual_media.eject_media) + + def test_eject_media(self): + self.assertFalse(self.sys_virtual_media._is_stale) + self.sys_virtual_media.eject_media() + self.sys_virtual_media._conn.post.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" + "/VirtualMedia.EjectMedia")) + self.assertTrue(self.sys_virtual_media._is_stale) -- GitLab From 3ed9aa95c8094d332ecaa48b80f7e6f83fa6a847 Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Mon, 27 Aug 2018 09:15:00 +0000 Subject: [PATCH 083/303] Caching ResourceCollectionBase::get_members() Right now each time 'get_members()' method gets called a new list is created with fresh instantiation of element type resources. This ought to be optimized by returning the cached result in case of repetitive invocations and should the need be to fetch the refreshed result it would ideally reinstantiate the member resources of the collection. Also, in case of any member resource element within collection is being marked as stale, this handles properly to return the refresh'ed resource if that is accessed through this 'get_members()' method of the referenced resource collection instance: res_collection_inst.get_members()[any_index].some_attr to access the current (or live) value of 'some_attr' w/o the need of recreating (read re-instantiating) the constituent element resource instance. Change-Id: I7885143baa430d4e088f99febd6f8bc1a4f99aea --- sushy/resources/base.py | 19 +++++++++++++++- sushy/resources/system/ethernet_interface.py | 1 + sushy/resources/system/processor.py | 1 + sushy/resources/system/simple_storage.py | 1 + sushy/resources/system/storage/volume.py | 1 + sushy/tests/unit/resources/test_base.py | 23 ++++++++++++++++++-- 6 files changed, 43 insertions(+), 3 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index d370443..a6bbcca 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -325,6 +325,8 @@ class ResourceCollectionBase(ResourceBase): adapter=utils.get_members_identities) """A tuple with the members identities""" + _members = None # caching variable + def __init__(self, connector, path, redfish_version=None): """A class representing the base of any Redfish resource collection @@ -365,4 +367,19 @@ class ResourceCollectionBase(ResourceBase): :returns: A list of ``_resource_type`` objects """ - return [self.get_member(id_) for id_ in self.members_identities] + if self._members is None: + self._members = [self.get_member(id_) + for id_ in self.members_identities] + + for m in self._members: + m.refresh(force=False) + return self._members + + def _do_refresh(self, force=False): + """Do refresh related activities. + + Undefine the `_members` attribute here for fresh evaluation in + subsequent calls to `get_members()` method. Other similar activities + can also follow in future, if needed. + """ + self._members = None diff --git a/sushy/resources/system/ethernet_interface.py b/sushy/resources/system/ethernet_interface.py index b89f2ed..8e95ad7 100644 --- a/sushy/resources/system/ethernet_interface.py +++ b/sushy/resources/system/ethernet_interface.py @@ -83,4 +83,5 @@ class EthernetInterfaceCollection(base.ResourceCollectionBase): greedy-refresh not done for them unless forced by ``force`` argument. """ + super(EthernetInterfaceCollection, self)._do_refresh(force) self._summary = None diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index 1ee39e3..7d1ba55 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -155,5 +155,6 @@ class ProcessorCollection(base.ResourceCollectionBase): greedy-refresh not done for them unless forced by ``force`` argument. """ + super(ProcessorCollection, self)._do_refresh(force) # Reset summary attribute self._summary = None diff --git a/sushy/resources/system/simple_storage.py b/sushy/resources/system/simple_storage.py index f615400..4fc0aec 100644 --- a/sushy/resources/system/simple_storage.py +++ b/sushy/resources/system/simple_storage.py @@ -81,6 +81,7 @@ class SimpleStorageCollection(base.ResourceCollectionBase): return self._max_size_bytes def _do_refresh(self, force=False): + super(SimpleStorageCollection, self)._do_refresh(force) # Note(deray): undefine the attribute here for fresh creation in # subsequent calls to it's exposed property. self._max_size_bytes = None diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 6e6c558..2188819 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -53,5 +53,6 @@ class VolumeCollection(base.ResourceCollectionBase): return self._max_size_bytes def _do_refresh(self, force=False): + super(VolumeCollection, self)._do_refresh(force) # invalidate the attribute self._max_size_bytes = None diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 8f92ba7..32db00e 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -129,10 +129,9 @@ class ResourceCollectionBaseTestCase(base.TestCase): self.test_resource_collection.get_member, '2') self.conn.get.assert_called_once_with(path='Fakes/2') - def test_get_members(self): + def _validate_get_members_result(self, member_ids): # | GIVEN | # setting some valid member paths - member_ids = ('1', '2') self.test_resource_collection.members_identities = member_ids # | WHEN | result = self.test_resource_collection.get_members() @@ -143,6 +142,26 @@ class ResourceCollectionBaseTestCase(base.TestCase): self.assertTrue(val.identity in member_ids) self.assertEqual('1.0.x', val.redfish_version) + return result + + def test_get_members(self): + self._validate_get_members_result(('1', '2')) + + def test_get_members_on_refresh(self): + self._validate_get_members_result(('1', '2')) + + # Now emulating the resource invalidate and refresh action! + self.test_resource_collection.invalidate() + self.assertTrue(self.test_resource_collection._is_stale) + self.test_resource_collection.refresh(force=False) + + self._validate_get_members_result(('3', '4')) + self.assertFalse(self.test_resource_collection._is_stale) + + def test_get_members_caching(self): + result = self._validate_get_members_result(('1', '2')) + self.assertIs(result, self.test_resource_collection.get_members()) + TEST_JSON = { 'String': 'a string', -- GitLab From fdc3e99169535cc44d4d7744cf1eea799db1ffeb Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Fri, 20 Apr 2018 10:18:41 +0000 Subject: [PATCH 084/303] Add storage and simple_storage attr to system Adds the support to get SimpleStorage and Storage collection from System resource via `simple_storage` and `storage` properties respectively. Story: 1668487 Task: 23042 Change-Id: I3a79f2afe6c838636df554ee468f8f2e0cf0859e --- ...attributes-to-system-16e81f9b15b1897d.yaml | 12 ++ sushy/resources/system/system.py | 69 ++++++++++- sushy/tests/unit/json_samples/system.json | 3 + .../unit/resources/system/test_system.py | 116 ++++++++++++++++++ sushy/utils.py | 2 +- 5 files changed, 199 insertions(+), 3 deletions(-) create mode 100644 releasenotes/notes/add-storage-and-simple-storage-attributes-to-system-16e81f9b15b1897d.yaml diff --git a/releasenotes/notes/add-storage-and-simple-storage-attributes-to-system-16e81f9b15b1897d.yaml b/releasenotes/notes/add-storage-and-simple-storage-attributes-to-system-16e81f9b15b1897d.yaml new file mode 100644 index 0000000..4af67c9 --- /dev/null +++ b/releasenotes/notes/add-storage-and-simple-storage-attributes-to-system-16e81f9b15b1897d.yaml @@ -0,0 +1,12 @@ +--- +features: + - | + Exposes the ``simple_storage`` and ``storage`` properties from system + resource in sushy. + + * ``simple_storage`` property indicates a collection of storage + controllers and their directly-attached devices associated with the + system. + * ``storage`` property refers to a collection of storage subsystem + associated with system. Resources such as drives and volumes can be + accessed from that subsystem. diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index be109bc..14064cd 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -23,6 +23,8 @@ from sushy.resources.system import constants as sys_cons from sushy.resources.system import ethernet_interface from sushy.resources.system import mappings as sys_maps from sushy.resources.system import processor +from sushy.resources.system import simple_storage as sys_simple_storage +from sushy.resources.system.storage import storage as sys_storage from sushy import utils @@ -119,14 +121,23 @@ class System(base.ResourceBase): memory_summary = MemorySummaryField('MemorySummary') """The summary info of memory of the system in general detail""" - _processors = None # ref to ProcessorCollection instance - _actions = ActionsField('Actions', required=True) + # reference to ProcessorCollection instance + _processors = None + + # reference to EthernetInterfaceCollection instance _ethernet_interfaces = None + # reference to BIOS instance _bios = None + # reference to SimpleStorageCollection instance + _simple_storage = None + + # reference to StorageCollection instance + _storage = None + def __init__(self, connector, identity, redfish_version=None): """A class representing a ComputerSystem @@ -303,6 +314,56 @@ class System(base.ResourceBase): self._bios.refresh(force=False) return self._bios + @property + def simple_storage(self): + """A collection of simple storage associated with system. + + This returns a reference to `SimpleStorageCollection` instance. + SimpleStorage represents the properties of a storage controller and its + directly-attached devices. + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + + :raises: MissingAttributeError if 'SimpleStorage/@odata.id' field + is missing. + :returns: `SimpleStorageCollection` instance + """ + if self._simple_storage is None: + self._simple_storage = sys_simple_storage.SimpleStorageCollection( + self._conn, + utils.get_sub_resource_path_by(self, "SimpleStorage"), + redfish_version=self.redfish_version) + + self._simple_storage.refresh(force=False) + return self._simple_storage + + @property + def storage(self): + """A collection of storage subsystems associated with system. + + This returns a reference to `StorageCollection` instance. + A storage subsystem represents a set of storage controllers (physical + or virtual) and the resources such as drives and volumes that can be + accessed from that subsystem. + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + + :raises: MissingAttributeError if 'Storage/@odata.id' field + is missing. + :returns: `StorageCollection` instance + """ + if self._storage is None: + self._storage = sys_storage.StorageCollection( + self._conn, utils.get_sub_resource_path_by(self, "Storage"), + redfish_version=self.redfish_version) + + self._storage.refresh(force=False) + return self._storage + def _do_refresh(self, force=False): """Do custom resource specific refresh activities @@ -316,6 +377,10 @@ class System(base.ResourceBase): self._ethernet_interfaces.invalidate(force) if self._bios is not None: self._bios.invalidate(force) + if self._simple_storage is not None: + self._simple_storage.invalidate(force) + if self._storage is not None: + self._storage.invalidate(force) class SystemCollection(base.ResourceCollectionBase): diff --git a/sushy/tests/unit/json_samples/system.json b/sushy/tests/unit/json_samples/system.json index 5dd7da6..c331d02 100644 --- a/sushy/tests/unit/json_samples/system.json +++ b/sushy/tests/unit/json_samples/system.json @@ -93,6 +93,9 @@ "SimpleStorage": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/SimpleStorage" }, + "Storage": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage" + }, "LogServices": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/LogServices" }, diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 5bcd93f..4642d9a 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -24,6 +24,8 @@ from sushy.resources.system import bios from sushy.resources.system import ethernet_interface from sushy.resources.system import mappings as sys_map from sushy.resources.system import processor +from sushy.resources.system import simple_storage +from sushy.resources.system.storage import storage from sushy.resources.system import system from sushy.tests.unit import base @@ -392,6 +394,120 @@ class SystemTestCase(base.TestCase): self.assertEqual('BIOS Configuration Current Settings', self.sys_inst.bios.name) + def test_simple_storage_for_missing_attr(self): + self.sys_inst.json.pop('SimpleStorage') + with self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute SimpleStorage'): + self.sys_inst.simple_storage + + def test_simple_storage(self): + # check for the underneath variable value + self.assertIsNone(self.sys_inst._simple_storage) + # | GIVEN | + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/' + 'simple_storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN | + actual_simple_storage = self.sys_inst.simple_storage + # | THEN | + self.assertIsInstance(actual_simple_storage, + simple_storage.SimpleStorageCollection) + self.conn.get.return_value.json.assert_called_once_with() + + # reset mock + self.conn.get.return_value.json.reset_mock() + # | WHEN & THEN | + # tests for same object on invoking subsequently + self.assertIs(actual_simple_storage, + self.sys_inst.simple_storage) + self.conn.get.return_value.json.assert_not_called() + + def test_simple_storage_on_refresh(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'simple_storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.sys_inst.simple_storage, + simple_storage.SimpleStorageCollection) + + # On refreshing the system instance... + with open('sushy/tests/unit/json_samples/system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.sys_inst.invalidate() + self.sys_inst.refresh(force=False) + + # | WHEN & THEN | + self.assertIsNotNone(self.sys_inst._simple_storage) + self.assertTrue(self.sys_inst._simple_storage._is_stale) + + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'simple_storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.sys_inst.simple_storage, + simple_storage.SimpleStorageCollection) + self.assertFalse(self.sys_inst._simple_storage._is_stale) + + def test_storage_for_missing_attr(self): + self.sys_inst.json.pop('Storage') + with self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Storage'): + self.sys_inst.storage + + def test_storage(self): + # check for the underneath variable value + self.assertIsNone(self.sys_inst._storage) + # | GIVEN | + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/' + 'storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN | + actual_storage = self.sys_inst.storage + # | THEN | + self.assertIsInstance(actual_storage, storage.StorageCollection) + self.conn.get.return_value.json.assert_called_once_with() + + # reset mock + self.conn.get.return_value.json.reset_mock() + # | WHEN & THEN | + # tests for same object on invoking subsequently + self.assertIs(actual_storage, self.sys_inst.storage) + self.conn.get.return_value.json.assert_not_called() + + def test_storage_on_refresh(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.sys_inst.storage, + storage.StorageCollection) + + # On refreshing the system instance... + with open('sushy/tests/unit/json_samples/system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.sys_inst.invalidate() + self.sys_inst.refresh(force=False) + + # | WHEN & THEN | + self.assertIsNotNone(self.sys_inst._storage) + self.assertTrue(self.sys_inst._storage._is_stale) + + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.sys_inst.storage, + storage.StorageCollection) + self.assertFalse(self.sys_inst._storage._is_stale) + class SystemCollectionTestCase(base.TestCase): diff --git a/sushy/utils.py b/sushy/utils.py index 0f687d0..5da076c 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -102,7 +102,7 @@ def max_safe(iterable, default=0): """ try: - return max([x for x in iterable if x is not None]) + return max(x for x in iterable if x is not None) except ValueError: # TypeError is not caught here as that should be thrown. return default -- GitLab From a0fc71f60a2c2fedbdfa2265ad80755cd59f1a8f Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Sat, 1 Sep 2018 17:09:54 +0300 Subject: [PATCH 085/303] Cleanup names for message registry For consistency: - renamed files from messageregistry to message_registry - renamed PARAMTYPE_MAP to PARAMTYPE_VALUE_MAP Followup to I9a1735230a8328fd8365e375889c6ab066c3df16 Change-Id: I2ad1e46c1deb0028bb8f78480e2660f072903f06 --- sushy/resources/mappings.py | 2 +- .../registry/{messageregistry.py => message_registry.py} | 3 ++- .../{test_messageregistry.py => test_message_registry.py} | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) rename sushy/resources/registry/{messageregistry.py => message_registry.py} (96%) rename sushy/tests/unit/resources/registry/{test_messageregistry.py => test_message_registry.py} (96%) diff --git a/sushy/resources/mappings.py b/sushy/resources/mappings.py index c081982..8afad7e 100644 --- a/sushy/resources/mappings.py +++ b/sushy/resources/mappings.py @@ -35,7 +35,7 @@ HEALTH_VALUE_MAP = { HEALTH_VALUE_MAP_REV = ( utils.revert_dictionary(HEALTH_VALUE_MAP)) -PARAMTYPE_MAP = { +PARAMTYPE_VALUE_MAP = { 'string': res_cons.PARAMTYPE_STRING, 'number': res_cons.PARAMTYPE_NUMBER } diff --git a/sushy/resources/registry/messageregistry.py b/sushy/resources/registry/message_registry.py similarity index 96% rename from sushy/resources/registry/messageregistry.py rename to sushy/resources/registry/message_registry.py index a5d80b5..d09a9ea 100644 --- a/sushy/resources/registry/messageregistry.py +++ b/sushy/resources/registry/message_registry.py @@ -37,7 +37,8 @@ class MessageDictionaryField(base.DictionaryField): param_types = base.Field('ParamTypes', adapter=lambda x: - [res_maps.PARAMTYPE_MAP[v] for v in x]) + [res_maps.PARAMTYPE_VALUE_MAP[v] + for v in x]) """Mapped MessageArg types, in order, for the message""" resolution = base.Field('Resolution', required=True) diff --git a/sushy/tests/unit/resources/registry/test_messageregistry.py b/sushy/tests/unit/resources/registry/test_message_registry.py similarity index 96% rename from sushy/tests/unit/resources/registry/test_messageregistry.py rename to sushy/tests/unit/resources/registry/test_message_registry.py index 59e2fc0..8669d44 100644 --- a/sushy/tests/unit/resources/registry/test_messageregistry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -18,7 +18,7 @@ import json import mock from sushy.resources import constants as res_cons -from sushy.resources.registry import messageregistry +from sushy.resources.registry import message_registry from sushy.tests.unit import base @@ -30,7 +30,7 @@ class MessageRegistryTestCase(base.TestCase): with open('sushy/tests/unit/json_samples/message_registry.json') as f: self.conn.get.return_value.json.return_value = json.load(f) - self.registry = messageregistry.MessageRegistry( + self.registry = message_registry.MessageRegistry( self.conn, '/redfish/v1/Registries/Test', redfish_version='1.0.2') -- GitLab From dac8bc3498b4d2e24e12a20c5aaec7f20fa6df17 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Mon, 30 Jul 2018 16:50:40 +0300 Subject: [PATCH 086/303] Add Message Registry File resource This is the second patch to support Redfish Message Registry. It delivers Redfish Message Registry File and Collection parsing to sushy fields. Any other processing (e.g., loading files) will be added in followup patches. It adds link to Registries resource from the service root. Currently exposing Message Registry File resource to sushy users is not intended. Change-Id: I3953840cf0145407d5915ee8d0a8a5f909301cc2 Story: 2001791 Task: 23062 --- sushy/main.py | 19 +++++ .../registry/message_registry_file.py | 75 +++++++++++++++++++ .../json_samples/message_registry_file.json | 18 +++++ .../message_registry_file_collection.json | 12 +++ sushy/tests/unit/json_samples/root.json | 3 + .../registry/test_message_registry_file.py | 74 ++++++++++++++++++ sushy/tests/unit/test_main.py | 14 ++++ 7 files changed, 215 insertions(+) create mode 100644 sushy/resources/registry/message_registry_file.py create mode 100644 sushy/tests/unit/json_samples/message_registry_file.json create mode 100644 sushy/tests/unit/json_samples/message_registry_file_collection.json create mode 100644 sushy/tests/unit/resources/registry/test_message_registry_file.py diff --git a/sushy/main.py b/sushy/main.py index b0a4334..b751658 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -19,6 +19,7 @@ from sushy import connector as sushy_connector from sushy import exceptions from sushy.resources import base from sushy.resources.manager import manager +from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system @@ -46,6 +47,9 @@ class Sushy(base.ResourceBase): _session_service_path = base.Field(['SessionService', '@odata.id']) """SessionService path""" + _registries_path = base.Field(['Registries', '@odata.id']) + """Registries path""" + def __init__(self, base_url, username=None, password=None, root_prefix='/redfish/v1/', verify=True, auth=None, connector=None): @@ -158,3 +162,18 @@ class Sushy(base.ResourceBase): """ return session.Session(self._conn, identity, redfish_version=self.redfish_version) + + def _get_registry_collection(self): + """Get MessageRegistryFileCollection object + + This resource is optional and can be empty. + + :returns: MessageRegistryFileCollection object + or None if Registries not provided + """ + + if self._registries_path: + return message_registry_file.MessageRegistryFileCollection( + self._conn, + self._registries_path, + redfish_version=self.redfish_version) diff --git a/sushy/resources/registry/message_registry_file.py b/sushy/resources/registry/message_registry_file.py new file mode 100644 index 0000000..460e3d2 --- /dev/null +++ b/sushy/resources/registry/message_registry_file.py @@ -0,0 +1,75 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# https://redfish.dmtf.org/schemas/v1/MessageRegistryFileCollection.json +# https://redfish.dmtf.org/schemas/v1/MessageRegistryFile.v1_1_0.json + + +from sushy.resources import base + + +class LocationListField(base.ListField): + """Location for each registry file of languages supported + + There are 3 options where the file can be hosted: + + * locally as a single file, + * locally as a part of archive (zip or other), + * publicly on the Internet. + """ + + language = base.Field('Language') + """File's RFC5646 language code or the string 'default'""" + + uri = base.Field('Uri') + """Location URI for co-located registry file with the Redfish service""" + + archive_uri = base.Field('ArchiveUri') + """Location URI for archive file""" + + archive_file = base.Field('ArchiveFile') + """File name for registry if using archive_uri""" + + publication_uri = base.Field('PublicationUri') + """Location URI of publicly available schema""" + + +class MessageRegistryFile(base.ResourceBase): + + identity = base.Field('Id', required=True) + """Identity of Message Registry file resource""" + + description = base.Field('Description') + """Description of Message Registry file resource""" + + name = base.Field('Name', required=True) + """Name of Message Registry file resource""" + + languages = base.Field('Languages', required=True) + """List of RFC 5646 language codes supported by this resource""" + + registry = base.Field('Registry', required=True) + """Prefix for MessageId used for messages from this resource + + This attribute is in form Registry_name.Major_version.Minor_version + """ + + location = LocationListField('Location', required=True) + """List of locations of Registry files for each supported language""" + + +class MessageRegistryFileCollection(base.ResourceCollectionBase): + """Collection of Message Registry Files""" + + @property + def _resource_type(self): + return MessageRegistryFile diff --git a/sushy/tests/unit/json_samples/message_registry_file.json b/sushy/tests/unit/json_samples/message_registry_file.json new file mode 100644 index 0000000..5242dc6 --- /dev/null +++ b/sushy/tests/unit/json_samples/message_registry_file.json @@ -0,0 +1,18 @@ +{ + "@odata.type": "#MessageRegistryFile.v1_1_0.MessageRegistryFile", + "Id": "Test", + "Name": "Test Message Registry File", + "Description": "Message Registry file for testing", + "Languages": ["en"], + "Registry": "Test.1.0", + "Location": [ + {"Language": "default", + "Uri": "/redfish/v1/Registries/Test/Test.1.0.json", + "ArchiveUri": "/redfish/v1/Registries/Archive.zip", + "ArchiveFile": "Test.1.0.json", + "PublicationUri": "https://example.com/Registries/Test.1.0.json" + } + ], + "@odata.context": "/redfish/v1/$metadata#MessageRegistryFile.MessageRegistryFile", + "@odata.id": "/redfish/v1/Registries/Test" +} diff --git a/sushy/tests/unit/json_samples/message_registry_file_collection.json b/sushy/tests/unit/json_samples/message_registry_file_collection.json new file mode 100644 index 0000000..87905ab --- /dev/null +++ b/sushy/tests/unit/json_samples/message_registry_file_collection.json @@ -0,0 +1,12 @@ +{ + "@odata.type": "#MessageRegistryFileCollection.MessageRegistryFileCollection", + "Name": "Message Registry Test Collection", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/Registries/Test" + } + ], + "@odata.context": "/redfish/v1/$metadata#MessageRegistryFileCollection.MessageRegistryFileCollection", + "@odata.id": "/redfish/v1/Registries" +} diff --git a/sushy/tests/unit/json_samples/root.json b/sushy/tests/unit/json_samples/root.json index 29709d5..ae17280 100644 --- a/sushy/tests/unit/json_samples/root.json +++ b/sushy/tests/unit/json_samples/root.json @@ -30,6 +30,9 @@ "@odata.id": "/redfish/v1/SessionService/Sessions" } }, + "Registries": { + "@odata.id": "/redfish/v1/Registries" + }, "Oem": {}, "@odata.context": "/redfish/v1/$metadata#ServiceRoot", "@odata.id": "/redfish/v1/", diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py new file mode 100644 index 0000000..9c63e5d --- /dev/null +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -0,0 +1,74 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import json +import mock + +from sushy.resources.registry import message_registry_file +from sushy.tests.unit import base + + +class MessageRegistryFileTestCase(base.TestCase): + + def setUp(self): + super(MessageRegistryFileTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'message_registry_file.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.reg_file = message_registry_file.MessageRegistryFile( + self.conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.reg_file._parse_attributes() + self.assertEqual('Test', self.reg_file.identity) + self.assertEqual('Test Message Registry File', self.reg_file.name) + self.assertEqual('Message Registry file for testing', + self.reg_file.description) + self.assertEqual('en', self.reg_file.languages[0]) + self.assertEqual('Test.1.0', self.reg_file.registry) + self.assertEqual('default', self.reg_file.location[0].language) + self.assertEqual('/redfish/v1/Registries/Test/Test.1.0.json', + self.reg_file.location[0].uri) + self.assertEqual('https://example.com/Registries/Test.1.0.json', + self.reg_file.location[0].publication_uri) + self.assertEqual('/redfish/v1/Registries/Archive.zip', + self.reg_file.location[0].archive_uri) + self.assertEqual('Test.1.0.json', + self.reg_file.location[0].archive_file) + + +class MessageRegistryFileCollectionTestCase(base.TestCase): + + def setUp(self): + super(MessageRegistryFileCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'message_registry_file_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.reg_file_col =\ + message_registry_file.MessageRegistryFileCollection( + self.conn, '/redfish/v1/Registries', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.reg_file_col._parse_attributes() + self.assertEqual('1.0.2', self.reg_file_col.redfish_version) + self.assertEqual('Message Registry Test Collection', + self.reg_file_col.name) + self.assertEqual(('/redfish/v1/Registries/Test',), + self.reg_file_col.members_identities) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 1d186bd..b0d9db2 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -22,6 +22,7 @@ from sushy import connector from sushy import exceptions from sushy import main from sushy.resources.manager import manager +from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system @@ -119,6 +120,16 @@ class MainTestCase(base.TestCase): self.root._conn, 'asdf', redfish_version=self.root.redfish_version) + @mock.patch.object(message_registry_file, + 'MessageRegistryFileCollection', + autospec=True) + def test__get_registry_collection( + self, MessageRegistryFileCollection_mock): + self.root._get_registry_collection() + MessageRegistryFileCollection_mock.assert_called_once_with( + self.root._conn, '/redfish/v1/Registries', + redfish_version=self.root.redfish_version) + class BareMinimumMainTestCase(base.TestCase): @@ -145,3 +156,6 @@ class BareMinimumMainTestCase(base.TestCase): self.assertRaisesRegex( exceptions.MissingAttributeError, 'SessionService/@odata.id', self.root.get_session_service) + + def test__get_registry_collection_when_registries_attr_absent(self): + self.assertIsNone(self.root._get_registry_collection()) -- GitLab From 9e8396a13c250faa56a5944cccd7db8a0e4f313b Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Mon, 3 Sep 2018 09:37:00 +0000 Subject: [PATCH 087/303] Fix for MediaTypes in virtual media According to Redfish schema the value type of `MediaTypes` is `array`[1]. This is a fix to accomodate that and along with that some more fixes as: - to maintain consistency across mapping variables, renamed the following mapping variables as: MEDIA_TYPE_VALUE_MAP (from MEDIA_TYPE_MAP) and CONNECTED_VIA_VALUE_MAP (from CONNECTED_VIA_MAP) - In manager test case, invoke get_member() on the empirical identity of virtual media resource. [1] As a reference search for `MediaTypes` in http://redfish.dmtf.org/schemas/v1/VirtualMedia.v1_2_0.json Change-Id: Ie370b4dde8196ae5e9b2a245d37c737296f54f16 --- sushy/resources/manager/constants.py | 2 +- sushy/resources/manager/mappings.py | 4 ++-- sushy/resources/manager/virtual_media.py | 10 +++++++--- sushy/tests/unit/json_samples/virtual_media.json | 5 ++++- sushy/tests/unit/resources/manager/test_manager.py | 3 ++- .../tests/unit/resources/manager/test_virtual_media.py | 7 +++++-- 6 files changed, 21 insertions(+), 10 deletions(-) diff --git a/sushy/resources/manager/constants.py b/sushy/resources/manager/constants.py index 69941c8..9c1cf18 100644 --- a/sushy/resources/manager/constants.py +++ b/sushy/resources/manager/constants.py @@ -77,7 +77,7 @@ COMMAND_SHELL_IPMI = 'command shell ipmi' COMMAND_SHELL_OEM = 'command shell oem' """Command Shell connection using an OEM-specific protocol""" -# Virtual Media Type constants +# Supported Virtual Media Type constants VIRTUAL_MEDIA_CD = 'cd' VIRTUAL_MEDIA_DVD = 'dvd' diff --git a/sushy/resources/manager/mappings.py b/sushy/resources/manager/mappings.py index c8ea5cb..a544115 100644 --- a/sushy/resources/manager/mappings.py +++ b/sushy/resources/manager/mappings.py @@ -60,14 +60,14 @@ COMMAND_SHELL_VALUE_MAP = { COMMAND_SHELL_VALUE_MAP_REV = ( utils.revert_dictionary(COMMAND_SHELL_VALUE_MAP)) -MEDIA_TYPE_MAP = { +MEDIA_TYPE_VALUE_MAP = { 'CD': mgr_cons.VIRTUAL_MEDIA_CD, 'DVD': mgr_cons.VIRTUAL_MEDIA_DVD, 'Floppy': mgr_cons.VIRTUAL_MEDIA_FLOPPY, 'USBStick': mgr_cons.VIRTUAL_MEDIA_USBSTICK } -CONNECTED_VIA_MAP = { +CONNECTED_VIA_VALUE_MAP = { "Applet": mgr_cons.CONNECTED_VIA_APPLET, "NotConnected": mgr_cons.CONNECTED_VIA_NOT_CONNECTED, "Oem": mgr_cons.CONNECTED_VIA_OEM, diff --git a/sushy/resources/manager/virtual_media.py b/sushy/resources/manager/virtual_media.py index d84d86f..6c05d32 100644 --- a/sushy/resources/manager/virtual_media.py +++ b/sushy/resources/manager/virtual_media.py @@ -45,11 +45,15 @@ class VirtualMedia(base.ResourceBase): write_protected = base.Field('WriteProtected') """Indicates the media is write protected""" - media_types = base.MappedField('MediaTypes', mgr_maps.MEDIA_TYPE_MAP) - """This is the media types supported as virtual media""" + media_types = base.Field( + 'MediaTypes', adapter=( + lambda x: [mgr_maps.MEDIA_TYPE_VALUE_MAP[v] for v in x + if v in mgr_maps.MEDIA_TYPE_VALUE_MAP]), + default=[]) + """List of supported media types as virtual media""" connected_via = base.MappedField('ConnectedVia', - mgr_maps.CONNECTED_VIA_MAP) + mgr_maps.CONNECTED_VIA_VALUE_MAP) """Current virtual media connection methods Applet: Connected to a client application diff --git a/sushy/tests/unit/json_samples/virtual_media.json b/sushy/tests/unit/json_samples/virtual_media.json index e072872..d4e7e55 100644 --- a/sushy/tests/unit/json_samples/virtual_media.json +++ b/sushy/tests/unit/json_samples/virtual_media.json @@ -2,7 +2,10 @@ "@odata.type": "#VirtualMedia.v1_1_0.VirtualMedia", "Id": "Floppy1", "Name": "Virtual Removable Media", - "MediaTypes": "Floppy", + "MediaTypes": [ + "Floppy", + "USBStick" + ], "Actions": { "#VirtualMedia.EjectMedia": { "target": "/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions/VirtualMedia.EjectMedia", diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index e1a093e..7276209 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -229,7 +229,8 @@ class ManagerTestCase(base.TestCase): virtual_media.VirtualMediaCollection) self.assertEqual(actual_virtual_media.name, 'Virtual Media Services') - member = actual_virtual_media.get_member('Floppy1') + member = actual_virtual_media.get_member( + '/redfish/v1/Managers/BMC/VirtualMedia/Floppy1') self.assertEqual(member.image_name, "Sardine2.1.43.35.6a") self.assertTrue(member.inserted) diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py index 22eb9dd..c9245f6 100644 --- a/sushy/tests/unit/resources/manager/test_virtual_media.py +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -16,6 +16,7 @@ import json import mock +import sushy from sushy import exceptions from sushy.resources.manager import virtual_media from sushy.tests.unit import base @@ -42,8 +43,10 @@ class VirtualMediaTestCase(base.TestCase): self.sys_virtual_media.image) self.assertEqual('Sardine2.1.43.35.6a', self.sys_virtual_media.image_name) - self.assertEqual('uri', self.sys_virtual_media.connected_via) - self.assertEqual('floppy', + self.assertEqual(sushy.CONNECTED_VIA_URI, + self.sys_virtual_media.connected_via) + self.assertEqual([sushy.VIRTUAL_MEDIA_FLOPPY, + sushy.VIRTUAL_MEDIA_USBSTICK], self.sys_virtual_media.media_types) self.assertEqual(True, self.sys_virtual_media.inserted) self.assertEqual(False, self.sys_virtual_media.write_protected) -- GitLab From 3d76b76abe365f4bbd5e5530a8564beb991bd227 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Fri, 7 Sep 2018 09:26:23 +0200 Subject: [PATCH 088/303] Return sizes of storage devices Properties implemented in all storage resources to return sizes of all available storage devices. Change-Id: Ieb374f8cabb0418bb2680fdab690446346fc354f --- sushy/resources/system/simple_storage.py | 34 +++++--- sushy/resources/system/storage/storage.py | 85 ++++++++++++++----- sushy/resources/system/storage/volume.py | 34 ++++++-- .../resources/system/storage/test_storage.py | 42 +++++++-- .../resources/system/storage/test_volume.py | 4 +- .../resources/system/test_simple_storage.py | 14 ++- 6 files changed, 160 insertions(+), 53 deletions(-) diff --git a/sushy/resources/system/simple_storage.py b/sushy/resources/system/simple_storage.py index 4fc0aec..e73caf3 100644 --- a/sushy/resources/system/simple_storage.py +++ b/sushy/resources/system/simple_storage.py @@ -58,30 +58,40 @@ class SimpleStorage(base.ResourceBase): class SimpleStorageCollection(base.ResourceCollectionBase): """Represents a collection of simple storage associated with system.""" - _max_size_bytes = None + _disks_sizes_bytes = None @property def _resource_type(self): return SimpleStorage + @property + def disks_sizes_bytes(self): + """Sizes of each Disk in bytes in SimpleStorage collection resource. + + Returns the list of cached values until it (or its parent resource) + is refreshed. + """ + if self._disks_sizes_bytes is None: + self._disks_sizes_bytes = sorted( + device.capacity_bytes + for simpl_stor in self.get_members() + for device in simpl_stor.devices + if device.status.state == res_cons.STATE_ENABLED + ) + + return self._disks_sizes_bytes + @property def max_size_bytes(self): - """Max size available (in bytes) among all enabled device resources. + """Max size available (in bytes) among all enabled Disk resources. - It returns the cached value until it (or its parent resource) is + Returns the cached value until it (or its parent resource) is refreshed. """ - if self._max_size_bytes is None: - self._max_size_bytes = ( - utils.max_safe(device.capacity_bytes - for simpl_stor in self.get_members() - for device in simpl_stor.devices - if (device.status.state == - res_cons.STATE_ENABLED))) - return self._max_size_bytes + return utils.max_safe(self.disks_sizes_bytes) def _do_refresh(self, force=False): super(SimpleStorageCollection, self)._do_refresh(force) # Note(deray): undefine the attribute here for fresh creation in # subsequent calls to it's exposed property. - self._max_size_bytes = None + self._disks_sizes_bytes = None diff --git a/sushy/resources/system/storage/storage.py b/sushy/resources/system/storage/storage.py index 5a35c88..7743469 100644 --- a/sushy/resources/system/storage/storage.py +++ b/sushy/resources/system/storage/storage.py @@ -42,7 +42,7 @@ class Storage(base.ResourceBase): adapter=utils.get_members_identities) """A tuple with the drive identities""" - _drives_max_size_bytes = None + _drives_sizes_bytes = None _drives = None _volumes = None # reference to VolumeCollection instance @@ -72,13 +72,22 @@ class Storage(base.ResourceBase): self.get_drive(id_) for id_ in self.drives_identities] return self._drives + @property + def drives_sizes_bytes(self): + """Sizes of all Drives in bytes in Storage resource. + + Returns the list of cached values until it (or its parent resource) + is refreshed. + """ + if self._drives_sizes_bytes is None: + self._drives_sizes_bytes = sorted( + drv.capacity_bytes for drv in self.drives) + return self._drives_sizes_bytes + @property def drives_max_size_bytes(self): """Max size available in bytes among all Drives of this collection.""" - if self._drives_max_size_bytes is None: - self._drives_max_size_bytes = ( - utils.max_safe(drv.capacity_bytes for drv in self.drives)) - return self._drives_max_size_bytes + return utils.max_safe(self.drives_sizes_bytes) @property def volumes(self): @@ -101,7 +110,7 @@ class Storage(base.ResourceBase): """Do resource specific refresh activities.""" # Note(deray): undefine the attribute here for fresh evaluation in # subsequent calls to it's exposed property. - self._drives_max_size_bytes = None + self._drives_sizes_bytes = None self._drives = None # invalidate the nested resource if self._volumes is not None: @@ -111,34 +120,66 @@ class Storage(base.ResourceBase): class StorageCollection(base.ResourceCollectionBase): """This class represents the collection of Storage resources""" - _max_drive_size_bytes = None - _max_volume_size_bytes = None + _drives_sizes_bytes = None + _volumes_sizes_bytes = None @property def _resource_type(self): return Storage + @property + def drives_sizes_bytes(self): + """Sizes of each Drive in bytes in Storage collection resource. + + Returns the list of cached values until it (or its parent resource) + is refreshed. + """ + if self._drives_sizes_bytes is None: + self._drives_sizes_bytes = sorted( + drive_size + for storage_ in self.get_members() + for drive_size in storage_.drives_sizes_bytes + ) + + return self._drives_sizes_bytes + @property def max_drive_size_bytes(self): - """Max size available (in bytes) among all device resources.""" - if self._max_drive_size_bytes is None: - self._max_drive_size_bytes = max( - storage_.drives_max_size_bytes - for storage_ in self.get_members()) - return self._max_drive_size_bytes + """Max size available (in bytes) among all Drive resources. + + Returns the cached value until it (or its parent resource) is + refreshed. + """ + return utils.max_safe(self.drives_sizes_bytes) + + @property + def volumes_sizes_bytes(self): + """Sizes of each Volume in bytes in Storage collection resource. + + Returns the list of cached values until it (or its parent resource) + is refreshed. + """ + if self._volumes_sizes_bytes is None: + self._volumes_sizes_bytes = sorted( + volume_size + for storage_ in self.get_members() + for volume_size in storage_.volumes.volumes_sizes_bytes) + + return self._volumes_sizes_bytes @property def max_volume_size_bytes(self): - """Max size available (in bytes) among all Volumes under this.""" - if self._max_volume_size_bytes is None: - self._max_volume_size_bytes = max( - storage_.volumes.max_size_bytes - for storage_ in self.get_members()) - return self._max_volume_size_bytes + """Max size available (in bytes) among all Volume resources. + + Returns the cached value until it (or its parent resource) is + refreshed. + """ + return utils.max_safe(self.volumes_sizes_bytes) def _do_refresh(self, force=False): """Do resource specific refresh activities""" + super(StorageCollection, self)._do_refresh(force) # Note(deray): undefine the attributes here for fresh evaluation in # subsequent calls to their exposed properties. - self._max_drive_size_bytes = None - self._max_volume_size_bytes = None + self._drives_sizes_bytes = None + self._volumes_sizes_bytes = None diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 2188819..2a40009 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -37,22 +37,38 @@ class Volume(base.ResourceBase): class VolumeCollection(base.ResourceCollectionBase): """This class represents the Storage Volume collection""" - _max_size_bytes = None + _volumes_sizes_bytes = None @property def _resource_type(self): return Volume @property - def max_size_bytes(self): - """Max size available in bytes among all Volumes of this collection.""" - if self._max_size_bytes is None: - self._max_size_bytes = ( - utils.max_safe([vol.capacity_bytes - for vol in self.get_members()])) - return self._max_size_bytes + def volumes_sizes_bytes(self): + """Sizes of all Volumes in bytes in VolumeCollection resource. + + Returns the list of cached values until it (or its parent resource) + is refreshed. + """ + if self._volumes_sizes_bytes is None: + self._volumes_sizes_bytes = sorted( + vol.capacity_bytes + for vol in self.get_members()) + return self._volumes_sizes_bytes + + @property + def max_volume_size_bytes(self): + """Max size available (in bytes) among all Volume resources. + + Returns the cached value until it (or its parent resource) is + refreshed. + """ + return utils.max_safe(self.volumes_sizes_bytes) + + # NOTE(etingof): for backward compatibility + max_size_bytes = max_volume_size_bytes def _do_refresh(self, force=False): super(VolumeCollection, self)._do_refresh(force) # invalidate the attribute - self._max_size_bytes = None + self._volumes_sizes_bytes = None diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index 2f9ce54..4ccdcb7 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -120,7 +120,7 @@ class StorageTestCase(base.TestCase): self.assertIsInstance(drv, drive.Drive) def test_drives_max_size_bytes(self): - self.assertIsNone(self.storage._drives_max_size_bytes) + self.assertIsNone(self.storage._drives_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -139,7 +139,7 @@ class StorageTestCase(base.TestCase): def test_drives_max_size_bytes_after_refresh(self): self.storage.refresh() - self.assertIsNone(self.storage._drives_max_size_bytes) + self.assertIsNone(self.storage._drives_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -244,8 +244,23 @@ class StorageCollectionTestCase(base.TestCase): self.assertIsInstance(members, list) self.assertEqual(1, len(members)) + def test_drives_sizes_bytes(self): + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual([899527000000, 899527000000, 899527000000, + 899527000000], self.stor_col.drives_sizes_bytes) + def test_max_drive_size_bytes(self): - self.assertIsNone(self.stor_col._max_drive_size_bytes) + self.assertIsNone(self.stor_col._drives_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -266,7 +281,7 @@ class StorageCollectionTestCase(base.TestCase): def test_max_drive_size_bytes_after_refresh(self): self.stor_col.refresh(force=False) - self.assertIsNone(self.stor_col._max_drive_size_bytes) + self.assertIsNone(self.stor_col._drives_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -280,8 +295,23 @@ class StorageCollectionTestCase(base.TestCase): self.assertEqual(899527000000, self.stor_col.max_drive_size_bytes) + def test_volumes_sizes_bytes(self): + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_VOLUME_FILE_NAMES: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual([107374182400, 899527000000, 1073741824000], + self.stor_col.volumes_sizes_bytes) + def test_max_volume_size_bytes(self): - self.assertIsNone(self.stor_col._max_volume_size_bytes) + self.assertIsNone(self.stor_col._volumes_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -302,7 +332,7 @@ class StorageCollectionTestCase(base.TestCase): def test_max_volume_size_bytes_after_refresh(self): self.stor_col.refresh(force=False) - self.assertIsNone(self.stor_col._max_volume_size_bytes) + self.assertIsNone(self.stor_col._volumes_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index 9d3d0db..a15117c 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -86,7 +86,7 @@ class VolumeCollectionTestCase(base.TestCase): self.assertEqual(3, len(members)) def test_max_size_bytes(self): - self.assertIsNone(self.stor_vol_col._max_size_bytes) + self.assertIsNone(self.stor_vol_col._volumes_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -107,7 +107,7 @@ class VolumeCollectionTestCase(base.TestCase): def test_max_size_bytes_after_refresh(self): self.stor_vol_col.refresh() - self.assertIsNone(self.stor_vol_col._max_size_bytes) + self.assertIsNone(self.stor_vol_col._volumes_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] diff --git a/sushy/tests/unit/resources/system/test_simple_storage.py b/sushy/tests/unit/resources/system/test_simple_storage.py index 07145ae..4fe0060 100644 --- a/sushy/tests/unit/resources/system/test_simple_storage.py +++ b/sushy/tests/unit/resources/system/test_simple_storage.py @@ -86,8 +86,18 @@ class SimpleStorageCollectionTestCase(base.TestCase): self.assertIsInstance(members, list) self.assertEqual(1, len(members)) + def test_disks_sizes_bytes(self): + self.conn.get.return_value.json.reset_mock() + + with open('sushy/tests/unit/json_samples/' + 'simple_storage.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.assertEqual([4000000000000, 8000000000000], + self.simpl_stor_col.disks_sizes_bytes) + def test_max_size_bytes(self): - self.assertIsNone(self.simpl_stor_col._max_size_bytes) + self.assertIsNone(self.simpl_stor_col._disks_sizes_bytes) self.conn.get.return_value.json.reset_mock() with open('sushy/tests/unit/json_samples/' @@ -103,7 +113,7 @@ class SimpleStorageCollectionTestCase(base.TestCase): def test_max_size_bytes_after_refresh(self): self.simpl_stor_col.refresh() - self.assertIsNone(self.simpl_stor_col._max_size_bytes) + self.assertIsNone(self.simpl_stor_col._disks_sizes_bytes) self.conn.get.return_value.json.reset_mock() with open('sushy/tests/unit/json_samples/' -- GitLab From ec6e6c1df1aa3ea9d10adf163b6750f63ba42e78 Mon Sep 17 00:00:00 2001 From: Andreas Jaeger Date: Sat, 22 Sep 2018 18:02:02 +0200 Subject: [PATCH 089/303] Use templates for cover and lower-constraints Small cleanups: * Use openstack-tox-cover template, this runs the cover job in the check queue only. Remove individual cover jobs. * Use openstack-lower-constraints-jobs template, remove individual jobs. * Sort list of templates Change-Id: I1d5b7401f880fa071d9e6312eb7d414050c9ece4 --- zuul.d/project.yaml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 8fbb78a..ef8474b 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -1,18 +1,16 @@ - project: templates: + - check-requirements + - openstack-cover-jobs + - openstack-lower-constraints-jobs - openstack-python-jobs - openstack-python35-jobs - openstack-python36-jobs - - check-requirements - publish-openstack-docs-pti - release-notes-jobs-python3 check: jobs: - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src - - openstack-tox-lower-constraints - - openstack-tox-cover gate: jobs: - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src - - openstack-tox-lower-constraints - - openstack-tox-cover -- GitLab From 7ee490cc0a3606b25cdd962c8f50e9d45bebee9c Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Tue, 25 Sep 2018 07:13:40 +0000 Subject: [PATCH 090/303] Omit tests from code coverage run Fine-tune the coverage results by: - Omitting the tests folder from the coverage run. - Added report generation of coverage on the console while tox target run. - Removed and consolidated duplicate switches from run/report commands inside .coveragerc file, which was already present. Change-Id: I5c44e73768487eaefebf95f1dd026ba99b90a04c --- .coveragerc | 4 ++++ tox.ini | 5 +++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.coveragerc b/.coveragerc index 3532cb5..0affbcc 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,9 +1,13 @@ [run] branch = True source = sushy +omit = + *tests* [report] ignore_errors = True +omit = + *tests* [html] directory = cover diff --git a/tox.ini b/tox.ini index f2a14fe..3e8e90b 100644 --- a/tox.ini +++ b/tox.ini @@ -27,14 +27,15 @@ commands = {posargs} basepython = python3 setenv = {[testenv]setenv} - PYTHON=coverage run --source sushy --parallel-mode + PYTHON=coverage run --parallel-mode # After running this target, visit sushy/cover/index.html # in your browser, to see a nicer presentation report with annotated # HTML listings detailing missed lines. commands = coverage erase stestr run {posargs} coverage combine - coverage html -d cover + coverage report + coverage html coverage xml -o cover/coverage.xml [testenv:docs] -- GitLab From 0c6623d01225b4de12179e522def1ca5375d4109 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Tue, 14 Aug 2018 12:57:18 +0300 Subject: [PATCH 091/303] Update to use mapped field for Settings Message Severity field Although the JSON schema does not have this field as enum, its description says it is defined as Status field from Redfish specification. Clarified with DMTF that 'Status section' denotes Health enum in [1]. MessageRegistry->Message->Severity was introduced in I9a1735230a8328fd8365e375889c6ab066c3df16 with the same mapping and it has the same JSON schema as this Settings->Message->Severity. This change makes Settings->Message->Severity field of the same type as MessageRegistry Message Severity field that facilitates their comparison and error handling. MessageRegistry's Message and Settings's Message are similar, but different field types. [1] https://redfish.dmtf.org/schemas/v1/Resource.json Change-Id: I1595b1705e2d77f5f4e822ebd87bc6959ee6cf53 Story: 2001791 Task: 19767 --- sushy/resources/settings.py | 4 +++- sushy/tests/unit/json_samples/settings.json | 2 +- sushy/tests/unit/resources/test_settings.py | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index 172cbc4..5f68365 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -16,6 +16,7 @@ from sushy.resources import base from sushy.resources import common +from sushy.resources import mappings as res_maps class MessageListField(base.ListField): @@ -29,7 +30,8 @@ class MessageListField(base.ListField): message = base.Field('Message') """Human readable message, if provided""" - severity = base.Field('Severity') + severity = base.MappedField('Severity', + res_maps.SEVERITY_VALUE_MAP) """Severity of the error""" resolution = base.Field('Resolution') diff --git a/sushy/tests/unit/json_samples/settings.json b/sushy/tests/unit/json_samples/settings.json index 8119885..afb86f0 100644 --- a/sushy/tests/unit/json_samples/settings.json +++ b/sushy/tests/unit/json_samples/settings.json @@ -5,7 +5,7 @@ "Messages": [{ "MessageId": "Base.1.0.SettingsFailed", "Message": "Settings update failed due to invalid value", - "Severity": "High", + "Severity": "Critical", "Resolution": "Fix the value and try again", "MessageArgs": [ "arg1" diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index a2edec8..287cf26 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -16,6 +16,7 @@ import json import mock +from sushy.resources import constants as res_cons from sushy.resources import settings from sushy.tests.unit import base @@ -42,7 +43,7 @@ class SettingsFieldTestCase(base.TestCase): instance.messages[0].message_id) self.assertEqual('Settings update failed due to invalid value', instance.messages[0].message) - self.assertEqual('High', + self.assertEqual(res_cons.SEVERITY_CRITICAL, instance.messages[0].severity) self.assertEqual('Fix the value and try again', instance.messages[0].resolution) -- GitLab From 660c98fdcfcb6f07139f7c89ef69d147aeb37996 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Mon, 1 Oct 2018 11:20:17 +0200 Subject: [PATCH 092/303] Removed Python 2 support, not needed anymore in Debian. --- debian/changelog | 8 ++++++-- debian/control | 38 ++++---------------------------------- debian/rules | 25 +++++++++---------------- 3 files changed, 19 insertions(+), 52 deletions(-) diff --git a/debian/changelog b/debian/changelog index 0fbc621..310c500 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,10 +1,14 @@ -python-sushy (1.3.1-3) UNRELEASED; urgency=medium +python-sushy (1.3.1-3) unstable; urgency=medium + [ Ondřej Nový ] * d/control: Add trailing tilde to min version depend to allow backports * d/control: Use team+openstack@tracker.debian.org as maintainer - -- Ondřej Nový Tue, 27 Feb 2018 16:40:03 +0100 + [ Thomas Goirand ] + * Removed Python 2 support, not needed anymore in Debian. + + -- Thomas Goirand Mon, 01 Oct 2018 11:19:49 +0200 python-sushy (1.3.1-2) unstable; urgency=medium diff --git a/debian/control b/debian/control index bc29bbe..851a2ff 100644 --- a/debian/control +++ b/debian/control @@ -8,22 +8,14 @@ Build-Depends: debhelper (>= 10~), dh-python, openstack-pkg-tools, - python-all, - python-pbr (>= 2.0.0), - python-setuptools, - python-sphinx (>= 1.6.2), + python3-sphinx (>= 1.6.2), python3-all, python3-pbr (>= 2.0.0), python3-setuptools, Build-Depends-Indep: - python-coverage, - python-hacking, - python-openstackdocstheme (>= 1.17.0), - python-oslotest (>= 1:3.2.0), - python-requests (>= 2.14.2), - python-six, - python-testscenarios, - python-testtools (>= 2.2.0), + python3-coverage, + python3-hacking, + python3-openstackdocstheme (>= 1.17.0), python3-oslotest (>= 1:3.2.0), python3-requests (>= 2.14.2), python3-six, @@ -36,28 +28,6 @@ Vcs-Browser: https://salsa.debian.org/openstack-team/libs/python-sushy Vcs-Git: https://salsa.debian.org/openstack-team/libs/python-sushy.git Homepage: https://docs.openstack.org/sushy -Package: python-sushy -Architecture: all -Depends: - python-pbr (>= 2.0.0), - python-requests (>= 2.14.2), - python-six, - ${misc:Depends}, - ${python:Depends}, -Suggests: - python-sushy-doc, -Description: small library to communicate with Redfish based systems - Python 2.7 - Sushy is a Python library to communicate with Redfish based systems. The goal - of the library is to be extremely simple, small, have as few dependencies as - possible and be very conservative when dealing with BMCs by issuing just - enough requests to it (BMCs are very flaky). - . - Therefore, the scope of the library has been limited to what is supported by - the OpenStack Ironic project. As the project grows and more features from - Redfish are needed Sushy will expand to fulfil those requirements. - . - This package contains the Python 2.7 module. - Package: python-sushy-doc Section: doc Architecture: all diff --git a/debian/rules b/debian/rules index 680ad8e..292b154 100755 --- a/debian/rules +++ b/debian/rules @@ -4,35 +4,28 @@ UPSTREAM_GIT := https://github.com/openstack/sushy.git include /usr/share/openstack-pkg-tools/pkgos.make %: - dh $@ --buildsystem=python_distutils --with python2,python3,sphinxdoc + dh $@ --buildsystem=python_distutils --with python3,sphinxdoc + +override_dh_auto_clean: + echo "Do nothing..." + +override_dh_auto_build: + echo "Do nothing..." override_dh_auto_install: pkgos-dh_auto_install override_dh_auto_test: ifeq (,$(findstring nocheck, $(DEB_BUILD_OPTIONS))) - pkgos-dh_auto_test + pkgos-dh_auto_test --no-py2 endif - override_dh_sphinxdoc: ifeq (,$(findstring nodocs, $(DEB_BUILD_OPTIONS))) - sphinx-build -b html doc/source debian/python-sushy-doc/usr/share/doc/python-sushy-doc/html + PYTHONPATH=. PYTHON=python3 python3 -m sphinx -b html doc/source debian/python-sushy-doc/usr/share/doc/python-sushy-doc/html dh_sphinxdoc -O--buildsystem=python_distutils endif override_dh_clean: dh_clean -O--buildsystem=python_distutils rm -rf build - - -# Commands not to run -override_dh_installcatalogs: -override_dh_installemacsen override_dh_installifupdown: -override_dh_installinfo override_dh_installmenu override_dh_installmime: -override_dh_installmodules override_dh_installlogcheck: -override_dh_installpam override_dh_installppp override_dh_installudev override_dh_installwm: -override_dh_installxfonts override_dh_gconf override_dh_icons override_dh_perl override_dh_usrlocal: -override_dh_installcron override_dh_installdebconf: -override_dh_installlogrotate override_dh_installgsettings: - -- GitLab From 5a657518b995c1454c6e8a9ac53c9e90fbc46253 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Mon, 1 Oct 2018 11:21:58 +0200 Subject: [PATCH 093/303] Revert: Add trailing tilde to min version depend to allow backports --- debian/changelog | 2 -- debian/control | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/debian/changelog b/debian/changelog index 310c500..6af7798 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,8 +1,6 @@ python-sushy (1.3.1-3) unstable; urgency=medium [ Ondřej Nový ] - * d/control: Add trailing tilde to min version depend to allow - backports * d/control: Use team+openstack@tracker.debian.org as maintainer [ Thomas Goirand ] diff --git a/debian/control b/debian/control index 851a2ff..68f0e24 100644 --- a/debian/control +++ b/debian/control @@ -5,7 +5,7 @@ Maintainer: Debian OpenStack Uploaders: Thomas Goirand , Build-Depends: - debhelper (>= 10~), + debhelper (>= 10), dh-python, openstack-pkg-tools, python3-sphinx (>= 1.6.2), -- GitLab From 2bdae43029aa831360dab38dbf5e32ebce5e08e1 Mon Sep 17 00:00:00 2001 From: dnuka Date: Fri, 28 Sep 2018 21:21:06 +0530 Subject: [PATCH 094/303] Add product property to the root Sushy object Chages made to sushy/main.py product property is the product associated with this Redfish service. Change-Id: I2ab5bd73ac609a8563807bdc0d1c1825cd5b4bba --- sushy/main.py | 3 +++ sushy/tests/unit/json_samples/root.json | 1 + sushy/tests/unit/test_main.py | 1 + 3 files changed, 5 insertions(+) diff --git a/sushy/main.py b/sushy/main.py index b0a4334..a581aed 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -37,6 +37,9 @@ class Sushy(base.ResourceBase): uuid = base.Field('UUID') """The Redfish root service UUID""" + product = base.Field('Product') + """The product associated with this Redfish service""" + _systems_path = base.Field(['Systems', '@odata.id']) """SystemCollection path""" diff --git a/sushy/tests/unit/json_samples/root.json b/sushy/tests/unit/json_samples/root.json index 29709d5..f45c145 100644 --- a/sushy/tests/unit/json_samples/root.json +++ b/sushy/tests/unit/json_samples/root.json @@ -4,6 +4,7 @@ "Name": "Root Service", "RedfishVersion": "1.0.2", "UUID": "92384634-2938-2342-8820-489239905423", + "Product": "Product", "Systems": { "@odata.id": "/redfish/v1/Systems" }, diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 1d186bd..ef917a2 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -54,6 +54,7 @@ class MainTestCase(base.TestCase): self.assertEqual('1.0.2', self.root.redfish_version) self.assertEqual('92384634-2938-2342-8820-489239905423', self.root.uuid) + self.assertEqual('Product', self.root.product) self.assertEqual('/redfish/v1/Systems', self.root._systems_path) self.assertEqual('/redfish/v1/Managers', self.root._managers_path) self.assertEqual('/redfish/v1/SessionService', -- GitLab From fbdd61100db140c62b6b116760104fe0f6fe81c9 Mon Sep 17 00:00:00 2001 From: dnuka Date: Mon, 1 Oct 2018 20:43:20 +0530 Subject: [PATCH 095/303] Add `ProtocolFeaturesSupported` property of the `Root Service` protocol_features_supported is the information about protocol features supported by the service. Story: #2003853 Task: #26649 Change-Id: I5103eabfa56fc049ca0c98bbfc62c82724fcbae9 --- ...l_features_supported-59de3f89b7382434.yaml | 5 +++++ sushy/main.py | 22 +++++++++++++++++++ sushy/tests/unit/json_samples/root.json | 7 ++++++ sushy/tests/unit/test_main.py | 6 +++++ 4 files changed, 40 insertions(+) create mode 100644 releasenotes/notes/add_product_and_protocol_features_supported-59de3f89b7382434.yaml diff --git a/releasenotes/notes/add_product_and_protocol_features_supported-59de3f89b7382434.yaml b/releasenotes/notes/add_product_and_protocol_features_supported-59de3f89b7382434.yaml new file mode 100644 index 0000000..01f2c22 --- /dev/null +++ b/releasenotes/notes/add_product_and_protocol_features_supported-59de3f89b7382434.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds `Product` and `ProductFeaturesSupported` properties support to + the Redfish `Root Service` diff --git a/sushy/main.py b/sushy/main.py index a581aed..053650a 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -26,6 +26,24 @@ from sushy.resources.system import system LOG = logging.getLogger(__name__) +class ProtocolFeaturesSupportedField(base.CompositeField): + + excerpt_query = base.Field('ExcerptQuery') + """The excerpt query parameter is supported""" + + expand_query = base.Field('ExpandQuery') + """The expand query parameter is supported""" + + filter_query = base.Field('FilterQuery') + """The filter query parameter is supported""" + + only_member_query = base.Field('OnlyMemberQuery') + """The only query parameter is supported""" + + select_query = base.Field('SelectQuery') + """The select query parameter is supported""" + + class Sushy(base.ResourceBase): identity = base.Field('Id', required=True) @@ -40,6 +58,10 @@ class Sushy(base.ResourceBase): product = base.Field('Product') """The product associated with this Redfish service""" + protocol_features_supported = ProtocolFeaturesSupportedField( + 'ProtocolFeaturesSupported') + """The information about protocol features supported by the service""" + _systems_path = base.Field(['Systems', '@odata.id']) """SystemCollection path""" diff --git a/sushy/tests/unit/json_samples/root.json b/sushy/tests/unit/json_samples/root.json index f45c145..1776fdf 100644 --- a/sushy/tests/unit/json_samples/root.json +++ b/sushy/tests/unit/json_samples/root.json @@ -5,6 +5,13 @@ "RedfishVersion": "1.0.2", "UUID": "92384634-2938-2342-8820-489239905423", "Product": "Product", + "ProtocolFeaturesSupported": { + "ExcerptQuery": true, + "ExpandQuery": false, + "FilterQuery": true, + "OnlyMemberQuery": true, + "SelectQuery": false + }, "Systems": { "@odata.id": "/redfish/v1/Systems" }, diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index ef917a2..ff662f8 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -55,6 +55,12 @@ class MainTestCase(base.TestCase): self.assertEqual('92384634-2938-2342-8820-489239905423', self.root.uuid) self.assertEqual('Product', self.root.product) + self.assertTrue(self.root.protocol_features_supported.excerpt_query) + self.assertFalse(self.root.protocol_features_supported.expand_query) + self.assertTrue(self.root.protocol_features_supported.filter_query) + self.assertTrue( + self.root.protocol_features_supported.only_member_query) + self.assertFalse(self.root.protocol_features_supported.select_query) self.assertEqual('/redfish/v1/Systems', self.root._systems_path) self.assertEqual('/redfish/v1/Managers', self.root._managers_path) self.assertEqual('/redfish/v1/SessionService', -- GitLab From 02e401615446432bb66cde1d7cfc9049e92f0694 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Tue, 16 Oct 2018 18:44:27 +0200 Subject: [PATCH 096/303] Remove stray unicode character from tox.ini The weird unicode back tick might cause Python unicode deserialization error when reading tox file. Change-Id: I753fa4b700dd3634f19c9e5201575f75ab9e9080 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 3e8e90b..b14f1c6 100644 --- a/tox.ini +++ b/tox.ini @@ -55,7 +55,7 @@ commands = oslo_debug_helper -t sushy/tests {posargs} # E123, E125 skipped as they are invalid PEP-8. show-source = True ignore = E123,E125 -# [H106] Don’t put vim configuration in source files. +# [H106] Don't put vim configuration in source files. # [H203] Use assertIs(Not)None to check for None. # [H204] Use assert(Not)Equal to check for equality. # [H205] Use assert(Greater|Less)(Equal) for comparison. -- GitLab From ee317078f4507f754679c7d0b00a9fa106c5663d Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Tue, 16 Oct 2018 19:32:06 +0200 Subject: [PATCH 097/303] Fix crashing `BasicAuth` context manager When `BasicAuth` object is used as a context manager, it crashes on `__exit__` because of missing `.close()` method. This patch fixes that and adds a bunch of test cases. Change-Id: I3d8612dfb481d29fb56cec1012c906e37ed911a9 --- sushy/auth.py | 6 ++++++ sushy/tests/unit/test_auth.py | 22 ++++++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/sushy/auth.py b/sushy/auth.py index db65fc2..c537c84 100644 --- a/sushy/auth.py +++ b/sushy/auth.py @@ -69,6 +69,12 @@ class AuthBase(object): def can_refresh_session(self): """Method to assert if session based refresh can be done.""" + def close(self): + """Shutdown Redfish authentication object + + Undoes whatever should be undone to cancel authenticated session. + """ + def __enter__(self): """Allow object to be called with the 'with' statement.""" return self diff --git a/sushy/tests/unit/test_auth.py b/sushy/tests/unit/test_auth.py index eb0405d..df6650b 100644 --- a/sushy/tests/unit/test_auth.py +++ b/sushy/tests/unit/test_auth.py @@ -63,6 +63,13 @@ class BasicAuthTestCase(base.TestCase): def test_can_refresh_session(self): self.assertFalse(self.base_auth.can_refresh_session()) + @mock.patch.object(auth.BasicAuth, 'close', autospec=True) + def test_context_manager(self, auth_close): + with auth.BasicAuth(self.username, self.password) as base_auth: + self.assertEqual(self.username, base_auth._username) + self.assertEqual(self.password, base_auth._password) + auth_close.assert_called_once_with(base_auth) + class SessionAuthTestCase(base.TestCase): @@ -193,6 +200,13 @@ class SessionAuthTestCase(base.TestCase): self.assertIsNone(self.sess_auth.get_session_resource_id()) self.assertIsNone(self.sess_auth.get_session_key()) + @mock.patch.object(auth.SessionAuth, 'close', autospec=True) + def test_context_manager(self, auth_close): + with auth.SessionAuth(self.username, self.password) as session_auth: + self.assertEqual(self.username, session_auth._username) + self.assertEqual(self.password, session_auth._password) + auth_close.assert_called_once_with(session_auth) + class SessionOrBasicAuthTestCase(base.TestCase): @@ -331,3 +345,11 @@ class SessionOrBasicAuthTestCase(base.TestCase): self.conn.delete.assert_called_once_with(self.sess_uri) self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) self.assertIsNone(self.sess_basic_auth.get_session_key()) + + @mock.patch.object(auth.SessionOrBasicAuth, 'close', autospec=True) + def test_context_manager(self, auth_close): + with auth.SessionOrBasicAuth( + self.username, self.password) as session_or_base_auth: + self.assertEqual(self.username, session_or_base_auth._username) + self.assertEqual(self.password, session_or_base_auth._password) + auth_close.assert_called_once_with(session_or_base_auth) -- GitLab From 27c725cbb8a2a5d55d9e74519ad5e386b2fe50c4 Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Wed, 11 Jul 2018 09:40:16 +0000 Subject: [PATCH 098/303] Introduce ``cache_it`` and ``cache_clear`` Right now the nested resource property methods contain a lot of repetitive code for caching purpose. This utility decorator ``cache_it`` with its complimentary ``cache_clear`` method attempt to reduce that code bloat. Change-Id: I7404a15beb029cb282ac6b84bb8b8fdb97ebcd4c --- sushy/resources/base.py | 20 +-- sushy/resources/manager/manager.py | 20 +-- .../sessionservice/sessionservice.py | 20 +-- sushy/resources/system/bios.py | 24 +-- sushy/resources/system/ethernet_interface.py | 20 +-- sushy/resources/system/processor.py | 44 +++-- sushy/resources/system/simple_storage.py | 24 +-- sushy/resources/system/storage/storage.py | 73 +++----- sushy/resources/system/storage/volume.py | 16 +- sushy/resources/system/system.py | 90 +++------- .../unit/resources/manager/test_manager.py | 10 +- .../sessionservice/test_sessionservice.py | 6 +- .../resources/system/storage/test_storage.py | 17 +- .../resources/system/storage/test_volume.py | 2 - .../tests/unit/resources/system/test_bios.py | 22 ++- .../system/test_ethernet_interfaces.py | 1 - .../unit/resources/system/test_processor.py | 8 +- .../resources/system/test_simple_storage.py | 2 - .../unit/resources/system/test_system.py | 32 +--- sushy/tests/unit/resources/test_base.py | 27 ++- sushy/tests/unit/test_utils.py | 110 ++++++++++++ sushy/utils.py | 160 ++++++++++++++++++ 22 files changed, 444 insertions(+), 304 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 205ddf5..a5e5388 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -358,8 +358,6 @@ class ResourceCollectionBase(ResourceBase): adapter=utils.get_members_identities) """A tuple with the members identities""" - _members = None # caching variable - def __init__(self, connector, path, redfish_version=None): """A class representing the base of any Redfish resource collection @@ -395,24 +393,20 @@ class ResourceCollectionBase(ResourceBase): return self._resource_type(self._conn, identity, redfish_version=self.redfish_version) + @utils.cache_it def get_members(self): """Return a list of ``_resource_type`` objects present in collection :returns: A list of ``_resource_type`` objects """ - if self._members is None: - self._members = [self.get_member(id_) - for id_ in self.members_identities] - - for m in self._members: - m.refresh(force=False) - return self._members + return [self.get_member(id_) for id_ in self.members_identities] - def _do_refresh(self, force=False): + def _do_refresh(self, force): """Do refresh related activities. - Undefine the `_members` attribute here for fresh evaluation in - subsequent calls to `get_members()` method. Other similar activities + Invalidate / Undefine the cache attributes here for fresh evaluation + in subsequent calls to `get_members()` method. Other similar activities can also follow in future, if needed. """ - self._members = None + super(ResourceCollectionBase, self)._do_refresh(force=force) + utils.cache_clear(self, force) diff --git a/sushy/resources/manager/manager.py b/sushy/resources/manager/manager.py index 9568408..ef63655 100644 --- a/sushy/resources/manager/manager.py +++ b/sushy/resources/manager/manager.py @@ -77,8 +77,6 @@ class Manager(base.ResourceBase): _actions = ActionsField('Actions', required=True) - _virtual_media = None - def __init__(self, connector, identity, redfish_version=None): """A class representing a Manager @@ -89,9 +87,9 @@ class Manager(base.ResourceBase): """ super(Manager, self).__init__(connector, identity, redfish_version) - def _do_refresh(self, force=False): - if self._virtual_media is not None: - self._virtual_media.invalidate(force) + def _do_refresh(self, force): + super(Manager, self)._do_refresh(force=force) + utils.cache_clear(self, force) def get_supported_graphical_console_types(self): """Get the supported values for Graphical Console connection types. @@ -188,15 +186,11 @@ class Manager(base.ResourceBase): LOG.info('The Manager %s is being reset', self.identity) @property + @utils.cache_it def virtual_media(self): - if self._virtual_media is None: - self._virtual_media = virtual_media.VirtualMediaCollection( - self._conn, - utils.get_sub_resource_path_by(self, 'VirtualMedia'), - redfish_version=self.redfish_version) - - self._virtual_media.refresh(force=False) - return self._virtual_media + return virtual_media.VirtualMediaCollection( + self._conn, utils.get_sub_resource_path_by(self, 'VirtualMedia'), + redfish_version=self.redfish_version) class ManagerCollection(base.ResourceCollectionBase): diff --git a/sushy/resources/sessionservice/sessionservice.py b/sushy/resources/sessionservice/sessionservice.py index 6de40d0..325902f 100644 --- a/sushy/resources/sessionservice/sessionservice.py +++ b/sushy/resources/sessionservice/sessionservice.py @@ -18,6 +18,7 @@ import logging from sushy import exceptions from sushy.resources import base from sushy.resources.sessionservice import session +from sushy import utils LOG = logging.getLogger(__name__) @@ -36,8 +37,6 @@ class SessionService(base.ResourceBase): service_enabled = base.Field('ServiceEnabled') """Tells us if session service is enabled""" - _sessions = None # ref to SessionCollection instance - session_timeout = base.Field('SessionTimeout') """The session service timeout""" @@ -66,29 +65,26 @@ class SessionService(base.ResourceBase): return sessions_col.get('@odata.id') @property + @utils.cache_it def sessions(self): """Property to provide reference to the `SessionCollection` instance It is calculated once when the first time it is queried. On refresh, this property gets reset. """ - if self._sessions is None: - self._sessions = session.SessionCollection( - self._conn, self._get_sessions_collection_path(), - redfish_version=self.redfish_version) - - self._sessions.refresh(force=False) - return self._sessions + return session.SessionCollection( + self._conn, self._get_sessions_collection_path(), + redfish_version=self.redfish_version) - def _do_refresh(self, force=False): + def _do_refresh(self, force): """Do custom resource specific refresh activities On refresh, all sub-resources are marked as stale, i.e. greedy-refresh not done for them unless forced by ``force`` argument. """ - if self._sessions is not None: - self._sessions.invalidate(force) + super(SessionService, self)._do_refresh(force=force) + utils.cache_clear(self, force) def close_session(self, session_uri): """This function is for closing a session based on its id. diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 3888a44..c66896c 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -19,6 +19,7 @@ from sushy import exceptions from sushy.resources import base from sushy.resources import common from sushy.resources import settings +from sushy import utils LOG = logging.getLogger(__name__) @@ -56,7 +57,13 @@ class Bios(base.ResourceBase): _actions = ActionsField('Actions') - _pending_settings_resource = None + @property + @utils.cache_it + def _pending_settings_resource(self): + """Pending BIOS settings resource""" + return Bios( + self._conn, self._settings.resource_uri, + redfish_version=self.redfish_version) @property def pending_attributes(self): @@ -65,13 +72,6 @@ class Bios(base.ResourceBase): BIOS attributes that have been comitted to the system, but for them to take effect system restart is necessary """ - - if not self._pending_settings_resource: - self._pending_settings_resource = Bios( - self._conn, - self._settings.resource_uri, - redfish_version=self.redfish_version) - self._pending_settings_resource.refresh(force=False) return self._pending_settings_resource.attributes def set_attribute(self, key, value): @@ -97,8 +97,8 @@ class Bios(base.ResourceBase): """ self._settings.commit(self._conn, {'Attributes': value}) - if self._pending_settings_resource: - self._pending_settings_resource.invalidate() + utils.cache_clear(self, force_refresh=False, + only_these=['_pending_settings_resource']) def _get_reset_bios_action_element(self): actions = self._actions @@ -155,5 +155,5 @@ class Bios(base.ResourceBase): greedy-refresh not done for them unless forced by ``force`` argument. """ - if self._pending_settings_resource is not None: - self._pending_settings_resource.invalidate(force) + super(Bios, self)._do_refresh(force=force) + utils.cache_clear(self, force) diff --git a/sushy/resources/system/ethernet_interface.py b/sushy/resources/system/ethernet_interface.py index 8e95ad7..e6d1554 100644 --- a/sushy/resources/system/ethernet_interface.py +++ b/sushy/resources/system/ethernet_interface.py @@ -18,6 +18,7 @@ import logging from sushy.resources import base from sushy.resources import common from sushy.resources import constants as res_cons +from sushy import utils LOG = logging.getLogger(__name__) @@ -49,13 +50,12 @@ class EthernetInterface(base.ResourceBase): class EthernetInterfaceCollection(base.ResourceCollectionBase): - _summary = None - @property def _resource_type(self): return EthernetInterface @property + @utils.cache_it def summary(self): """Summary of MAC addresses and interfaces state @@ -67,14 +67,12 @@ class EthernetInterfaceCollection(base.ResourceCollectionBase): {'aa:bb:cc:dd:ee:ff': sushy.STATE_ENABLED, 'aa:bb:aa:aa:aa:aa': sushy.STATE_DISABLED} """ - if self._summary is None: - mac_dict = {} - for eth in self.get_members(): - if eth.mac_address is not None and eth.status is not None: - if eth.status.health == res_cons.HEALTH_OK: - mac_dict[eth.mac_address] = eth.status.state - self._summary = mac_dict - return self._summary + mac_dict = {} + for eth in self.get_members(): + if eth.mac_address is not None and eth.status is not None: + if eth.status.health == res_cons.HEALTH_OK: + mac_dict[eth.mac_address] = eth.status.state + return mac_dict def _do_refresh(self, force=False): """Do custom resource specific refresh activities @@ -84,4 +82,4 @@ class EthernetInterfaceCollection(base.ResourceCollectionBase): argument. """ super(EthernetInterfaceCollection, self)._do_refresh(force) - self._summary = None + utils.cache_clear(self, force) diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index 7d1ba55..9013aae 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -18,6 +18,7 @@ import logging from sushy.resources import base from sushy.resources import common from sushy.resources.system import mappings as sys_maps +from sushy import utils # Representation of Summary of Processor information ProcessorSummary = collections.namedtuple('ProcessorSummary', @@ -104,10 +105,8 @@ class ProcessorCollection(base.ResourceCollectionBase): def _resource_type(self): return Processor - _summary = None - """The summary of processors of the system in general detail""" - @property + @utils.cache_it def summary(self): """Property to provide ProcessorSummary info @@ -117,25 +116,21 @@ class ProcessorCollection(base.ResourceCollectionBase): :returns: A namedtuple containing the ``count`` of processors in regards to logical CPUs, and their ``architecture``. """ - if self._summary is None: - count, architecture = 0, None - for proc in self.get_members(): - # Note(deray): It attempts to detect the number of CPU cores. - # It returns the number of logical CPUs. - if proc.total_threads is not None: - count += proc.total_threads - - # Note(deray): Bail out of checking the architecture info - # if you have already got hold of any one of the processors' - # architecture information. - if (architecture is None - and proc.processor_architecture is not None): - architecture = proc.processor_architecture - - self._summary = ProcessorSummary(count=count, - architecture=architecture) - - return self._summary + count, architecture = 0, None + for proc in self.get_members(): + # Note(deray): It attempts to detect the number of CPU cores. + # It returns the number of logical CPUs. + if proc.total_threads is not None: + count += proc.total_threads + + # Note(deray): Bail out of checking the architecture info + # if you have already got hold of any one of the processors' + # architecture information. + if (architecture is None + and proc.processor_architecture is not None): + architecture = proc.processor_architecture + + return ProcessorSummary(count=count, architecture=architecture) def __init__(self, connector, path, redfish_version=None): """A class representing a ProcessorCollection @@ -155,6 +150,5 @@ class ProcessorCollection(base.ResourceCollectionBase): greedy-refresh not done for them unless forced by ``force`` argument. """ - super(ProcessorCollection, self)._do_refresh(force) - # Reset summary attribute - self._summary = None + super(ProcessorCollection, self)._do_refresh(force=force) + utils.cache_clear(self, force) diff --git a/sushy/resources/system/simple_storage.py b/sushy/resources/system/simple_storage.py index e73caf3..1569598 100644 --- a/sushy/resources/system/simple_storage.py +++ b/sushy/resources/system/simple_storage.py @@ -58,28 +58,22 @@ class SimpleStorage(base.ResourceBase): class SimpleStorageCollection(base.ResourceCollectionBase): """Represents a collection of simple storage associated with system.""" - _disks_sizes_bytes = None - @property def _resource_type(self): return SimpleStorage @property + @utils.cache_it def disks_sizes_bytes(self): """Sizes of each Disk in bytes in SimpleStorage collection resource. Returns the list of cached values until it (or its parent resource) is refreshed. """ - if self._disks_sizes_bytes is None: - self._disks_sizes_bytes = sorted( - device.capacity_bytes - for simpl_stor in self.get_members() - for device in simpl_stor.devices - if device.status.state == res_cons.STATE_ENABLED - ) - - return self._disks_sizes_bytes + return sorted(device.capacity_bytes + for simpl_stor in self.get_members() + for device in simpl_stor.devices + if device.status.state == res_cons.STATE_ENABLED) @property def max_size_bytes(self): @@ -90,8 +84,6 @@ class SimpleStorageCollection(base.ResourceCollectionBase): """ return utils.max_safe(self.disks_sizes_bytes) - def _do_refresh(self, force=False): - super(SimpleStorageCollection, self)._do_refresh(force) - # Note(deray): undefine the attribute here for fresh creation in - # subsequent calls to it's exposed property. - self._disks_sizes_bytes = None + def _do_refresh(self, force): + super(SimpleStorageCollection, self)._do_refresh(force=force) + utils.cache_clear(self, force) diff --git a/sushy/resources/system/storage/storage.py b/sushy/resources/system/storage/storage.py index 7743469..3b339c8 100644 --- a/sushy/resources/system/storage/storage.py +++ b/sushy/resources/system/storage/storage.py @@ -42,10 +42,6 @@ class Storage(base.ResourceBase): adapter=utils.get_members_identities) """A tuple with the drive identities""" - _drives_sizes_bytes = None - _drives = None - _volumes = None # reference to VolumeCollection instance - def get_drive(self, drive_identity): """Given the drive identity return a ``Drive`` object @@ -57,6 +53,7 @@ class Storage(base.ResourceBase): redfish_version=self.redfish_version) @property + @utils.cache_it def drives(self): """Return a list of `Drive` objects present in the storage resource. @@ -67,22 +64,17 @@ class Storage(base.ResourceBase): :returns: A list of `Drive` objects :raises: ResourceNotFoundError """ - if self._drives is None: - self._drives = [ - self.get_drive(id_) for id_ in self.drives_identities] - return self._drives + return [self.get_drive(id_) for id_ in self.drives_identities] @property + @utils.cache_it def drives_sizes_bytes(self): """Sizes of all Drives in bytes in Storage resource. Returns the list of cached values until it (or its parent resource) is refreshed. """ - if self._drives_sizes_bytes is None: - self._drives_sizes_bytes = sorted( - drv.capacity_bytes for drv in self.drives) - return self._drives_sizes_bytes + return sorted(drv.capacity_bytes for drv in self.drives) @property def drives_max_size_bytes(self): @@ -90,6 +82,7 @@ class Storage(base.ResourceBase): return utils.max_safe(self.drives_sizes_bytes) @property + @utils.cache_it def volumes(self): """Property to reference `VolumeCollection` instance @@ -98,50 +91,35 @@ class Storage(base.ResourceBase): point). Here only the actual refresh of the sub-resource happens, if resource is stale. """ - if self._volumes is None: - self._volumes = volume.VolumeCollection( - self._conn, utils.get_sub_resource_path_by(self, 'Volumes'), - redfish_version=self.redfish_version) - - self._volumes.refresh(force=False) - return self._volumes + return volume.VolumeCollection( + self._conn, utils.get_sub_resource_path_by(self, 'Volumes'), + redfish_version=self.redfish_version) - def _do_refresh(self, force=False): + def _do_refresh(self, force): """Do resource specific refresh activities.""" - # Note(deray): undefine the attribute here for fresh evaluation in - # subsequent calls to it's exposed property. - self._drives_sizes_bytes = None - self._drives = None - # invalidate the nested resource - if self._volumes is not None: - self._volumes.invalidate(force) + # Note(deray): invalidate / undefine the attributes here for fresh + # evaluation in subsequent calls to it's exposed property. + super(Storage, self)._do_refresh(force=force) + utils.cache_clear(self, force) class StorageCollection(base.ResourceCollectionBase): """This class represents the collection of Storage resources""" - _drives_sizes_bytes = None - _volumes_sizes_bytes = None - @property def _resource_type(self): return Storage @property + @utils.cache_it def drives_sizes_bytes(self): """Sizes of each Drive in bytes in Storage collection resource. Returns the list of cached values until it (or its parent resource) is refreshed. """ - if self._drives_sizes_bytes is None: - self._drives_sizes_bytes = sorted( - drive_size - for storage_ in self.get_members() - for drive_size in storage_.drives_sizes_bytes - ) - - return self._drives_sizes_bytes + return sorted(drive_size for storage_ in self.get_members() + for drive_size in storage_.drives_sizes_bytes) @property def max_drive_size_bytes(self): @@ -153,19 +131,15 @@ class StorageCollection(base.ResourceCollectionBase): return utils.max_safe(self.drives_sizes_bytes) @property + @utils.cache_it def volumes_sizes_bytes(self): """Sizes of each Volume in bytes in Storage collection resource. Returns the list of cached values until it (or its parent resource) is refreshed. """ - if self._volumes_sizes_bytes is None: - self._volumes_sizes_bytes = sorted( - volume_size - for storage_ in self.get_members() - for volume_size in storage_.volumes.volumes_sizes_bytes) - - return self._volumes_sizes_bytes + return sorted(volume_size for storage_ in self.get_members() + for volume_size in storage_.volumes.volumes_sizes_bytes) @property def max_volume_size_bytes(self): @@ -176,10 +150,7 @@ class StorageCollection(base.ResourceCollectionBase): """ return utils.max_safe(self.volumes_sizes_bytes) - def _do_refresh(self, force=False): + def _do_refresh(self, force): """Do resource specific refresh activities""" - super(StorageCollection, self)._do_refresh(force) - # Note(deray): undefine the attributes here for fresh evaluation in - # subsequent calls to their exposed properties. - self._drives_sizes_bytes = None - self._volumes_sizes_bytes = None + super(StorageCollection, self)._do_refresh(force=force) + utils.cache_clear(self, force) diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 2a40009..203b0a5 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -37,24 +37,19 @@ class Volume(base.ResourceBase): class VolumeCollection(base.ResourceCollectionBase): """This class represents the Storage Volume collection""" - _volumes_sizes_bytes = None - @property def _resource_type(self): return Volume @property + @utils.cache_it def volumes_sizes_bytes(self): """Sizes of all Volumes in bytes in VolumeCollection resource. Returns the list of cached values until it (or its parent resource) is refreshed. """ - if self._volumes_sizes_bytes is None: - self._volumes_sizes_bytes = sorted( - vol.capacity_bytes - for vol in self.get_members()) - return self._volumes_sizes_bytes + return sorted(vol.capacity_bytes for vol in self.get_members()) @property def max_volume_size_bytes(self): @@ -68,7 +63,6 @@ class VolumeCollection(base.ResourceCollectionBase): # NOTE(etingof): for backward compatibility max_size_bytes = max_volume_size_bytes - def _do_refresh(self, force=False): - super(VolumeCollection, self)._do_refresh(force) - # invalidate the attribute - self._volumes_sizes_bytes = None + def _do_refresh(self, force): + super(VolumeCollection, self)._do_refresh(force=force) + utils.cache_clear(self, force) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 14064cd..b6c3967 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -123,21 +123,6 @@ class System(base.ResourceBase): _actions = ActionsField('Actions', required=True) - # reference to ProcessorCollection instance - _processors = None - - # reference to EthernetInterfaceCollection instance - _ethernet_interfaces = None - - # reference to BIOS instance - _bios = None - - # reference to SimpleStorageCollection instance - _simple_storage = None - - # reference to StorageCollection instance - _storage = None - def __init__(self, connector, identity, redfish_version=None): """A class representing a ComputerSystem @@ -264,6 +249,7 @@ class System(base.ResourceBase): return utils.get_sub_resource_path_by(self, 'Processors') @property + @utils.cache_it def processors(self): """Property to reference `ProcessorCollection` instance @@ -271,15 +257,12 @@ class System(base.ResourceBase): this property is marked as stale (greedy-refresh not done). Here the actual refresh of the sub-resource happens, if stale. """ - if self._processors is None: - self._processors = processor.ProcessorCollection( - self._conn, self._get_processor_collection_path(), - redfish_version=self.redfish_version) - - self._processors.refresh(force=False) - return self._processors + return processor.ProcessorCollection( + self._conn, self._get_processor_collection_path(), + redfish_version=self.redfish_version) @property + @utils.cache_it def ethernet_interfaces(self): """Property to reference `EthernetInterfaceCollection` instance @@ -287,17 +270,13 @@ class System(base.ResourceBase): this property is marked as stale (greedy-refresh not done). Here the actual refresh of the sub-resource happens, if stale. """ - if self._ethernet_interfaces is None: - self._ethernet_interfaces = ( - ethernet_interface.EthernetInterfaceCollection( - self._conn, - utils.get_sub_resource_path_by(self, "EthernetInterfaces"), - redfish_version=self.redfish_version)) - - self._ethernet_interfaces.refresh(force=False) - return self._ethernet_interfaces + return ethernet_interface.EthernetInterfaceCollection( + self._conn, + utils.get_sub_resource_path_by(self, "EthernetInterfaces"), + redfish_version=self.redfish_version) @property + @utils.cache_it def bios(self): """Property to reference `Bios` instance @@ -305,16 +284,13 @@ class System(base.ResourceBase): this property is marked as stale (greedy-refresh not done). Here the actual refresh of the sub-resource happens, if stale. """ - if self._bios is None: - self._bios = bios.Bios( - self._conn, - utils.get_sub_resource_path_by(self, 'Bios'), - redfish_version=self.redfish_version) - - self._bios.refresh(force=False) - return self._bios + return bios.Bios( + self._conn, + utils.get_sub_resource_path_by(self, 'Bios'), + redfish_version=self.redfish_version) @property + @utils.cache_it def simple_storage(self): """A collection of simple storage associated with system. @@ -330,16 +306,12 @@ class System(base.ResourceBase): is missing. :returns: `SimpleStorageCollection` instance """ - if self._simple_storage is None: - self._simple_storage = sys_simple_storage.SimpleStorageCollection( - self._conn, - utils.get_sub_resource_path_by(self, "SimpleStorage"), - redfish_version=self.redfish_version) - - self._simple_storage.refresh(force=False) - return self._simple_storage + return sys_simple_storage.SimpleStorageCollection( + self._conn, utils.get_sub_resource_path_by(self, "SimpleStorage"), + redfish_version=self.redfish_version) @property + @utils.cache_it def storage(self): """A collection of storage subsystems associated with system. @@ -356,31 +328,19 @@ class System(base.ResourceBase): is missing. :returns: `StorageCollection` instance """ - if self._storage is None: - self._storage = sys_storage.StorageCollection( - self._conn, utils.get_sub_resource_path_by(self, "Storage"), - redfish_version=self.redfish_version) - - self._storage.refresh(force=False) - return self._storage + return sys_storage.StorageCollection( + self._conn, utils.get_sub_resource_path_by(self, "Storage"), + redfish_version=self.redfish_version) - def _do_refresh(self, force=False): + def _do_refresh(self, force): """Do custom resource specific refresh activities On refresh, all sub-resources are marked as stale, i.e. greedy-refresh not done for them unless forced by ``force`` argument. """ - if self._processors is not None: - self._processors.invalidate(force) - if self._ethernet_interfaces is not None: - self._ethernet_interfaces.invalidate(force) - if self._bios is not None: - self._bios.invalidate(force) - if self._simple_storage is not None: - self._simple_storage.invalidate(force) - if self._storage is not None: - self._storage.invalidate(force) + super(System, self)._do_refresh(force=force) + utils.cache_clear(self, force) class SystemCollection(base.ResourceCollectionBase): diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index 7276209..5c98ef6 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -54,7 +54,6 @@ class ManagerTestCase(base.TestCase): self.assertEqual(sushy.MANAGER_TYPE_BMC, self.manager.manager_type) self.assertEqual('58893887-8974-2487-2389-841168418919', self.manager.uuid) - self.assertIsNone(self.manager._virtual_media) def test_get_supported_graphical_console_types(self): # | GIVEN | @@ -243,8 +242,8 @@ class ManagerTestCase(base.TestCase): self.conn.get.return_value.json.return_value = json.load(f) # | WHEN & THEN | - self.assertIsInstance(self.manager.virtual_media, - virtual_media.VirtualMediaCollection) + vrt_media = self.manager.virtual_media + self.assertIsInstance(vrt_media, virtual_media.VirtualMediaCollection) # On refreshing the manager instance... with open('sushy/tests/unit/json_samples/manager.json', 'r') as f: @@ -254,8 +253,7 @@ class ManagerTestCase(base.TestCase): self.manager.refresh(force=False) # | WHEN & THEN | - self.assertIsNotNone(self.manager._virtual_media) - self.assertTrue(self.manager._virtual_media._is_stale) + self.assertTrue(vrt_media._is_stale) # | GIVEN | with open('sushy/tests/unit/json_samples/' @@ -265,7 +263,7 @@ class ManagerTestCase(base.TestCase): # | WHEN & THEN | self.assertIsInstance(self.manager.virtual_media, virtual_media.VirtualMediaCollection) - self.assertFalse(self.manager._virtual_media._is_stale) + self.assertFalse(vrt_media._is_stale) class ManagerCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py index eacaf09..c87178f 100644 --- a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -53,7 +53,6 @@ class SessionServiceTestCase(base.TestCase): self.assertEqual('Session Service', self.sess_serv_inst.name) self.assertEqual(30, self.sess_serv_inst.session_timeout) self.assertEqual(exp_path, self.sess_serv_inst.path) - self.assertIsNone(self.sess_serv_inst._sessions) def test__parse_attributes_missing_timeout(self): self.sess_serv_inst.json.pop('SessionTimeout') @@ -134,8 +133,6 @@ class SessionServiceTestCase(base.TestCase): self.conn.get.return_value.json.side_effect = successive_return_values def test_sessions(self): - # check for the underneath variable value - self.assertIsNone(self.sess_serv_inst._sessions) # | GIVEN | self._setUp_sessions() # | WHEN | @@ -166,8 +163,7 @@ class SessionServiceTestCase(base.TestCase): self.sess_serv_inst.refresh(force=True) # | WHEN & THEN | - self.assertIsNotNone(self.sess_serv_inst._sessions) - self.assertFalse(self.sess_serv_inst._sessions._is_stale) + self.assertFalse(self.sess_serv_inst.sessions._is_stale) def test_close_session(self): self.sess_serv_inst.close_session('session/identity') diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index 4ccdcb7..502aa58 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -103,7 +103,6 @@ class StorageTestCase(base.TestCase): def test_drives_after_refresh(self): self.storage.refresh() - self.assertIsNone(self.storage._drives) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -120,7 +119,6 @@ class StorageTestCase(base.TestCase): self.assertIsInstance(drv, drive.Drive) def test_drives_max_size_bytes(self): - self.assertIsNone(self.storage._drives_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -139,7 +137,6 @@ class StorageTestCase(base.TestCase): def test_drives_max_size_bytes_after_refresh(self): self.storage.refresh() - self.assertIsNone(self.storage._drives_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -152,8 +149,6 @@ class StorageTestCase(base.TestCase): self.assertEqual(899527000000, self.storage.drives_max_size_bytes) def test_volumes(self): - # check for the underneath variable value - self.assertIsNone(self.storage._volumes) # | GIVEN | self.conn.get.return_value.json.reset_mock() with open('sushy/tests/unit/json_samples/volume_collection.json') as f: @@ -184,8 +179,8 @@ class StorageTestCase(base.TestCase): with open('sushy/tests/unit/json_samples/volume_collection.json') as f: self.conn.get.return_value.json.return_value = json.load(f) # | WHEN & THEN | - self.assertIsInstance(self.storage.volumes, - volume.VolumeCollection) + vols = self.storage.volumes + self.assertIsInstance(vols, volume.VolumeCollection) # On refreshing the system instance... with open('sushy/tests/unit/json_samples/storage.json') as f: @@ -195,8 +190,7 @@ class StorageTestCase(base.TestCase): self.storage.refresh(force=False) # | WHEN & THEN | - self.assertIsNotNone(self.storage._volumes) - self.assertTrue(self.storage._volumes._is_stale) + self.assertTrue(vols._is_stale) # | GIVEN | with open('sushy/tests/unit/json_samples/volume_collection.json') as f: @@ -204,7 +198,6 @@ class StorageTestCase(base.TestCase): # | WHEN & THEN | self.assertIsInstance(self.storage.volumes, volume.VolumeCollection) - self.assertFalse(self.storage._volumes._is_stale) class StorageCollectionTestCase(base.TestCase): @@ -260,7 +253,6 @@ class StorageCollectionTestCase(base.TestCase): 899527000000], self.stor_col.drives_sizes_bytes) def test_max_drive_size_bytes(self): - self.assertIsNone(self.stor_col._drives_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -281,7 +273,6 @@ class StorageCollectionTestCase(base.TestCase): def test_max_drive_size_bytes_after_refresh(self): self.stor_col.refresh(force=False) - self.assertIsNone(self.stor_col._drives_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -311,7 +302,6 @@ class StorageCollectionTestCase(base.TestCase): self.stor_col.volumes_sizes_bytes) def test_max_volume_size_bytes(self): - self.assertIsNone(self.stor_col._volumes_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -332,7 +322,6 @@ class StorageCollectionTestCase(base.TestCase): def test_max_volume_size_bytes_after_refresh(self): self.stor_col.refresh(force=False) - self.assertIsNone(self.stor_col._volumes_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index a15117c..d1a4d96 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -86,7 +86,6 @@ class VolumeCollectionTestCase(base.TestCase): self.assertEqual(3, len(members)) def test_max_size_bytes(self): - self.assertIsNone(self.stor_vol_col._volumes_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] @@ -107,7 +106,6 @@ class VolumeCollectionTestCase(base.TestCase): def test_max_size_bytes_after_refresh(self): self.stor_vol_col.refresh() - self.assertIsNone(self.stor_vol_col._volumes_sizes_bytes) self.conn.get.return_value.json.reset_mock() successive_return_values = [] diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index 63c4f53..b433db9 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -66,13 +66,20 @@ class BiosTestCase(base.TestCase): data={'Attributes': {'ProcTurboMode': 'Disabled'}}) def test_set_attribute_on_refresh(self): + self.conn.get.reset_mock() # make it to instantiate pending attributes self.sys_bios.pending_attributes + self.assertTrue(self.conn.get.called) + + self.conn.get.reset_mock() + + self.sys_bios.pending_attributes + self.assertFalse(self.conn.get.called) + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled') - self.assertTrue(self.sys_bios._pending_settings_resource._is_stale) # make it to refresh pending attributes on next retrieval self.sys_bios.pending_attributes - self.assertFalse(self.sys_bios._pending_settings_resource._is_stale) + self.assertTrue(self.conn.get.called) def test_set_attributes(self): self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', @@ -83,14 +90,21 @@ class BiosTestCase(base.TestCase): 'UsbControl': 'UsbDisabled'}}) def test_set_attributes_on_refresh(self): + self.conn.get.reset_mock() # make it to instantiate pending attributes self.sys_bios.pending_attributes + self.assertTrue(self.conn.get.called) + + self.conn.get.reset_mock() + + self.sys_bios.pending_attributes + self.assertFalse(self.conn.get.called) + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', 'UsbControl': 'UsbDisabled'}) - self.assertTrue(self.sys_bios._pending_settings_resource._is_stale) # make it to refresh pending attributes on next retrieval self.sys_bios.pending_attributes - self.assertFalse(self.sys_bios._pending_settings_resource._is_stale) + self.assertTrue(self.conn.get.called) def test__get_reset_bios_action_element(self): value = self.sys_bios._get_reset_bios_action_element() diff --git a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py index ebb5710..89a9065 100644 --- a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py +++ b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py @@ -93,7 +93,6 @@ class EthernetInterfaceCollectionTestCase(base.TestCase): self.assertEqual(1, len(members)) def test_summary(self): - self.assertIsNone(self.sys_eth_col._summary) self.conn.get.return_value.json.reset_mock() with open('sushy/tests/unit/json_samples/' 'ethernet_interfaces.json') as f: diff --git a/sushy/tests/unit/resources/system/test_processor.py b/sushy/tests/unit/resources/system/test_processor.py index f06d4aa..1d933d2 100644 --- a/sushy/tests/unit/resources/system/test_processor.py +++ b/sushy/tests/unit/resources/system/test_processor.py @@ -127,8 +127,6 @@ class ProcessorCollectionTestCase(base.TestCase): self.conn.get.return_value.json.side_effect = successive_return_values def test_summary(self): - # check for the underneath variable value - self.assertIsNone(self.sys_processor_col._summary) # | GIVEN | self._setUp_processor_summary() # | WHEN | @@ -161,10 +159,8 @@ class ProcessorCollectionTestCase(base.TestCase): with open('sushy/tests/unit/json_samples/' 'processor_collection.json') as f: self.conn.get.return_value.json.return_value = json.load(f) - self.sys_processor_col.refresh(force=True) - - # | WHEN & THEN | - self.assertIsNone(self.sys_processor_col._summary) + self.sys_processor_col.invalidate() + self.sys_processor_col.refresh(force=False) # | GIVEN | self._setUp_processor_summary() diff --git a/sushy/tests/unit/resources/system/test_simple_storage.py b/sushy/tests/unit/resources/system/test_simple_storage.py index 4fe0060..f3d6ffa 100644 --- a/sushy/tests/unit/resources/system/test_simple_storage.py +++ b/sushy/tests/unit/resources/system/test_simple_storage.py @@ -97,7 +97,6 @@ class SimpleStorageCollectionTestCase(base.TestCase): self.simpl_stor_col.disks_sizes_bytes) def test_max_size_bytes(self): - self.assertIsNone(self.simpl_stor_col._disks_sizes_bytes) self.conn.get.return_value.json.reset_mock() with open('sushy/tests/unit/json_samples/' @@ -113,7 +112,6 @@ class SimpleStorageCollectionTestCase(base.TestCase): def test_max_size_bytes_after_refresh(self): self.simpl_stor_col.refresh() - self.assertIsNone(self.simpl_stor_col._disks_sizes_bytes) self.conn.get.return_value.json.reset_mock() with open('sushy/tests/unit/json_samples/' diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 4642d9a..eb96a7d 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -21,7 +21,6 @@ import sushy from sushy import exceptions from sushy.resources import constants as res_cons from sushy.resources.system import bios -from sushy.resources.system import ethernet_interface from sushy.resources.system import mappings as sys_map from sushy.resources.system import processor from sushy.resources.system import simple_storage @@ -68,9 +67,6 @@ class SystemTestCase(base.TestCase): self.sys_inst.power_state) self.assertEqual(96, self.sys_inst.memory_summary.size_gib) self.assertEqual("OK", self.sys_inst.memory_summary.health) - self.assertIsNone(self.sys_inst._processors) - self.assertIsNone(self.sys_inst._ethernet_interfaces) - self.assertIsNone(self.sys_inst._bios) def test__parse_attributes_missing_actions(self): self.sys_inst.json.pop('Actions') @@ -270,8 +266,6 @@ class SystemTestCase(base.TestCase): self.assertIsNone(self.sys_inst.memory_summary) def test_processors(self): - # check for the underneath variable value - self.assertIsNone(self.sys_inst._processors) # | GIVEN | self.conn.get.return_value.json.reset_mock() with open('sushy/tests/unit/json_samples/' @@ -308,10 +302,6 @@ class SystemTestCase(base.TestCase): self.sys_inst.invalidate() self.sys_inst.refresh(force=False) - # | WHEN & THEN | - self.assertIsNotNone(self.sys_inst._processors) - self.assertTrue(self.sys_inst._processors._is_stale) - # | GIVEN | with open('sushy/tests/unit/json_samples/' 'processor_collection.json') as f: @@ -319,7 +309,6 @@ class SystemTestCase(base.TestCase): # | WHEN & THEN | self.assertIsInstance(self.sys_inst.processors, processor.ProcessorCollection) - self.assertFalse(self.sys_inst._processors._is_stale) def _setUp_processor_summary(self): self.conn.get.return_value.json.reset_mock() @@ -374,13 +363,10 @@ class SystemTestCase(base.TestCase): self.conn.get.return_value.json.side_effect = [eth_coll_return_value, eth_return_value] - self.assertIsNone(self.sys_inst._ethernet_interfaces) actual_macs = self.sys_inst.ethernet_interfaces.summary expected_macs = ( {'12:44:6A:3B:04:11': res_cons.STATE_ENABLED}) self.assertEqual(expected_macs, actual_macs) - self.assertIsInstance(self.sys_inst._ethernet_interfaces, - ethernet_interface.EthernetInterfaceCollection) def test_bios(self): self.conn.get.return_value.json.reset_mock() @@ -389,7 +375,6 @@ class SystemTestCase(base.TestCase): bios_return_value = json.load(f) self.conn.get.return_value.json.side_effect = [bios_return_value] - self.assertIsNone(self.sys_inst._bios) self.assertIsInstance(self.sys_inst.bios, bios.Bios) self.assertEqual('BIOS Configuration Current Settings', self.sys_inst.bios.name) @@ -401,8 +386,6 @@ class SystemTestCase(base.TestCase): self.sys_inst.simple_storage def test_simple_storage(self): - # check for the underneath variable value - self.assertIsNone(self.sys_inst._simple_storage) # | GIVEN | self.conn.get.return_value.json.reset_mock() with open('sushy/tests/unit/json_samples/' @@ -439,10 +422,6 @@ class SystemTestCase(base.TestCase): self.sys_inst.invalidate() self.sys_inst.refresh(force=False) - # | WHEN & THEN | - self.assertIsNotNone(self.sys_inst._simple_storage) - self.assertTrue(self.sys_inst._simple_storage._is_stale) - # | GIVEN | with open('sushy/tests/unit/json_samples/' 'simple_storage_collection.json') as f: @@ -450,7 +429,6 @@ class SystemTestCase(base.TestCase): # | WHEN & THEN | self.assertIsInstance(self.sys_inst.simple_storage, simple_storage.SimpleStorageCollection) - self.assertFalse(self.sys_inst._simple_storage._is_stale) def test_storage_for_missing_attr(self): self.sys_inst.json.pop('Storage') @@ -459,8 +437,6 @@ class SystemTestCase(base.TestCase): self.sys_inst.storage def test_storage(self): - # check for the underneath variable value - self.assertIsNone(self.sys_inst._storage) # | GIVEN | self.conn.get.return_value.json.reset_mock() with open('sushy/tests/unit/json_samples/' @@ -495,18 +471,12 @@ class SystemTestCase(base.TestCase): self.sys_inst.invalidate() self.sys_inst.refresh(force=False) - # | WHEN & THEN | - self.assertIsNotNone(self.sys_inst._storage) - self.assertTrue(self.sys_inst._storage._is_stale) - # | GIVEN | with open('sushy/tests/unit/json_samples/' 'storage_collection.json') as f: self.conn.get.return_value.json.return_value = json.load(f) # | WHEN & THEN | - self.assertIsInstance(self.sys_inst.storage, - storage.StorageCollection) - self.assertFalse(self.sys_inst._storage._is_stale) + self.assertIsInstance(self.sys_inst.storage, storage.StorageCollection) class SystemCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 42ed1b1..3b14245 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -141,6 +141,7 @@ class ResourceCollectionBaseTestCase(base.TestCase): self.assertIsInstance(val, TestResource) self.assertTrue(val.identity in member_ids) self.assertEqual('1.0.x', val.redfish_version) + self.assertFalse(val._is_stale) return result @@ -148,15 +149,33 @@ class ResourceCollectionBaseTestCase(base.TestCase): self._validate_get_members_result(('1', '2')) def test_get_members_on_refresh(self): - self._validate_get_members_result(('1', '2')) + all_members = self._validate_get_members_result(('1', '2')) - # Now emulating the resource invalidate and refresh action! + # Call resource invalidate self.test_resource_collection.invalidate() self.assertTrue(self.test_resource_collection._is_stale) + # Now invoke refresh action on resource. This can be viewed as + # "light refresh" which involves only the resource's fresh retrieval + # and not its nested resources (these are only marked as stale). self.test_resource_collection.refresh(force=False) - - self._validate_get_members_result(('3', '4')) + # resource itself is fresh self.assertFalse(self.test_resource_collection._is_stale) + # members are marked as stale + for m in all_members: + self.assertTrue(m._is_stale) + + self._validate_get_members_result(('1', '2')) + # members are also now freshly retrieved + for m in all_members: + self.assertFalse(m._is_stale) + + # Again invalidate and do a forced refresh on resource + self.test_resource_collection.invalidate(force_refresh=True) + # Now, even the members are also freshly retrieved. This can be viewed + # as "cascading refresh" which involves not only the resource's fresh + # retrieval but also its nested resources. + for m in all_members: + self.assertFalse(m._is_stale) def test_get_members_caching(self): result = self._validate_get_members_result(('1', '2')) diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index c7253b8..29b2f94 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -18,6 +18,7 @@ import json import mock from sushy import exceptions +from sushy.resources import base as resource_base from sushy.resources.system import system from sushy.tests.unit import base from sushy import utils @@ -96,3 +97,112 @@ class UtilsTestCase(base.TestCase): self.assertEqual(821, utils.max_safe([15, 300, 270, None, 821, None])) self.assertEqual(0, utils.max_safe([])) self.assertIsNone(utils.max_safe([], default=None)) + + +class NestedResource(resource_base.ResourceBase): + + def _parse_attributes(self): + pass + + +class BaseResource(resource_base.ResourceBase): + + def _parse_attributes(self): + pass + + def _do_some_crunch_work_to_get_a(self): + return 'a' + + @utils.cache_it + def get_a(self): + return self._do_some_crunch_work_to_get_a() + + def _do_some_crunch_work_to_get_b(self): + return 'b' + + @utils.cache_it + def get_b(self): + return self._do_some_crunch_work_to_get_b() + + @property + @utils.cache_it + def nested_resource(self): + return NestedResource( + self._conn, "path/to/nested_resource", + redfish_version=self.redfish_version) + + @property + @utils.cache_it + def few_nested_resources(self): + return [NestedResource(self._conn, "/nested_res1", + redfish_version=self.redfish_version), + NestedResource(self._conn, "/nested_res2", + redfish_version=self.redfish_version)] + + def _do_refresh(self, force): + utils.cache_clear(self, force) + + +class CacheTestCase(base.TestCase): + + def setUp(self): + super(CacheTestCase, self).setUp() + self.conn = mock.Mock() + self.res = BaseResource(connector=self.conn, path='/Foo', + redfish_version='1.0.2') + + def test_cache_nested_resource_retrieval(self): + nested_res = self.res.nested_resource + few_nested_res = self.res.few_nested_resources + + self.assertIsInstance(nested_res, NestedResource) + self.assertIs(nested_res, self.res.nested_resource) + self.assertIsInstance(few_nested_res, list) + for n_res in few_nested_res: + self.assertIsInstance(n_res, NestedResource) + self.assertIs(few_nested_res, self.res.few_nested_resources) + + self.res.invalidate() + self.res.refresh(force=False) + + self.assertIsNotNone(self.res._cache_nested_resource) + self.assertTrue(self.res._cache_nested_resource._is_stale) + self.assertIsNotNone(self.res._cache_few_nested_resources) + for n_res in self.res._cache_few_nested_resources: + self.assertTrue(n_res._is_stale) + + self.assertIsInstance(self.res.nested_resource, NestedResource) + self.assertFalse(self.res._cache_nested_resource._is_stale) + self.assertIsInstance(self.res.few_nested_resources, list) + for n_res in self.res._cache_few_nested_resources: + self.assertFalse(n_res._is_stale) + + def test_cache_non_resource_retrieval(self): + with mock.patch.object( + self.res, '_do_some_crunch_work_to_get_a', + wraps=self.res._do_some_crunch_work_to_get_a, + autospec=True) as do_work_to_get_a_spy: + result = self.res.get_a() + self.assertTrue(do_work_to_get_a_spy.called) + + do_work_to_get_a_spy.reset_mock() + # verify subsequent invocation + self.assertEqual(result, self.res.get_a()) + self.assertFalse(do_work_to_get_a_spy.called) + + def test_cache_clear_only_selected_attr(self): + self.res.nested_resource + self.res.get_a() + self.res.get_b() + + utils.cache_clear(self.res, False, only_these=['get_a']) + + # cache cleared (set to None) + self.assertIsNone(self.res._cache_get_a) + # cache retained + self.assertEqual('b', self.res._cache_get_b) + self.assertFalse(self.res._cache_nested_resource._is_stale) + + def test_cache_clear_failure(self): + self.assertRaises( + TypeError, utils.cache_clear, self.res, False, only_these=10) diff --git a/sushy/utils.py b/sushy/utils.py index 5da076c..c7bb767 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -13,12 +13,17 @@ # License for the specific language governing permissions and limitations # under the License. +import collections import logging +import six + from sushy import exceptions LOG = logging.getLogger(__name__) +CACHE_ATTR_NAMES_VAR_NAME = '_cache_attr_names' + def revert_dictionary(dictionary): """Given a dictionary revert it's mapping @@ -97,6 +102,7 @@ def max_safe(iterable, default=0): This function is just a wrapper over builtin max() w/o ``key`` argument. The ``default`` argument specifies an object to return if the provided ``iterable`` is empty. Also it filters out the None type values. + :param iterable: an iterable :param default: 0 by default """ @@ -106,3 +112,157 @@ def max_safe(iterable, default=0): except ValueError: # TypeError is not caught here as that should be thrown. return default + + +def setdefaultattr(obj, name, default): + """Python's ``dict.setdefault`` applied on Python objects. + + If name is an attribute with obj, return its value. If not, set name + attribute with a value of default and return default. + + :param obj: a python object + :param name: name of attribute + :param default: default value to be set + """ + + try: + return getattr(obj, name) + except AttributeError: + setattr(obj, name, default) + return default + + +def cache_it(res_accessor_method): + """Utility decorator to cache the return value of the decorated method. + + This decorator is to be used with any Sushy resource class method. + This will internally create an attribute on the resource namely + ``_cache_``. This is referred to as the "caching + attribute". This attribute will eventually hold the resultant value from + the method invocation (when method gets first time called) and for every + subsequent calls to that method this cached value will get returned. It + expects the decorated method to contain its own logic of evaluation. + + This also assigns a variable named ``_cache_attr_names`` on the resource. + This variable maintains a collection of all the existing + "caching attribute" names. + + To invalidate or clear the cache use :py:func:`~cache_clear`. + Usage: + + .. code-block:: python + + class SomeResource(base.ResourceBase): + ... + @cache_it + def get_summary(self): + # do some calculation and return the result + # and this result will be cached. + return result + ... + def _do_refresh(self, force): + cache_clear(self, force) + + If the returned value is a Sushy resource instance or an Iterable whose + element is of type Sushy resource it handles the case of calling the + ``refresh()`` method of that resource. This is done to avoid unnecessary + recreation of a new resource instance which got already created at the + first place in contrast to fresh retrieval of the resource json data. + Again, the ``force`` argument is deliberately set to False to do only the + "light refresh" of the resource (only the fresh retrieval of resource) + instead of doing the complete exhaustive "cascading refresh" (resource + with all its nested subresources recursively). + + .. code-block:: python + + class SomeResource(base.ResourceBase): + ... + @property + @cache_it + def nested_resource(self): + return NestedResource( + self._conn, "Path/to/NestedResource", + redfish_version=self.redfish_version) + ... + def _do_refresh(self, force): + # selective attribute clearing + cache_clear(self, force, only_these=['nested_resource']) + + Do note that this is not thread safe. So guard your code to protect it + from any kind of concurrency issues while using this decorator. + + :param res_accessor_method: the resource accessor decorated method. + + """ + cache_attr_name = '_cache_' + res_accessor_method.__name__ + + @six.wraps(res_accessor_method) + def func_wrapper(res_selfie): + + cache_attr_val = getattr(res_selfie, cache_attr_name, None) + if cache_attr_val is None: + + cache_attr_val = res_accessor_method(res_selfie) + setattr(res_selfie, cache_attr_name, cache_attr_val) + + # Note(deray): Each resource instance maintains a collection of + # all the cache attribute names in a private attribute. + cache_attr_names = setdefaultattr( + res_selfie, CACHE_ATTR_NAMES_VAR_NAME, set()) + cache_attr_names.add(cache_attr_name) + + from sushy.resources import base + + if isinstance(cache_attr_val, base.ResourceBase): + cache_attr_val.refresh(force=False) + elif isinstance(cache_attr_val, collections.Iterable): + for elem in cache_attr_val: + if isinstance(elem, base.ResourceBase): + elem.refresh(force=False) + + return cache_attr_val + + return func_wrapper + + +def cache_clear(res_selfie, force_refresh, only_these=None): + """Clear some or all cached values of the resource. + + If the cache variable refers to a resource instance then the + ``invalidate()`` method is called on that. Otherwise it is set to None. + Should there be a need to force refresh the resource and its sub-resources, + "cascading refresh", ``force_refresh`` is to be set to True. + + This is the complimentary method of ``cache_it`` decorator. + + :param res_selfie: the resource instance. + :param force_refresh: force_refresh argument of ``invalidate()`` method. + :param only_these: expects an Iterable of specific method names + for which the cached value/s need to be cleared only. When None, all + the cached values are cleared. + """ + cache_attr_names = setdefaultattr( + res_selfie, CACHE_ATTR_NAMES_VAR_NAME, set()) + if only_these is not None: + if not isinstance(only_these, collections.Iterable): + raise TypeError("'only_these' must be Iterable.") + + cache_attr_names = cache_attr_names.intersection( + '_cache_' + attr for attr in only_these) + + for cache_attr_name in cache_attr_names: + cache_attr_val = getattr(res_selfie, cache_attr_name) + + from sushy.resources import base + + if isinstance(cache_attr_val, base.ResourceBase): + cache_attr_val.invalidate(force_refresh) + elif isinstance(cache_attr_val, collections.Iterable): + for elem in cache_attr_val: + if isinstance(elem, base.ResourceBase): + elem.invalidate(force_refresh) + else: + setattr(res_selfie, cache_attr_name, None) + break + else: + setattr(res_selfie, cache_attr_name, None) -- GitLab From e886e7182e2d54e124bd655b441ee3dc3987e461 Mon Sep 17 00:00:00 2001 From: paresh-sao Date: Thu, 4 Oct 2018 06:17:32 +0000 Subject: [PATCH 099/303] Requests session keyword arguments for sushy connector Adds functionality to pass different requests library session arguments to sushy connector. Change-Id: I6bd9a0719acfb839fcf137c58bcf03254b1af5ad Story: 2003974 Task: 26916 --- ...gument_for_connector-cea5dc4e6c01b548.yaml | 5 ++ sushy/connector.py | 78 ++++++++++++------- sushy/tests/unit/test_connector.py | 47 +++++------ 3 files changed, 73 insertions(+), 57 deletions(-) create mode 100644 releasenotes/notes/add_keyword_argument_for_connector-cea5dc4e6c01b548.yaml diff --git a/releasenotes/notes/add_keyword_argument_for_connector-cea5dc4e6c01b548.yaml b/releasenotes/notes/add_keyword_argument_for_connector-cea5dc4e6c01b548.yaml new file mode 100644 index 0000000..9b29dfe --- /dev/null +++ b/releasenotes/notes/add_keyword_argument_for_connector-cea5dc4e6c01b548.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds functionality to pass different requests library session + arguments to sushy connector. diff --git a/sushy/connector.py b/sushy/connector.py index d507f68..cae99c1 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -13,7 +13,6 @@ # License for the specific language governing permissions and limitations # under the License. -import json import logging import requests @@ -55,7 +54,8 @@ class Connector(object): """Close this connector and the associated HTTP session.""" self._session.close() - def _op(self, method, path='', data=None, headers=None): + def _op(self, method, path='', data=None, headers=None, + **extra_session_req_kwargs): """Generic RESTful request handler. :param method: The HTTP method to be used, e.g: GET, POST, @@ -63,29 +63,24 @@ class Connector(object): :param path: The sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. :returns: The response object from the requests library. :raises: ConnectionError :raises: HTTPError """ - json_data = None - if headers is None: - headers = {} - - if data is not None: - json_data = json.dumps(data) - headers['Content-Type'] = 'application/json' - url = parse.urljoin(self._url, path) # TODO(lucasagomes): We should mask the data to remove sensitive # information - LOG.debug('HTTP request: %(method)s %(url)s; ' - 'headers: %(headers)s; body: %(data)s', + LOG.debug('HTTP request: %(method)s %(url)s; headers: %(headers)s; ' + 'body: %(data)s; session arguments: %(session)s;', {'method': method, 'url': url, 'headers': headers, - 'data': json_data}) + 'data': data, 'session': extra_session_req_kwargs}) try: - response = self._session.request(method, url, - data=json_data, - headers=headers) + response = self._session.request(method, url, json=data, + headers=headers, + **extra_session_req_kwargs) except requests.ConnectionError as e: raise exceptions.ConnectionError(url=url, error=e) # If we received an AccessError, and we @@ -99,9 +94,9 @@ class Connector(object): self._auth.refresh_session() LOG.debug("Authentication refreshed successfully, " "retrying the call.") - response = self._session.request(method, url, - data=json_data, - headers=headers) + response = self._session.request(method, url, json=data, + headers=headers, + **extra_session_req_kwargs) else: raise @@ -112,65 +107,90 @@ class Connector(object): return response - def get(self, path='', data=None, headers=None): + def get(self, path='', data=None, headers=None, + **extra_session_req_kwargs): """HTTP GET method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. :returns: The response object from the requests library. :raises: ConnectionError :raises: HTTPError """ - return self._op('GET', path, data, headers) + return self._op('GET', path, data=data, headers=headers, + **extra_session_req_kwargs) - def post(self, path='', data=None, headers=None): + def post(self, path='', data=None, headers=None, + **extra_session_req_kwargs): """HTTP POST method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. :returns: The response object from the requests library. :raises: ConnectionError :raises: HTTPError """ - return self._op('POST', path, data, headers) + return self._op('POST', path, data=data, headers=headers, + **extra_session_req_kwargs) - def patch(self, path='', data=None, headers=None): + def patch(self, path='', data=None, headers=None, + **extra_session_req_kwargs): """HTTP PATCH method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. :returns: The response object from the requests library. :raises: ConnectionError :raises: HTTPError """ - return self._op('PATCH', path, data, headers) + return self._op('PATCH', path, data=data, headers=headers, + **extra_session_req_kwargs) - def put(self, path='', data=None, headers=None): + def put(self, path='', data=None, headers=None, + **extra_session_req_kwargs): """HTTP PUT method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. :returns: The response object from the requests library. :raises: ConnectionError :raises: HTTPError """ - return self._op('PUT', path, data, headers) + return self._op('PUT', path, data=data, headers=headers, + **extra_session_req_kwargs) - def delete(self, path='', data=None, headers=None): + def delete(self, path='', data=None, headers=None, + **extra_session_req_kwargs): """HTTP DELETE method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. :returns: The response object from the requests library. :raises: ConnectionError :raises: HTTPError """ - return self._op('DELETE', path, data, headers) + return self._op('DELETE', path, data=data, headers=headers, + **extra_session_req_kwargs) def __enter__(self): return self diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index a519c06..0547563 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -48,35 +48,35 @@ class ConnectorMethodsTestCase(base.TestCase): self.conn.get(path='fake/path', data=self.data.copy(), headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'GET', 'fake/path', - self.data, self.headers) + data=self.data, headers=self.headers) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_post(self, mock__op): self.conn.post(path='fake/path', data=self.data.copy(), headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'POST', 'fake/path', - self.data, self.headers) + data=self.data, headers=self.headers) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_patch(self, mock__op): self.conn.patch(path='fake/path', data=self.data.copy(), headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'PATCH', 'fake/path', - self.data, self.headers) + data=self.data, headers=self.headers) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_put(self, mock__op): self.conn.put(path='fake/path', data=self.data.copy(), headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'PUT', 'fake/path', - self.data, self.headers) + data=self.data, headers=self.headers) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_delete(self, mock__op): self.conn.delete(path='fake/path', data=self.data.copy(), headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'DELETE', 'fake/path', - self.data, self.headers) + data=self.data, headers=self.headers) def test_set_auth(self): mock_auth = mock.MagicMock() @@ -119,59 +119,52 @@ class ConnectorOpTestCase(base.TestCase): self.request.return_value.status_code = http_client.OK def test_ok_get(self): - expected_headers = self.headers.copy() - self.conn._op('GET', path='fake/path', headers=self.headers) self.request.assert_called_once_with( 'GET', 'http://foo.bar:1234/fake/path', - data=None, headers=expected_headers) + headers=self.headers, json=None) - def test_ok_post(self): - expected_headers = self.headers.copy() - expected_headers['Content-Type'] = 'application/json' + def test_ok_get_url_redirect_false(self): + self.conn._op('GET', path='fake/path', headers=self.headers, + allow_redirects=False) + self.request.assert_called_once_with( + 'GET', 'http://foo.bar:1234/fake/path', + headers=self.headers, json=None, allow_redirects=False) + def test_ok_post(self): self.conn._op('POST', path='fake/path', data=self.data.copy(), headers=self.headers) self.request.assert_called_once_with( 'POST', 'http://foo.bar:1234/fake/path', - data=json.dumps(self.data), headers=expected_headers) + json=self.data, headers=self.headers) def test_ok_put(self): - expected_headers = self.headers.copy() - expected_headers['Content-Type'] = 'application/json' - self.conn._op('PUT', path='fake/path', data=self.data.copy(), headers=self.headers) self.request.assert_called_once_with( 'PUT', 'http://foo.bar:1234/fake/path', - data=json.dumps(self.data), headers=expected_headers) + json=self.data, headers=self.headers) def test_ok_delete(self): - expected_headers = self.headers.copy() - self.conn._op('DELETE', path='fake/path', headers=self.headers.copy()) self.request.assert_called_once_with( 'DELETE', 'http://foo.bar:1234/fake/path', - data=None, headers=expected_headers) + headers=self.headers, json=None) def test_ok_post_with_session(self): self.conn._session.headers = {} self.conn._session.headers['X-Auth-Token'] = 'asdf1234' expected_headers = self.headers.copy() - expected_headers['Content-Type'] = 'application/json' - - self.conn._op('POST', path='fake/path', data=self.data, - headers=self.headers) + self.conn._op('POST', path='fake/path', headers=self.headers, + data=self.data) self.request.assert_called_once_with( 'POST', 'http://foo.bar:1234/fake/path', - data=json.dumps(self.data), headers=expected_headers) + json=self.data, headers=expected_headers) self.assertEqual(self.conn._session.headers, {'X-Auth-Token': 'asdf1234'}) def test_timed_out_session_unable_to_create_session(self): self.conn._auth.can_refresh_session.return_value = False - expected_headers = self.headers.copy() - expected_headers['Content-Type'] = 'application/json' self.conn._session = self.session self.request = self.session.request self.request.return_value.status_code = http_client.FORBIDDEN @@ -190,8 +183,6 @@ class ConnectorOpTestCase(base.TestCase): self.session = mock.Mock(spec=requests.Session) self.conn._session = self.session self.request = self.session.request - first_expected_headers = self.headers.copy() - first_expected_headers['Content-Type'] = 'application/json' first_response = mock.Mock() first_response.status_code = http_client.FORBIDDEN second_response = mock.Mock() -- GitLab From 28ee59fd79d91d9d0ab2025aa9b1c4aa4b7eed00 Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Thu, 25 Oct 2018 05:39:12 +0000 Subject: [PATCH 100/303] Follow-up to 27c725c to move up ``cache_clear`` Move up ``cache_clear()`` to ``ResourceBase._do_refresh()`` method. It saves on a lot of ``_do_refresh()`` overrides in subclasses. Also change to ``Sequence`` (from ``Iterable``) in the caching implementation. Follow-up of Change: I7404a15beb029cb282ac6b84bb8b8fdb97ebcd4c Change-Id: Id6c6afcd9142bed0a7f59b9f8893fb86975a65bb --- sushy/resources/base.py | 18 ++++++++---------- sushy/resources/manager/manager.py | 4 ---- .../resources/sessionservice/sessionservice.py | 10 ---------- sushy/resources/system/bios.py | 10 ---------- sushy/resources/system/ethernet_interface.py | 10 ---------- sushy/resources/system/processor.py | 10 ---------- sushy/resources/system/simple_storage.py | 4 ---- sushy/resources/system/storage/storage.py | 12 ------------ sushy/resources/system/storage/volume.py | 4 ---- sushy/resources/system/system.py | 10 ---------- sushy/tests/unit/test_utils.py | 3 --- sushy/utils.py | 12 ++++++------ 12 files changed, 14 insertions(+), 93 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index a5e5388..8bc46d3 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -316,12 +316,20 @@ class ResourceBase(object): resource specific refresh operations to be performed. This is a primitive method in the paradigm of Template design pattern. + As for the base implementation of this method the approach taken is: + On refresh, all sub-resources are marked as stale. That means + invalidate (or undefine) the exposed attributes for nested resources + for fresh evaluation in subsequent calls to those exposed attributes. + In other words greedy-refresh is not done for them, unless forced by + ``force`` argument. + :param force: should force refresh the resource and its sub-resources, if set to True. :raises: ResourceNotFoundError :raises: ConnectionError :raises: HTTPError """ + utils.cache_clear(self, force_refresh=force) def invalidate(self, force_refresh=False): """Mark the resource as stale, prompting refresh() before getting used. @@ -400,13 +408,3 @@ class ResourceCollectionBase(ResourceBase): :returns: A list of ``_resource_type`` objects """ return [self.get_member(id_) for id_ in self.members_identities] - - def _do_refresh(self, force): - """Do refresh related activities. - - Invalidate / Undefine the cache attributes here for fresh evaluation - in subsequent calls to `get_members()` method. Other similar activities - can also follow in future, if needed. - """ - super(ResourceCollectionBase, self)._do_refresh(force=force) - utils.cache_clear(self, force) diff --git a/sushy/resources/manager/manager.py b/sushy/resources/manager/manager.py index ef63655..96b8133 100644 --- a/sushy/resources/manager/manager.py +++ b/sushy/resources/manager/manager.py @@ -87,10 +87,6 @@ class Manager(base.ResourceBase): """ super(Manager, self).__init__(connector, identity, redfish_version) - def _do_refresh(self, force): - super(Manager, self)._do_refresh(force=force) - utils.cache_clear(self, force) - def get_supported_graphical_console_types(self): """Get the supported values for Graphical Console connection types. diff --git a/sushy/resources/sessionservice/sessionservice.py b/sushy/resources/sessionservice/sessionservice.py index 325902f..5c01cec 100644 --- a/sushy/resources/sessionservice/sessionservice.py +++ b/sushy/resources/sessionservice/sessionservice.py @@ -76,16 +76,6 @@ class SessionService(base.ResourceBase): self._conn, self._get_sessions_collection_path(), redfish_version=self.redfish_version) - def _do_refresh(self, force): - """Do custom resource specific refresh activities - - On refresh, all sub-resources are marked as stale, i.e. - greedy-refresh not done for them unless forced by ``force`` - argument. - """ - super(SessionService, self)._do_refresh(force=force) - utils.cache_clear(self, force) - def close_session(self, session_uri): """This function is for closing a session based on its id. diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index c66896c..5aa50a9 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -147,13 +147,3 @@ class Bios(base.ResourceBase): 'OldPassword': old_password, 'PasswordName': password_name}) LOG.info('BIOS password %s is being changed', self.identity) - - def _do_refresh(self, force=False): - """Do custom resource specific refresh activities - - On refresh, all sub-resources are marked as stale, i.e. - greedy-refresh not done for them unless forced by ``force`` - argument. - """ - super(Bios, self)._do_refresh(force=force) - utils.cache_clear(self, force) diff --git a/sushy/resources/system/ethernet_interface.py b/sushy/resources/system/ethernet_interface.py index e6d1554..f7d2af1 100644 --- a/sushy/resources/system/ethernet_interface.py +++ b/sushy/resources/system/ethernet_interface.py @@ -73,13 +73,3 @@ class EthernetInterfaceCollection(base.ResourceCollectionBase): if eth.status.health == res_cons.HEALTH_OK: mac_dict[eth.mac_address] = eth.status.state return mac_dict - - def _do_refresh(self, force=False): - """Do custom resource specific refresh activities - - On refresh, all sub-resources are marked as stale, i.e. - greedy-refresh not done for them unless forced by ``force`` - argument. - """ - super(EthernetInterfaceCollection, self)._do_refresh(force) - utils.cache_clear(self, force) diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index 9013aae..a3abf61 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -142,13 +142,3 @@ class ProcessorCollection(base.ResourceCollectionBase): """ super(ProcessorCollection, self).__init__(connector, path, redfish_version) - - def _do_refresh(self, force=False): - """Do custom resource specific refresh activities - - On refresh, all sub-resources are marked as stale, i.e. - greedy-refresh not done for them unless forced by ``force`` - argument. - """ - super(ProcessorCollection, self)._do_refresh(force=force) - utils.cache_clear(self, force) diff --git a/sushy/resources/system/simple_storage.py b/sushy/resources/system/simple_storage.py index 1569598..3045fd9 100644 --- a/sushy/resources/system/simple_storage.py +++ b/sushy/resources/system/simple_storage.py @@ -83,7 +83,3 @@ class SimpleStorageCollection(base.ResourceCollectionBase): refreshed. """ return utils.max_safe(self.disks_sizes_bytes) - - def _do_refresh(self, force): - super(SimpleStorageCollection, self)._do_refresh(force=force) - utils.cache_clear(self, force) diff --git a/sushy/resources/system/storage/storage.py b/sushy/resources/system/storage/storage.py index 3b339c8..762e8d6 100644 --- a/sushy/resources/system/storage/storage.py +++ b/sushy/resources/system/storage/storage.py @@ -95,13 +95,6 @@ class Storage(base.ResourceBase): self._conn, utils.get_sub_resource_path_by(self, 'Volumes'), redfish_version=self.redfish_version) - def _do_refresh(self, force): - """Do resource specific refresh activities.""" - # Note(deray): invalidate / undefine the attributes here for fresh - # evaluation in subsequent calls to it's exposed property. - super(Storage, self)._do_refresh(force=force) - utils.cache_clear(self, force) - class StorageCollection(base.ResourceCollectionBase): """This class represents the collection of Storage resources""" @@ -149,8 +142,3 @@ class StorageCollection(base.ResourceCollectionBase): refreshed. """ return utils.max_safe(self.volumes_sizes_bytes) - - def _do_refresh(self, force): - """Do resource specific refresh activities""" - super(StorageCollection, self)._do_refresh(force=force) - utils.cache_clear(self, force) diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 203b0a5..7d2e20d 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -62,7 +62,3 @@ class VolumeCollection(base.ResourceCollectionBase): # NOTE(etingof): for backward compatibility max_size_bytes = max_volume_size_bytes - - def _do_refresh(self, force): - super(VolumeCollection, self)._do_refresh(force=force) - utils.cache_clear(self, force) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index b6c3967..104cf83 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -332,16 +332,6 @@ class System(base.ResourceBase): self._conn, utils.get_sub_resource_path_by(self, "Storage"), redfish_version=self.redfish_version) - def _do_refresh(self, force): - """Do custom resource specific refresh activities - - On refresh, all sub-resources are marked as stale, i.e. - greedy-refresh not done for them unless forced by ``force`` - argument. - """ - super(System, self)._do_refresh(force=force) - utils.cache_clear(self, force) - class SystemCollection(base.ResourceCollectionBase): diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index 29b2f94..8be70ca 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -139,9 +139,6 @@ class BaseResource(resource_base.ResourceBase): NestedResource(self._conn, "/nested_res2", redfish_version=self.redfish_version)] - def _do_refresh(self, force): - utils.cache_clear(self, force) - class CacheTestCase(base.TestCase): diff --git a/sushy/utils.py b/sushy/utils.py index c7bb767..d9735af 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -163,7 +163,7 @@ def cache_it(res_accessor_method): def _do_refresh(self, force): cache_clear(self, force) - If the returned value is a Sushy resource instance or an Iterable whose + If the returned value is a Sushy resource instance or a sequence whose element is of type Sushy resource it handles the case of calling the ``refresh()`` method of that resource. This is done to avoid unnecessary recreation of a new resource instance which got already created at the @@ -215,7 +215,7 @@ def cache_it(res_accessor_method): if isinstance(cache_attr_val, base.ResourceBase): cache_attr_val.refresh(force=False) - elif isinstance(cache_attr_val, collections.Iterable): + elif isinstance(cache_attr_val, collections.Sequence): for elem in cache_attr_val: if isinstance(elem, base.ResourceBase): elem.refresh(force=False) @@ -237,15 +237,15 @@ def cache_clear(res_selfie, force_refresh, only_these=None): :param res_selfie: the resource instance. :param force_refresh: force_refresh argument of ``invalidate()`` method. - :param only_these: expects an Iterable of specific method names + :param only_these: expects a sequence of specific method names for which the cached value/s need to be cleared only. When None, all the cached values are cleared. """ cache_attr_names = setdefaultattr( res_selfie, CACHE_ATTR_NAMES_VAR_NAME, set()) if only_these is not None: - if not isinstance(only_these, collections.Iterable): - raise TypeError("'only_these' must be Iterable.") + if not isinstance(only_these, collections.Sequence): + raise TypeError("'only_these' must be a sequence.") cache_attr_names = cache_attr_names.intersection( '_cache_' + attr for attr in only_these) @@ -257,7 +257,7 @@ def cache_clear(res_selfie, force_refresh, only_these=None): if isinstance(cache_attr_val, base.ResourceBase): cache_attr_val.invalidate(force_refresh) - elif isinstance(cache_attr_val, collections.Iterable): + elif isinstance(cache_attr_val, collections.Sequence): for elem in cache_attr_val: if isinstance(elem, base.ResourceBase): elem.invalidate(force_refresh) -- GitLab From 2412070e880e7535e78565f083b663cc79951e77 Mon Sep 17 00:00:00 2001 From: inspurericzhang Date: Thu, 1 Nov 2018 10:45:41 +0800 Subject: [PATCH 101/303] [Trivial Fix] modify spelling error of "resource" Although it is spelling mistakes, it affects reading. Change-Id: Ifa719f67edcbc3b37c110f09f24330a2b399c145 --- sushy/tests/unit/resources/test_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 3b14245..4e2e82e 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -64,7 +64,7 @@ class TestResource(resource_base.ResourceBase): """A concrete Test Resource to test against""" def __init__(self, connector, identity, redfish_version=None): - """Ctor of TestResouce + """Ctor of TestResource :param connector: A Connector instance :param identity: The id of the Resource -- GitLab From 878a32e07f81e4bdd64edff3ef8ce23ce82a1bbe Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Mon, 6 Aug 2018 15:49:05 +0300 Subject: [PATCH 102/303] Add support for loading resources from archive file As Redfish Message registries can be reside inside archives, added support to load resource from JSON files in archive. Currently supporting only ZIP archives, support for other archive types can be added as need arises or specification is clarified. The Redfish specification does not detail which types of archives need to be supported, but gives ZIP as an example. Change-Id: I3609df39c68f2149c1ff1a6818af7168bbd02df0 Story: 2001791 Task: 23062 --- sushy/exceptions.py | 4 + sushy/resources/base.py | 69 +++++++++++++++++- .../tests/unit/json_samples/TestRegistry.zip | Bin 0 -> 622 bytes sushy/tests/unit/resources/test_base.py | 45 +++++++++++- 4 files changed, 115 insertions(+), 3 deletions(-) create mode 100644 sushy/tests/unit/json_samples/TestRegistry.zip diff --git a/sushy/exceptions.py b/sushy/exceptions.py index 21531c7..e6deb37 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -57,6 +57,10 @@ class InvalidParameterValueError(SushyError): 'Valid values are: %(valid_values)s') +class ArchiveParsingError(SushyError): + message = 'Failed parsing archive "%(path)s": %(error)s' + + class HTTPError(SushyError): """Basic exception for HTTP errors""" diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 8bc46d3..e858ae9 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -16,7 +16,10 @@ import abc import collections import copy +import io +import json import logging +import zipfile import six @@ -244,13 +247,69 @@ class MappedField(Field): adapter=mapping.get) +@six.add_metaclass(abc.ABCMeta) +class AbstractJsonReader(object): + + def set_connection(self, connector, path): + """Sets mandatory connection parameters + + :param connector: A Connector instance + :param path: path of the resource + """ + self._conn = connector + self._path = path + + @abc.abstractmethod + def get_json(self): + """Based on data source get data and parse to JSON""" + + +class JsonFileReader(AbstractJsonReader): + """Gets the data from JSON file given by path""" + + def get_json(self): + """Gets JSON file from URI directly""" + return self._conn.get(path=self._path).json() + + +class JsonArchiveReader(AbstractJsonReader): + """Gets the data from JSON file in archive""" + + def __init__(self, archive_file): + """Initializes the reader + + :param archive_file: file name of JSON file in archive + """ + self._archive_file = archive_file + + def get_json(self): + """Gets JSON file from archive. Currently supporting ZIP only""" + + data = self._conn.get(path=self._path) + if data.headers.get('content-type') == 'application/zip': + try: + archive = zipfile.ZipFile(io.BytesIO(data.content)) + return json.loads(archive.read(self._archive_file) + .decode(encoding='utf-8')) + except (zipfile.BadZipfile, ValueError) as e: + raise exceptions.ArchiveParsingError( + path=self._path, error=e) + else: + LOG.error('Support for %(type)s not implemented', + {'type': data.headers['content-type']}) + + @six.add_metaclass(abc.ABCMeta) class ResourceBase(object): redfish_version = None """The Redfish version""" - def __init__(self, connector, path='', redfish_version=None): + def __init__(self, + connector, + path='', + redfish_version=None, + reader=JsonFileReader()): """A class representing the base of any Redfish resource Invokes the ``refresh()`` method of resource for the first @@ -259,6 +318,8 @@ class ResourceBase(object): :param path: sub-URI path to the resource. :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. + :param reader: Reader to use to fetch JSON data. Defaults to + JsonFileReader """ self._conn = connector self._path = path @@ -269,6 +330,9 @@ class ResourceBase(object): # attribute values are fetched. self._is_stale = True + reader.set_connection(connector, path) + self._reader = reader + self.refresh() def _parse_attributes(self): @@ -299,7 +363,8 @@ class ResourceBase(object): if not self._is_stale and not force: return - self._json = self._conn.get(path=self._path).json() + self._json = self._reader.get_json() + LOG.debug('Received representation of %(type)s %(path)s: %(json)s', {'type': self.__class__.__name__, 'path': self._path, 'json': self._json}) diff --git a/sushy/tests/unit/json_samples/TestRegistry.zip b/sushy/tests/unit/json_samples/TestRegistry.zip new file mode 100644 index 0000000000000000000000000000000000000000..565c7c88648262e4bbc7bd9738514bf72d74accc GIT binary patch literal 622 zcmWIWW@Zs#-~hrowQRl&P_UbYfq|DnfgvQdxJ1uL&pk_tK5GT~Xw zgf>^jm5F+yFI?O;C9J!+!%l`AZaWpUZ*}ae7cEot*cvb2G&lGm6t+g4$2`;RTzW`C zrbl$b-0)AYEI66CWkmcfU#<06s*6L4bT5~q0e%_J7SJx_&H{E@@ z?)a*|DTd0XGwy3md;9$A1Hnv5^{aKY)@N>-<&^C`r~GNgq0MZ+H{H1PZ--lRoy&%h zBF|GxF1$P7bJCyb)~!5~%6$8ZhL4h~=StbQv~u5<`^Hmaf1v1&T-O@`SqAyuLf*g+ z4@%7Uw*ZtoAW?i4)MDG8pq4(p;QcfI=+qCx4-ps!Y0p9E!xxK&EEn;L~07Y?tHzSh> kGa~wt Date: Sat, 3 Nov 2018 11:05:51 +0530 Subject: [PATCH 103/303] [Trivial Fix] modify spelling error of "committed" Although it is a spelling mistake, it affects reading and overall code quality. Change-Id: Ifebe9ecba26371c68d0502449bce7cc7fd2737cd --- sushy/resources/system/bios.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 5aa50a9..4190b32 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -69,7 +69,7 @@ class Bios(base.ResourceBase): def pending_attributes(self): """Pending BIOS attributes - BIOS attributes that have been comitted to the system, + BIOS attributes that have been committed to the system, but for them to take effect system restart is necessary """ return self._pending_settings_resource.attributes -- GitLab From 8b641daee71ee6b85f709e23ab701b447dc8e226 Mon Sep 17 00:00:00 2001 From: "ya.wang" Date: Fri, 9 Nov 2018 18:00:15 +0800 Subject: [PATCH 104/303] Change adapter to 'int_or_none' for processor properties Change-Id: I009b2571534f38820eec01b5a0e6d886c7b1f4df --- sushy/resources/system/processor.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index a3abf61..6138a0f 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -73,7 +73,7 @@ class Processor(base.ResourceBase): model = base.Field('Model') """The product model number of this device""" - max_speed_mhz = base.Field('MaxSpeedMHz', adapter=int) + max_speed_mhz = base.Field('MaxSpeedMHz', adapter=utils.int_or_none) """The maximum clock speed of the processor in MHz.""" processor_id = ProcessorIdField('ProcessorId') @@ -82,10 +82,10 @@ class Processor(base.ResourceBase): status = common.StatusField('Status') """The processor status""" - total_cores = base.Field('TotalCores', adapter=int) + total_cores = base.Field('TotalCores', adapter=utils.int_or_none) """The total number of cores contained in this processor""" - total_threads = base.Field('TotalThreads', adapter=int) + total_threads = base.Field('TotalThreads', adapter=utils.int_or_none) """The total number of execution threads supported by this processor""" def __init__(self, connector, identity, redfish_version=None): -- GitLab From 5a68be079624c1fd6a69b2bda3a783aa0b852289 Mon Sep 17 00:00:00 2001 From: dnuka Date: Mon, 12 Nov 2018 10:04:50 +0530 Subject: [PATCH 105/303] [Trivial Fix] modify spelling errors of fulfill, for, containing Although it is spelling mistakes, it affects reading and overall code quality. Change-Id: I038271bb972c39afe5704416afa596666b4a6e55 --- README.rst | 2 +- sushy/resources/manager/virtual_media.py | 2 +- sushy/resources/system/system.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index f8414a8..c893b1d 100644 --- a/README.rst +++ b/README.rst @@ -10,7 +10,7 @@ by issuing just enough requests to it (BMCs are very flaky). Therefore, the scope of the library has been limited to what is supported by the `OpenStack Ironic `_ project. As the project grows and more features from `Redfish`_ are -needed we can expand Sushy to fullfil those requirements. +needed we can expand Sushy to fulfill those requirements. * Free software: Apache license * Documentation: https://docs.openstack.org/sushy/latest/ diff --git a/sushy/resources/manager/virtual_media.py b/sushy/resources/manager/virtual_media.py index 6c05d32..7c21a87 100644 --- a/sushy/resources/manager/virtual_media.py +++ b/sushy/resources/manager/virtual_media.py @@ -63,7 +63,7 @@ class VirtualMedia(base.ResourceBase): """ _actions = ActionsField('Actions') - """Insert/eject action fot virtual media""" + """Insert/eject action for virtual media""" def _get_insert_media_element(self): insert_media = self._actions.insert_media diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 104cf83..6c6dfdb 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -74,7 +74,7 @@ class System(base.ResourceBase): """The system BIOS version""" boot = BootField('Boot', required=True) - """A dictionary containg the current boot device, frequency and mode""" + """A dictionary containing the current boot device, frequency and mode""" description = base.Field('Description') """The system description""" -- GitLab From d94866f156c72c3de4062d4dff96609425f954f3 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Fri, 26 Oct 2018 16:46:43 +0200 Subject: [PATCH 106/303] Delete HTTP tokens on `Sushy` shutdown Tries to terminate authenticated Redfish session at BMC Session Service on the event of `Sushy` object deallocation. This should reduce the chance of authenticated sessions pool exhaustion at some BMCs. Story: #2003813 Change-Id: I31a0955e72422e5ca2519aa2aea7d376f4e31fe9 --- ...-to-close-session-on-dealloc-c3687d4dcb1441b8.yaml | 6 ++++++ sushy/main.py | 11 +++++++++++ 2 files changed, 17 insertions(+) create mode 100644 releasenotes/notes/fix-to-close-session-on-dealloc-c3687d4dcb1441b8.yaml diff --git a/releasenotes/notes/fix-to-close-session-on-dealloc-c3687d4dcb1441b8.yaml b/releasenotes/notes/fix-to-close-session-on-dealloc-c3687d4dcb1441b8.yaml new file mode 100644 index 0000000..df9fbfd --- /dev/null +++ b/releasenotes/notes/fix-to-close-session-on-dealloc-c3687d4dcb1441b8.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Tries to terminate authenticated Redfish session at BMC Session Service on + the event of ``Sushy`` object deallocation. This should reduce the chance + of authenticated sessions pool exhaustion at some BMCs. diff --git a/sushy/main.py b/sushy/main.py index 5f9e9fa..364c147 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -110,10 +110,21 @@ class Sushy(base.ResourceBase): super(Sushy, self).__init__( connector or sushy_connector.Connector(base_url, verify=verify), path=self._root_prefix) + self._base_url = base_url self._auth = auth self._auth.set_context(self, self._conn) self._auth.authenticate() + def __del__(self): + if self._auth: + try: + self._auth.close() + + except Exception as ex: + LOG.warning('Ignoring error while closing Redfish session ' + 'with %s: %s', self._base_url, ex) + self._auth = None + def _parse_attributes(self): super(Sushy, self)._parse_attributes() self.redfish_version = self.json.get('RedfishVersion') -- GitLab From 3c7a281422f21027ea956e66791e06fbd369b76c Mon Sep 17 00:00:00 2001 From: dnuka Date: Thu, 8 Nov 2018 17:29:56 +0530 Subject: [PATCH 107/303] Unify sushy models by Redfish schema bundle In the course of sushy library development, we seem to model resources using the most current schemas at the time. Sometimes DMTF releases schema bundles that ties all the most recent schemas together thus ensuring their interoperability. These changes introduce and update currently implemented sushy models to comply with the most recent schema bundle[1]. [1]https://www.dmtf.org/documents/redfish-spmf/redfish-schema-bundle-20181 Story: #2004050 Task: #27056 Change-Id: I479bc18f79c6c51644115671ef68a879a94d102e --- .../update_sushy_models-9b8ea0350eb4d4d0.yaml | 7 +++ sushy/resources/manager/manager.py | 7 +++ .../registry/message_registry_file.py | 1 + sushy/resources/sessionservice/session.py | 3 ++ .../sessionservice/sessionservice.py | 3 ++ sushy/resources/settings.py | 43 ++++++++++++++++++- sushy/resources/system/constants.py | 28 ++++++++++-- sushy/resources/system/ethernet_interface.py | 2 +- sushy/resources/system/mappings.py | 28 ++++++++++++ sushy/resources/system/processor.py | 32 ++++++++++++-- sushy/resources/system/storage/drive.py | 2 +- sushy/resources/system/system.py | 3 ++ sushy/tests/unit/json_samples/bios.json | 2 +- .../unit/json_samples/bios_settings.json | 2 +- sushy/tests/unit/json_samples/drive.json | 4 +- sushy/tests/unit/json_samples/drive2.json | 4 +- sushy/tests/unit/json_samples/drive3.json | 4 +- .../json_samples/ethernet_interfaces.json | 2 +- sushy/tests/unit/json_samples/manager.json | 3 +- .../unit/json_samples/message_registry.json | 6 +-- .../json_samples/message_registry_file.json | 2 +- sushy/tests/unit/json_samples/processor.json | 2 +- sushy/tests/unit/json_samples/processor2.json | 2 +- sushy/tests/unit/json_samples/root.json | 2 +- sushy/tests/unit/json_samples/session.json | 2 +- .../unit/json_samples/session_service.json | 2 +- sushy/tests/unit/json_samples/settings.json | 17 +++++++- sushy/tests/unit/json_samples/storage.json | 4 +- sushy/tests/unit/json_samples/system.json | 2 +- .../unit/json_samples/virtual_media.json | 2 +- .../unit/resources/manager/test_manager.py | 1 + .../registry/test_message_registry.py | 4 +- .../unit/resources/system/test_processor.py | 8 +++- sushy/tests/unit/resources/test_settings.py | 18 ++++++++ 34 files changed, 216 insertions(+), 38 deletions(-) create mode 100644 releasenotes/notes/update_sushy_models-9b8ea0350eb4d4d0.yaml diff --git a/releasenotes/notes/update_sushy_models-9b8ea0350eb4d4d0.yaml b/releasenotes/notes/update_sushy_models-9b8ea0350eb4d4d0.yaml new file mode 100644 index 0000000..6ca3eb3 --- /dev/null +++ b/releasenotes/notes/update_sushy_models-9b8ea0350eb4d4d0.yaml @@ -0,0 +1,7 @@ +features: + - | + Unifies sushy models by Redfish schema bundle. + + These changes introduce and update currently implemented + sushy models to comply with the most recent schema bundle[1]. + [1]https://www.dmtf.org/documents/redfish-spmf/redfish-schema-bundle-20181 diff --git a/sushy/resources/manager/manager.py b/sushy/resources/manager/manager.py index 96b8133..b4f2fd5 100644 --- a/sushy/resources/manager/manager.py +++ b/sushy/resources/manager/manager.py @@ -10,6 +10,9 @@ # License for the specific language governing permissions and limitations # under the License. +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Manager.v1_4_0.json + import logging from sushy import exceptions @@ -38,6 +41,10 @@ class RemoteAccessField(base.CompositeField): class Manager(base.ResourceBase): + auto_dst_enabled = base.Field('AutoDSTEnabled') + """Indicates whether the manager is configured for automatic DST + adjustment""" + firmware_version = base.Field('FirmwareVersion') """The manager firmware version""" diff --git a/sushy/resources/registry/message_registry_file.py b/sushy/resources/registry/message_registry_file.py index 460e3d2..37e545b 100644 --- a/sushy/resources/registry/message_registry_file.py +++ b/sushy/resources/registry/message_registry_file.py @@ -10,6 +10,7 @@ # License for the specific language governing permissions and limitations # under the License. +# This is referred from Redfish standard schema. # https://redfish.dmtf.org/schemas/v1/MessageRegistryFileCollection.json # https://redfish.dmtf.org/schemas/v1/MessageRegistryFile.v1_1_0.json diff --git a/sushy/resources/sessionservice/session.py b/sushy/resources/sessionservice/session.py index 753cbde..fc82e66 100644 --- a/sushy/resources/sessionservice/session.py +++ b/sushy/resources/sessionservice/session.py @@ -13,6 +13,9 @@ # License for the specific language governing permissions and limitations # under the License. +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Session.v1_1_0.json + import logging from sushy.resources import base diff --git a/sushy/resources/sessionservice/sessionservice.py b/sushy/resources/sessionservice/sessionservice.py index 5c01cec..5d2affd 100644 --- a/sushy/resources/sessionservice/sessionservice.py +++ b/sushy/resources/sessionservice/sessionservice.py @@ -13,6 +13,9 @@ # License for the specific language governing permissions and limitations # under the License. +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/SessionService.v1_1_3.json + import logging from sushy import exceptions diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index 5f68365..56b9b96 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -11,7 +11,7 @@ # under the License. # This is referred from Redfish standard schema. -# http://redfish.dmtf.org/schemas/v1/Settings.v1_0_0.json +# https://redfish.dmtf.org/schemas/Settings.v1_2_0.json from sushy.resources import base @@ -48,6 +48,38 @@ class MessageListField(base.ListField): """ +class MaintenanceWindowField(base.CompositeField): + + maintenance_window_duration_in_seconds = base.Field( + 'MaintenanceWindowDurationInSeconds', + required=True) + """The expiry time of maintenance window in seconds""" + + maintenance_window_start_time = base.Field( + 'MaintenanceWindowStartTime', + required=True) + """The start time of a maintenance window""" + + +class OperationApplyTimeSupportField(base.CompositeField): + + maintenance_window_duration_in_seconds = base.Field( + 'MaintenanceWindowDurationInSeconds') + """The expiry time of maintenance window in seconds""" + + maintenance_window_resource = base.Field( + 'MaintenanceWindowResource') + """The location of the maintenance window settings""" + + maintenance_window_start_time = base.Field( + 'MaintenanceWindowStartTime') + """The start time of a maintenance window""" + + supported_values = base.Field('SupportedValues', required=True) + """The client is allowed request when performing a create, delete, or + action operation""" + + class SettingsField(base.CompositeField): """The settings of a resource @@ -76,10 +108,19 @@ class SettingsField(base.CompositeField): to change this resource """ + _maintenance_window = MaintenanceWindowField('MaintenanceWindow') + """Indicates if a given resource has a maintenance window assignment + for applying settings or operations""" + messages = MessageListField("Messages") """Represents the results of the last time the values of the Settings resource were applied to the server""" + _operation_apply_time_support = OperationApplyTimeSupportField( + 'OperationApplyTimeSupport') + """Indicates if a client is allowed to request for a specific apply + time of a create, delete, or action operation of a given resource""" + def commit(self, connector, value): """Commits new settings values diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index 06b07b0..c5743ac 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -13,7 +13,7 @@ # License for the specific language governing permissions and limitations # under the License. -# Values comes from the Redfish System json-schema 1.0.0: +# Values come from the Redfish System json-schema 1.0.0: # http://redfish.dmtf.org/schemas/v1/ComputerSystem.v1_0_0.json#/definitions/ComputerSystem # noqa # Reset action constants @@ -112,8 +112,8 @@ BOOT_SOURCE_ENABLED_CONTINUOUS = 'continuous' BOOT_SOURCE_ENABLED_DISABLED = 'disabled' # Processor related constants -# Values comes from the Redfish Processor json-schema 1.0.0: -# http://redfish.dmtf.org/schemas/v1/Processor.v1_0_0.json +# Values comes from the Redfish Processor json-schema 1.3.0: +# http://redfish.dmtf.org/schemas/v1/Processor.v1_3_0.json # Processor Architecture constants @@ -122,3 +122,25 @@ PROCESSOR_ARCH_IA_64 = 'Intel Itanium' PROCESSOR_ARCH_ARM = 'ARM' PROCESSOR_ARCH_MIPS = 'MIPS' PROCESSOR_ARCH_OEM = 'OEM-defined' + +# Processor type constants + +PROCESSOR_TYPE_ACCELERATOR = 'An Accelerator' +PROCESSOR_TYPE_CPU = 'A Central Processing Unit' +PROCESSOR_TYPE_CORE = 'A Core in a Processor' +PROCESSOR_TYPE_DSP = 'A Digital Signal Processor' +PROCESSOR_TYPE_FPGA = 'A Field Programmable Gate Array' +PROCESSOR_TYPE_GPU = 'A Graphics Processing Unit' +PROCESSOR_TYPE_OEM = 'An OEM-defined Processing Unit' +PROCESSOR_TYPE_THREAD = 'A Thread in a Processor' + +# Processor InstructionSet constants + +PROCESSOR_INSTRUCTIONSET_ARM_A32 = 'ARM 32-bit' +PROCESSOR_INSTRUCTIONSET_ARM_A64 = 'ARM 64-bit' +PROCESSOR_INSTRUCTIONSET_IA_64 = 'Intel IA-64' +PROCESSOR_INSTRUCTIONSET_MIPS32 = 'MIPS 32-bit' +PROCESSOR_INSTRUCTIONSET_MIPS64 = 'MIPS 64-bit' +PROCESSOR_INSTRUCTIONSET_OEM = 'OEM-defined' +PROCESSOR_INSTRUCTIONSET_x86 = 'x86 32-bit' +PROCESSOR_INSTRUCTIONSET_x86_64 = 'x86 64-bit' diff --git a/sushy/resources/system/ethernet_interface.py b/sushy/resources/system/ethernet_interface.py index f7d2af1..b6eccec 100644 --- a/sushy/resources/system/ethernet_interface.py +++ b/sushy/resources/system/ethernet_interface.py @@ -11,7 +11,7 @@ # under the License. # This is referred from Redfish standard schema. -# http://redfish.dmtf.org/schemas/EthernetInterface.v1_3_0.json +# https://redfish.dmtf.org/schemas/EthernetInterface.v1_4_0.json import logging diff --git a/sushy/resources/system/mappings.py b/sushy/resources/system/mappings.py index c9a3244..649fea6 100644 --- a/sushy/resources/system/mappings.py +++ b/sushy/resources/system/mappings.py @@ -89,3 +89,31 @@ PROCESSOR_ARCH_VALUE_MAP = { PROCESSOR_ARCH_VALUE_MAP_REV = ( utils.revert_dictionary(PROCESSOR_ARCH_VALUE_MAP)) + +PROCESSOR_TYPE_VALUE_MAP = { + 'Accelerator': sys_cons.PROCESSOR_TYPE_ACCELERATOR, + 'CPU': sys_cons.PROCESSOR_TYPE_CPU, + 'Core': sys_cons.PROCESSOR_TYPE_CORE, + 'DSP': sys_cons.PROCESSOR_TYPE_DSP, + 'FPGA': sys_cons.PROCESSOR_TYPE_FPGA, + 'GPU': sys_cons.PROCESSOR_TYPE_GPU, + 'OEM': sys_cons.PROCESSOR_TYPE_OEM, + 'Thread': sys_cons.PROCESSOR_TYPE_THREAD +} + +PROCESSOR_TYPE_VALUE_MAP_REV = ( + utils.revert_dictionary(PROCESSOR_TYPE_VALUE_MAP)) + +PROCESSOR_INSTRUCTIONSET_VALUE_MAP = { + 'ARM-A32': sys_cons.PROCESSOR_INSTRUCTIONSET_ARM_A32, + 'ARM-A64': sys_cons.PROCESSOR_INSTRUCTIONSET_ARM_A64, + 'IA-64': sys_cons.PROCESSOR_INSTRUCTIONSET_IA_64, + 'MIPS32': sys_cons.PROCESSOR_INSTRUCTIONSET_MIPS32, + 'MIPS64': sys_cons.PROCESSOR_INSTRUCTIONSET_MIPS64, + 'OEM': sys_cons.PROCESSOR_INSTRUCTIONSET_OEM, + 'x86': sys_cons.PROCESSOR_INSTRUCTIONSET_x86, + 'x86-64': sys_cons.PROCESSOR_INSTRUCTIONSET_x86_64 +} + +PROCESSOR_INSTRUCTIONSET_VALUE_MAP_REV = ( + utils.revert_dictionary(PROCESSOR_INSTRUCTIONSET_VALUE_MAP)) diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index 6138a0f..a8ea385 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -12,9 +12,13 @@ # License for the specific language governing permissions and limitations # under the License. +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Processor.v1_3_0.json + import collections import logging +from sushy import exceptions from sushy.resources import base from sushy.resources import common from sushy.resources.system import mappings as sys_maps @@ -55,16 +59,16 @@ class Processor(base.ResourceBase): socket = base.Field('Socket') """The socket or location of the processor""" - # TODO(deray): Create mappings for the processor_type - processor_type = base.Field('ProcessorType') + processor_type = base.MappedField( + 'ProcessorType', sys_maps.PROCESSOR_TYPE_VALUE_MAP) """The type of processor""" processor_architecture = base.MappedField( 'ProcessorArchitecture', sys_maps.PROCESSOR_ARCH_VALUE_MAP) """The architecture of the processor""" - # TODO(deray): Create mappings for the instruction_set - instruction_set = base.Field('InstructionSet') + instruction_set = base.MappedField( + 'InstructionSet', sys_maps.PROCESSOR_INSTRUCTIONSET_VALUE_MAP) """The instruction set of the processor""" manufacturer = base.Field('Manufacturer') @@ -98,6 +102,26 @@ class Processor(base.ResourceBase): """ super(Processor, self).__init__(connector, identity, redfish_version) + def _get_processor_collection_path(self): + """Helper function to find the ProcessorCollection path""" + pro_col = self.json.get('ProcessorCollection') + if not pro_col: + raise exceptions.MissingAttributeError( + attribute='ProcessorCollection', resource=self._path) + return pro_col.get('@odata.id') + + @property + @utils.cache_it + def sub_processors(self): + """A reference to + + the collection of Sub-Processors associated with + this system, such as cores or threads that are part of a processor. + """ + return ProcessorCollection( + self.conn, self._get_processor_collection_path, + redfish_version=self.redfish_version) + class ProcessorCollection(base.ResourceCollectionBase): diff --git a/sushy/resources/system/storage/drive.py b/sushy/resources/system/storage/drive.py index a6d0f92..e45ca9e 100644 --- a/sushy/resources/system/storage/drive.py +++ b/sushy/resources/system/storage/drive.py @@ -11,7 +11,7 @@ # under the License. # This is referred from Redfish standard schema. -# http://redfish.dmtf.org/schemas/v1/Drive.v1_3_0.json +# http://redfish.dmtf.org/schemas/v1/Drive.v1_4_0.json import logging diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 6c6dfdb..693264e 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -13,6 +13,9 @@ # License for the specific language governing permissions and limitations # under the License. +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/ComputerSystem.v1_5_0.json + import logging from sushy import exceptions diff --git a/sushy/tests/unit/json_samples/bios.json b/sushy/tests/unit/json_samples/bios.json index 7625af9..1d947b8 100644 --- a/sushy/tests/unit/json_samples/bios.json +++ b/sushy/tests/unit/json_samples/bios.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Bios.v1_0_0.Bios", + "@odata.type": "#Bios.v1_0_3.Bios", "Id": "BIOS", "Name": "BIOS Configuration Current Settings", "AttributeRegistry": "BiosAttributeRegistryP89.v1_0_0", diff --git a/sushy/tests/unit/json_samples/bios_settings.json b/sushy/tests/unit/json_samples/bios_settings.json index b7c7772..9b59836 100644 --- a/sushy/tests/unit/json_samples/bios_settings.json +++ b/sushy/tests/unit/json_samples/bios_settings.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Bios.v1_0_0.Bios", + "@odata.type": "#Bios.v1_0_3.Bios", "Id": "Settings", "Name": "BIOS Configuration Pending Settings", "AttributeRegistry": "BiosAttributeRegistryP89.v1_0_0", diff --git a/sushy/tests/unit/json_samples/drive.json b/sushy/tests/unit/json_samples/drive.json index a3f1184..9488041 100644 --- a/sushy/tests/unit/json_samples/drive.json +++ b/sushy/tests/unit/json_samples/drive.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Drive.v1_2_0.Drive", + "@odata.type": "#Drive.v1_4_0.Drive", "Id": "32ADF365C6C1B7BD", "Name": "Drive Sample", "IndicatorLED": "Lit", @@ -43,4 +43,4 @@ "@odata.context": "/redfish/v1/$metadata#Drive.Drive", "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD", "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." -} \ No newline at end of file +} diff --git a/sushy/tests/unit/json_samples/drive2.json b/sushy/tests/unit/json_samples/drive2.json index ab05b6f..4f6fb8a 100644 --- a/sushy/tests/unit/json_samples/drive2.json +++ b/sushy/tests/unit/json_samples/drive2.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Drive.v1_2_0.Drive", + "@odata.type": "#Drive.v1_4_0.Drive", "Id": "35D38F11ACEF7BD3", "Name": "Drive Sample", "IndicatorLED": "Lit", @@ -48,4 +48,4 @@ "@odata.context": "/redfish/v1/$metadata#Drive.Drive", "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3", "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." -} \ No newline at end of file +} diff --git a/sushy/tests/unit/json_samples/drive3.json b/sushy/tests/unit/json_samples/drive3.json index 9a4faf9..25c0cb3 100644 --- a/sushy/tests/unit/json_samples/drive3.json +++ b/sushy/tests/unit/json_samples/drive3.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Drive.v1_2_0.Drive", + "@odata.type": "#Drive.v1_4_0.Drive", "Id": "3D58ECBC375FD9F2", "Name": "Drive Sample", "IndicatorLED": "Lit", @@ -51,4 +51,4 @@ "@odata.context": "/redfish/v1/$metadata#Drive.Drive", "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2", "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." -} \ No newline at end of file +} diff --git a/sushy/tests/unit/json_samples/ethernet_interfaces.json b/sushy/tests/unit/json_samples/ethernet_interfaces.json index 88f9417..d3015fb 100644 --- a/sushy/tests/unit/json_samples/ethernet_interfaces.json +++ b/sushy/tests/unit/json_samples/ethernet_interfaces.json @@ -1,5 +1,5 @@ { - "@odata.type": "#EthernetInterface.v1_0_2.EthernetInterface", + "@odata.type": "#EthernetInterface.v1_4_0.EthernetInterface", "Id": "1", "Name": "Ethernet Interface", "Description": "System NIC 1", diff --git a/sushy/tests/unit/json_samples/manager.json b/sushy/tests/unit/json_samples/manager.json index e79e386..f22ec0b 100644 --- a/sushy/tests/unit/json_samples/manager.json +++ b/sushy/tests/unit/json_samples/manager.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Manager.v1_1_0.Manager", + "@odata.type": "#Manager.v1_4_0.Manager", "Id": "BMC", "Name": "Manager", "ManagerType": "BMC", @@ -9,6 +9,7 @@ "Model": "Joo Janta 200", "DateTime": "2015-03-13T04:14:33+06:00", "DateTimeLocalOffset": "+06:00", + "AutoDSTEnabled": false, "Status": { "State": "Enabled", "Health": "OK" diff --git a/sushy/tests/unit/json_samples/message_registry.json b/sushy/tests/unit/json_samples/message_registry.json index 8b612a2..eb61b68 100644 --- a/sushy/tests/unit/json_samples/message_registry.json +++ b/sushy/tests/unit/json_samples/message_registry.json @@ -1,11 +1,11 @@ { - "@odata.type": "#MessageRegistry.v1_0_0.MessageRegistry", - "Id": "Test.1.0.0", + "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", + "Id": "Test.1.1.1", "Name": "Test Message Registry", "Language": "en", "Description": "This registry defines messages for sushy testing", "RegistryPrefix": "Test", - "RegistryVersion": "1.0.0", + "RegistryVersion": "1.1.1", "OwningEntity": "sushy", "Messages": { "Success": { diff --git a/sushy/tests/unit/json_samples/message_registry_file.json b/sushy/tests/unit/json_samples/message_registry_file.json index 5242dc6..714105e 100644 --- a/sushy/tests/unit/json_samples/message_registry_file.json +++ b/sushy/tests/unit/json_samples/message_registry_file.json @@ -1,5 +1,5 @@ { - "@odata.type": "#MessageRegistryFile.v1_1_0.MessageRegistryFile", + "@odata.type": "#MessageRegistryFile.v1_1_1.MessageRegistryFile", "Id": "Test", "Name": "Test Message Registry File", "Description": "Message Registry file for testing", diff --git a/sushy/tests/unit/json_samples/processor.json b/sushy/tests/unit/json_samples/processor.json index 7b6d47f..d6e03c7 100644 --- a/sushy/tests/unit/json_samples/processor.json +++ b/sushy/tests/unit/json_samples/processor.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Processor.v1_0_2.Processor", + "@odata.type": "#Processor.v1_3_0.Processor", "Id": "CPU1", "Socket": "CPU 1", "ProcessorType": "CPU", diff --git a/sushy/tests/unit/json_samples/processor2.json b/sushy/tests/unit/json_samples/processor2.json index a050851..e1a2e76 100644 --- a/sushy/tests/unit/json_samples/processor2.json +++ b/sushy/tests/unit/json_samples/processor2.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Processor.v1_0_2.Processor", + "@odata.type": "#Processor.v1_3_0.Processor", "Id": "CPU2", "Socket": "CPU 2", "ProcessorType": "CPU", diff --git a/sushy/tests/unit/json_samples/root.json b/sushy/tests/unit/json_samples/root.json index 66f6d55..7bc7387 100644 --- a/sushy/tests/unit/json_samples/root.json +++ b/sushy/tests/unit/json_samples/root.json @@ -1,5 +1,5 @@ { - "@odata.type": "#ServiceRoot.v1_0_2.ServiceRoot", + "@odata.type": "#ServiceRoot.v1_3_1.ServiceRoot", "Id": "RootService", "Name": "Root Service", "RedfishVersion": "1.0.2", diff --git a/sushy/tests/unit/json_samples/session.json b/sushy/tests/unit/json_samples/session.json index 61508b7..8179602 100644 --- a/sushy/tests/unit/json_samples/session.json +++ b/sushy/tests/unit/json_samples/session.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Session.v1_0_2.Session", + "@odata.type": "#Session.v1_1_0.Session", "Id": "1234567890ABCDEF", "Name": "User Session", "Description": "Manager User Session", diff --git a/sushy/tests/unit/json_samples/session_service.json b/sushy/tests/unit/json_samples/session_service.json index ab28afa..2e73020 100644 --- a/sushy/tests/unit/json_samples/session_service.json +++ b/sushy/tests/unit/json_samples/session_service.json @@ -1,5 +1,5 @@ { - "@odata.type": "#SessionService.v1_0_2.SessionService", + "@odata.type": "#SessionService.v1_1_3.SessionService", "Id": "SessionService", "Name": "Session Service", "Description": "Session Service", diff --git a/sushy/tests/unit/json_samples/settings.json b/sushy/tests/unit/json_samples/settings.json index afb86f0..671c7f0 100644 --- a/sushy/tests/unit/json_samples/settings.json +++ b/sushy/tests/unit/json_samples/settings.json @@ -1,7 +1,22 @@ { "@Redfish.Settings": { - "@odata.type": "#Settings.v1_0_0.Settings", + "@odata.type": "#Settings.v1_2_0.Settings", "ETag": "9234ac83b9700123cc32", + "MaintenanceWindow": { + "MaintenanceWindowDurationInSeconds": 1, + "MaintenanceWindowStartTime": "2016-03-07T14:44.30-05:05" + }, + "OperationApplyTimeSupport": { + "MaintenanceWindowDurationInSeconds": 1, + "MaintenanceWindowResource": "", + "MaintenanceWindowStartTime": "2016-03-07T14:44.30-05:10", + "SupportedValues": [ + "Immediate", + "OnReset", + "AtMaintenanceWindowStart", + "InMaintenanceWindowOnReset" + ] + }, "Messages": [{ "MessageId": "Base.1.0.SettingsFailed", "Message": "Settings update failed due to invalid value", diff --git a/sushy/tests/unit/json_samples/storage.json b/sushy/tests/unit/json_samples/storage.json index e272cce..b9cbe91 100644 --- a/sushy/tests/unit/json_samples/storage.json +++ b/sushy/tests/unit/json_samples/storage.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Storage.v1_3_0.Storage", + "@odata.type": "#Storage.v1_4_0.Storage", "Id": "1", "Name": "Local Storage Controller", "Description": "Integrated RAID Controller", @@ -71,4 +71,4 @@ "@odata.context": "/redfish/v1/$metadata#Storage.Storage", "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1", "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." -} \ No newline at end of file +} diff --git a/sushy/tests/unit/json_samples/system.json b/sushy/tests/unit/json_samples/system.json index c331d02..a455c03 100644 --- a/sushy/tests/unit/json_samples/system.json +++ b/sushy/tests/unit/json_samples/system.json @@ -1,5 +1,5 @@ { - "@odata.type": "#ComputerSystem.v1_1_0.ComputerSystem", + "@odata.type": "#ComputerSystem.v1_5_0.ComputerSystem", "Id": "437XR1138R2", "Name": "WebFrontEnd483", "SystemType": "Physical", diff --git a/sushy/tests/unit/json_samples/virtual_media.json b/sushy/tests/unit/json_samples/virtual_media.json index d4e7e55..61af2a5 100644 --- a/sushy/tests/unit/json_samples/virtual_media.json +++ b/sushy/tests/unit/json_samples/virtual_media.json @@ -1,5 +1,5 @@ { - "@odata.type": "#VirtualMedia.v1_1_0.VirtualMedia", + "@odata.type": "#VirtualMedia.v1_2_0.VirtualMedia", "Id": "Floppy1", "Name": "Virtual Removable Media", "MediaTypes": [ diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index 5c98ef6..108d749 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -38,6 +38,7 @@ class ManagerTestCase(base.TestCase): # | THEN | self.assertEqual('1.0.2', self.manager.redfish_version) self.assertEqual('1.00', self.manager.firmware_version) + self.assertFalse(self.manager.auto_dst_enabled) self.assertEqual(True, self.manager.graphical_console.service_enabled) self.assertEqual( 2, self.manager.graphical_console.max_concurrent_sessions) diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py index 8669d44..0b3d37b 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -36,13 +36,13 @@ class MessageRegistryTestCase(base.TestCase): def test__parse_attributes(self): self.registry._parse_attributes() - self.assertEqual('Test.1.0.0', self.registry.identity) + self.assertEqual('Test.1.1.1', self.registry.identity) self.assertEqual('Test Message Registry', self.registry.name) self.assertEqual('en', self.registry.language) self.assertEqual('This registry defines messages for sushy testing', self.registry.description) self.assertEqual('Test', self.registry.registry_prefix) - self.assertEqual('1.0.0', self.registry.registry_version) + self.assertEqual('1.1.1', self.registry.registry_version) self.assertEqual('sushy', self.registry.owning_entity) self.assertEqual(3, len(self.registry.messages)) self.assertEqual('Everything OK', diff --git a/sushy/tests/unit/resources/system/test_processor.py b/sushy/tests/unit/resources/system/test_processor.py index 1d933d2..ca3dc48 100644 --- a/sushy/tests/unit/resources/system/test_processor.py +++ b/sushy/tests/unit/resources/system/test_processor.py @@ -39,10 +39,14 @@ class ProcessorTestCase(base.TestCase): self.assertEqual('1.0.2', self.sys_processor.redfish_version) self.assertEqual('CPU1', self.sys_processor.identity) self.assertEqual('CPU 1', self.sys_processor.socket) - self.assertEqual('CPU', self.sys_processor.processor_type) + self.assertEqual( + sushy.PROCESSOR_TYPE_CPU, + self.sys_processor.processor_type) self.assertEqual(sushy.PROCESSOR_ARCH_x86, self.sys_processor.processor_architecture) - self.assertEqual('x86-64', self.sys_processor.instruction_set) + self.assertEqual( + sushy.PROCESSOR_INSTRUCTIONSET_x86_64, + self.sys_processor.instruction_set) self.assertEqual('Intel(R) Corporation', self.sys_processor.manufacturer) self.assertEqual('Multi-Core Intel(R) Xeon(R) processor 7xxx Series', diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index 287cf26..4eaf4fe 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -53,6 +53,24 @@ class SettingsFieldTestCase(base.TestCase): instance.messages[0]._related_properties[0]) self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', instance._settings_object_idref.resource_uri) + self.assertEqual( + 1, + instance. + _maintenance_window.maintenance_window_duration_in_seconds) + self.assertEqual( + '2016-03-07T14:44.30-05:05', + instance._maintenance_window.maintenance_window_start_time) + self.assertEqual( + 1, + instance._operation_apply_time_support. + maintenance_window_duration_in_seconds) + self.assertEqual( + '2016-03-07T14:44.30-05:10', + instance._operation_apply_time_support. + maintenance_window_start_time) + self.assertIn( + 'Immediate', + instance._operation_apply_time_support.supported_values) def test_commit(self): conn = mock.Mock() -- GitLab From 67406a28a251379f45bb192800036024118a774b Mon Sep 17 00:00:00 2001 From: chengebj5238 Date: Sat, 24 Nov 2018 16:09:02 +0800 Subject: [PATCH 108/303] remove useless whitespces Change-Id: Icbf2e386e38debaaca48f0c3d05f5c4e8306066e --- doc/source/contributor/index.rst | 2 +- doc/source/index.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/contributor/index.rst b/doc/source/contributor/index.rst index bb0c76e..211620f 100644 --- a/doc/source/contributor/index.rst +++ b/doc/source/contributor/index.rst @@ -45,7 +45,7 @@ After the download, extract the files somewhere in the file-system:: unzip DSP2043_1.0.0.zip -d -Now run ``sushy-static`` pointing to those files. For example to serve +Now run ``sushy-static`` pointing to those files. For example to serve the ``DSP2043-server`` mockup files, run:: sushy-static --mockup-files /DSP2043-server diff --git a/doc/source/index.rst b/doc/source/index.rst index 86810b0..6c23168 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -1,5 +1,5 @@ .. sushy documentation master file, created by - sphinx-quickstart on Tue Jul 9 22:26:36 2013. + sphinx-quickstart on Tue Jul 9 22:26:36 2013. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -- GitLab From 9a0b77343877e2a6ce60c1c700480b5bf817d22b Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Mon, 29 Oct 2018 13:42:24 +0200 Subject: [PATCH 109/303] Cleanup JsonDataReader name Changed name of JsonFileReader to JsonDataReader to better indicate that it reads from HTTP responses and not local filesystem files. Followup to I3609df39c68f2149c1ff1a6818af7168bbd02df0 Change-Id: Ib591f5c2fb522ca81e8f9d93fcb3f653c9586e2f --- sushy/resources/base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index e858ae9..f84b442 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -264,8 +264,8 @@ class AbstractJsonReader(object): """Based on data source get data and parse to JSON""" -class JsonFileReader(AbstractJsonReader): - """Gets the data from JSON file given by path""" +class JsonDataReader(AbstractJsonReader): + """Gets the data from HTTP response given by path""" def get_json(self): """Gets JSON file from URI directly""" @@ -309,7 +309,7 @@ class ResourceBase(object): connector, path='', redfish_version=None, - reader=JsonFileReader()): + reader=JsonDataReader()): """A class representing the base of any Redfish resource Invokes the ``refresh()`` method of resource for the first @@ -319,7 +319,7 @@ class ResourceBase(object): :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. :param reader: Reader to use to fetch JSON data. Defaults to - JsonFileReader + JsonDataReader """ self._conn = connector self._path = path -- GitLab From c5d13e4385abed40c7bb41c84419c8d53e708449 Mon Sep 17 00:00:00 2001 From: Gabriela Soria Date: Fri, 5 Oct 2018 00:51:58 -0700 Subject: [PATCH 110/303] Add `ChassisCollection` and `Chassis` classes Add representation of Chassis and ChassisCollection resources. The Chassis is used to represent a chassis or other physical enclosure as a Redfish resource. Also adds the methods get_chassis_collection and get_chassis in the public API. Implements: ChassisCollection and Chassis classes Story: #2003853 Task: #26647 Change-Id: I59083562ff2ab3b18bfeebdabc0f4cfd663d01bb --- .../add-chassis-support-5b97daffe1c61a2b.yaml | 5 + sushy/__init__.py | 1 + sushy/main.py | 27 +++ sushy/resources/chassis/__init__.py | 0 sushy/resources/chassis/chassis.py | 213 ++++++++++++++++++ sushy/resources/chassis/constants.py | 162 +++++++++++++ sushy/resources/chassis/mappings.py | 48 ++++ sushy/resources/constants.py | 61 +++++ sushy/resources/manager/constants.py | 9 +- sushy/resources/mappings.py | 30 +++ sushy/resources/system/constants.py | 34 +-- sushy/resources/system/mappings.py | 16 -- sushy/resources/system/system.py | 5 +- sushy/tests/unit/json_samples/chassis.json | 98 ++++++++ .../unit/json_samples/chassis_collection.json | 25 ++ .../tests/unit/resources/chassis/__init__.py | 0 .../unit/resources/chassis/test_chassis.py | 157 +++++++++++++ sushy/tests/unit/test_main.py | 15 ++ 18 files changed, 870 insertions(+), 36 deletions(-) create mode 100644 releasenotes/notes/add-chassis-support-5b97daffe1c61a2b.yaml create mode 100644 sushy/resources/chassis/__init__.py create mode 100644 sushy/resources/chassis/chassis.py create mode 100644 sushy/resources/chassis/constants.py create mode 100644 sushy/resources/chassis/mappings.py create mode 100644 sushy/tests/unit/json_samples/chassis.json create mode 100644 sushy/tests/unit/json_samples/chassis_collection.json create mode 100644 sushy/tests/unit/resources/chassis/__init__.py create mode 100644 sushy/tests/unit/resources/chassis/test_chassis.py diff --git a/releasenotes/notes/add-chassis-support-5b97daffe1c61a2b.yaml b/releasenotes/notes/add-chassis-support-5b97daffe1c61a2b.yaml new file mode 100644 index 0000000..7852e7c --- /dev/null +++ b/releasenotes/notes/add-chassis-support-5b97daffe1c61a2b.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds support for the Chassis resource to the library. + diff --git a/sushy/__init__.py b/sushy/__init__.py index 913dd05..92623cf 100644 --- a/sushy/__init__.py +++ b/sushy/__init__.py @@ -20,6 +20,7 @@ from sushy.main import Sushy from sushy.resources.constants import * # noqa from sushy.resources.system.constants import * # noqa from sushy.resources.manager.constants import * # noqa +from sushy.resources.chassis.constants import * # noqa __all__ = ('Sushy',) __version__ = pbr.version.VersionInfo( diff --git a/sushy/main.py b/sushy/main.py index 5f9e9fa..0b88bf0 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -18,6 +18,7 @@ from sushy import auth as sushy_auth from sushy import connector as sushy_connector from sushy import exceptions from sushy.resources import base +from sushy.resources.chassis import chassis from sushy.resources.manager import manager from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session @@ -69,6 +70,9 @@ class Sushy(base.ResourceBase): _managers_path = base.Field(['Managers', '@odata.id']) """ManagerCollection path""" + _chassis_path = base.Field(['Chassis', '@odata.id']) + """ChassisCollection path""" + _session_service_path = base.Field(['SessionService', '@odata.id']) """SessionService path""" @@ -141,6 +145,29 @@ class Sushy(base.ResourceBase): return system.System(self._conn, identity, redfish_version=self.redfish_version) + def get_chassis_collection(self): + """Get the ChassisCollection object + + :raises: MissingAttributeError, if the collection attribute is + not found + :returns: a ChassisCollection object + """ + if not self._chassis_path: + raise exceptions.MissingAttributeError( + attribute='Chassis/@odata.id', resource=self._path) + + return chassis.ChassisCollection(self._conn, self._chassis_path, + redfish_version=self.redfish_version) + + def get_chassis(self, identity): + """Given the identity return a Chassis object + + :param identity: The identity of the Chassis resource + :returns: The Chassis object + """ + return chassis.Chassis(self._conn, identity, + redfish_version=self.redfish_version) + def get_manager_collection(self): """Get the ManagerCollection object diff --git a/sushy/resources/chassis/__init__.py b/sushy/resources/chassis/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/resources/chassis/chassis.py b/sushy/resources/chassis/chassis.py new file mode 100644 index 0000000..6eeed64 --- /dev/null +++ b/sushy/resources/chassis/chassis.py @@ -0,0 +1,213 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Chassis.v1_8_0.json + +from sushy import exceptions +from sushy.resources import base +from sushy.resources.chassis import mappings as cha_maps +from sushy.resources import common +from sushy.resources import mappings as res_maps + +import logging + +LOG = logging.getLogger(__name__) + + +class ActionsField(base.CompositeField): + reset = common.ResetActionField('#Chassis.Reset') + + +class PhysicalSecurity(base.CompositeField): + intrusion_sensor = base.MappedField('IntrusionSensor', + cha_maps.CHASSIS_INTRUSION_SENSOR_MAP) + """IntrusionSensor + This indicates the known state of the physical security sensor, such as if + it is hardware intrusion detected. + """ + + intrusion_sensor_number = base.Field('IntrusionSensorNumber') + """A numerical identifier to represent the physical security sensor""" + + intrusion_sensor_re_arm = ( + base.MappedField('IntrusionSensorReArm', + cha_maps.CHASSIS_INTRUSION_SENSOR_RE_ARM_MAP)) + """This indicates how the Normal state to be restored""" + + +class Chassis(base.ResourceBase): + """Chassis resource + + The Chassis represents the physical components of a system. This + resource represents the sheet-metal confined spaces and logical zones + such as racks, enclosures, chassis and all other containers. + """ + + chassis_type = base.MappedField('ChassisType', + cha_maps.CHASSIS_TYPE_VALUE_MAP, + required=True) + """The type of physical form factor of the chassis""" + + identity = base.Field('Id', required=True) + """Identifier for the chassis""" + + name = base.Field('Name', required=True) + """The chassis name""" + + asset_tag = base.Field('AssetTag') + """The user assigned asset tag of this chassis""" + + depth_mm = base.Field('DepthMm') + """Depth in millimeters + The depth of the chassis. The value of this property shall represent + the depth (length) of the chassis (in millimeters) as specified by the + manufacturer. + """ + + description = base.Field('Description') + """The chassis description""" + + height_mm = base.Field('HeightMm') + """Height in millimeters + The height of the chassis. The value of this property shall represent + the height of the chassis (in millimeters) as specified by the + manufacturer. + """ + + indicator_led = base.MappedField('IndicatorLED', + res_maps.INDICATOR_LED_VALUE_MAP) + """The state of the indicator LED, used to identify the chassis""" + + manufacturer = base.Field('Manufacturer') + """The manufacturer of this chassis""" + + model = base.Field('Model') + """The model number of the chassis""" + + part_number = base.Field('PartNumber') + """The part number of the chassis""" + + physical_security = PhysicalSecurity('PhysicalSecurity') + """PhysicalSecurity + This value of this property shall contain the sensor state of the physical + security. + """ + + power_state = base.MappedField('PowerState', + res_maps.POWER_STATE_VALUE_MAP) + """The current power state of the chassis""" + + serial_number = base.Field('SerialNumber') + """The serial number of the chassis""" + + sku = base.Field('SKU') + """Stock-keeping unit number (SKU) + The value of this property shall be the stock-keeping unit number for + this chassis. + """ + + status = common.StatusField('Status') + """Status and Health + This property describes the status and health of the chassis and its + children. + """ + + uuid = base.Field('UUID') + """The Universal Unique Identifier (UUID) for this Chassis.""" + + weight_kg = base.Field('WeightKg') + """Weight in kilograms + The value of this property shall represent the published mass (commonly + referred to as weight) of the chassis (in kilograms). + """ + + width_mm = base.Field('WidthMm') + """Width in millimeters + The value of this property shall represent the width of the chassis + (in millimeters) as specified by the manufacturer. + """ + + _actions = ActionsField('Actions') + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a Chassis + + :param connector: A Connector instance + :param identity: The identity of the Chassis resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + """ + super(Chassis, self).__init__(connector, identity, redfish_version) + + def _get_reset_action_element(self): + reset_action = self._actions.reset + + if not reset_action: + raise exceptions.MissingActionError(action='#Chassis.Reset', + resource=self._path) + return reset_action + + def get_allowed_reset_chassis_values(self): + """Get the allowed values for resetting the chassis. + + :returns: A set of allowed values. + :raises: MissingAttributeError, if Actions/#Chassis.Reset attribute + not present. + """ + reset_action = self._get_reset_action_element() + + if not reset_action.allowed_values: + LOG.warning('Could not figure out the allowed values for the ' + 'reset chassis action for Chassis %s', self.identity) + return set(res_maps.RESET_TYPE_VALUE_MAP_REV) + + return set([res_maps.RESET_TYPE_VALUE_MAP[v] for v in + set(res_maps.RESET_TYPE_VALUE_MAP). + intersection(reset_action.allowed_values)]) + + def reset_chassis(self, value): + """Reset the chassis. + + :param value: The target value. + :raises: InvalidParameterValueError, if the target value is not + allowed. + """ + valid_resets = self.get_allowed_reset_chassis_values() + if value not in valid_resets: + raise exceptions.InvalidParameterValueError( + parameter='value', value=value, valid_values=valid_resets) + + value = res_maps.RESET_TYPE_VALUE_MAP_REV[value] + target_uri = self._get_reset_action_element().target_uri + + LOG.debug('Resetting the Chassis %s ...', self.identity) + self._conn.post(target_uri, data={'ResetType': value}) + LOG.info('The Chassis %s is being reset', self.identity) + + +class ChassisCollection(base.ResourceCollectionBase): + + @property + def _resource_type(self): + return Chassis + + def __init__(self, connector, path, redfish_version=None): + """A class representing a ChassisCollection + + :param connector: A Connector instance + :param path: The canonical path to the Chassis collection resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + """ + super(ChassisCollection, self).__init__(connector, path, + redfish_version) diff --git a/sushy/resources/chassis/constants.py b/sushy/resources/chassis/constants.py new file mode 100644 index 0000000..3235af4 --- /dev/null +++ b/sushy/resources/chassis/constants.py @@ -0,0 +1,162 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values comes from the Redfish Chassis json-schema 1.8.0: +# http://redfish.dmtf.org/schemas/v1/Chassis.v1_8_0.json#/definitions/Chassis + +# Chassis Types constants + +CHASSIS_TYPE_RACK = 'rack chassis type' +"""An equipment rack, typically a 19-inch wide freestanding unit""" + +CHASSIS_TYPE_BLADE = 'blade chassis type' +"""Blade + +An enclosed or semi-enclosed, typically vertically-oriented, system +chassis which must be plugged into a multi-system chassis to function +normally. +""" + +CHASSIS_TYPE_ENCLOSURE = 'enclosure chassis type' +"""A generic term for a chassis that does not fit any other description""" + +CHASSIS_TYPE_STAND_ALONE = 'stand alone chassis type' +"""StandAlone + +A single, free-standing system, commonly called a tower or desktop +chassis. +""" + +CHASSIS_TYPE_RACK_MOUNT = 'rack mount chassis type' +"""RackMount + +A single system chassis designed specifically for mounting in an +equipment rack. +""" + +CHASSIS_TYPE_CARD = 'card chassis type' +"""Card + +A loose device or circuit board intended to be installed in a system or +other enclosure. +""" + +CHASSIS_TYPE_CARTRIDGE = 'cartridge chassis type' +"""Cartridge + +A small self-contained system intended to be plugged into a multi-system +chassis""" + +CHASSIS_TYPE_ROW = 'row chassis type' +"""A collection of equipment rack""" + +CHASSIS_TYPE_POD = 'pod chassis type' +"""Pod + +A collection of equipment racks in a large, likely transportable, +container""" + +CHASSIS_TYPE_EXPANSION = 'expansion chassis type' +"""A chassis which expands the capabilities or capacity of another chassis""" + +CHASSIS_TYPE_SIDECAR = 'sidecar chassis type' +"""Sidecar + +A chassis that mates mechanically with another chassis to expand its +capabilities or capacity. +""" + +CHASSIS_TYPE_ZONE = 'zone chassis type' +"""Zone + +A logical division or portion of a physical chassis that contains multiple +devices or systems that cannot be physically separated. +""" + +CHASSIS_TYPE_SLED = 'sled chassis type' +"""Sled + +An enclosed or semi-enclosed, system chassis which must be plugged into a +multi-system chassis to function normally similar to a blade type chassis. +""" + +CHASSIS_TYPE_SHELF = 'shelf chassis type' +"""Shelf + +An enclosed or semi-enclosed, typically horizontally-oriented, system chassis +which must be plugged into a multi-system chassis to function +normally. +""" + +CHASSIS_TYPE_DRAWER = 'drawer chassis type' +"""Drawer + +An enclosed or semi-enclosed, typically horizontally-oriented, system +chassis which may be slid into a multi-system chassis. +""" + +CHASSIS_TYPE_MODULE = 'module chassis type' +"""Module + +A small, typically removable, chassis or card which contains devices for +a particular subsystem or function. +""" + +CHASSIS_TYPE_COMPONENT = 'component chassis type' +"""Component + +A small chassis, card, or device which contains devices for a particular +subsystem or function. +""" + +CHASSIS_TYPE_IP_BASED_DRIVE = 'IP based drive chassis type' +"""A chassis in a drive form factor with IP-based network connections""" + +CHASSIS_TYPE_RACK_GROUP = 'rack group chassis type' +"""A group of racks which form a single entity or share infrastructure""" + +CHASSIS_TYPE_STORAGE_ENCLOSURE = 'storage enclosure chassis type' +"""A chassis which encloses storage""" + +CHASSIS_TYPE_OTHER = 'other chassis type' +"""A chassis that does not fit any of these definitions""" + +# Chassis IntrusionSensor constants + +CHASSIS_INTRUSION_SENSOR_NORMAL = 'normal chassis intrusion sensor' +"""No abnormal physical security conditions are detected at this time""" + +CHASSIS_INTRUSION_SENSOR_HARDWARE_INTRUSION = 'hardware intrusion chassis ' \ + 'intrusion sensor' +"""HardwareIntrusion + +A door, lock, or other mechanism protecting the internal system hardware from +being accessed is detected as being in an insecure state. +""" + +CHASSIS_INTRUSION_SENSOR_TAMPERING_DETECTED = 'tampering detected chassis ' \ + 'intrusion sensor' +"""Physical tampering of the monitored entity is detected""" + +# Chassis IntrusionSensorReArm constants + +CHASSIS_INTRUSION_SENSOR_RE_ARM_MANUAL = 'manual re arm chassis intrusion ' \ + 'sensor' +"""This sensor would be restored to the Normal state by a manual re-arm""" + +CHASSIS_INTRUSION_SENSOR_RE_ARM_AUTOMATIC = 'automatic re arm chassis ' \ + 'intrusion sensor' +"""Automatic + +This sensor would be restored to the Normal state automatically as no abnormal +physical security conditions are detected. +""" diff --git a/sushy/resources/chassis/mappings.py b/sushy/resources/chassis/mappings.py new file mode 100644 index 0000000..eaa8c16 --- /dev/null +++ b/sushy/resources/chassis/mappings.py @@ -0,0 +1,48 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.chassis import constants as cha_cons + +CHASSIS_TYPE_VALUE_MAP = { + 'Rack': cha_cons.CHASSIS_TYPE_RACK, + 'Blade': cha_cons.CHASSIS_TYPE_BLADE, + 'Enclosure': cha_cons.CHASSIS_TYPE_ENCLOSURE, + 'StandAlone': cha_cons.CHASSIS_TYPE_STAND_ALONE, + 'RackMount': cha_cons.CHASSIS_TYPE_RACK_MOUNT, + 'Card': cha_cons.CHASSIS_TYPE_CARD, + 'Cartridge': cha_cons.CHASSIS_TYPE_CARTRIDGE, + 'Row': cha_cons.CHASSIS_TYPE_ROW, + 'Pod': cha_cons.CHASSIS_TYPE_POD, + 'Expansion': cha_cons.CHASSIS_TYPE_EXPANSION, + 'Sidecar': cha_cons.CHASSIS_TYPE_SIDECAR, + 'Zone': cha_cons.CHASSIS_TYPE_ZONE, + 'Sled': cha_cons.CHASSIS_TYPE_SLED, + 'Shelf': cha_cons.CHASSIS_TYPE_SHELF, + 'Drawer': cha_cons.CHASSIS_TYPE_DRAWER, + 'Module': cha_cons.CHASSIS_TYPE_MODULE, + 'Component': cha_cons.CHASSIS_TYPE_COMPONENT, + 'IPBasedDrive': cha_cons.CHASSIS_TYPE_IP_BASED_DRIVE, + 'RackGroup': cha_cons.CHASSIS_TYPE_RACK_GROUP, + 'StorageEnclosure': cha_cons.CHASSIS_TYPE_STORAGE_ENCLOSURE, + 'Other': cha_cons.CHASSIS_TYPE_OTHER, +} + +CHASSIS_INTRUSION_SENSOR_MAP = { + 'Normal': cha_cons.CHASSIS_INTRUSION_SENSOR_NORMAL, + 'HardwareIntrusion': cha_cons.CHASSIS_INTRUSION_SENSOR_HARDWARE_INTRUSION, + 'TamperingDetected': cha_cons.CHASSIS_INTRUSION_SENSOR_TAMPERING_DETECTED, +} + +CHASSIS_INTRUSION_SENSOR_RE_ARM_MAP = { + 'Manual': cha_cons.CHASSIS_INTRUSION_SENSOR_RE_ARM_MANUAL, + 'Automatic': cha_cons.CHASSIS_INTRUSION_SENSOR_RE_ARM_AUTOMATIC, +} diff --git a/sushy/resources/constants.py b/sushy/resources/constants.py index ea8a279..dc72f81 100644 --- a/sushy/resources/constants.py +++ b/sushy/resources/constants.py @@ -40,3 +40,64 @@ PARAMTYPE_NUMBER = 'number' SEVERITY_OK = 'ok' SEVERITY_WARNING = 'warning' SEVERITY_CRITICAL = 'critical' + +# Indicator LED Constants + +INDICATOR_LED_LIT = 'indicator led lit' +"""The Indicator LED is lit""" + +INDICATOR_LED_BLINKING = 'indicator led blinking' +"""The Indicator LED is blinking""" + +INDICATOR_LED_OFF = 'indicator led off' +"""The Indicator LED is off""" + +INDICATOR_LED_UNKNOWN = 'indicator led unknown' +"""The state of the Indicator LED cannot be determine""" + +# System' PowerState constants + +POWER_STATE_ON = 'on' +"""The resource is powered on""" + +POWER_STATE_OFF = 'off' +"""The resource is powered off, although some components may continue to + have AUX power such as management controller""" + +POWER_STATE_POWERING_ON = 'powering on' +"""A temporary state between Off and On. This temporary state can + be very short""" + +POWER_STATE_POWERING_OFF = 'powering off' +"""A temporary state between On and Off. The power off action can take + time while the OS is in the shutdown process""" + +# Reset action constants + +RESET_TYPE_ON = 'on' +"""Turn the unit on""" + +RESET_TYPE_FORCE_ON = 'force on' +"""Turn the unit on immediately""" + +RESET_TYPE_FORCE_OFF = 'force off' +"""Turn the unit off immediately (non-graceful shutdown)""" + +RESET_TYPE_GRACEFUL_SHUTDOWN = 'graceful shutdown' +"""Perform a graceful shutdown and power off""" + +RESET_TYPE_GRACEFUL_RESTART = 'graceful restart' +"""Perform a graceful shutdown followed by a restart of the system""" + +RESET_TYPE_FORCE_RESTART = 'force restart' +"""Perform an immediate (non-graceful) shutdown, followed by a restart""" + +RESET_TYPE_NMI = 'nmi' +"""Generate a Diagnostic Interrupt (usually an NMI on x86 systems) to cease +normal operations, perform diagnostic actions and typically halt the system""" + +RESET_TYPE_PUSH_POWER_BUTTON = 'push power button' +"""Simulate the pressing of the physical power button on this unit""" + +RESET_TYPE_POWER_CYCLE = 'power cycle' +"""Perform a power cycle of the unit""" diff --git a/sushy/resources/manager/constants.py b/sushy/resources/manager/constants.py index 9c1cf18..4459c65 100644 --- a/sushy/resources/manager/constants.py +++ b/sushy/resources/manager/constants.py @@ -13,10 +13,15 @@ # Values comes from the Redfish System json-schema 1.0.0: # http://redfish.dmtf.org/schemas/v1/Manager.v1_0_0.json#/definitions/Manager # noqa +from sushy.resources import constants as res_cons + # Manager Reset action constants -RESET_MANAGER_GRACEFUL_RESTART = 'graceful restart' -RESET_MANAGER_FORCE_RESTART = 'force restart' +RESET_MANAGER_GRACEFUL_RESTART = res_cons.RESET_TYPE_GRACEFUL_RESTART +"""Perform a graceful shutdown followed by a restart of the system""" + +RESET_MANAGER_FORCE_RESTART = res_cons.RESET_TYPE_FORCE_RESTART +"""Perform an immediate (non-graceful) shutdown, followed by a restart""" # Manager Type constants diff --git a/sushy/resources/mappings.py b/sushy/resources/mappings.py index 8afad7e..3416769 100644 --- a/sushy/resources/mappings.py +++ b/sushy/resources/mappings.py @@ -45,3 +45,33 @@ SEVERITY_VALUE_MAP = { 'Warning': res_cons.SEVERITY_WARNING, 'Critical': res_cons.SEVERITY_CRITICAL } + +INDICATOR_LED_VALUE_MAP = { + 'Lit': res_cons.INDICATOR_LED_LIT, + 'Blinking': res_cons.INDICATOR_LED_BLINKING, + 'Off': res_cons.INDICATOR_LED_OFF, + 'Unknown': res_cons.INDICATOR_LED_UNKNOWN, +} + +POWER_STATE_VALUE_MAP = { + 'On': res_cons.POWER_STATE_ON, + 'Off': res_cons.POWER_STATE_OFF, + 'PoweringOn': res_cons.POWER_STATE_POWERING_ON, + 'PoweringOff': res_cons.POWER_STATE_POWERING_OFF, +} + +POWER_STATE_MAP_REV = utils.revert_dictionary(POWER_STATE_VALUE_MAP) + +RESET_TYPE_VALUE_MAP = { + 'On': res_cons.RESET_TYPE_ON, + 'ForceOff': res_cons.RESET_TYPE_FORCE_OFF, + 'GracefulShutdown': res_cons.RESET_TYPE_GRACEFUL_SHUTDOWN, + 'GracefulRestart': res_cons.RESET_TYPE_GRACEFUL_RESTART, + 'ForceRestart': res_cons.RESET_TYPE_FORCE_RESTART, + 'Nmi': res_cons.RESET_TYPE_NMI, + 'ForceOn': res_cons.RESET_TYPE_FORCE_ON, + 'PushPowerButton': res_cons.RESET_TYPE_PUSH_POWER_BUTTON, + 'PowerCycle': res_cons.RESET_TYPE_POWER_CYCLE, +} + +RESET_TYPE_VALUE_MAP_REV = utils.revert_dictionary(RESET_TYPE_VALUE_MAP) diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index 06b07b0..37e0652 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -16,46 +16,48 @@ # Values comes from the Redfish System json-schema 1.0.0: # http://redfish.dmtf.org/schemas/v1/ComputerSystem.v1_0_0.json#/definitions/ComputerSystem # noqa +from sushy.resources import constants as res_cons + # Reset action constants -RESET_ON = 'on' -RESET_FORCE_OFF = 'force off' -RESET_GRACEFUL_SHUTDOWN = 'graceful shutdown' -RESET_GRACEFUL_RESTART = 'graceful restart' -RESET_FORCE_RESTART = 'force restart' -RESET_NMI = 'nmi' -RESET_FORCE_ON = 'force on' -RESET_PUSH_POWER_BUTTON = 'push power button' +RESET_ON = res_cons.RESET_TYPE_ON +RESET_FORCE_OFF = res_cons.RESET_TYPE_FORCE_OFF +RESET_GRACEFUL_SHUTDOWN = res_cons.RESET_TYPE_GRACEFUL_SHUTDOWN +RESET_GRACEFUL_RESTART = res_cons.RESET_TYPE_GRACEFUL_RESTART +RESET_FORCE_RESTART = res_cons.RESET_TYPE_FORCE_RESTART +RESET_NMI = res_cons.RESET_TYPE_NMI +RESET_FORCE_ON = res_cons.RESET_TYPE_FORCE_ON +RESET_PUSH_POWER_BUTTON = res_cons.RESET_TYPE_PUSH_POWER_BUTTON # System' PowerState constants -SYSTEM_POWER_STATE_ON = 'on' +SYSTEM_POWER_STATE_ON = res_cons.POWER_STATE_ON """The system is powered on""" -SYSTEM_POWER_STATE_OFF = 'off' +SYSTEM_POWER_STATE_OFF = res_cons.POWER_STATE_OFF """The system is powered off, although some components may continue to have AUX power such as management controller""" -SYSTEM_POWER_STATE_POWERING_ON = 'powering on' +SYSTEM_POWER_STATE_POWERING_ON = res_cons.POWER_STATE_POWERING_ON """A temporary state between Off and On. This temporary state can be very short""" -SYSTEM_POWER_STATE_POWERING_OFF = 'powering off' +SYSTEM_POWER_STATE_POWERING_OFF = res_cons.POWER_STATE_POWERING_OFF """A temporary state between On and Off. The power off action can take time while the OS is in the shutdown process""" # Indicator LED Constants -SYSTEM_INDICATOR_LED_LIT = 'Lit' +SYSTEM_INDICATOR_LED_LIT = res_cons.INDICATOR_LED_LIT """The Indicator LED is lit""" -SYSTEM_INDICATOR_LED_BLINKING = 'Blinking' +SYSTEM_INDICATOR_LED_BLINKING = res_cons.INDICATOR_LED_BLINKING """The Indicator LED is blinking""" -SYSTEM_INDICATOR_LED_OFF = 'Off' +SYSTEM_INDICATOR_LED_OFF = res_cons.INDICATOR_LED_OFF """The Indicator LED is off""" -SYSTEM_INDICATOR_LED_UNKNOWN = 'Unknown' +SYSTEM_INDICATOR_LED_UNKNOWN = res_cons.INDICATOR_LED_UNKNOWN """The state of the Indicator LED cannot be determine""" # Boot source target constants diff --git a/sushy/resources/system/mappings.py b/sushy/resources/system/mappings.py index c9a3244..3ff34f0 100644 --- a/sushy/resources/system/mappings.py +++ b/sushy/resources/system/mappings.py @@ -30,22 +30,6 @@ RESET_SYSTEM_VALUE_MAP = { RESET_SYSTEM_VALUE_MAP_REV = utils.revert_dictionary(RESET_SYSTEM_VALUE_MAP) -SYSTEM_POWER_STATE_MAP = { - 'On': sys_cons.SYSTEM_POWER_STATE_ON, - 'Off': sys_cons.SYSTEM_POWER_STATE_OFF, - 'PoweringOn': sys_cons.SYSTEM_POWER_STATE_POWERING_ON, - 'PoweringOff': sys_cons.SYSTEM_POWER_STATE_POWERING_OFF, -} - -SYSTEM_POWER_STATE_MAP_REV = utils.revert_dictionary(SYSTEM_POWER_STATE_MAP) - -SYSTEM_INDICATOR_LED_MAP = { - 'Lit': sys_cons.SYSTEM_INDICATOR_LED_LIT, - 'Blinking': sys_cons.SYSTEM_INDICATOR_LED_BLINKING, - 'Off': sys_cons.SYSTEM_INDICATOR_LED_OFF, - 'Unknown': sys_cons.SYSTEM_INDICATOR_LED_UNKNOWN, -} - BOOT_SOURCE_TARGET_MAP = { 'None': sys_cons.BOOT_SOURCE_TARGET_NONE, 'Pxe': sys_cons.BOOT_SOURCE_TARGET_PXE, diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 14064cd..6859ee1 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -18,6 +18,7 @@ import logging from sushy import exceptions from sushy.resources import base from sushy.resources import common +from sushy.resources import mappings as res_maps from sushy.resources.system import bios from sushy.resources.system import constants as sys_cons from sushy.resources.system import ethernet_interface @@ -86,7 +87,7 @@ class System(base.ResourceBase): """The system identity string""" indicator_led = base.MappedField('IndicatorLED', - sys_maps.SYSTEM_INDICATOR_LED_MAP) + res_maps.INDICATOR_LED_VALUE_MAP) """Whether the indicator LED is lit or off""" manufacturer = base.Field('Manufacturer') @@ -99,7 +100,7 @@ class System(base.ResourceBase): """The system part number""" power_state = base.MappedField('PowerState', - sys_maps.SYSTEM_POWER_STATE_MAP) + res_maps.POWER_STATE_VALUE_MAP) """The system power state""" serial_number = base.Field('SerialNumber') diff --git a/sushy/tests/unit/json_samples/chassis.json b/sushy/tests/unit/json_samples/chassis.json new file mode 100644 index 0000000..45cd1e7 --- /dev/null +++ b/sushy/tests/unit/json_samples/chassis.json @@ -0,0 +1,98 @@ +{ + "@odata.type": "#Chassis.v1_8_0.Chassis", + "Id": "Blade1", + "Name": "Blade", + "Description": "Test description", + "ChassisType": "Blade", + "AssetTag": "45Z-2381", + "Manufacturer": "Contoso", + "Model": "SX1000", + "SKU": "6914260", + "SerialNumber": "529QB9450R6", + "PartNumber": "166480-S23", + "UUID": "FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF", + "PowerState": "On", + "IndicatorLED": "Off", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "HeightMm": 44.45, + "WidthMm": 431.8, + "DepthMm": 711, + "WeightKg": 15.31, + "Location": { + "PartLocation": { + "ServiceLabel": "Blade 1", + "LocationType": "Slot", + "LocationOrdinalValue": 0, + "Reference": "Front", + "Orientation": "LeftToRight" + } + }, + "PhysicalSecurity": { + "IntrusionSensor": "Normal", + "IntrusionSensorNumber": 123, + "IntrusionSensorReArm": "Manual" + }, + "Thermal": { + "@odata.id": "/redfish/v1/Chassis/Blade1/Thermal" + }, + "Links": { + "ComputerSystems": [ + { + "@odata.id": "/redfish/v1/Systems/529QB9450R6" + } + ], + "ManagedBy": [ + { + "@odata.id": "/redfish/v1/Managers/Blade1BMC" + } + ], + "ContainedBy": { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl" + }, + "CooledBy": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Thermal#/Fans/0" + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Thermal#/Fans/1" + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Thermal#/Fans/2" + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Thermal#/Fans/3" + } + ], + "PoweredBy": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/0" + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/1" + } + ] + }, + "Actions": { + "#Chassis.Reset": { + "target": "/redfish/v1/Chassis/Blade1/Actions/Chassis.Reset", + "ResetType@Redfish.AllowableValues": [ + "ForceRestart", + "GracefulRestart", + "On", + "ForceOff", + "GracefulShutdown", + "Nmi", + "ForceOn", + "PushPowerButton", + "PowerCycle" + ] + }, + "Oem": {} + }, + "@odata.context": "/redfish/v1/$metadata#Chassis.Chassis", + "@odata.id": "/redfish/v1/Chassis/Blade1", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/chassis_collection.json b/sushy/tests/unit/json_samples/chassis_collection.json new file mode 100644 index 0000000..0af4cad --- /dev/null +++ b/sushy/tests/unit/json_samples/chassis_collection.json @@ -0,0 +1,25 @@ +{ + "@odata.type": "#ChassisCollection.ChassisCollection", + "Name": "Chassis Collection", + "Members@odata.count": 5, + "Members": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade1" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade2" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade3" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade4" + } + ], + "@odata.context": "/redfish/v1/$metadata#ChassisCollection.ChassisCollection", + "@odata.id": "/redfish/v1/Chassis", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/resources/chassis/__init__.py b/sushy/tests/unit/resources/chassis/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/tests/unit/resources/chassis/test_chassis.py b/sushy/tests/unit/resources/chassis/test_chassis.py new file mode 100644 index 0000000..8fc87ff --- /dev/null +++ b/sushy/tests/unit/resources/chassis/test_chassis.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +import sushy +from sushy import exceptions +from sushy.resources.chassis import chassis +from sushy.tests.unit import base + + +class ChassisTestCase(base.TestCase): + + def setUp(self): + super(ChassisTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/chassis.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.chassis = chassis.Chassis(self.conn, '/redfish/v1/Chassis/Blade1', + redfish_version='1.8.0') + + def test__parse_attributes(self): + # | WHEN | + self.chassis._parse_attributes() + # | THEN | + self.assertEqual('1.8.0', self.chassis.redfish_version) + self.assertEqual('Blade1', self.chassis.identity) + self.assertEqual('Blade', self.chassis.name) + self.assertEqual('Test description', self.chassis.description) + self.assertEqual('45Z-2381', self.chassis.asset_tag) + self.assertEqual(sushy.CHASSIS_TYPE_BLADE, + self.chassis.chassis_type) + self.assertEqual('Contoso', self.chassis.manufacturer) + self.assertEqual('SX1000', self.chassis.model) + self.assertEqual('529QB9450R6', self.chassis.serial_number) + self.assertEqual('6914260', self.chassis.sku) + self.assertEqual('166480-S23', self.chassis.part_number) + self.assertEqual('FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF', + self.chassis.uuid) + self.assertEqual(sushy.INDICATOR_LED_OFF, + self.chassis.indicator_led) + self.assertEqual(sushy.POWER_STATE_ON, + self.chassis.power_state) + self.assertEqual(sushy.STATE_ENABLED, self.chassis.status.state) + self.assertEqual(44.45, self.chassis.height_mm) + self.assertEqual(431.8, self.chassis.width_mm) + self.assertEqual(711, self.chassis.depth_mm) + self.assertEqual(15.31, self.chassis.weight_kg) + self.assertEqual(sushy.HEALTH_OK, self.chassis.status.health) + self.assertEqual(sushy.CHASSIS_INTRUSION_SENSOR_NORMAL, + self.chassis.physical_security.intrusion_sensor) + self.assertEqual(123, + self.chassis.physical_security.intrusion_sensor_number + ) + self.assertEqual(sushy.CHASSIS_INTRUSION_SENSOR_RE_ARM_MANUAL, + self.chassis.physical_security.intrusion_sensor_re_arm + ) + + def test_get_allowed_reset_chasis_values(self): + # | GIVEN | + expected = {sushy.RESET_TYPE_POWER_CYCLE, + sushy.RESET_TYPE_PUSH_POWER_BUTTON, + sushy.RESET_TYPE_FORCE_ON, sushy.RESET_TYPE_NMI, + sushy.RESET_TYPE_FORCE_RESTART, + sushy.RESET_TYPE_GRACEFUL_RESTART, sushy.RESET_TYPE_ON, + sushy.RESET_TYPE_FORCE_OFF, + sushy.RESET_TYPE_GRACEFUL_SHUTDOWN} + # | WHEN | + values = self.chassis.get_allowed_reset_chassis_values() + # | THEN | + self.assertEqual(expected, values) + self.assertIsInstance(values, set) + + def test_get_allowed_reset_chassis_values_for_no_values_set(self): + # | GIVEN | + self.chassis._actions.reset.allowed_values = [] + expected = {sushy.RESET_TYPE_POWER_CYCLE, + sushy.RESET_TYPE_PUSH_POWER_BUTTON, + sushy.RESET_TYPE_FORCE_ON, sushy.RESET_TYPE_NMI, + sushy.RESET_TYPE_FORCE_RESTART, + sushy.RESET_TYPE_GRACEFUL_RESTART, sushy.RESET_TYPE_ON, + sushy.RESET_TYPE_FORCE_OFF, + sushy.RESET_TYPE_GRACEFUL_SHUTDOWN} + # | WHEN | + values = self.chassis.get_allowed_reset_chassis_values() + # | THEN | + self.assertEqual(expected, values) + self.assertIsInstance(values, set) + + def test_get_allowed_reset_chassis_values_missing_action_reset_attr(self): + # | GIVEN | + self.chassis._actions.reset = None + # | WHEN & THEN | + self.assertRaisesRegex( + exceptions.MissingActionError, 'action #Chassis.Reset') + + def test_reset_chassis(self): + self.chassis.reset_chassis(sushy.RESET_TYPE_GRACEFUL_RESTART) + self.chassis._conn.post.assert_called_once_with( + '/redfish/v1/Chassis/Blade1/Actions/Chassis.Reset', + data={'ResetType': 'GracefulRestart'}) + + def test_reset_chassis_with_invalid_value(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.chassis.reset_chassis, 'invalid-value') + + +class ChassisCollectionTestCase(base.TestCase): + + def setUp(self): + super(ChassisCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'chassis_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.chassis = chassis.ChassisCollection( + self.conn, '/redfish/v1/Chassis', redfish_version='1.5.0') + + @mock.patch.object(chassis, 'Chassis', autospec=True) + def test_get_member(self, chassis_mock): + self.chassis.get_member('/redfish/v1/Chassis/MultiBladeEncl') + chassis_mock.assert_called_once_with( + self.chassis._conn, '/redfish/v1/Chassis/MultiBladeEncl', + redfish_version=self.chassis.redfish_version) + + @mock.patch.object(chassis, 'Chassis', autospec=True) + def test_get_members(self, chassis_mock): + members = self.chassis.get_members() + calls = [ + mock.call(self.chassis._conn, '/redfish/v1/Chassis/MultiBladeEncl', + redfish_version=self.chassis.redfish_version), + mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade1', + redfish_version=self.chassis.redfish_version), + mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade2', + redfish_version=self.chassis.redfish_version), + mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade3', + redfish_version=self.chassis.redfish_version), + mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade4', + redfish_version=self.chassis.redfish_version) + ] + chassis_mock.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(5, len(members)) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 60df9ac..486616f 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -21,6 +21,7 @@ from sushy import auth from sushy import connector from sushy import exceptions from sushy import main +from sushy.resources.chassis import chassis from sushy.resources.manager import manager from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session @@ -99,6 +100,20 @@ class MainTestCase(base.TestCase): self.root._conn, 'fake-system-id', redfish_version=self.root.redfish_version) + @mock.patch.object(chassis, 'Chassis', autospec=True) + def test_get_chassis(self, mock_chassis): + self.root.get_chassis('fake-chassis-id') + mock_chassis.assert_called_once_with( + self.root._conn, 'fake-chassis-id', + redfish_version=self.root.redfish_version) + + @mock.patch.object(chassis, 'ChassisCollection', autospec=True) + def test_get_chassis_collection(self, chassis_collection_mock): + self.root.get_chassis_collection() + chassis_collection_mock.assert_called_once_with( + self.root._conn, '/redfish/v1/Chassis', + redfish_version=self.root.redfish_version) + @mock.patch.object(manager, 'ManagerCollection', autospec=True) def test_get_manager_collection(self, ManagerCollection_mock): self.root.get_manager_collection() -- GitLab From 5353214bed9803fc3b70740dd1b1e204ab437709 Mon Sep 17 00:00:00 2001 From: melissaml Date: Tue, 4 Dec 2018 18:27:20 +0800 Subject: [PATCH 111/303] Change openstack-dev to openstack-discuss and update URL Mailinglists have been updated. Openstack-discuss replaces openstack-dev. Change-Id: Icae1acef28a49ed51000ea952a7e34e3ad435116 --- setup.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 7963eb8..1d0e36d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,8 +4,8 @@ summary = Sushy is a small Python library to communicate with Redfish based syst description-file = README.rst author = OpenStack -author-email = openstack-dev@lists.openstack.org -home-page = https://docs.openstack.org/sushy +author-email = openstack-discuss@lists.openstack.org +home-page = https://docs.openstack.org/sushy/latest/ classifier = Environment :: OpenStack Intended Audience :: Information Technology -- GitLab From 0bf7cbf14db35a6546bcda38621309b9531af3d0 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Mon, 3 Dec 2018 15:41:25 +0100 Subject: [PATCH 112/303] Add System<->Manager linkage Redfish data model rests on three interlinked entities - ComputerSystem(s), Manager(s) and Chassis. As of this moment, sushy does not support traversing between these entities at the Sushy abstraction level, despite the availability of such linkage in the JSON documents sushy feeds on. This change establishes System->Managers and Managers->System links. Change-Id: I54b0fdeebdea1e13c2b6912ee4c97776ebccaf03 Story: 2004512 Task: 28240 --- ...stem-manager-linkage-86be69c9df4cb359.yaml | 6 ++++ sushy/resources/manager/manager.py | 19 +++++++++++++ sushy/resources/system/system.py | 19 +++++++++++++ .../unit/resources/manager/test_manager.py | 13 +++++++++ .../unit/resources/system/test_system.py | 13 +++++++++ sushy/tests/unit/test_utils.py | 8 ++++++ sushy/utils.py | 28 +++++++++++++++---- 7 files changed, 100 insertions(+), 6 deletions(-) create mode 100644 releasenotes/notes/add-system-manager-linkage-86be69c9df4cb359.yaml diff --git a/releasenotes/notes/add-system-manager-linkage-86be69c9df4cb359.yaml b/releasenotes/notes/add-system-manager-linkage-86be69c9df4cb359.yaml new file mode 100644 index 0000000..69e0c00 --- /dev/null +++ b/releasenotes/notes/add-system-manager-linkage-86be69c9df4cb359.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Establishes ComputerSystem->Managers and Manager->ComputerSystems + references at sushy data abstraction level what make it possible to + look up Manager(s) responsible for a ComputerSystem and vice versa. diff --git a/sushy/resources/manager/manager.py b/sushy/resources/manager/manager.py index 96b8133..ef7b51c 100644 --- a/sushy/resources/manager/manager.py +++ b/sushy/resources/manager/manager.py @@ -188,6 +188,25 @@ class Manager(base.ResourceBase): self._conn, utils.get_sub_resource_path_by(self, 'VirtualMedia'), redfish_version=self.redfish_version) + @property + @utils.cache_it + def systems(self): + """A list of systems managed by this manager. + + Returns a list of `System` objects representing systems being + managed by this manager. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `System` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "ManagerForServers"], is_collection=True) + + from sushy.resources.system import system + return [system.System(self._conn, path, + redfish_version=self.redfish_version) + for path in paths] + class ManagerCollection(base.ResourceCollectionBase): diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 3781923..a0b5e4d 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -18,6 +18,7 @@ import logging from sushy import exceptions from sushy.resources import base from sushy.resources import common +from sushy.resources.manager import manager from sushy.resources import mappings as res_maps from sushy.resources.system import bios from sushy.resources.system import constants as sys_cons @@ -333,6 +334,24 @@ class System(base.ResourceBase): self._conn, utils.get_sub_resource_path_by(self, "Storage"), redfish_version=self.redfish_version) + @property + @utils.cache_it + def managers(self): + """A list of managers for this system. + + Returns a list of `Manager` objects representing the managers + that manage this system. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `Manager` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "ManagedBy"], is_collection=True) + + return [manager.Manager(self._conn, path, + redfish_version=self.redfish_version) + for path in paths] + class SystemCollection(base.ResourceCollectionBase): diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index 5c98ef6..f860578 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -18,6 +18,7 @@ import sushy from sushy import exceptions from sushy.resources.manager import manager from sushy.resources.manager import virtual_media +from sushy.resources.system import system from sushy.tests.unit import base @@ -265,6 +266,18 @@ class ManagerTestCase(base.TestCase): virtual_media.VirtualMediaCollection) self.assertFalse(vrt_media._is_stale) + def test_systems(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_systems = self.manager.systems + self.assertIsInstance(actual_systems[0], system.System) + self.assertEqual( + '/redfish/v1/Systems/437XR1138R2', actual_systems[0].path) + class ManagerCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index eb96a7d..f6deb74 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -20,6 +20,7 @@ import mock import sushy from sushy import exceptions from sushy.resources import constants as res_cons +from sushy.resources.manager import manager from sushy.resources.system import bios from sushy.resources.system import mappings as sys_map from sushy.resources.system import processor @@ -478,6 +479,18 @@ class SystemTestCase(base.TestCase): # | WHEN & THEN | self.assertIsInstance(self.sys_inst.storage, storage.StorageCollection) + def test_managers(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'manager.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_managers = self.sys_inst.managers + self.assertIsInstance(actual_managers[0], manager.Manager) + self.assertEqual( + '/redfish/v1/Managers/BMC', actual_managers[0].path) + class SystemCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index 8be70ca..899eee6 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -69,6 +69,14 @@ class UtilsTestCase(base.TestCase): subresource_path) self.assertEqual(expected_result, value) + def test_get_sub_resource_path_by_collection(self): + subresource_path = ["Links", "ManagedBy"] + expected_result = ['/redfish/v1/Managers/BMC'] + value = utils.get_sub_resource_path_by(self.sys_inst, + subresource_path, + is_collection=True) + self.assertEqual(expected_result, value) + def test_get_sub_resource_path_by_fails(self): subresource_path = ['Links', 'Chassis'] expected_result = 'attribute Links/Chassis/@odata.id is missing' diff --git a/sushy/utils.py b/sushy/utils.py index d9735af..0ba6b9b 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -66,13 +66,17 @@ def int_or_none(x): return int(x) -def get_sub_resource_path_by(resource, subresource_name): +def get_sub_resource_path_by(resource, subresource_name, is_collection=False): """Helper function to find the subresource path :param resource: ResourceBase instance on which the name gets queried upon. :param subresource_name: name of the resource field to fetch the '@odata.id' from. + :param is_collection: if `True`, expect a list of resources to + fetch the '@odata.id' from. + :returns: Resource path (if `is_collection` is `False`) or + a list of resource paths (if `is_collection` is `True`). """ if not subresource_name: raise ValueError('"subresource_name" cannot be empty') @@ -88,12 +92,24 @@ def get_sub_resource_path_by(resource, subresource_name): raise exceptions.MissingAttributeError( attribute='/'.join(subresource_name), resource=resource.path) - if '@odata.id' not in body: - raise exceptions.MissingAttributeError( - attribute='/'.join(subresource_name) + '/@odata.id', - resource=resource.path) + elements = [] + + try: + if is_collection: + for element in body: + elements.append(element['@odata.id']) + return elements - return body['@odata.id'] + else: + return body['@odata.id'] + + except (TypeError, KeyError): + attribute = '/'.join(subresource_name) + if is_collection: + attribute += '[%s]' % len(elements) + attribute += '/@odata.id' + raise exceptions.MissingAttributeError( + attribute=attribute, resource=resource.path) def max_safe(iterable, default=0): -- GitLab From e022001628eb4792359aa7fab2430bee412073bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Nov=C3=BD?= Date: Wed, 9 Jan 2019 14:26:54 +0100 Subject: [PATCH 113/303] Running wrap-and-sort -bast --- debian/changelog | 6 ++++++ debian/control | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/debian/changelog b/debian/changelog index 6af7798..0df7cf6 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (1.3.1-4) UNRELEASED; urgency=medium + + * Running wrap-and-sort -bast. + + -- Ondřej Nový Wed, 09 Jan 2019 14:26:54 +0100 + python-sushy (1.3.1-3) unstable; urgency=medium [ Ondřej Nový ] diff --git a/debian/control b/debian/control index 68f0e24..7894374 100644 --- a/debian/control +++ b/debian/control @@ -8,10 +8,10 @@ Build-Depends: debhelper (>= 10), dh-python, openstack-pkg-tools, - python3-sphinx (>= 1.6.2), python3-all, python3-pbr (>= 2.0.0), python3-setuptools, + python3-sphinx (>= 1.6.2), Build-Depends-Indep: python3-coverage, python3-hacking, -- GitLab From dedc0e38c77ce653d7f3bb3673fbccd6525d57d6 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Wed, 9 Jan 2019 15:18:35 +0100 Subject: [PATCH 114/303] Add System<->Manager linkage follow up Follow-up for change I54b0fdeebdea1e13c2b6912ee4c97776ebccaf03 Change-Id: Ibcdafd82d16c43fb62bce30cd2772abd0310610b Story: 2004512 Task: 28240 --- sushy/utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sushy/utils.py b/sushy/utils.py index 0ba6b9b..f025c2a 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -100,8 +100,7 @@ def get_sub_resource_path_by(resource, subresource_name, is_collection=False): elements.append(element['@odata.id']) return elements - else: - return body['@odata.id'] + return body['@odata.id'] except (TypeError, KeyError): attribute = '/'.join(subresource_name) -- GitLab From bbbadbd0b56708368415b0efeb98cf8aa9168e1b Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Wed, 5 Dec 2018 17:55:04 +0100 Subject: [PATCH 115/303] Add Chassis<->ComputerSystem/Manager linkage Redfish data model rests on three interlinked entities - ComputerSystem(s), Manager(s) and Chassis. As of this moment, sushy does not support traversal via Chassis despite the availability of such linkage in the JSON documents sushy feeds on. This change establishes Chassis<->ComputerSystem/Managers links. Change-Id: If26f95b3ef6d70419b3c37b3e9eabe41be258c8d Story: 2004512 Task: 28308 --- .../add-chassis-linkage-d8e567f9c791169d.yaml | 7 ++++ sushy/resources/chassis/chassis.py | 39 +++++++++++++++++++ sushy/resources/manager/manager.py | 19 +++++++++ sushy/resources/system/system.py | 23 +++++++++-- .../unit/resources/chassis/test_chassis.py | 26 +++++++++++++ .../unit/resources/manager/test_manager.py | 13 +++++++ .../unit/resources/system/test_system.py | 13 +++++++ 7 files changed, 136 insertions(+), 4 deletions(-) create mode 100644 releasenotes/notes/add-chassis-linkage-d8e567f9c791169d.yaml diff --git a/releasenotes/notes/add-chassis-linkage-d8e567f9c791169d.yaml b/releasenotes/notes/add-chassis-linkage-d8e567f9c791169d.yaml new file mode 100644 index 0000000..19084e9 --- /dev/null +++ b/releasenotes/notes/add-chassis-linkage-d8e567f9c791169d.yaml @@ -0,0 +1,7 @@ +--- +features: + - | + Establishes linkage between Chassis and ComputerSystem/Managers + resources as references at sushy data abstraction level. That + makes it possible to look up Chassis by Manager/ComputerSystem or + any other way around. diff --git a/sushy/resources/chassis/chassis.py b/sushy/resources/chassis/chassis.py index 6eeed64..a88fb5f 100644 --- a/sushy/resources/chassis/chassis.py +++ b/sushy/resources/chassis/chassis.py @@ -17,7 +17,9 @@ from sushy import exceptions from sushy.resources import base from sushy.resources.chassis import mappings as cha_maps from sushy.resources import common +from sushy.resources.manager import manager from sushy.resources import mappings as res_maps +from sushy import utils import logging @@ -194,6 +196,43 @@ class Chassis(base.ResourceBase): self._conn.post(target_uri, data={'ResetType': value}) LOG.info('The Chassis %s is being reset', self.identity) + @property + @utils.cache_it + def managers(self): + """A list of managers for this chassis. + + Returns a list of `Manager` objects representing the managers + that manage this chassis. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `Manager` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "ManagedBy"], is_collection=True) + + return [manager.Manager(self._conn, path, + redfish_version=self.redfish_version) + for path in paths] + + @property + @utils.cache_it + def systems(self): + """A list of systems residing in this chassis. + + Returns a list of `System` objects representing systems being + mounted in this chassis/cabinet. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `System` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "ComputerSystems"], is_collection=True) + + from sushy.resources.system import system + return [system.System(self._conn, path, + redfish_version=self.redfish_version) + for path in paths] + class ChassisCollection(base.ResourceCollectionBase): diff --git a/sushy/resources/manager/manager.py b/sushy/resources/manager/manager.py index 9c17c70..4211f20 100644 --- a/sushy/resources/manager/manager.py +++ b/sushy/resources/manager/manager.py @@ -214,6 +214,25 @@ class Manager(base.ResourceBase): redfish_version=self.redfish_version) for path in paths] + @property + @utils.cache_it + def chassis(self): + """A list of chassis managed by this manager. + + Returns a list of `Chassis` objects representing the chassis + or cabinets managed by this manager. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `Chassis` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "ManagerForChassis"], is_collection=True) + + from sushy.resources.chassis import chassis + return [chassis.Chassis(self._conn, path, + redfish_version=self.redfish_version) + for path in paths] + class ManagerCollection(base.ResourceCollectionBase): diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 18851e7..69c44b8 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -20,6 +20,7 @@ import logging from sushy import exceptions from sushy.resources import base +from sushy.resources.chassis import chassis from sushy.resources import common from sushy.resources.manager import manager from sushy.resources import mappings as res_maps @@ -245,10 +246,6 @@ class System(base.ResourceBase): # Probably we should call refresh() as well. self._conn.patch(self.path, data=data) - # TODO(lucasagomes): All system have a Manager and Chassis object, - # include a get_manager() and get_chassis() once we have an abstraction - # for those resources. - def _get_processor_collection_path(self): """Helper function to find the ProcessorCollection path""" return utils.get_sub_resource_path_by(self, 'Processors') @@ -355,6 +352,24 @@ class System(base.ResourceBase): redfish_version=self.redfish_version) for path in paths] + @property + @utils.cache_it + def chassis(self): + """A list of chassis where this system resides. + + Returns a list of `Chassis` objects representing the chassis + or cabinets where this system is mounted. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `Chassis` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "Chassis"], is_collection=True) + + return [chassis.Chassis(self._conn, path, + redfish_version=self.redfish_version) + for path in paths] + class SystemCollection(base.ResourceCollectionBase): diff --git a/sushy/tests/unit/resources/chassis/test_chassis.py b/sushy/tests/unit/resources/chassis/test_chassis.py index 8fc87ff..bed4319 100644 --- a/sushy/tests/unit/resources/chassis/test_chassis.py +++ b/sushy/tests/unit/resources/chassis/test_chassis.py @@ -19,6 +19,8 @@ import mock import sushy from sushy import exceptions from sushy.resources.chassis import chassis +from sushy.resources.manager import manager +from sushy.resources.system import system from sushy.tests.unit import base @@ -118,6 +120,30 @@ class ChassisTestCase(base.TestCase): self.assertRaises(exceptions.InvalidParameterValueError, self.chassis.reset_chassis, 'invalid-value') + def test_managers(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'manager.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_managers = self.chassis.managers + self.assertIsInstance(actual_managers[0], manager.Manager) + self.assertEqual( + '/redfish/v1/Managers/Blade1BMC', actual_managers[0].path) + + def test_systems(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_systems = self.chassis.systems + self.assertIsInstance(actual_systems[0], system.System) + self.assertEqual( + '/redfish/v1/Systems/529QB9450R6', actual_systems[0].path) + class ChassisCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index 565cc7c..60c9c36 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -16,6 +16,7 @@ import mock import sushy from sushy import exceptions +from sushy.resources.chassis import chassis from sushy.resources.manager import manager from sushy.resources.manager import virtual_media from sushy.resources.system import system @@ -279,6 +280,18 @@ class ManagerTestCase(base.TestCase): self.assertEqual( '/redfish/v1/Systems/437XR1138R2', actual_systems[0].path) + def test_chassis(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'chassis.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_chassis = self.manager.chassis + self.assertIsInstance(actual_chassis[0], chassis.Chassis) + self.assertEqual( + '/redfish/v1/Chassis/1U', actual_chassis[0].path) + class ManagerCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index f6deb74..6d6c4cd 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -19,6 +19,7 @@ import mock import sushy from sushy import exceptions +from sushy.resources.chassis import chassis from sushy.resources import constants as res_cons from sushy.resources.manager import manager from sushy.resources.system import bios @@ -491,6 +492,18 @@ class SystemTestCase(base.TestCase): self.assertEqual( '/redfish/v1/Managers/BMC', actual_managers[0].path) + def test_chassis(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'chassis.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_chassis = self.sys_inst.chassis + self.assertIsInstance(actual_chassis[0], chassis.Chassis) + self.assertEqual( + '/redfish/v1/Chassis/1U', actual_chassis[0].path) + class SystemCollectionTestCase(base.TestCase): -- GitLab From 47053d432600b1e99e293f4d477af57a442209f0 Mon Sep 17 00:00:00 2001 From: dnuka Date: Thu, 6 Dec 2018 17:18:26 +0530 Subject: [PATCH 116/303] Add missing tests This change adds missing tests to "Add `ChassisCollection` and `Chassis` classes" [1] [1] https://review.openstack.org/#/c/608177/ Change-Id: Ib2b0e587ea897c6bf7280c5cff4764927fc9536a --- sushy/tests/unit/test_main.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 486616f..d785fd9 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -65,6 +65,7 @@ class MainTestCase(base.TestCase): self.assertFalse(self.root.protocol_features_supported.select_query) self.assertEqual('/redfish/v1/Systems', self.root._systems_path) self.assertEqual('/redfish/v1/Managers', self.root._managers_path) + self.assertEqual('/redfish/v1/Chassis', self.root._chassis_path) self.assertEqual('/redfish/v1/SessionService', self.root._session_service_path) @@ -174,6 +175,11 @@ class BareMinimumMainTestCase(base.TestCase): exceptions.MissingAttributeError, 'Managers/@odata.id', self.root.get_manager_collection) + def test_get_chassis_collection_when_chassis_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Chassis/@odata.id', self.root.get_chassis_collection) + def test_get_session_service_when_sessionservice_attr_absent(self): self.assertRaisesRegex( exceptions.MissingAttributeError, -- GitLab From 02ddce7d5bd38d4ef5c4a64bb99ad764423fff16 Mon Sep 17 00:00:00 2001 From: dnuka Date: Thu, 17 Jan 2019 18:56:25 +0530 Subject: [PATCH 117/303] Update to public A follow up patch to Change-Id I479bc18f79c6c51644115671ef68a879a94d102e `_maintenance_window` and `_operation_apply_time_support` set to public. Change-Id: Id68e1fd4252095d69d31f0a57e66e103d9d4f768 --- sushy/resources/settings.py | 4 ++-- sushy/tests/unit/resources/test_settings.py | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index 56b9b96..636f5cc 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -108,7 +108,7 @@ class SettingsField(base.CompositeField): to change this resource """ - _maintenance_window = MaintenanceWindowField('MaintenanceWindow') + maintenance_window = MaintenanceWindowField('MaintenanceWindow') """Indicates if a given resource has a maintenance window assignment for applying settings or operations""" @@ -116,7 +116,7 @@ class SettingsField(base.CompositeField): """Represents the results of the last time the values of the Settings resource were applied to the server""" - _operation_apply_time_support = OperationApplyTimeSupportField( + operation_apply_time_support = OperationApplyTimeSupportField( 'OperationApplyTimeSupport') """Indicates if a client is allowed to request for a specific apply time of a create, delete, or action operation of a given resource""" diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index 4eaf4fe..2b6e883 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -56,21 +56,21 @@ class SettingsFieldTestCase(base.TestCase): self.assertEqual( 1, instance. - _maintenance_window.maintenance_window_duration_in_seconds) + maintenance_window.maintenance_window_duration_in_seconds) self.assertEqual( '2016-03-07T14:44.30-05:05', - instance._maintenance_window.maintenance_window_start_time) + instance.maintenance_window.maintenance_window_start_time) self.assertEqual( 1, - instance._operation_apply_time_support. + instance.operation_apply_time_support. maintenance_window_duration_in_seconds) self.assertEqual( '2016-03-07T14:44.30-05:10', - instance._operation_apply_time_support. + instance.operation_apply_time_support. maintenance_window_start_time) self.assertIn( 'Immediate', - instance._operation_apply_time_support.supported_values) + instance.operation_apply_time_support.supported_values) def test_commit(self): conn = mock.Mock() -- GitLab From 70485a977e462062bf9acdc1e8e8808ba18ac741 Mon Sep 17 00:00:00 2001 From: dnuka Date: Thu, 17 Jan 2019 19:10:22 +0530 Subject: [PATCH 118/303] Update the docstring of `sub_processors()` A follow up patch to Change-Id I479bc18f79c6c51644115671ef68a879a94d102e Change-Id: Iccd414e3b92d9b03eaec8ec3ac511a10171bd617 --- sushy/resources/system/processor.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index a8ea385..efd335c 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -113,11 +113,7 @@ class Processor(base.ResourceBase): @property @utils.cache_it def sub_processors(self): - """A reference to - - the collection of Sub-Processors associated with - this system, such as cores or threads that are part of a processor. - """ + """A reference to the collection of Sub-Processors""" return ProcessorCollection( self.conn, self._get_processor_collection_path, redfish_version=self.redfish_version) -- GitLab From 69f5e9850493084e9713c68a37504bc10deb0f7f Mon Sep 17 00:00:00 2001 From: dnuka Date: Tue, 18 Dec 2018 16:27:50 +0530 Subject: [PATCH 119/303] Introduce `dateutil` These changes introduce `dateutil.parser.parse()` to the sushy which converts ASCII text time into the Python's `datetime` object which is way more handy to work with rather than String. A follow up patch to Change-Id I479bc18f79c6c51644115671ef68a879a94d102e Change-Id: I1b8efd0387e489e98c7a79e8612a6ad0cbcaf30e --- lower-constraints.txt | 1 + requirements.txt | 1 + sushy/resources/settings.py | 7 +++++-- sushy/tests/unit/json_samples/settings.json | 6 +++--- sushy/tests/unit/resources/test_settings.py | 18 +++++++++++++++--- 5 files changed, 25 insertions(+), 8 deletions(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index 2d10fe4..946073f 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -24,6 +24,7 @@ pbr==2.0.0 pep8==1.5.7 pyflakes==0.8.1 Pygments==2.2.0 +python-dateutil==2.7.0 python-mimeparse==1.6.0 python-subunit==1.0.0 pytz==2013.6 diff --git a/requirements.txt b/requirements.txt index ad13f8c..80e77b8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,3 +5,4 @@ pbr!=2.1.0,>=2.0.0 # Apache-2.0 requests>=2.14.2 # Apache-2.0 six>=1.10.0 # MIT +python-dateutil>=2.7.0 # BSD diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index 636f5cc..eb53ebe 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -13,6 +13,7 @@ # This is referred from Redfish standard schema. # https://redfish.dmtf.org/schemas/Settings.v1_2_0.json +from dateutil import parser from sushy.resources import base from sushy.resources import common @@ -57,7 +58,8 @@ class MaintenanceWindowField(base.CompositeField): maintenance_window_start_time = base.Field( 'MaintenanceWindowStartTime', - required=True) + required=True, + adapter=parser.parse) """The start time of a maintenance window""" @@ -72,7 +74,8 @@ class OperationApplyTimeSupportField(base.CompositeField): """The location of the maintenance window settings""" maintenance_window_start_time = base.Field( - 'MaintenanceWindowStartTime') + 'MaintenanceWindowStartTime', + adapter=parser.parse) """The start time of a maintenance window""" supported_values = base.Field('SupportedValues', required=True) diff --git a/sushy/tests/unit/json_samples/settings.json b/sushy/tests/unit/json_samples/settings.json index 671c7f0..b1055e0 100644 --- a/sushy/tests/unit/json_samples/settings.json +++ b/sushy/tests/unit/json_samples/settings.json @@ -4,12 +4,12 @@ "ETag": "9234ac83b9700123cc32", "MaintenanceWindow": { "MaintenanceWindowDurationInSeconds": 1, - "MaintenanceWindowStartTime": "2016-03-07T14:44.30-05:05" + "MaintenanceWindowStartTime": "2016-03-07T14:44:30-05:05" }, "OperationApplyTimeSupport": { "MaintenanceWindowDurationInSeconds": 1, "MaintenanceWindowResource": "", - "MaintenanceWindowStartTime": "2016-03-07T14:44.30-05:10", + "MaintenanceWindowStartTime": "2016-03-07T14:44:30-05:10", "SupportedValues": [ "Immediate", "OnReset", @@ -32,6 +32,6 @@ "SettingsObject": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" }, - "Time": "2016-03-07T14:44.30-05:00" + "Time": "2016-03-07T14:44:30-05:00" } } diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index 2b6e883..3535297 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -16,6 +16,9 @@ import json import mock +from dateutil import parser + +from sushy import exceptions from sushy.resources import constants as res_cons from sushy.resources import settings from sushy.tests.unit import base @@ -35,7 +38,7 @@ class SettingsFieldTestCase(base.TestCase): self.assertEqual('9234ac83b9700123cc32', instance._etag) - self.assertEqual('2016-03-07T14:44.30-05:00', + self.assertEqual('2016-03-07T14:44:30-05:00', instance.time) self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', instance._settings_object_idref.resource_uri) @@ -58,20 +61,29 @@ class SettingsFieldTestCase(base.TestCase): instance. maintenance_window.maintenance_window_duration_in_seconds) self.assertEqual( - '2016-03-07T14:44.30-05:05', + parser.parse('2016-03-07T14:44:30-05:05'), instance.maintenance_window.maintenance_window_start_time) self.assertEqual( 1, instance.operation_apply_time_support. maintenance_window_duration_in_seconds) self.assertEqual( - '2016-03-07T14:44.30-05:10', + parser.parse('2016-03-07T14:44:30-05:10'), instance.operation_apply_time_support. maintenance_window_start_time) self.assertIn( 'Immediate', instance.operation_apply_time_support.supported_values) + def test__load_failure(self): + self.json[ + '@Redfish.Settings']['MaintenanceWindow'][ + 'MaintenanceWindowStartTime'] = 'bad date' + self.assertRaisesRegex( + exceptions.MalformedAttributeError, + '@Redfish.Settings/MaintenanceWindow/MaintenanceWindowStartTime', + self.settings._load, self.json, mock.Mock()) + def test_commit(self): conn = mock.Mock() instance = self.settings._load(self.json, conn) -- GitLab From 185b2503cb6be157fbb88dc44ba89c888fffbe4b Mon Sep 17 00:00:00 2001 From: Iury Gregory Melo Ferreira Date: Wed, 23 Jan 2019 11:39:51 +0100 Subject: [PATCH 120/303] Move to zuulv3 Change sushy job to zuuvl3 and use ironic-base job Depends-On: https://review.openstack.org/#/c/630100/ Change-Id: I5364432a52a597d58df7d1b2f1fc599c016250c1 --- .../post.yaml | 15 -- .../run.yaml | 159 ------------------ zuul.d/legacy-sushy-jobs.yaml | 26 --- zuul.d/project.yaml | 4 +- zuul.d/sushy-jobs.yaml | 29 ++++ 5 files changed, 31 insertions(+), 202 deletions(-) delete mode 100644 playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml delete mode 100644 playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml delete mode 100644 zuul.d/legacy-sushy-jobs.yaml create mode 100644 zuul.d/sushy-jobs.yaml diff --git a/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml b/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml deleted file mode 100644 index e07f551..0000000 --- a/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml +++ /dev/null @@ -1,15 +0,0 @@ -- hosts: primary - tasks: - - - name: Copy files from {{ ansible_user_dir }}/workspace/ on node - synchronize: - src: '{{ ansible_user_dir }}/workspace/' - dest: '{{ zuul.executor.log_root }}' - mode: pull - copy_links: true - verify_host: true - rsync_opts: - - --include=/logs/** - - --include=*/ - - --exclude=* - - --prune-empty-dirs diff --git a/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml b/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml deleted file mode 100644 index cae77b8..0000000 --- a/playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml +++ /dev/null @@ -1,159 +0,0 @@ -- hosts: all - name: Autoconverted job legacy-tempest-dsvm-ironic-ipa-partition-redfish-sushy-src - from old job gate-tempest-dsvm-ironic-ipa-partition-redfish-sushy-src-ubuntu-xenial - tasks: - - - name: Ensure legacy workspace directory - file: - path: '{{ ansible_user_dir }}/workspace' - state: directory - - - shell: - cmd: | - set -e - set -x - cat > clonemap.yaml << EOF - clonemap: - - name: openstack-infra/devstack-gate - dest: devstack-gate - EOF - /usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \ - git://git.openstack.org \ - openstack-infra/devstack-gate - executable: /bin/bash - chdir: '{{ ansible_user_dir }}/workspace' - environment: '{{ zuul | zuul_legacy_vars }}' - - - shell: - cmd: | - cat << 'EOF' >> ironic-extra-vars - export DEVSTACK_PROJECT_FROM_GIT="sushy,$DEVSTACK_PROJECT_FROM_GIT" - - EOF - chdir: '{{ ansible_user_dir }}/workspace' - environment: '{{ zuul | zuul_legacy_vars }}' - - - shell: - cmd: | - cat << 'EOF' >> ironic-extra-vars - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_DEPLOY_DRIVER_ISCSI_WITH_IPA=True" - # Standardize VM size for each supported ramdisk - case "tinyipa" in - 'tinyipa') - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_SPECS_RAM=384" - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_RAMDISK_TYPE=tinyipa" - ;; - 'tinyipa256') - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_SPECS_RAM=256" - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_RAMDISK_TYPE=tinyipa" - ;; - 'coreos') - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_SPECS_RAM=1280" - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_RAMDISK_TYPE=coreos" - ;; - # if using a ramdisk without a known good value, use the devstack - # default by not exporting any value for IRONIC_VM_SPECS_RAM - esac - - EOF - chdir: '{{ ansible_user_dir }}/workspace' - environment: '{{ zuul | zuul_legacy_vars }}' - - - shell: - cmd: | - cat << 'EOF' >> ironic-extra-vars - export DEVSTACK_GATE_TEMPEST_REGEX="ironic_tempest_plugin.tests.scenario" - - EOF - chdir: '{{ ansible_user_dir }}/workspace' - environment: '{{ zuul | zuul_legacy_vars }}' - - - shell: - cmd: | - cat << 'EOF' >> ironic-vars-early - # use tempest plugin - export DEVSTACK_LOCAL_CONFIG+=$'\n'"TEMPEST_PLUGINS+=' /opt/stack/new/ironic-tempest-plugin'" - export TEMPEST_CONCURRENCY=1 - EOF - chdir: '{{ ansible_user_dir }}/workspace' - environment: '{{ zuul | zuul_legacy_vars }}' - - - shell: - cmd: | - set -e - set -x - export PROJECTS="openstack/ironic $PROJECTS" - export PROJECTS="openstack/ironic-lib $PROJECTS" - export PROJECTS="openstack/ironic-python-agent $PROJECTS" - export PROJECTS="openstack/ironic-tempest-plugin $PROJECTS" - export PROJECTS="openstack/python-ironicclient $PROJECTS" - export PROJECTS="openstack/pyghmi $PROJECTS" - export PROJECTS="openstack/virtualbmc $PROJECTS" - export PYTHONUNBUFFERED=true - export DEVSTACK_GATE_TEMPEST=1 - export DEVSTACK_GATE_IRONIC=1 - export DEVSTACK_GATE_NEUTRON=1 - export DEVSTACK_GATE_VIRT_DRIVER=ironic - export DEVSTACK_GATE_CONFIGDRIVE=1 - export DEVSTACK_GATE_IRONIC_DRIVER=redfish - export BRANCH_OVERRIDE=default - if [ "$BRANCH_OVERRIDE" != "default" ] ; then - export OVERRIDE_ZUUL_BRANCH=$BRANCH_OVERRIDE - fi - - if [[ ! "stable/newton stable/ocata stable/pike" =~ $ZUUL_BRANCH ]] ; then - export DEVSTACK_GATE_TLSPROXY=1 - fi - - if [ "redfish" == "pxe_snmp" ] ; then - # explicitly enable pxe_snmp driver - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_ENABLED_DRIVERS=fake,pxe_snmp" - fi - - if [ "redfish" == "redfish" ] ; then - # When deploying with redfish we need to enable the "redfish" - # hardware type - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_ENABLED_HARDWARE_TYPES=redfish" - fi - - if [ "partition" == "wholedisk" ] ; then - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_TEMPEST_WHOLE_DISK_IMAGE=True" - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_EPHEMERAL_DISK=0" - else - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_TEMPEST_WHOLE_DISK_IMAGE=False" - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_EPHEMERAL_DISK=1" - fi - - if [ -n "" ] ; then - export DEVSTACK_GATE_IRONIC_BUILD_RAMDISK=1 - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_INSPECTOR_BUILD_RAMDISK=True" - export DEVSTACK_LOCAL_CONFIG+=$'\n'"USE_SUBNETPOOL=False" - else - export DEVSTACK_GATE_IRONIC_BUILD_RAMDISK=0 - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_INSPECTOR_BUILD_RAMDISK=False" - fi - - if [ "bios" == "uefi" ] ; then - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_BOOT_MODE=uefi" - fi - - export DEVSTACK_PROJECT_FROM_GIT="" - export DEVSTACK_LOCAL_CONFIG+=$'\n'"IRONIC_VM_COUNT=1" - - # Ensure the ironic-vars-EARLY file exists - touch ironic-vars-early - # Pull in the EARLY variables injected by the optional builders - source ironic-vars-early - - export DEVSTACK_LOCAL_CONFIG+=$'\n'"enable_plugin ironic git://git.openstack.org/openstack/ironic" - - # Ensure the ironic-EXTRA-vars file exists - touch ironic-extra-vars - # Pull in the EXTRA variables injected by the optional builders - source ironic-extra-vars - - cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh - ./safe-devstack-vm-gate-wrap.sh - executable: /bin/bash - chdir: '{{ ansible_user_dir }}/workspace' - environment: '{{ zuul | zuul_legacy_vars }}' diff --git a/zuul.d/legacy-sushy-jobs.yaml b/zuul.d/legacy-sushy-jobs.yaml deleted file mode 100644 index e8a8902..0000000 --- a/zuul.d/legacy-sushy-jobs.yaml +++ /dev/null @@ -1,26 +0,0 @@ -- job: - name: sushy-tempest-dsvm-ironic-ipa-partition-redfish-src - parent: legacy-dsvm-base - irrelevant-files: - - ^test-requirements.txt$ - - ^.*\.rst$ - - ^doc/.*$ - - ^releasenotes/.*$ - - ^setup.cfg$ - - ^sushy/tests/.*$ - - ^tools/.*$ - - ^tox.ini$ - required-projects: - - openstack-infra/devstack-gate - - openstack/ironic - - openstack/ironic-lib - - openstack/ironic-python-agent - - openstack/ironic-tempest-plugin - - openstack/pyghmi - - openstack/python-ironicclient - - openstack/sushy - - openstack/tempest - - openstack/virtualbmc - run: playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/run.yaml - post-run: playbooks/legacy/sushy-tempest-dsvm-ironic-ipa-partition-redfish-src/post.yaml - timeout: 5400 diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index ef8474b..73673d3 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -10,7 +10,7 @@ - release-notes-jobs-python3 check: jobs: - - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src + - sushy-tempest-ironic-ipa-partition-redfish-src gate: jobs: - - sushy-tempest-dsvm-ironic-ipa-partition-redfish-src + - sushy-tempest-ironic-ipa-partition-redfish-src diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml new file mode 100644 index 0000000..108e388 --- /dev/null +++ b/zuul.d/sushy-jobs.yaml @@ -0,0 +1,29 @@ +- job: + name: sushy-tempest-ironic-ipa-partition-redfish-src + parent: ironic-base + irrelevant-files: + - ^test-requirements.txt$ + - ^.*\.rst$ + - ^doc/.*$ + - ^releasenotes/.*$ + - ^setup.cfg$ + - ^sushy/tests/.*$ + - ^tools/.*$ + - ^tox.ini$ + timeout: 5400 + required-projects: + - openstack/ironic-lib + - openstack/sushy + vars: + devstack_localrc: + IRONIC_DEPLOY_DRIVER: redfish + IRONIC_ENABLED_HARDWARE_TYPES: redfish + IRONIC_DEFAULT_RESCUE_INTERFACE: "" + EBTABLES_RACE_FIX: True + SWIFT_ENABLE_TEMPURLS: True + SWIFT_TEMPURL_KEY: secretkey + devstack_services: + s-account: True + s-container: True + s-object: True + s-proxy: True -- GitLab From 945edeb86314822478c0f1ba92e0469f5fba0e67 Mon Sep 17 00:00:00 2001 From: Iury Gregory Melo Ferreira Date: Thu, 14 Feb 2019 10:52:33 +0100 Subject: [PATCH 121/303] Follow Up Zuulv3 This is a follow-up for I5364432a52a597d58df7d1b2f1fc599c016250c1 Change-Id: Ie3ee54ff6ea49d454d8c31e6c3807af114af2fe1 --- zuul.d/project.yaml | 4 ++-- zuul.d/sushy-jobs.yaml | 16 +--------------- 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 73673d3..6c936e3 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -10,7 +10,7 @@ - release-notes-jobs-python3 check: jobs: - - sushy-tempest-ironic-ipa-partition-redfish-src + - sushy-tempest-ironic-partition-redfish-src gate: jobs: - - sushy-tempest-ironic-ipa-partition-redfish-src + - sushy-tempest-ironic-partition-redfish-src diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml index 108e388..69870fb 100644 --- a/zuul.d/sushy-jobs.yaml +++ b/zuul.d/sushy-jobs.yaml @@ -1,18 +1,11 @@ - job: - name: sushy-tempest-ironic-ipa-partition-redfish-src + name: sushy-tempest-ironic-partition-redfish-src parent: ironic-base irrelevant-files: - ^test-requirements.txt$ - - ^.*\.rst$ - - ^doc/.*$ - - ^releasenotes/.*$ - - ^setup.cfg$ - ^sushy/tests/.*$ - - ^tools/.*$ - - ^tox.ini$ timeout: 5400 required-projects: - - openstack/ironic-lib - openstack/sushy vars: devstack_localrc: @@ -20,10 +13,3 @@ IRONIC_ENABLED_HARDWARE_TYPES: redfish IRONIC_DEFAULT_RESCUE_INTERFACE: "" EBTABLES_RACE_FIX: True - SWIFT_ENABLE_TEMPURLS: True - SWIFT_TEMPURL_KEY: secretkey - devstack_services: - s-account: True - s-container: True - s-object: True - s-proxy: True -- GitLab From 88aeb030c3f3dc359f0ebe6f340e1933e446fde6 Mon Sep 17 00:00:00 2001 From: dnuka Date: Sat, 27 Oct 2018 13:58:08 +0530 Subject: [PATCH 122/303] Add support for the `UpdateService` resource Adds the `UpdateService` resource of Redfish standard schema. `UpdateService` is responsible for managing firmware updates. Story: #2003853 Task: #26652 Change-Id: I7477d4463491f99368f1a5981bce2ddacf612a8c --- .../add_update_service-b54c9bb0177e3468.yaml | 5 + sushy/main.py | 17 ++ sushy/resources/updateservice/__init__.py | 0 sushy/resources/updateservice/constants.py | 26 +++ sushy/resources/updateservice/mappings.py | 35 ++++ .../updateservice/softwareinventory.py | 96 +++++++++++ .../resources/updateservice/updateservice.py | 155 ++++++++++++++++++ sushy/tests/unit/json_samples/root.json | 3 + .../unit/json_samples/softwareinventory.json | 29 ++++ .../softwareinventory_collection.json | 14 ++ .../unit/json_samples/updateservice.json | 30 ++++ .../unit/resources/updateservice/__init__.py | 0 .../updateservice/test_softwareinventory.py | 90 ++++++++++ .../updateservice/test_updateservice.py | 106 ++++++++++++ sushy/tests/unit/test_main.py | 13 ++ 15 files changed, 619 insertions(+) create mode 100644 releasenotes/notes/add_update_service-b54c9bb0177e3468.yaml create mode 100644 sushy/resources/updateservice/__init__.py create mode 100644 sushy/resources/updateservice/constants.py create mode 100644 sushy/resources/updateservice/mappings.py create mode 100644 sushy/resources/updateservice/softwareinventory.py create mode 100644 sushy/resources/updateservice/updateservice.py create mode 100644 sushy/tests/unit/json_samples/softwareinventory.json create mode 100644 sushy/tests/unit/json_samples/softwareinventory_collection.json create mode 100644 sushy/tests/unit/json_samples/updateservice.json create mode 100644 sushy/tests/unit/resources/updateservice/__init__.py create mode 100644 sushy/tests/unit/resources/updateservice/test_softwareinventory.py create mode 100644 sushy/tests/unit/resources/updateservice/test_updateservice.py diff --git a/releasenotes/notes/add_update_service-b54c9bb0177e3468.yaml b/releasenotes/notes/add_update_service-b54c9bb0177e3468.yaml new file mode 100644 index 0000000..e9dd662 --- /dev/null +++ b/releasenotes/notes/add_update_service-b54c9bb0177e3468.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds support for the UpdateService resource to the library. + `UpdateService` is responsible for managing firmware updates. diff --git a/sushy/main.py b/sushy/main.py index eb7a5b8..12e2672 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -24,6 +24,7 @@ from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system +from sushy.resources.updateservice import updateservice LOG = logging.getLogger(__name__) @@ -79,6 +80,9 @@ class Sushy(base.ResourceBase): _registries_path = base.Field(['Registries', '@odata.id']) """Registries path""" + _update_service_path = base.Field(['UpdateService', '@odata.id']) + """UpdateService path""" + def __init__(self, base_url, username=None, password=None, root_prefix='/redfish/v1/', verify=True, auth=None, connector=None): @@ -226,6 +230,19 @@ class Sushy(base.ResourceBase): return session.Session(self._conn, identity, redfish_version=self.redfish_version) + def get_update_service(self): + """Get the UpdateService object + + :returns: The UpdateService object + """ + if not self._update_service_path: + raise exceptions.MissingAttributeError( + attribute='UpdateService/@odata.id', resource=self._path) + + return updateservice.UpdateService( + self._conn, self._update_service_path, + redfish_version=self.redfish_version) + def _get_registry_collection(self): """Get MessageRegistryFileCollection object diff --git a/sushy/resources/updateservice/__init__.py b/sushy/resources/updateservice/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/resources/updateservice/constants.py b/sushy/resources/updateservice/constants.py new file mode 100644 index 0000000..bffa493 --- /dev/null +++ b/sushy/resources/updateservice/constants.py @@ -0,0 +1,26 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values come from the Redfish UpdateService json-schema. +# https://redfish.dmtf.org/schemas/UpdateService.v1_2_2.json + +# Transfer Protocol Type constants + +TRANSFER_PROTOCOL_TYPE_CIFS = 'CIFS' +TRANSFER_PROTOCOL_TYPE_FTP = 'FTP' +TRANSFER_PROTOCOL_TYPE_SFTP = 'SFTP' +TRANSFER_PROTOCOL_TYPE_HTTP = 'HTTP' +TRANSFER_PROTOCOL_TYPE_HTTPS = 'HTTPS' +TRANSFER_PROTOCOL_TYPE_SCP = 'SCP' +TRANSFER_PROTOCOL_TYPE_TFTP = 'TFTP' +TRANSFER_PROTOCOL_TYPE_OEM = 'OEM' +TRANSFER_PROTOCOL_TYPE_NFS = 'NFS' diff --git a/sushy/resources/updateservice/mappings.py b/sushy/resources/updateservice/mappings.py new file mode 100644 index 0000000..4f5d60e --- /dev/null +++ b/sushy/resources/updateservice/mappings.py @@ -0,0 +1,35 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.updateservice import constants as ups_cons +from sushy import utils + + +TRANSFER_PROTOCOL_TYPE_VALUE_MAP = { + 'Common Internet File System Protocol': + ups_cons.TRANSFER_PROTOCOL_TYPE_CIFS, + 'File Transfer Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_FTP, + 'Secure File Transfer Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_SFTP, + 'Hypertext Transfer Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_HTTP, + 'HTTP Secure Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_HTTPS, + 'Secure File Copy Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_SCP, + 'Trivial File Transfer Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_TFTP, + 'A protocol defined by the manufacturer': + ups_cons.TRANSFER_PROTOCOL_TYPE_OEM, + 'Network File System Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_NFS +} + +TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV = ( + utils.revert_dictionary(TRANSFER_PROTOCOL_TYPE_VALUE_MAP)) + +TRANSFER_PROTOCOL_TYPE_VALUE_MAP[ + 'Network File System Protocol'] = ups_cons.TRANSFER_PROTOCOL_TYPE_NFS diff --git a/sushy/resources/updateservice/softwareinventory.py b/sushy/resources/updateservice/softwareinventory.py new file mode 100644 index 0000000..3615bb8 --- /dev/null +++ b/sushy/resources/updateservice/softwareinventory.py @@ -0,0 +1,96 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/SoftwareInventory.v1_2_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common + +LOG = logging.getLogger(__name__) + + +class SoftwareInventory(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The software inventory identity""" + + lowest_supported_version = base.Field('LowestSupportedVersion') + """The lowest supported version of the software""" + + manufacturer = base.Field('Manufacturer') + """The manufacturer of the software""" + + name = base.Field('Name', required=True) + """The software inventory name""" + + release_date = base.Field('ReleaseDate') + """Release date of the software""" + + related_item = base.Field('RelatedItem') + """The ID(s) of the resources associated with the software inventory + item""" + + status = common.StatusField('Status') + """The status of the software inventory""" + + software_id = base.Field('SoftwareId') + """The identity of the software""" + + uefi_device_paths = base.Field('UefiDevicePaths') + """Represents the UEFI Device Path(s)""" + + updateable = base.Field('Updateable') + """Indicates whether this software can be updated by the update + service""" + + version = base.Field('Version') + """The version of the software""" + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a SoftwareInventory + + :param connector: A Connector instance + :param identity: The identity of the SoftwareInventory resources + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + """ + super(SoftwareInventory, self).__init__( + connector, + identity, + redfish_version) + + +class SoftwareInventoryCollection(base.ResourceCollectionBase): + + name = base.Field('Name') + """The software inventory collection name""" + + description = base.Field('Description') + """The software inventory collection description""" + + @property + def _resource_type(self): + return SoftwareInventory + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a SoftwareInventoryCollection + + :param connector: A Connector instance + :param identity: The identity of SoftwareInventory resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + """ + super(SoftwareInventoryCollection, self).__init__( + connector, identity, redfish_version) diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py new file mode 100644 index 0000000..cd2e90c --- /dev/null +++ b/sushy/resources/updateservice/updateservice.py @@ -0,0 +1,155 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/UpdateService.v1_2_2.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources.updateservice import mappings as up_maps +from sushy.resources.updateservice import softwareinventory +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class SimpleUpdateActionField(common.ActionField): + + image_uri = base.Field('ImageURI') + """The URI of the software image to be installed""" + + targets = base.Field('Targets') + """The array of URIs indicating where the update image is to be""" + \ + """applied""" + + transfer_protocol = base.MappedField( + 'TransferProtocol', + up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP) + """The network protocol used by the Update Service""" + + +class ActionsField(base.CompositeField): + + simple_update = SimpleUpdateActionField( + '#UpdateService.SimpleUpdate') + + +class UpdateService(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The update service identity""" + + http_push_uri = base.Field('HttpPushUri') + """The URI used to perform an HTTP or HTTPS push update to the Update + Service""" + + http_push_uri_targets = base.Field('HttpPushUriTargets') + """The array of URIs indicating the target for applying the""" + \ + """update image""" + + http_push_uri_targets_busy = base.Field('HttpPushUriTargetsBusy') + """This represents if the HttpPushUriTargets property is reserved""" + \ + """by anyclient""" + + name = base.Field('Name', required=True) + """The update service name""" + + service_enabled = base.Field('ServiceEnabled') + """The status of whether this service is enabled""" + + status = common.StatusField('Status') + """The status of the update service""" + + _actions = ActionsField('Actions', required=True) + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a UpdateService + + :param connector: A Connector instance + :param identity: The identity of the UpdateService resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version + """ + super(UpdateService, self).__init__( + connector, + identity, + redfish_version) + + def _get_simple_update_element(self): + simple_update_action = self._actions.simple_update + if not simple_update_action: + raise exceptions.MissingAttributeError( + action='#UpdateService.SimpleUpdate', + resource=self._path) + return simple_update_action + + def get_allowed_transfer_protocol_values(self): + """Get the allowed values for transfer protocol. + + :returns: A set of allowed values. + :raises: MissingAttributeError, if Actions/#UpdateService.SimpleUpdate + attribute not present. + """ + simple_update_action = self._get_simple_update_element() + + if not simple_update_action.transfer_protocol: + LOG.warning( + 'Could not figure out the allowed values for the simple ' + 'update action for UpdateService %s', self.identity) + return set(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV) + + return set(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP[v] for v in + simple_update_action.transfer_protocol if v in + up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP) + + def simple_update(self, image_uri, targets, transfer_protocol): + """Simple Update is used to update software components""" + transfer_protocol = transfer_protocol + + valid_transfer_protocols = self.get_allowed_transfer_protocol_values() + if transfer_protocol not in valid_transfer_protocols: + raise exceptions.InvalidParameterValueError( + parameter='transfer_protocol', value=transfer_protocol, + valid_values=valid_transfer_protocols) + + self._conn.post(data={ + 'ImageURI': image_uri, + 'Targets': targets, + 'TransferProtocol': transfer_protocol}) + + def _get_software_inventory_collection_path(self): + """Helper function to find the SoftwareInventoryCollections path""" + soft_inv_col = self.json.get('SoftwareInventory') + if not soft_inv_col: + raise exceptions.MissingAttributeError( + attribute='SoftwareInventory', resource=self._path) + return soft_inv_col.get('@odata.id') + + @property + @utils.cache_it + def software_inventory(self): + """Property to reference SoftwareInventoryCollection instance""" + return softwareinventory.SoftwareInventoryCollection( + self._conn, self._get_software_inventory_collection_path, + redfish_version=self.redfish_version) + + @property + @utils.cache_it + def firmware_inventory(self): + """Property to reference SoftwareInventoryCollection instance""" + return softwareinventory.SoftwareInventoryCollection( + self._conn, self._get_software_inventory_collection_path, + redfish_version=self.redfish_version) diff --git a/sushy/tests/unit/json_samples/root.json b/sushy/tests/unit/json_samples/root.json index 66f6d55..40d950f 100644 --- a/sushy/tests/unit/json_samples/root.json +++ b/sushy/tests/unit/json_samples/root.json @@ -27,6 +27,9 @@ "SessionService": { "@odata.id": "/redfish/v1/SessionService" }, + "UpdateService": { + "@odata.id": "/redfish/v1/UpdateService" + }, "AccountService": { "@odata.id": "/redfish/v1/AccountService" }, diff --git a/sushy/tests/unit/json_samples/softwareinventory.json b/sushy/tests/unit/json_samples/softwareinventory.json new file mode 100644 index 0000000..2e3264f --- /dev/null +++ b/sushy/tests/unit/json_samples/softwareinventory.json @@ -0,0 +1,29 @@ +{ + "@odata.type": "#SoftwareInventory.v1_2_0.SoftwareInventory", + "Id": "BMC", + "Name": "Contoso BMC Firmware", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "Updateable": true, + "Manufacturer": "Contoso", + "ReleaseDate": "2017-08-22T12:00:00", + "Version": "1.45.455b66-rev4", + "SoftwareId": "1624A9DF-5E13-47FC-874A-DF3AFF143089", + "LowestSupportedVersion": "1.30.367a12-rev1", + "UefiDevicePaths": [ + "BMC(0x1,0x0ABCDEF)" + ], + "RelatedItem": [ + { + "@odata.id": "/redfish/v1/Managers/1" + } + ], + "Actions": { + "Oem": {} + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#SoftwareInventory.SoftwareInventory", + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory/BMC" +} diff --git a/sushy/tests/unit/json_samples/softwareinventory_collection.json b/sushy/tests/unit/json_samples/softwareinventory_collection.json new file mode 100644 index 0000000..71c58e4 --- /dev/null +++ b/sushy/tests/unit/json_samples/softwareinventory_collection.json @@ -0,0 +1,14 @@ +{ + "@odata.type": "#SoftwareInventoryCollection.v1_4_0.SoftwareInventoryCollection", + "@odata.id": "/redfish/v1/UpdateService/SoftwareInventory", + "Name": "Software Inventory Collection", + "Members@odata.count": 2, + "Members": [ + { + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory" + }, + { + "@odata.id": "/redfish/v1/UpdateService/SoftwareInventory" + } + ] +} diff --git a/sushy/tests/unit/json_samples/updateservice.json b/sushy/tests/unit/json_samples/updateservice.json new file mode 100644 index 0000000..1a67b5e --- /dev/null +++ b/sushy/tests/unit/json_samples/updateservice.json @@ -0,0 +1,30 @@ +{ + "@odata.type": "#UpdateService.v1_2_1.UpdateService", + "Id": "UpdateService", + "Name": "Update service", + "Status": { + "State": "Enabled", + "Health": "OK", + "HealthRollup": "OK" + }, + "ServiceEnabled": true, + "HttpPushUri": "/FWUpdate", + "HttpPushUriTargets": ["/FWUpdate"], + "HttpPushUriTargetsBusy": false, + "FirmwareInventory": { + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory" + }, + "SoftwareInventory": { + "@odata.id": "/redfish/v1/UpdateService/SoftwareInventory" + }, + "Actions": { + "#UpdateService.SimpleUpdate": { + "target": "/redfish/v1/UpdateService/Actions/SimpleUpdate", + "@Redfish.ActionInfo": "/redfish/v1/UpdateService/SimpleUpdateActionInfo" + }, + "Oem": {} + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#UpdateService.UpdateService", + "@odata.id": "/redfish/v1/UpdateService" +} diff --git a/sushy/tests/unit/resources/updateservice/__init__.py b/sushy/tests/unit/resources/updateservice/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py new file mode 100644 index 0000000..98536ca --- /dev/null +++ b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py @@ -0,0 +1,90 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import mock + +from sushy import exceptions +from sushy.resources import constants as res_cons +from sushy.resources.updateservice import softwareinventory +from sushy.tests.unit import base + + +class SoftwareInventoryTestCase(base.TestCase): + + def setUp(self): + super(SoftwareInventoryTestCase, self).setUp() + conn = mock.Mock() + with open( + 'sushy/tests/unit/json_samples/softwareinventory.json') as f: + conn.get.return_value.json.return_value = json.load(f) + + self.soft_inv = softwareinventory.SoftwareInventory( + conn, + '/redfish/v1/UpdateService/SoftwareInventory/1', + redfish_version='1.3.0') + + def test__parse_attributes(self): + self.soft_inv._parse_attributes() + self.assertEqual('BMC', self.soft_inv.identity) + self.assertEqual( + '1.30.367a12-rev1', + self.soft_inv.lowest_supported_version) + self.assertEqual('Contoso', self.soft_inv.manufacturer) + self.assertEqual('Contoso BMC Firmware', self.soft_inv.name) + self.assertEqual('2017-08-22T12:00:00', self.soft_inv.release_date) + self.assertEqual( + res_cons.STATE_ENABLED, + self.soft_inv.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.soft_inv.status.health) + self.assertEqual( + '1624A9DF-5E13-47FC-874A-DF3AFF143089', + self.soft_inv.software_id) + self.assertTrue(self.soft_inv.updateable) + self.assertEqual('1.45.455b66-rev4', self.soft_inv.version) + + def test__parse_attributes_missing_identity(self): + self.soft_inv.json.pop('Id') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Id', + self.soft_inv._parse_attributes) + + +class SoftwareInventoryCollectionTestCase(base.TestCase): + + def setUp(self): + super(SoftwareInventoryCollectionTestCase, self).setUp() + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'softwareinventory_collection.json') as f: + conn.get.return_value.json.return_value = json.load(f) + + self.soft_inv_col = softwareinventory.SoftwareInventoryCollection( + conn, '/redfish/v1/UpdateService/SoftwareInventory', + redfish_version='1.3.0') + + def test__parse_attributes(self): + self.soft_inv_col._parse_attributes() + self.assertEqual('1.3.0', self.soft_inv_col.redfish_version) + self.assertEqual( + 'Software Inventory Collection', + self.soft_inv_col.name) + + @mock.patch.object( + softwareinventory, 'SoftwareInventory', autospec=True) + def test_get_member(self, mock_softwareinventory): + path = '/redfish/v1/UpdateService/SoftwareInventory/1' + self.soft_inv_col.get_member(path) + mock_softwareinventory.assert_called_once_with( + self.soft_inv_col._conn, path, + redfish_version=self.soft_inv_col.redfish_version) diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py new file mode 100644 index 0000000..741d81a --- /dev/null +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -0,0 +1,106 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import mock + +from sushy import exceptions +from sushy.resources import constants as res_cons +from sushy.resources.updateservice import constants as ups_cons +from sushy.resources.updateservice import softwareinventory +from sushy.resources.updateservice import updateservice +from sushy.tests.unit import base + + +class UpdateServiceTestCase(base.TestCase): + + def setUp(self): + super(UpdateServiceTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/updateservice.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.upd_serv = updateservice.UpdateService( + self.conn, '/redfish/v1/UpdateService/UpdateService', + redfish_version='1.3.0') + + def test__parse_attributes(self): + self.upd_serv._parse_attributes() + self.assertEqual('UpdateService', self.upd_serv.identity) + self.assertEqual('/FWUpdate', self.upd_serv.http_push_uri) + self.assertIn('/FWUpdate', self.upd_serv.http_push_uri_targets) + self.assertFalse(self.upd_serv.http_push_uri_targets_busy) + self.assertEqual('Update service', self.upd_serv.name) + self.assertTrue(self.upd_serv.service_enabled) + self.assertEqual(res_cons.STATE_ENABLED, self.upd_serv.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.upd_serv.status.health) + self.assertEqual( + res_cons.HEALTH_OK, + self.upd_serv.status.health_rollup) + + def test__parse_attributes_missing_actions(self): + self.upd_serv.json.pop('Actions') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Actions', + self.upd_serv._parse_attributes) + + def test_simple_update(self): + self.upd_serv.simple_update( + image_uri='local.server/update.exe', + targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', + transfer_protocol=ups_cons.TRANSFER_PROTOCOL_TYPE_HTTPS) + self.upd_serv._conn.post.assert_called_once_with( + data={ + 'ImageURI': 'local.server/update.exe', + 'Targets': '/redfish/v1/UpdateService/Actions/SimpleUpdate', + 'TransferProtocol': 'HTTPS'}) + + def test_software_inventory(self): + # | GIVEN | + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/' + 'softwareinventory_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN | + actual_software_inventory = self.upd_serv.software_inventory + # | THEN | + self.assertIsInstance(actual_software_inventory, + softwareinventory.SoftwareInventoryCollection) + self.conn.get.return_value.json.assert_called_once_with() + + # reset mock + self.conn.get.return_value.json.reset_mock() + # | WHEN & THEN | + self.assertIs(actual_software_inventory, + self.upd_serv.software_inventory) + self.conn.get.return_value.json.assert_not_called() + + def test_firmware_inventory(self): + # | GIVEN | + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/' + 'softwareinventory_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN | + actual_firmware_inventory = self.upd_serv.firmware_inventory + # | THEN | + self.assertIsInstance(actual_firmware_inventory, + softwareinventory.SoftwareInventoryCollection) + self.conn.get.return_value.json.assert_called_once_with() + + # reset mock + self.conn.get.return_value.json.reset_mock() + # | WHEN & THEN | + self.assertIs(actual_firmware_inventory, + self.upd_serv.firmware_inventory) + self.conn.get.return_value.json.assert_not_called() diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 486616f..4bc615b 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -27,6 +27,7 @@ from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system +from sushy.resources.updateservice import updateservice from sushy.tests.unit import base @@ -142,6 +143,13 @@ class MainTestCase(base.TestCase): self.root._conn, 'asdf', redfish_version=self.root.redfish_version) + @mock.patch.object(updateservice, 'UpdateService', autospec=True) + def test_get_update_service(self, mock_upd_serv): + self.root.get_update_service() + mock_upd_serv.assert_called_once_with( + self.root._conn, '/redfish/v1/UpdateService', + redfish_version=self.root.redfish_version) + @mock.patch.object(message_registry_file, 'MessageRegistryFileCollection', autospec=True) @@ -179,5 +187,10 @@ class BareMinimumMainTestCase(base.TestCase): exceptions.MissingAttributeError, 'SessionService/@odata.id', self.root.get_session_service) + def test_get_update_service_when_updateservice_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'UpdateService/@odata.id', self.root.get_update_service) + def test__get_registry_collection_when_registries_attr_absent(self): self.assertIsNone(self.root._get_registry_collection()) -- GitLab From 396da34b7591dad8a5f8382839fcfe775b02ae2b Mon Sep 17 00:00:00 2001 From: dnuka Date: Fri, 5 Oct 2018 20:06:35 +0530 Subject: [PATCH 123/303] Add support for the `CompositionService` resource Adds the `CompositionService` resource of Redfish standard schema. The `CompositionService` is the top level resource for all things related to Composability. If a Redfish service supports Composability, the Service Root resource will contain the `CompositionService` property. Implemented according to the latest Redfish schema versions[1]. [1]https://redfish.dmtf.org/schemas/ Story: #2003853 Task: #26650 Change-Id: I135d9d58e6693647a53cdd405b3c841edad4772a --- ..._composition_service-84750d8d1d96474a.yaml | 8 ++ sushy/main.py | 20 ++++ .../resources/compositionservice/__init__.py | 0 .../compositionservice/compositionservice.py | 96 +++++++++++++++ .../resources/compositionservice/constants.py | 31 +++++ .../resources/compositionservice/mappings.py | 40 +++++++ .../compositionservice/resourceblock.py | 112 ++++++++++++++++++ .../compositionservice/resourcezone.py | 92 ++++++++++++++ .../unit/json_samples/compositionservice.json | 21 ++++ .../unit/json_samples/resourceblock.json | 48 ++++++++ .../resourceblock_collection.json | 11 ++ .../tests/unit/json_samples/resourcezone.json | 50 ++++++++ .../json_samples/resourcezone_collection.json | 13 ++ sushy/tests/unit/json_samples/root.json | 3 + .../resources/compositionservice/__init__.py | 0 .../test_compositionservice.py | 49 ++++++++ .../compositionservice/test_resourceblock.py | 107 +++++++++++++++++ .../compositionservice/test_resourcezone.py | 93 +++++++++++++++ sushy/tests/unit/test_main.py | 17 +++ 19 files changed, 811 insertions(+) create mode 100644 releasenotes/notes/add_composition_service-84750d8d1d96474a.yaml create mode 100644 sushy/resources/compositionservice/__init__.py create mode 100644 sushy/resources/compositionservice/compositionservice.py create mode 100644 sushy/resources/compositionservice/constants.py create mode 100644 sushy/resources/compositionservice/mappings.py create mode 100644 sushy/resources/compositionservice/resourceblock.py create mode 100644 sushy/resources/compositionservice/resourcezone.py create mode 100644 sushy/tests/unit/json_samples/compositionservice.json create mode 100644 sushy/tests/unit/json_samples/resourceblock.json create mode 100644 sushy/tests/unit/json_samples/resourceblock_collection.json create mode 100644 sushy/tests/unit/json_samples/resourcezone.json create mode 100644 sushy/tests/unit/json_samples/resourcezone_collection.json create mode 100644 sushy/tests/unit/resources/compositionservice/__init__.py create mode 100644 sushy/tests/unit/resources/compositionservice/test_compositionservice.py create mode 100644 sushy/tests/unit/resources/compositionservice/test_resourceblock.py create mode 100644 sushy/tests/unit/resources/compositionservice/test_resourcezone.py diff --git a/releasenotes/notes/add_composition_service-84750d8d1d96474a.yaml b/releasenotes/notes/add_composition_service-84750d8d1d96474a.yaml new file mode 100644 index 0000000..c6585cb --- /dev/null +++ b/releasenotes/notes/add_composition_service-84750d8d1d96474a.yaml @@ -0,0 +1,8 @@ +--- +features: + - | + Adds support for the CompositionService resource to the library. + + The `CompositionService` is the top level resource for all things + related to Composability. If a Redfish service supports Composability, + the Service Root resource will contain the `CompositionService` property. diff --git a/sushy/main.py b/sushy/main.py index 12e2672..b1c5963 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -19,6 +19,7 @@ from sushy import connector as sushy_connector from sushy import exceptions from sushy.resources import base from sushy.resources.chassis import chassis +from sushy.resources.compositionservice import compositionservice from sushy.resources.manager import manager from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session @@ -65,6 +66,10 @@ class Sushy(base.ResourceBase): 'ProtocolFeaturesSupported') """The information about protocol features supported by the service""" + _composition_service_path = base.Field( + ['CompositionService', '@odata.id']) + """CompositionService path""" + _systems_path = base.Field(['Systems', '@odata.id']) """SystemCollection path""" @@ -257,3 +262,18 @@ class Sushy(base.ResourceBase): self._conn, self._registries_path, redfish_version=self.redfish_version) + + def get_composition_service(self): + """Get the CompositionService object + + :raises: MissingAttributeError, if the composition service + attribute is not found + :returns: The CompositionService object + """ + if not self._composition_service_path: + raise exceptions.MissingAttributeError( + attribute='CompositionService/@odata.id', + resource=self._path) + return compositionservice.CompositionService( + self._conn, self._composition_service_path, + redfish_version=self.redfish_version) diff --git a/sushy/resources/compositionservice/__init__.py b/sushy/resources/compositionservice/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/resources/compositionservice/compositionservice.py b/sushy/resources/compositionservice/compositionservice.py new file mode 100644 index 0000000..a70ea32 --- /dev/null +++ b/sushy/resources/compositionservice/compositionservice.py @@ -0,0 +1,96 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/CompositionService.v1_1_0.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources.compositionservice import resourceblock +from sushy.resources.compositionservice import resourcezone +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class CompositionService(base.ResourceBase): + + allow_overprovisioning = base.Field('AllowOverprovisioning') + """This indicates whether this service is allowed to overprovision""" + + allow_zone_affinity = base.Field('AllowZoneAffinity') + """This indicates whether a client is allowed to request that given + composition request""" + + description = base.Field('Description') + """The composition service description""" + + identity = base.Field('Id', required=True) + """The composition service identity string""" + + name = base.Field('Name', required=True) + """The composition service name""" + + status = common.StatusField('Status') + """The status of composition service""" + + service_enabled = base.Field('ServiceEnabled') + """The status of composition service is enabled""" + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a CompositionService + + :param connector: A connector instance + :param identity: The identity of the CompositionService resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version + """ + super(CompositionService, self).__init__( + connector, + identity, + redfish_version) + + def _get_resource_blocks_collection_path(self): + """Helper function to find the ResourceBlockCollections path""" + res_block_col = self.json.get('ResourceBlocks') + if not res_block_col: + raise exceptions.MissingAttributeError( + attribute='ResourceBlocks', resource=self._path) + return res_block_col.get('@odata.id') + + def _get_resource_zones_collection_path(self): + """Helper function to find the ResourceZoneCollections path""" + res_zone_col = self.json.get('ResourceZones') + if not res_zone_col: + raise exceptions.MissingAttributeError( + attribute='ResourceZones', resource=self._path) + return res_zone_col.get('@odata.id') + + @property + @utils.cache_it + def resource_blocks(self): + """Property to reference `ResourceBlockCollection` instance""" + return resourceblock.ResourceBlockCollection( + self.conn, self._get_resource_blocks_collection_path, + redfish_version=self.redfish_version) + + @property + @utils.cache_it + def resource_zones(self): + """Property to reference `ResourceZoneCollection` instance""" + return resourcezone.ResourceZoneCollection( + self.conn, self._get_resource_zones_collection_path, + redfish_version=self.redfish_version) diff --git a/sushy/resources/compositionservice/constants.py b/sushy/resources/compositionservice/constants.py new file mode 100644 index 0000000..ad12859 --- /dev/null +++ b/sushy/resources/compositionservice/constants.py @@ -0,0 +1,31 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values come from the Redfish ResourceBlock json-schema. +# https://redfish.dmtf.org/schemas/ResourceBlock.v1_3_0.json + +# Composition state related constants +COMPOSITION_STATE_COMPOSING = 'Composing' +COMPOSITION_STATE_COMPOSED_AND_AVAILABLE = 'ComposedAndAvailable' +COMPOSITION_STATE_COMPOSED = 'Composed' +COMPOSITION_STATE_UNUSED = 'Unused' +COMPOSITION_STATE_FAILED = 'Failed' +COMPOSITION_STATE_UNAVAILABLE = 'Unavailable' + +# Resource Block type related constants +RESOURCE_BLOCK_TYPE_COMPUTE = 'Compute' +RESOURCE_BLOCK_TYPE_PROCESSOR = 'Processor' +RESOURCE_BLOCK_TYPE_MEMORY = 'Memory' +RESOURCE_BLOCK_TYPE_NETWORK = 'Network' +RESOURCE_BLOCK_TYPE_STORAGE = 'Storage' +RESOURCE_BLOCK_TYPE_COMPUTERSYSTEM = 'ComputerSystem' +RESOURCE_BLOCK_TYPE_EXPANSION = 'Expansion' diff --git a/sushy/resources/compositionservice/mappings.py b/sushy/resources/compositionservice/mappings.py new file mode 100644 index 0000000..5e09f91 --- /dev/null +++ b/sushy/resources/compositionservice/mappings.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.compositionservice import constants as comp_cons +from sushy import utils + + +COMPOSITION_STATE_VALUE_MAP = { + 'Composing': comp_cons.COMPOSITION_STATE_COMPOSING, + 'ComposedAndAvailable': comp_cons.COMPOSITION_STATE_COMPOSED_AND_AVAILABLE, + 'Composed': comp_cons.COMPOSITION_STATE_COMPOSED, + 'Unused': comp_cons.COMPOSITION_STATE_UNUSED, + 'Failed': comp_cons.COMPOSITION_STATE_FAILED, + 'Unavailable': comp_cons.COMPOSITION_STATE_UNAVAILABLE +} + +COMPOSITION_STATE_VALUE_MAP_REV = ( + utils.revert_dictionary(COMPOSITION_STATE_VALUE_MAP)) + +RESOURCE_BLOCK_TYPE_VALUE_MAP = { + 'Compute': comp_cons.RESOURCE_BLOCK_TYPE_COMPUTE, + 'Processor': comp_cons.RESOURCE_BLOCK_TYPE_PROCESSOR, + 'Memory': comp_cons.RESOURCE_BLOCK_TYPE_MEMORY, + 'Network': comp_cons.RESOURCE_BLOCK_TYPE_NETWORK, + 'Storage': comp_cons.RESOURCE_BLOCK_TYPE_STORAGE, + 'ComputerSystem': comp_cons.RESOURCE_BLOCK_TYPE_COMPUTERSYSTEM, + 'Expansion': comp_cons.RESOURCE_BLOCK_TYPE_EXPANSION +} + +RESOURCE_BLOCK_TYPE_VALUE_MAP_REV = ( + utils.revert_dictionary(RESOURCE_BLOCK_TYPE_VALUE_MAP)) diff --git a/sushy/resources/compositionservice/resourceblock.py b/sushy/resources/compositionservice/resourceblock.py new file mode 100644 index 0000000..8fabfbf --- /dev/null +++ b/sushy/resources/compositionservice/resourceblock.py @@ -0,0 +1,112 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/ResourceBlock.v1_1_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources.compositionservice import mappings as res_maps + +LOG = logging.getLogger(__name__) + + +class CompositionStatusField(base.CompositeField): + + composition_state = base.MappedField( + 'CompositionState', + res_maps.COMPOSITION_STATE_VALUE_MAP, + required=True) + """Inform the client, state of the resource block""" + + max_compositions = base.Field('MaxCompositions') + """The maximum number of compositions""" + + number_of_compositions = base.Field('NumberOfCompositions') + """The number of compositions""" + + reserved_state = base.Field('Reserved') + """Inform the resource block has been identified by a client""" + + sharing_capable = base.Field('SharingCapable') + """Indicates if this Resource Block is capable of participating in + multiple compositions simultaneously""" + + sharing_enabled = base.Field('SharingEnabled') + """Indicates if this Resource Block is allowed to participate in + multiple compositions simultaneously""" + + +class ResourceBlock(base.ResourceBase): + + composition_status = CompositionStatusField( + 'CompositionStatus', + required=True) + """The composition state of resource block""" + + description = base.Field('Description') + """The resource block description""" + + identity = base.Field('Id', required=True) + """The resource block identity string""" + + name = base.Field('Name', required=True) + """The resource block name""" + + resource_block_type = base.MappedField( + 'ResourceBlockType', + res_maps.RESOURCE_BLOCK_TYPE_VALUE_MAP, + required=True) + """The type of resource block""" + + status = common.StatusField('Status') + """The status of resource block""" + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a ResourceBlock + + :param connector: A Connector instance + :param identity: The identity of the ResourceBlock resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + """ + super(ResourceBlock, self).__init__( + connector, + identity, + redfish_version) + + +class ResourceBlockCollection(base.ResourceCollectionBase): + + name = base.Field('Name') + """The resource block collection name""" + + description = base.Field('Description') + """The resource block collection description""" + + @property + def _resource_type(self): + return ResourceBlock + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a ResourceBlockCollection + + :param connector: A Connector instance + :param identity: A identity of the ResourceBlock resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + """ + super(ResourceBlockCollection, self).__init__( + connector, identity, redfish_version) diff --git a/sushy/resources/compositionservice/resourcezone.py b/sushy/resources/compositionservice/resourcezone.py new file mode 100644 index 0000000..1a6b5bf --- /dev/null +++ b/sushy/resources/compositionservice/resourcezone.py @@ -0,0 +1,92 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Zone.v1_2_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common + +LOG = logging.getLogger(__name__) + + +class LinksField(base.CompositeField): + + endpoints = base.Field('Endpoints') + """The references to the endpoints that are contained in this zone""" + + involved_switches = base.Field('InvolvedSwitches') + """The references to the switches in this zone""" + + resource_blocks = base.Field('ResourceBlocks') + """The references to the Resource Blocks that are used in this zone""" + + +class ResourceZone(base.ResourceBase): + + # Note(dnuka): This patch doesn't contain 100% of the ResourceZone + + description = base.Field('Description') + """The resources zone description""" + + identity = base.Field('Id', required=True) + """The resource zone identity string""" + + links = LinksField('Links') + """The references to other resources that are related to this + resource""" + + name = base.Field('Name', required=True) + """The resource zone name""" + + status = common.StatusField('Status') + """The resource zone status""" + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a ResourceZone + + :param connector: A Connector instance + :param identity: The identity of the ResourceZone resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + """ + super(ResourceZone, self).__init__( + connector, + identity, + redfish_version) + + +class ResourceZoneCollection(base.ResourceCollectionBase): + + name = base.Field('Name') + """The resource zone collection name""" + + description = base.Field('Description') + """The resource zone collection description""" + + @property + def _resource_type(self): + return ResourceZone + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a ResourceZoneCollection + + :param connector: A Connector instance + :param identity: The identity of the ResourceZone resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + """ + super(ResourceZoneCollection, self).__init__( + connector, identity, redfish_version) diff --git a/sushy/tests/unit/json_samples/compositionservice.json b/sushy/tests/unit/json_samples/compositionservice.json new file mode 100644 index 0000000..07bd0e2 --- /dev/null +++ b/sushy/tests/unit/json_samples/compositionservice.json @@ -0,0 +1,21 @@ +{ + "@odata.context": "/redfish/v1/$metadata#CompositionService.CompositionService", + "@odata.type": "#CompositionService.v1_1_0.CompositionService", + "@odata.id": "/redfish/v1/CompositionService", + "AllowOverprovisioning": false, + "AllowZoneAffinity": true, + "Description": "CompositionService1", + "Id": "CompositionService", + "Name": "Composition Service", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "ServiceEnabled": true, + "ResourceBlocks": { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks" + }, + "ResourceZones": { + "@odata.id": "/redfish/v1/CompositionService/ResourceZones" + } +} diff --git a/sushy/tests/unit/json_samples/resourceblock.json b/sushy/tests/unit/json_samples/resourceblock.json new file mode 100644 index 0000000..871b88f --- /dev/null +++ b/sushy/tests/unit/json_samples/resourceblock.json @@ -0,0 +1,48 @@ +{ + "@odata.context": "/redfish/v1/$metadata#ResourceBlock.ResourceBlock", + "@odata.type": "#ResourceBlock.v1_3_0.ResourceBlock", + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock3", + "Id": "DriveBlock3", + "Name": "Drive Block 3", + "Description": "ResourceBlock1", + "ResourceBlockType": "Storage", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "CompositionStatus": { + "Reserved": false, + "CompositionState": "Composed", + "MaxCompositions": 1, + "NumberOfCompositions": 0, + "SharingCapable": true, + "SharingEnabled": false + }, + "Processors": [], + "Memory": [], + "Storage": [ + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock3/Storage/Block3NVMe" + } + ], + "Links": { + "ComputerSystems": [ + { + "@odata.id": "/redfish/v1/Systems/ComposedSystem" + } + ], + "Chassis": [ + { + "@odata.id": "/redfish/v1/Chassis/ComposableModule3" + } + ], + "Zones": [ + { + "@odata.id": "/redfish/v1/CompositionService/ResourceZones/1" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceZones/2" + } + ] + } +} diff --git a/sushy/tests/unit/json_samples/resourceblock_collection.json b/sushy/tests/unit/json_samples/resourceblock_collection.json new file mode 100644 index 0000000..de33737 --- /dev/null +++ b/sushy/tests/unit/json_samples/resourceblock_collection.json @@ -0,0 +1,11 @@ +{ + "@odata.type": "#ResourceBlockCollection.ResourceBlockCollection", + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks", + "Name": "Resource Block Collection", + "Members@odata.count": 1, + "Members": [ + { "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1" } + ] +} + + diff --git a/sushy/tests/unit/json_samples/resourcezone.json b/sushy/tests/unit/json_samples/resourcezone.json new file mode 100644 index 0000000..31c4ef3 --- /dev/null +++ b/sushy/tests/unit/json_samples/resourcezone.json @@ -0,0 +1,50 @@ +{ + "@odata.context": "/redfish/v1/$metadata#Zone.Zone", + "@odata.type": "#Zone.v1_2_1.Zone", + "@odata.id": "/redfish/v1/CompositionService/ResourceZones/1", + "Id": "1", + "Name": "Resource Zone 1", + "Description": "ResourceZone1", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "Links": { + "ResourceBlocks": [ + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock3" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock4" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock5" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock6" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock7" + } + ] + }, + "@Redfish.CollectionCapabilities": { + "@odata.type": "#CollectionCapabilities.v1_0_0.CollectionCapabilities", + "Capabilities": [ + { + "CapabilitiesObject": { + "@odata.id": "/redfish/v1/Systems/Capabilities" + }, + "UseCase": "ComputerSystemComposition", + "Links": { + "TargetCollection": { + "@odata.id": "/redfish/v1/Systems" + } + } + } + ] + } +} diff --git a/sushy/tests/unit/json_samples/resourcezone_collection.json b/sushy/tests/unit/json_samples/resourcezone_collection.json new file mode 100644 index 0000000..423405a --- /dev/null +++ b/sushy/tests/unit/json_samples/resourcezone_collection.json @@ -0,0 +1,13 @@ +{ + "@odata.type": "#ZoneCollection.ZoneCollection", + "@odata.id": "/redfish/v1/CompositionService/ResourceZones", + "Name": "Resource Zone Collection", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/CompositionService/ResourceZones/1" + } + ] +} + + diff --git a/sushy/tests/unit/json_samples/root.json b/sushy/tests/unit/json_samples/root.json index ae2de36..6e770ef 100644 --- a/sushy/tests/unit/json_samples/root.json +++ b/sushy/tests/unit/json_samples/root.json @@ -33,6 +33,9 @@ "AccountService": { "@odata.id": "/redfish/v1/AccountService" }, + "CompositionService": { + "@odata.id": "/redfish/v1/CompositionService" + }, "EventService": { "@odata.id": "/redfish/v1/EventService" }, diff --git a/sushy/tests/unit/resources/compositionservice/__init__.py b/sushy/tests/unit/resources/compositionservice/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/tests/unit/resources/compositionservice/test_compositionservice.py b/sushy/tests/unit/resources/compositionservice/test_compositionservice.py new file mode 100644 index 0000000..110c3b1 --- /dev/null +++ b/sushy/tests/unit/resources/compositionservice/test_compositionservice.py @@ -0,0 +1,49 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import mock + +from sushy.resources.compositionservice import compositionservice +from sushy.resources import constants as res_cons +from sushy.tests.unit import base + + +class CompositionServiceTestCase(base.TestCase): + + def setUp(self): + super(CompositionServiceTestCase, self).setUp() + self.conn = mock.Mock() + with open( + 'sushy/tests/unit/json_samples/compositionservice.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.comp_ser = compositionservice.CompositionService( + self.conn, + '/redfish/v1/CompositionService', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.comp_ser._parse_attributes() + self.assertFalse(self.comp_ser.allow_overprovisioning) + self.assertTrue(self.comp_ser.allow_zone_affinity) + self.assertTrue(self.comp_ser.description, 'CompositionService1') + self.assertEqual( + 'CompositionService', + self.comp_ser.identity) + self.assertEqual( + 'Composition Service', + self.comp_ser.name) + self.assertEqual(res_cons.STATE_ENABLED, self.comp_ser.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.comp_ser.status.health) + self.assertTrue(self.comp_ser.service_enabled) diff --git a/sushy/tests/unit/resources/compositionservice/test_resourceblock.py b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py new file mode 100644 index 0000000..af315bf --- /dev/null +++ b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py @@ -0,0 +1,107 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import mock + +from sushy import exceptions +from sushy.resources.compositionservice import constants as res_block_cons +from sushy.resources.compositionservice import resourceblock +from sushy.resources import constants as res_cons + +from sushy.tests.unit import base + + +class ResourceBlockTestCase(base.TestCase): + + def setUp(self): + super(ResourceBlockTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/resourceblock.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.res_block = resourceblock.ResourceBlock( + self.conn, + '/redfish/v1/CompositionService/ResourceBlocks/DriveBlock3', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.res_block._parse_attributes() + self.assertEqual( + res_block_cons.COMPOSITION_STATE_COMPOSED, + self.res_block.composition_status.composition_state) + self.assertEqual(1, self.res_block.composition_status.max_compositions) + self.assertEqual( + 0, self.res_block.composition_status.number_of_compositions) + self.assertFalse(self.res_block.composition_status.reserved_state) + self.assertTrue(self.res_block.composition_status.sharing_capable) + self.assertFalse(self.res_block.composition_status.sharing_enabled) + self.assertEqual('ResourceBlock1', self.res_block.description) + self.assertEqual('DriveBlock3', self.res_block.identity) + self.assertEqual('Drive Block 3', self.res_block.name) + self.assertEqual( + res_block_cons.RESOURCE_BLOCK_TYPE_STORAGE, + self.res_block.resource_block_type) + self.assertEqual( + res_cons.STATE_ENABLED, + self.res_block.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.res_block.status.health) + exp_path = '/redfish/v1/CompositionService/ResourceBlocks/DriveBlock3' + self.assertEqual(exp_path, self.res_block.path) + + def test__parse_attributes_missing_identity(self): + self.res_block.json.pop('Id') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Id', + self.res_block._parse_attributes) + + +class ResourceBlockCollectionTestCase(base.TestCase): + + def setUp(self): + super(ResourceBlockCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'resourceblock_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.res_block_col = resourceblock.ResourceBlockCollection( + self.conn, '/redfish/v1/CompositionService/ResourceBlocks', + redfish_version='1.0.2') + + def test__parse_attributes(self): + path = '/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1' + self.res_block_col._parse_attributes() + self.assertEqual('1.0.2', self.res_block_col.redfish_version) + self.assertEqual( + 'Resource Block Collection', + self.res_block_col.name) + self.assertEqual((path,), self.res_block_col.members_identities) + + @mock.patch.object(resourceblock, 'ResourceBlock', autospec=True) + def test_get_member(self, mock_resourceblock): + path = '/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1' + self.res_block_col.get_member(path) + mock_resourceblock.assert_called_once_with( + self.res_block_col._conn, path, + redfish_version=self.res_block_col.redfish_version) + + @mock.patch.object(resourceblock, 'ResourceBlock', autospec=True) + def test_get_members(self, mock_resourceblock): + path = '/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1' + members = self.res_block_col.get_members() + mock_resourceblock.assert_called_once_with( + self.res_block_col._conn, path, + redfish_version=self.res_block_col.redfish_version) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/compositionservice/test_resourcezone.py b/sushy/tests/unit/resources/compositionservice/test_resourcezone.py new file mode 100644 index 0000000..200daad --- /dev/null +++ b/sushy/tests/unit/resources/compositionservice/test_resourcezone.py @@ -0,0 +1,93 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import mock + +from sushy import exceptions +from sushy.resources.compositionservice import resourcezone +from sushy.resources import constants as res_cons +from sushy.tests.unit import base + + +class ResourceZoneTestCase(base.TestCase): + + def setUp(self): + super(ResourceZoneTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/resourcezone.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.res_zone = resourcezone.ResourceZone( + self.conn, + '/redfish/v1/CompositionService/ResourceZones/1', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.res_zone._parse_attributes() + self.assertEqual('ResourceZone1', self.res_zone.description) + self.assertEqual('1', self.res_zone.identity) + self.assertEqual('Resource Zone 1', self.res_zone.name) + self.assertEqual( + res_cons.STATE_ENABLED, + self.res_zone.status.state) + self.assertEqual( + res_cons.HEALTH_OK, + self.res_zone.status.health) + exp_path = '/redfish/v1/CompositionService/ResourceZones/1' + self.assertEqual(exp_path, self.res_zone.path) + + def test__parse_attributes_missing_identity(self): + self.res_zone.json.pop('Id') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Id', + self.res_zone._parse_attributes) + + +class ResourceZoneCollectionTestCase(base.TestCase): + + def setUp(self): + super(ResourceZoneCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'resourcezone_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.res_zone_col = resourcezone.ResourceZoneCollection( + self.conn, '/redfish/v1/CompositionService/ResourceZones', + redfish_version='1.0.2') + + def test__parse_attributes(self): + path = '/redfish/v1/CompositionService/ResourceZones/1' + self.res_zone_col._parse_attributes() + self.assertEqual('1.0.2', self.res_zone_col.redfish_version) + self.assertEqual('Resource Zone Collection', self.res_zone_col.name) + self.assertEqual((path,), self.res_zone_col.members_identities) + + @mock.patch.object(resourcezone, 'ResourceZone', autospec=True) + def test_get_member(self, mock_resourcezone): + path = '/redfish/v1/CompositionService/ResourceZones/1' + self.res_zone_col.get_member(path) + mock_resourcezone.assert_called_once_with( + self.res_zone_col._conn, path, + redfish_version=self.res_zone_col.redfish_version) + + @mock.patch.object(resourcezone, 'ResourceZone', autospec=True) + def test_get_members(self, mock_resourcezone): + path = '/redfish/v1/CompositionService/ResourceZones/1' + members = self.res_zone_col.get_members() + mock_resourcezone.assert_called_once_with( + self.res_zone_col._conn, path, + redfish_version=self.res_zone_col.redfish_version) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 972c84f..a343f3c 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -22,6 +22,7 @@ from sushy import connector from sushy import exceptions from sushy import main from sushy.resources.chassis import chassis +from sushy.resources.compositionservice import compositionservice from sushy.resources.manager import manager from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session @@ -69,6 +70,8 @@ class MainTestCase(base.TestCase): self.assertEqual('/redfish/v1/Chassis', self.root._chassis_path) self.assertEqual('/redfish/v1/SessionService', self.root._session_service_path) + self.assertEqual('/redfish/v1/CompositionService', + self.root._composition_service_path) @mock.patch.object(connector, 'Connector', autospec=True) def test__init_throws_exception(self, mock_Connector): @@ -161,6 +164,14 @@ class MainTestCase(base.TestCase): self.root._conn, '/redfish/v1/Registries', redfish_version=self.root.redfish_version) + @mock.patch.object( + compositionservice, 'CompositionService', autospec=True) + def test_get_composition_service(self, mock_comp_ser): + self.root.get_composition_service() + mock_comp_ser.assert_called_once_with( + self.root._conn, '/redfish/v1/CompositionService', + redfish_version=self.root.redfish_version) + class BareMinimumMainTestCase(base.TestCase): @@ -198,5 +209,11 @@ class BareMinimumMainTestCase(base.TestCase): exceptions.MissingAttributeError, 'UpdateService/@odata.id', self.root.get_update_service) + def test_get_composition_service_when_compositionservice_attr_absent( + self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'CompositionService/@odata.id', self.root.get_composition_service) + def test__get_registry_collection_when_registries_attr_absent(self): self.assertIsNone(self.root._get_registry_collection()) -- GitLab From 9bd1d6b0ddec4059ceeb0e05b8e79542ef96ca4c Mon Sep 17 00:00:00 2001 From: ankit Date: Thu, 14 Feb 2019 07:27:27 +0000 Subject: [PATCH 124/303] Add support for ilo Virtual Media This commit adds support for eject virtual media for HPE Proliant Servers. Story: #2004995 Task: #29467 Change-Id: I1706206ac74211c6abb71712ceb55b29b4824f16 --- ...ix-eject-media-empty-dict-573b4c9e06f52ce7.yaml | 6 ++++++ sushy/resources/manager/virtual_media.py | 13 ++++++++++--- .../unit/resources/manager/test_virtual_media.py | 14 ++++++++++++++ 3 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 releasenotes/notes/fix-eject-media-empty-dict-573b4c9e06f52ce7.yaml diff --git a/releasenotes/notes/fix-eject-media-empty-dict-573b4c9e06f52ce7.yaml b/releasenotes/notes/fix-eject-media-empty-dict-573b4c9e06f52ce7.yaml new file mode 100644 index 0000000..99c207d --- /dev/null +++ b/releasenotes/notes/fix-eject-media-empty-dict-573b4c9e06f52ce7.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Some vendors like HPE iLO has this kind of implementation that for eject + virtual media need to pass empty dictionary otherwise throws Unsupported + media type error. diff --git a/sushy/resources/manager/virtual_media.py b/sushy/resources/manager/virtual_media.py index 7c21a87..5253b16 100644 --- a/sushy/resources/manager/virtual_media.py +++ b/sushy/resources/manager/virtual_media.py @@ -13,6 +13,8 @@ # This is referred from Redfish standard schema. # https://redfish.dmtf.org/schemas/VirtualMedia.v1_2_0.json +from six.moves import http_client + from sushy import exceptions from sushy.resources import base from sushy.resources import common @@ -98,9 +100,14 @@ class VirtualMedia(base.ResourceBase): After ejecting media inserted will be False and image_name will be empty. """ - - target_uri = self._get_eject_media_element().target_uri - self._conn.post(target_uri) + try: + target_uri = self._get_eject_media_element().target_uri + self._conn.post(target_uri) + except exceptions.HTTPError as response: + # Some vendors like HPE iLO has this kind of implementation. + # It needs to pass an empty dict. + if response.status_code == http_client.UNSUPPORTED_MEDIA_TYPE: + self._conn.post(target_uri, data={}) self.invalidate() diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py index c9245f6..690bc64 100644 --- a/sushy/tests/unit/resources/manager/test_virtual_media.py +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -15,6 +15,7 @@ import json import mock +from six.moves import http_client import sushy from sushy import exceptions @@ -83,3 +84,16 @@ class VirtualMediaTestCase(base.TestCase): ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" "/VirtualMedia.EjectMedia")) self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_pass_empty_dict(self): + target_uri = ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" + "/VirtualMedia.EjectMedia") + self.conn.post.side_effect = [exceptions.HTTPError( + method='POST', url=target_uri, response=mock.Mock( + status_code=http_client.UNSUPPORTED_MEDIA_TYPE)), '200'] + self.sys_virtual_media.eject_media() + post_calls = [ + mock.call(target_uri), + mock.call(target_uri, data={})] + self.sys_virtual_media._conn.post.assert_has_calls(post_calls) + self.assertTrue(self.sys_virtual_media._is_stale) -- GitLab From 63fc3595a4a6edd1830521131b83e5da8ffc1061 Mon Sep 17 00:00:00 2001 From: dnuka Date: Mon, 25 Feb 2019 16:11:24 +0530 Subject: [PATCH 125/303] Introduce default value for `transfer_protocol` parameter These changes update `simple_update()`. According to [1], "If transfer_protocol parameter is not provided, the Update Service shall use HTTP to retrieve the image." [1] https://redfish.dmtf.org/schemas/UpdateService.v1_4_0.json Change-Id: I7af63eed68ba0d4d038bf94dfdf537b8d1fdc33b --- sushy/resources/updateservice/updateservice.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py index cd2e90c..309b9a2 100644 --- a/sushy/resources/updateservice/updateservice.py +++ b/sushy/resources/updateservice/updateservice.py @@ -115,7 +115,7 @@ class UpdateService(base.ResourceBase): simple_update_action.transfer_protocol if v in up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP) - def simple_update(self, image_uri, targets, transfer_protocol): + def simple_update(self, image_uri, targets, transfer_protocol='HTTP'): """Simple Update is used to update software components""" transfer_protocol = transfer_protocol -- GitLab From 668c40dec4cb65d658aeb6b22e2009aac963bc8b Mon Sep 17 00:00:00 2001 From: Debayan Ray Date: Tue, 3 Oct 2017 07:24:40 -0400 Subject: [PATCH 126/303] Add foundation for supporting Redfish OEMs This is to create the foundation for supporting resource extensibility as proposed in Redfish specification [0]. In essence it tries to provide a framework to dynamically load the varied OEM resource extensions using stevedore. The target entry points are lazily invoked on demand. Other features as below: * Provides an attribute 'oem_vendors' in Resource Base class to discover the available OEM extensions. Also provides another API to get the resource extension object based on the OEM vendor string. * Provides the foundation for classes which can parse OEM extension attributes. Also a framework to dynamically load the varied OEM resource extensions using ``stevedore``. The target entry points are lazily invoked on demand. * Entry points added with name 'contoso' in setup.cfg. All Entry point names are as lowercase_underscore_joined. * A new folder structure added for OEM extensions framework - 'sushy/resources/oem/' * Provides a fake resource extension as a ready reckoner for OEMs. * Uses the existing json sample which consists of OEM string as 'Contoso', a fictitious company, for unit testing purpose. * Add Resource OEM extension related exceptions [0] http://redfish.dmtf.org/schemas/DSP0266_1.1.html#resource-extensibility Story: 1689605 Task: 12039 Change-Id: I6d64d27ec456e46c2cb5052f93a91d9e06ba82de --- lower-constraints.txt | 2 +- ...em-extension-support-50c9849bb7b6b25c.yaml | 13 ++ requirements.txt | 1 + setup.cfg | 5 + sushy/exceptions.py | 8 + sushy/resources/base.py | 20 ++ sushy/resources/oem/__init__.py | 15 ++ sushy/resources/oem/base.py | 107 ++++++++++ sushy/resources/oem/common.py | 132 +++++++++++++ sushy/resources/oem/fake.py | 41 ++++ sushy/tests/unit/resources/oem/__init__.py | 0 sushy/tests/unit/resources/oem/test_common.py | 186 ++++++++++++++++++ sushy/tests/unit/resources/oem/test_fake.py | 51 +++++ .../unit/resources/system/test_system.py | 12 ++ sushy/tests/unit/resources/test_base.py | 32 +++ sushy/tests/unit/test_utils.py | 20 ++ sushy/utils.py | 48 +++++ 17 files changed, 692 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/add-initial-redfish-oem-extension-support-50c9849bb7b6b25c.yaml create mode 100644 sushy/resources/oem/__init__.py create mode 100644 sushy/resources/oem/base.py create mode 100644 sushy/resources/oem/common.py create mode 100644 sushy/resources/oem/fake.py create mode 100644 sushy/tests/unit/resources/oem/__init__.py create mode 100644 sushy/tests/unit/resources/oem/test_common.py create mode 100644 sushy/tests/unit/resources/oem/test_fake.py diff --git a/lower-constraints.txt b/lower-constraints.txt index 946073f..bcfcd5d 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -36,7 +36,7 @@ six==1.10.0 snowballstemmer==1.2.1 Sphinx==1.6.2 sphinxcontrib-websupport==1.0.1 -stevedore==1.20.0 +stevedore==1.29.0 stestr==2.0.0 testscenarios==0.4 testtools==2.2.0 diff --git a/releasenotes/notes/add-initial-redfish-oem-extension-support-50c9849bb7b6b25c.yaml b/releasenotes/notes/add-initial-redfish-oem-extension-support-50c9849bb7b6b25c.yaml new file mode 100644 index 0000000..fbeb591 --- /dev/null +++ b/releasenotes/notes/add-initial-redfish-oem-extension-support-50c9849bb7b6b25c.yaml @@ -0,0 +1,13 @@ +--- +features: + - | + Adds foundation for supporting resource extensibility proposed as + OEM extensibility in Redfish specification [1] to the library. + + * Provides an attribute 'oem_vendors' in Resource classes to + discover the available OEM extensions. + * Provides a method 'get_oem_extension()' in Resource classes + to get the vendor defined resource OEM extension object, if + discovered. + + [1] http://redfish.dmtf.org/schemas/DSP0266_1.1.html#resource-extensibility diff --git a/requirements.txt b/requirements.txt index 80e77b8..14f51f5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,3 +6,4 @@ pbr!=2.1.0,>=2.0.0 # Apache-2.0 requests>=2.14.2 # Apache-2.0 six>=1.10.0 # MIT python-dateutil>=2.7.0 # BSD +stevedore>=1.29.0 # Apache-2.0 diff --git a/setup.cfg b/setup.cfg index 1d0e36d..a007730 100644 --- a/setup.cfg +++ b/setup.cfg @@ -22,6 +22,11 @@ classifier = packages = sushy +[entry_points] +sushy.resources.system.oems = + contoso = sushy.resources.oem.fake:FakeOEMSystemExtension + + [build_sphinx] source-dir = doc/source build-dir = doc/build diff --git a/sushy/exceptions.py b/sushy/exceptions.py index e6deb37..e903858 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -61,6 +61,14 @@ class ArchiveParsingError(SushyError): message = 'Failed parsing archive "%(path)s": %(error)s' +class ExtensionError(SushyError): + message = ('Sushy Extension Error: %(error)s') + + +class OEMExtensionNotFoundError(SushyError): + message = 'No %(resource)s OEM extension found by name "%(name)s".' + + class HTTPError(SushyError): """Basic exception for HTTP errors""" diff --git a/sushy/resources/base.py b/sushy/resources/base.py index f84b442..364749d 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -24,6 +24,7 @@ import zipfile import six from sushy import exceptions +from sushy.resources import oem from sushy import utils @@ -305,6 +306,9 @@ class ResourceBase(object): redfish_version = None """The Redfish version""" + oem_vendors = Field('Oem', adapter=list) + """The list of OEM extension names for this resource.""" + def __init__(self, connector, path='', @@ -420,6 +424,22 @@ class ResourceBase(object): def path(self): return self._path + @property + def resource_name(self): + return utils.camelcase_to_underscore_joined(self.__class__.__name__) + + def get_oem_extension(self, vendor): + """Get the OEM extension instance for this resource by OEM vendor + + :param vendor: the OEM vendor string which is the vendor-specific + extensibility identifier. Examples are 'Contoso', 'Hpe'. + Possible value can be got from ``oem_vendors`` attribute. + :returns: the Redfish resource OEM extension instance. + :raises: OEMExtensionNotFoundError + """ + return oem.get_resource_extension_by_vendor( + self.resource_name, vendor, self) + @six.add_metaclass(abc.ABCMeta) class ResourceCollectionBase(ResourceBase): diff --git a/sushy/resources/oem/__init__.py b/sushy/resources/oem/__init__.py new file mode 100644 index 0000000..b852678 --- /dev/null +++ b/sushy/resources/oem/__init__.py @@ -0,0 +1,15 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.oem.common import get_resource_extension_by_vendor + +__all__ = ('get_resource_extension_by_vendor',) diff --git a/sushy/resources/oem/base.py b/sushy/resources/oem/base.py new file mode 100644 index 0000000..adac1a2 --- /dev/null +++ b/sushy/resources/oem/base.py @@ -0,0 +1,107 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc +import logging + +import six + +from sushy.resources import base + + +LOG = logging.getLogger(__name__) + + +class OEMField(base.Field): + """Marker class for OEM specific fields.""" + + +def _collect_oem_fields(resource): + """Collect OEM fields from resource. + + :param resource: OEMExtensionResourceBase instance. + :returns: generator of tuples (key, field) + """ + for attr in dir(resource.__class__): + field = getattr(resource.__class__, attr) + if isinstance(field, OEMField): + yield (attr, field) + + +def _collect_base_fields(resource): + """Collect base fields from resource. + + :param resource: OEMExtensionResourceBase instance. + :returns: generator of tuples (key, field) + """ + for attr in dir(resource.__class__): + field = getattr(resource.__class__, attr) + if not isinstance(field, OEMField) and isinstance(field, base.Field): + yield (attr, field) + + +@six.add_metaclass(abc.ABCMeta) +class OEMCompositeField(base.CompositeField, OEMField): + """CompositeField for OEM fields.""" + + +class OEMListField(base.ListField, OEMField): + """ListField for OEM fields.""" + + +class OEMDictionaryField(base.DictionaryField, OEMField): + """DictionaryField for OEM fields.""" + + +class OEMMappedField(base.MappedField, OEMField): + """MappedField for OEM fields.""" + + +@six.add_metaclass(abc.ABCMeta) +class OEMExtensionResourceBase(object): + + def __init__(self, resource, oem_property_name, *args, **kwargs): + """A class representing the base of any resource OEM extension + + Invokes the ``refresh()`` method for the first time from here + (constructor). + :param resource: The parent Sushy resource instance + :param oem_property_name: the unique OEM identifier string + """ + if not resource: + raise ValueError('"resource" argument cannot be void') + if not isinstance(resource, base.ResourceBase): + raise TypeError('"resource" argument must be a ResourceBase') + + self.core_resource = resource + self.oem_property_name = oem_property_name + self.refresh() + + def _parse_oem_attributes(self): + """Parse the OEM extension attributes of a resource.""" + oem_json_body = (self.core_resource.json.get('Oem'). + get(self.oem_property_name)) + for attr, field in _collect_oem_fields(self): + # Hide the Field object behind the real value + setattr(self, attr, field._load(oem_json_body, self)) + + for attr, field in _collect_base_fields(self): + # Hide the Field object behind the real value + setattr(self, attr, field._load(self.core_resource.json, self)) + + def refresh(self): + """Refresh the attributes of the resource extension. + + Freshly parses the resource OEM attributes via + ``_parse_oem_attributes()`` method. + """ + self._parse_oem_attributes() diff --git a/sushy/resources/oem/common.py b/sushy/resources/oem/common.py new file mode 100644 index 0000000..142dad7 --- /dev/null +++ b/sushy/resources/oem/common.py @@ -0,0 +1,132 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +import stevedore + +from sushy import exceptions +from sushy import utils + + +LOG = logging.getLogger(__name__) + +_global_extn_mgrs_by_resource = {} + + +def _raise(m, ep, e): + raise exceptions.ExtensionError( + error='Failed to load entry point target: %(error)s' % {'error': e}) + + +def _create_extension_manager(namespace): + """Create the resource specific ExtensionManager instance. + + Use stevedore to find all vendor extensions of resource from their + namespace and return the ExtensionManager instance. + :param namespace: The namespace for the entry points. It maps to a + specific Sushy resource type. + :returns: the ExtensionManager instance + :raises ExtensionError: on resource OEM extension load error. + """ + # namespace format is: + # ``sushy.resources..oems`` + resource_name = namespace.split('.')[-2] + + extension_manager = ( + stevedore.ExtensionManager(namespace=namespace, + propagate_map_exceptions=True, + on_load_failure_callback=_raise)) + + LOG.debug('Resource OEM extensions for "%(resource)s" under namespace ' + '"%(namespace)s":', + {'resource': resource_name, 'namespace': namespace}) + for extension in extension_manager: + LOG.debug('Found vendor: %(name)s target: %(target)s', + {'name': extension.name, + 'target': extension.entry_point_target}) + + if not extension_manager.names(): + m = (('No extensions found for "%(resource)s" under namespace ' + '"%(namespace)s"') % + {'resource': resource_name, + 'namespace': namespace}) + LOG.error(m) + raise exceptions.ExtensionError(error=m) + + return extension_manager + + +@utils.synchronized +def _get_extension_manager_of_resource(resource_name): + """Get the resource specific ExtensionManager instance. + + :param resource_name: The name of the resource e.g. + 'system' / 'ethernet_interface' / 'update_service' + :returns: the ExtensionManager instance + :raises ExtensionError: on resource OEM extension load error. + """ + global _global_extn_mgrs_by_resource + + if resource_name not in _global_extn_mgrs_by_resource: + resource_namespace = 'sushy.resources.' + resource_name + '.oems' + _global_extn_mgrs_by_resource[resource_name] = ( + _create_extension_manager(resource_namespace) + ) + return _global_extn_mgrs_by_resource[resource_name] + + +@utils.synchronized +def _get_resource_vendor_extension_obj(extension, resource, *args, **kwds): + """Get the object returned by extension's plugin() method. + + :param extension: stevedore Extension + :param resource: The Sushy resource instance + :param *args, **kwds: constructor arguments to plugin() method. + :returns: The object returned by ``plugin(*args, **kwds)`` of extension. + """ + if extension.obj is None: + extension.obj = extension.plugin(resource, *args, **kwds) + + return extension.obj + + +def get_resource_extension_by_vendor( + resource_name, vendor, resource, *args, **kwds): + """Helper method to get Resource specific OEM extension object for vendor + + :param resource_name: The underscore joined name of the resource e.g. + 'system' / 'ethernet_interface' / 'update_service' + :param vendor: This is the OEM vendor string which is the vendor-specific + extensibility identifier. Examples are: 'Contoso', 'Hpe'. As a matter + of fact the lowercase of this string will be the plugin entry point + name. + :param resource: The Sushy resource instance + :returns: The object returned by ``plugin(*args, **kwds)`` of extension. + :raises OEMExtensionNotFoundError: if no valid resource OEM extension + found. + """ + if resource_name in _global_extn_mgrs_by_resource: + resource_extn_mgr = _global_extn_mgrs_by_resource[resource_name] + else: + resource_extn_mgr = _get_extension_manager_of_resource(resource_name) + + try: + resource_vendor_extn = resource_extn_mgr[vendor.lower()] + except KeyError: + raise exceptions.OEMExtensionNotFoundError( + resource=resource_name, name=vendor.lower()) + + if resource_vendor_extn.obj is None: + return _get_resource_vendor_extension_obj( + resource_vendor_extn, resource, *args, **kwds) + return resource_vendor_extn.obj diff --git a/sushy/resources/oem/fake.py b/sushy/resources/oem/fake.py new file mode 100644 index 0000000..1dcd983 --- /dev/null +++ b/sushy/resources/oem/fake.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from sushy.resources import base +from sushy.resources.oem import base as oem_base + +LOG = logging.getLogger(__name__) + + +class ProductionLocationField(oem_base.OEMCompositeField): + facility_name = base.Field('FacilityName') + country = base.Field('Country') + + +class FakeOEMSystemExtension(oem_base.OEMExtensionResourceBase): + + data_type = oem_base.OEMField('@odata.type') + production_location = ProductionLocationField('ProductionLocation') + reset_action = base.Field(['Actions', 'Oem', '#Contoso.Reset']) + + def __init__(self, resource, *args, **kwargs): + """A class representing ComputerSystem OEM extension for Contoso + + :param resource: The parent System resource instance + """ + super(FakeOEMSystemExtension, self).__init__( + resource, 'Contoso', *args, **kwargs) + + def get_reset_system_path(self): + return self.reset_action.get('target') diff --git a/sushy/tests/unit/resources/oem/__init__.py b/sushy/tests/unit/resources/oem/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/tests/unit/resources/oem/test_common.py b/sushy/tests/unit/resources/oem/test_common.py new file mode 100644 index 0000000..d76ae13 --- /dev/null +++ b/sushy/tests/unit/resources/oem/test_common.py @@ -0,0 +1,186 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock +import stevedore + +from sushy import exceptions +from sushy.resources import base as res_base +from sushy.resources.oem import base as oem_base +from sushy.resources.oem import common as oem_common +from sushy.tests.unit import base + + +class ContosoResourceOEMExtension(oem_base.OEMExtensionResourceBase): + + def __init__(self, resource, *args, **kwargs): + super(ContosoResourceOEMExtension, self).__init__( + resource, 'Contoso', *args, **kwargs) + + +class FauxResourceOEMExtension(oem_base.OEMExtensionResourceBase): + + def __init__(self, resource, *args, **kwargs): + super(FauxResourceOEMExtension, self).__init__( + resource, 'Faux', *args, **kwargs) + + +class ResourceOEMCommonMethodsTestCase(base.TestCase): + + def setUp(self): + super(ResourceOEMCommonMethodsTestCase, self).setUp() + # We use ExtensionManager.make_test_instance() and instantiate the + # test instance outside of the test cases in setUp. Inside of the + # test cases we set this as the return value of the mocked + # constructor. Also note that this instrumentation has been done + # only for one specific resource namespace which gets passed in the + # constructor of ExtensionManager. Moreover, this setUp also enables + # us to verify that the constructor is called correctly while still + # using a more realistic ExtensionManager. + contoso_ep = mock.Mock() + contoso_ep.module_name = __name__ + contoso_ep.attrs = ['ContosoResourceOEMExtension'] + self.contoso_extn = stevedore.extension.Extension( + 'contoso', contoso_ep, ContosoResourceOEMExtension, None) + self.contoso_extn_dup = stevedore.extension.Extension( + 'contoso_dup', contoso_ep, ContosoResourceOEMExtension, None) + + faux_ep = mock.Mock() + faux_ep.module_name = __name__ + faux_ep.attrs = ['FauxResourceOEMExtension'] + self.faux_extn = stevedore.extension.Extension( + 'faux', faux_ep, FauxResourceOEMExtension, None) + self.faux_extn_dup = stevedore.extension.Extension( + 'faux_dup', faux_ep, FauxResourceOEMExtension, None) + + self.fake_ext_mgr = ( + stevedore.extension.ExtensionManager.make_test_instance( + [self.contoso_extn, self.faux_extn])) + self.fake_ext_mgr2 = ( + stevedore.extension.ExtensionManager.make_test_instance( + [self.contoso_extn_dup, self.faux_extn_dup])) + + def tearDown(self): + super(ResourceOEMCommonMethodsTestCase, self).tearDown() + if oem_common._global_extn_mgrs_by_resource: + oem_common._global_extn_mgrs_by_resource = {} + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test__create_extension_manager(self, ExtensionManager_mock): + system_resource_oem_ns = 'sushy.resources.system.oems' + ExtensionManager_mock.return_value = self.fake_ext_mgr + + result = oem_common._create_extension_manager(system_resource_oem_ns) + + self.assertEqual(self.fake_ext_mgr, result) + ExtensionManager_mock.assert_called_once_with( + system_resource_oem_ns, propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test__create_extension_manager_no_extns(self, ExtensionManager_mock): + system_resource_oem_ns = 'sushy.resources.system.oems' + ExtensionManager_mock.return_value.names.return_value = [] + + self.assertRaisesRegex( + exceptions.ExtensionError, 'No extensions found', + oem_common._create_extension_manager, + system_resource_oem_ns) + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test__get_extension_manager_of_resource(self, ExtensionManager_mock): + ExtensionManager_mock.return_value = self.fake_ext_mgr + + result = oem_common._get_extension_manager_of_resource('system') + self.assertEqual(self.fake_ext_mgr, result) + ExtensionManager_mock.assert_called_once_with( + namespace='sushy.resources.system.oems', + propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + ExtensionManager_mock.reset_mock() + + result = oem_common._get_extension_manager_of_resource('manager') + self.assertEqual(self.fake_ext_mgr, result) + ExtensionManager_mock.assert_called_once_with( + namespace='sushy.resources.manager.oems', + propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + for name, extension in result.items(): + self.assertTrue(name in ('contoso', 'faux')) + self.assertTrue(extension in (self.contoso_extn, + self.faux_extn)) + + def test__get_resource_vendor_extension_obj_lazy_plugin_invoke(self): + resource_instance_mock = mock.Mock() + extension_mock = mock.MagicMock() + extension_mock.obj = None + + result = oem_common._get_resource_vendor_extension_obj( + extension_mock, resource_instance_mock) + self.assertEqual(extension_mock.plugin.return_value, result) + extension_mock.plugin.assert_called_once_with(resource_instance_mock) + extension_mock.reset_mock() + + # extension_mock.obj is not None anymore + result = oem_common._get_resource_vendor_extension_obj( + extension_mock, resource_instance_mock) + self.assertEqual(extension_mock.plugin.return_value, result) + self.assertFalse(extension_mock.plugin.called) + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test_get_resource_extension_by_vendor(self, ExtensionManager_mock): + resource_instance_mock = mock.Mock(spec=res_base.ResourceBase) + ExtensionManager_mock.side_effect = [self.fake_ext_mgr, + self.fake_ext_mgr2] + + result = oem_common.get_resource_extension_by_vendor( + 'system', 'Faux', resource_instance_mock) + self.assertIsInstance(result, FauxResourceOEMExtension) + ExtensionManager_mock.assert_called_once_with( + 'sushy.resources.system.oems', propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + ExtensionManager_mock.reset_mock() + + result = oem_common.get_resource_extension_by_vendor( + 'system', 'Contoso', resource_instance_mock) + self.assertIsInstance(result, ContosoResourceOEMExtension) + self.assertFalse(ExtensionManager_mock.called) + ExtensionManager_mock.reset_mock() + + result = oem_common.get_resource_extension_by_vendor( + 'manager', 'Faux_dup', resource_instance_mock) + self.assertIsInstance(result, FauxResourceOEMExtension) + ExtensionManager_mock.assert_called_once_with( + 'sushy.resources.manager.oems', propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + ExtensionManager_mock.reset_mock() + + result = oem_common.get_resource_extension_by_vendor( + 'manager', 'Contoso_dup', resource_instance_mock) + self.assertIsInstance(result, ContosoResourceOEMExtension) + self.assertFalse(ExtensionManager_mock.called) + ExtensionManager_mock.reset_mock() + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test_get_resource_extension_by_vendor_fail( + self, ExtensionManager_mock): + resource_instance_mock = mock.Mock(spec=res_base.ResourceBase) + # ``fake_ext_mgr2`` has extension names as ``faux_dup`` + # and ``contoso_dup``. + ExtensionManager_mock.return_value = self.fake_ext_mgr2 + + self.assertRaisesRegex( + exceptions.OEMExtensionNotFoundError, + 'No sushy.resources.system.oems OEM extension found ' + 'by name "faux"', + oem_common.get_resource_extension_by_vendor, + 'sushy.resources.system.oems', 'Faux', resource_instance_mock) diff --git a/sushy/tests/unit/resources/oem/test_fake.py b/sushy/tests/unit/resources/oem/test_fake.py new file mode 100644 index 0000000..7050e48 --- /dev/null +++ b/sushy/tests/unit/resources/oem/test_fake.py @@ -0,0 +1,51 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy.resources.oem import fake +from sushy.resources.system import system +from sushy.tests.unit import base + + +class FakeOEMSystemExtensionTestCase(base.TestCase): + + def setUp(self): + super(FakeOEMSystemExtensionTestCase, self).setUp() + self.conn = mock.MagicMock() + with open('sushy/tests/unit/json_samples/system.json', 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + + self.sys_instance = system.System( + self.conn, '/redfish/v1/Systems/437XR1138R2', + redfish_version='1.0.2') + self.fake_sys_oem_extn = fake.FakeOEMSystemExtension(self.sys_instance) + + def test__parse_oem_attributes(self): + self.assertEqual('http://Contoso.com/Schema#Contoso.ComputerSystem', + self.fake_sys_oem_extn.data_type) + self.assertEqual('PacWest Production Facility', ( + self.fake_sys_oem_extn.production_location.facility_name)) + self.assertEqual('USA', ( + self.fake_sys_oem_extn.production_location.country)) + self.assertEqual({ + "target": ("/redfish/v1/Systems/437XR1138R2/Oem/Contoso/Actions/" + "Contoso.Reset")}, self.fake_sys_oem_extn.reset_action) + + def test_get_reset_system_path(self): + value = self.fake_sys_oem_extn.get_reset_system_path() + expected = ( + '/redfish/v1/Systems/437XR1138R2/Oem/Contoso/Actions/Contoso.Reset' + ) + self.assertEqual(expected, value) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 6d6c4cd..5113f69 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -22,6 +22,7 @@ from sushy import exceptions from sushy.resources.chassis import chassis from sushy.resources import constants as res_cons from sushy.resources.manager import manager +from sushy.resources.oem import fake from sushy.resources.system import bios from sushy.resources.system import mappings as sys_map from sushy.resources.system import processor @@ -69,6 +70,8 @@ class SystemTestCase(base.TestCase): self.sys_inst.power_state) self.assertEqual(96, self.sys_inst.memory_summary.size_gib) self.assertEqual("OK", self.sys_inst.memory_summary.health) + for oem_vendor in self.sys_inst.oem_vendors: + self.assertIn(oem_vendor, ('Contoso', 'Chipwise')) def test__parse_attributes_missing_actions(self): self.sys_inst.json.pop('Actions') @@ -504,6 +507,15 @@ class SystemTestCase(base.TestCase): self.assertEqual( '/redfish/v1/Chassis/1U', actual_chassis[0].path) + def test_get_oem_extension(self): + # | WHEN | + contoso_system_extn_inst = self.sys_inst.get_oem_extension('Contoso') + # | THEN | + self.assertIsInstance(contoso_system_extn_inst, + fake.FakeOEMSystemExtension) + self.assertIs(self.sys_inst, contoso_system_extn_inst.core_resource) + self.assertEqual('Contoso', contoso_system_extn_inst.oem_property_name) + class SystemCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index ad33946..e2faf43 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -25,20 +25,47 @@ from sushy.tests.unit import base import zipfile +BASE_RESOURCE_JSON = { + "@odata.type": "#FauxResource.v1_0_0.FauxResource", + "Id": "1111AAAA", + "Name": "Faux Resource", + "@odata.id": "/redfish/v1/FauxResource/1111AAAA", + "Oem": { + "Contoso": { + "@odata.type": "http://contoso.com/schemas/extensions.v1_2_1#contoso.AnvilTypes1", # noqa + "slogan": "Contoso never fail", + "disclaimer": "* Most of the time" + }, + "EID_412_ASB_123": { + "@odata.type": "http://AnotherStandardsBody/schemas.v1_0_1#styleInfoExt", # noqa + "Style": "Executive" + } + } +} + + class BaseResource(resource_base.ResourceBase): def _parse_attributes(self): pass +class BaseResource2(resource_base.ResourceBase): + pass + + class ResourceBaseTestCase(base.TestCase): def setUp(self): super(ResourceBaseTestCase, self).setUp() self.conn = mock.Mock() + self.conn.get.return_value.json.return_value = ( + copy.deepcopy(BASE_RESOURCE_JSON)) self.base_resource = BaseResource(connector=self.conn, path='/Foo', redfish_version='1.0.2') self.assertFalse(self.base_resource._is_stale) + self.base_resource2 = BaseResource2(connector=self.conn, path='/Foo', + redfish_version='1.0.2') # refresh() is called in the constructor self.conn.reset_mock() @@ -102,6 +129,11 @@ class ResourceBaseTestCase(base.TestCase): reader=resource_base. JsonArchiveReader('Test.2.0.json')) + def test__parse_attributes(self): + for oem_vendor in self.base_resource2.oem_vendors: + self.assertTrue(oem_vendor in ('Contoso', 'EID_412_ASB_123')) + self.assertEqual('base_resource2', self.base_resource2.resource_name) + class TestResource(resource_base.ResourceBase): """A concrete Test Resource to test against""" diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index 899eee6..324366f 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -106,6 +106,26 @@ class UtilsTestCase(base.TestCase): self.assertEqual(0, utils.max_safe([])) self.assertIsNone(utils.max_safe([], default=None)) + def test_camelcase_to_underscore_joined(self): + input_vs_expected = [ + ('GarbageCollection', 'garbage_collection'), + ('DD', 'dd'), + ('rr', 'rr'), + ('AABbbC', 'aa_bbb_c'), + ('AABbbCCCDd', 'aa_bbb_ccc_dd'), + ('Manager', 'manager'), + ('EthernetInterfaceCollection', 'ethernet_interface_collection'), + (' ', ' '), + ] + for inp, exp in input_vs_expected: + self.assertEqual(exp, utils.camelcase_to_underscore_joined(inp)) + + def test_camelcase_to_underscore_joined_fails_with_empty_string(self): + self.assertRaisesRegex( + ValueError, + '"camelcase_str" cannot be empty', + utils.camelcase_to_underscore_joined, '') + class NestedResource(resource_base.ResourceBase): diff --git a/sushy/utils.py b/sushy/utils.py index f025c2a..32fbf7a 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -15,6 +15,7 @@ import collections import logging +import threading import six @@ -281,3 +282,50 @@ def cache_clear(res_selfie, force_refresh, only_these=None): break else: setattr(res_selfie, cache_attr_name, None) + + +def camelcase_to_underscore_joined(camelcase_str): + """Convert camelCase string to underscore_joined string + + :param camelcase_str: The camelCase string + :returns: the equivalent underscore_joined string + """ + if not camelcase_str: + raise ValueError('"camelcase_str" cannot be empty') + + r = camelcase_str[0].lower() + for i, letter in enumerate(camelcase_str[1:], 1): + if letter.isupper(): + try: + if (camelcase_str[i - 1].islower() + or camelcase_str[i + 1].islower()): + r += '_' + except IndexError: + pass + + r += letter.lower() + + return r + + +def synchronized(wrapped): + """Simple synchronization decorator. + + Decorating a method like so: + + .. code-block:: python + + @synchronized + def foo(self, *args): + ... + + ensures that only one thread will execute the foo method at a time. + """ + lock = threading.RLock() + + @six.wraps(wrapped) + def wrapper(*args, **kwargs): + with lock: + return wrapped(*args, **kwargs) + + return wrapper -- GitLab From bc7c5e476280377cb95ff8d6961f8401df50616a Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Thu, 28 Feb 2019 18:12:46 +0000 Subject: [PATCH 127/303] Update .gitreview for stable/stein Change-Id: Ide9bf79857f94fe0c44e531528e64f6fb5008af3 --- .gitreview | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitreview b/.gitreview index 6ec9ed8..de005ed 100644 --- a/.gitreview +++ b/.gitreview @@ -2,3 +2,4 @@ host=review.openstack.org port=29418 project=openstack/sushy.git +defaultbranch=stable/stein -- GitLab From 798458d444c332bf202902a370f8bf50017d871a Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Thu, 28 Feb 2019 18:12:51 +0000 Subject: [PATCH 128/303] Update master for stable/stein Add file to the reno documentation build to show release notes for stable/stein. Use pbr instruction to increment the minor version number automatically so that master versions are higher than the versions on stable/stein. Change-Id: I5a40ef5db945ece5a9f17fb5b7b1a9468f153db6 Sem-Ver: feature --- releasenotes/source/index.rst | 1 + releasenotes/source/stein.rst | 6 ++++++ 2 files changed, 7 insertions(+) create mode 100644 releasenotes/source/stein.rst diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst index 4c1d79a..5edb814 100644 --- a/releasenotes/source/index.rst +++ b/releasenotes/source/index.rst @@ -6,6 +6,7 @@ :maxdepth: 1 unreleased + stein rocky queens pike diff --git a/releasenotes/source/stein.rst b/releasenotes/source/stein.rst new file mode 100644 index 0000000..efaceb6 --- /dev/null +++ b/releasenotes/source/stein.rst @@ -0,0 +1,6 @@ +=================================== + Stein Series Release Notes +=================================== + +.. release-notes:: + :branch: stable/stein -- GitLab From 7993f04490aa8780787c09b868965c761c5f90f1 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Wed, 8 Aug 2018 13:44:48 +0300 Subject: [PATCH 129/303] Add support for loading packaged standard registries Added JsonPackagedFileReader to allow loading JSON from filesystem. Added licensed Redfish standard message registry files. Currently, the method to load the files is not intended to be used by Sushy users, but Sushy will use it internally when needed in following patches. Change-Id: Iae206dac90b499614a489fc0e90a0d881f21d310 Story: 2001791 Task: 23062 --- README.rst | 3 + ...ard-registry-license-0ded489afd6cfad1.yaml | 6 + sushy/main.py | 21 + sushy/resources/base.py | 19 + sushy/standard_registries/Base.1.0.0.json | 466 +++++++++++++++ sushy/standard_registries/Base.1.2.0.json | 517 +++++++++++++++++ sushy/standard_registries/Base.1.3.0.json | 535 +++++++++++++++++ sushy/standard_registries/Base.1.3.1.json | 535 +++++++++++++++++ sushy/standard_registries/Base.1.4.0.json | 542 ++++++++++++++++++ sushy/tests/unit/resources/test_base.py | 7 + sushy/tests/unit/test_main.py | 8 +- 11 files changed, 2658 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/standard-registry-license-0ded489afd6cfad1.yaml create mode 100644 sushy/standard_registries/Base.1.0.0.json create mode 100644 sushy/standard_registries/Base.1.2.0.json create mode 100644 sushy/standard_registries/Base.1.3.0.json create mode 100644 sushy/standard_registries/Base.1.3.1.json create mode 100644 sushy/standard_registries/Base.1.4.0.json diff --git a/README.rst b/README.rst index c893b1d..aa951da 100644 --- a/README.rst +++ b/README.rst @@ -13,6 +13,9 @@ project. As the project grows and more features from `Redfish`_ are needed we can expand Sushy to fulfill those requirements. * Free software: Apache license +* Includes Redfish registry files licensed under + Creative Commons Attribution 4.0 License: + https://creativecommons.org/licenses/by/4.0/ * Documentation: https://docs.openstack.org/sushy/latest/ * Usage: https://docs.openstack.org/sushy/latest/reference/usage.html * Source: https://git.openstack.org/cgit/openstack/sushy diff --git a/releasenotes/notes/standard-registry-license-0ded489afd6cfad1.yaml b/releasenotes/notes/standard-registry-license-0ded489afd6cfad1.yaml new file mode 100644 index 0000000..4d3c25f --- /dev/null +++ b/releasenotes/notes/standard-registry-license-0ded489afd6cfad1.yaml @@ -0,0 +1,6 @@ +--- +other: + - | + Includes Redfish standard message registry files that are licensed + under Creative Commons Attribution 4.0 License: + https://creativecommons.org/licenses/by/4.0/ diff --git a/sushy/main.py b/sushy/main.py index b1c5963..24f1a37 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -13,6 +13,7 @@ # License for the specific language governing permissions and limitations # under the License. import logging +import pkg_resources from sushy import auth as sushy_auth from sushy import connector as sushy_connector @@ -21,6 +22,7 @@ from sushy.resources import base from sushy.resources.chassis import chassis from sushy.resources.compositionservice import compositionservice from sushy.resources.manager import manager +from sushy.resources.registry import message_registry from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session from sushy.resources.sessionservice import sessionservice @@ -29,6 +31,8 @@ from sushy.resources.updateservice import updateservice LOG = logging.getLogger(__name__) +STANDARD_REGISTRY_PATH = 'standard_registries/' + class ProtocolFeaturesSupportedField(base.CompositeField): @@ -277,3 +281,20 @@ class Sushy(base.ResourceBase): return compositionservice.CompositionService( self._conn, self._composition_service_path, redfish_version=self.redfish_version) + + def _get_standard_message_registry_collection(self): + """Load packaged standard message registries + + :returns: list of MessageRegistry + """ + + message_registries = [] + resource_package_name = __name__ + for json_file in pkg_resources.resource_listdir( + resource_package_name, STANDARD_REGISTRY_PATH): + mes_reg = message_registry.MessageRegistry( + None, STANDARD_REGISTRY_PATH + json_file, + reader=base.JsonPackagedFileReader(resource_package_name)) + message_registries.append(mes_reg) + + return message_registries diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 364749d..6065e97 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -19,6 +19,7 @@ import copy import io import json import logging +import pkg_resources import zipfile import six @@ -300,6 +301,24 @@ class JsonArchiveReader(AbstractJsonReader): {'type': data.headers['content-type']}) +class JsonPackagedFileReader(AbstractJsonReader): + """Gets the data from packaged file given by path""" + + def __init__(self, resource_package_name): + """Initializes the reader + + :param resource_package: Python package/module name + """ + self._resource_package_name = resource_package_name + + def get_json(self): + """Gets JSON file from packaged file denoted by path""" + + with pkg_resources.resource_stream(self._resource_package_name, + self._path) as resource: + return json.loads(resource.read().decode(encoding='utf-8')) + + @six.add_metaclass(abc.ABCMeta) class ResourceBase(object): diff --git a/sushy/standard_registries/Base.1.0.0.json b/sushy/standard_registries/Base.1.0.0.json new file mode 100644 index 0000000..ce3c24f --- /dev/null +++ b/sushy/standard_registries/Base.1.0.0.json @@ -0,0 +1,466 @@ +{ + "@Redfish.Copyright": "Copyright © 2014-2015 Distributed Management Task Force, Inc. (DMTF). All rights reserved.", + "@Redfish.License": "Creative Commons Attribution 4.0 License. For full text see link: https://creativecommons.org/licenses/by/4.0/", + "@odata.type": "#MessageRegistry.1.0.0.MessageRegistry", + "Id": "Base.1.0.0", + "Name": "Base Message Registry", + "Language": "en", + "Description": "This registry defines the base messages for Redfish", + "RegistryPrefix": "Base", + "RegistryVersion": "1.0.0", + "OwningEntity": "DMTF", + "Messages": { + "Success": { + "Description": "Indicates that all conditions of a successful operation have been met.", + "Message": "Successfully Completed Request", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "GeneralError": { + "Description": "Indicates that a general error has occurred.", + "Message": "A general error has occurred. See ExtendedInfo for more information.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "See ExtendedInfo for more information." + }, + "Created": { + "Description": "Indicates that all conditions of a successful creation operation have been met.", + "Message": "The resource has been created successfully", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "PropertyDuplicate": { + "Description": "Indicates that a duplicate property was included in the request body.", + "Message": "The property %1 was duplicated in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the duplicate property from the request body and resubmit the request if the operation failed." + }, + "PropertyUnknown": { + "Description": "Indicates that an unknown property was included in the request body.", + "Message": "The property %1 is not in the list of valid properties for the resource.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the unknown property from the request body and resubmit the request if the operation failed." + }, + "PropertyValueTypeError": { + "Description": "Indicates that a property was given the wrong value type, such as when a number is supplied for a property that requires a string.", + "Message": "The value %1 for the property %2 is of a different type than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueFormatError": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the property %2 is of a different format than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueNotInList": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This values not in an enumeration", + "Message": "The value %1 for the property %2 is not in the list of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Choose a value from the enumeration list that the implementation can support and resubmit the request if the operation failed." + }, + "PropertyNotWritable": { + "Description": "Indicates that a property was given a value in the request body, but the property is a readonly property.", + "Message": "The property %1 is a read only property and cannot be assigned a value.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the property from the request body and resubmit the request if the operation failed." + }, + "PropertyMissing": { + "Description": "Indicates that a required property was not supplied as part of the request.", + "Message": "The property %1 is a required property and must be included in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the property is in the request body and has a valid value and resubmit the request if the operation failed." + }, + "MalformedJSON": { + "Description": "Indicates that the request body was malformed JSON. Could be duplicate, syntax error,etc.", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." + }, + "ActionNotSupported": { + "Description": "Indicates that the action supplied with the POST operation is not supported by the resource.", + "Message": "The action %1 is not supported by the resource.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "The action supplied cannot be resubmitted to the implementation. Perhaps the action was invalid, the wrong resource was the target or the implementation documentation may be of assistance." + }, + "ActionParameterMissing": { + "Description": "Indicates that the action requested was missing a parameter that is required to process the action.", + "Message": "The action %1 requires the parameter %2 to be present in the request body.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Supply the action with the required parameter in the request body when the request is resubmitted." + }, + "ActionParameterDuplicate": { + "Description": "Indicates that the action was supplied with a duplicated parameter in the request body.", + "Message": "The action %1 was submitted with more than one value for the parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the action with only one instance of the parameter in the request body if the operation failed." + }, + "ActionParameterUnknown": { + "Description": "Indicates that an action was submitted but a parameter supplied did not match any of the known parameters.", + "Message": "The action %1 was submitted with with the invalid parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the invalid parameter and resubmit the request if the operation failed." + }, + "ActionParameterValueTypeError": { + "Description": "Indicates that a parameter was given the wrong value type, such as when a number is supplied for a parameter that requires a string.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterValueFormatError": { + "Description": "Indicates that a parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterNotSupported": { + "Description": "Indicates that the parameter supplied for the action is not supported on the resource.", + "Message": "The parameter %1 for the action %2 is not supported on the target resource.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Remove the parameter supplied and resubmit the request if the operation failed." + }, + "QueryParameterValueTypeError": { + "Description": "Indicates that a query parameter was given the wrong value type, such as when a number is supplied for a query parameter that requires a string.", + "Message": "The value %1 for the query parameter %2 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterValueFormatError": { + "Description": "Indicates that a query parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterOutOfRange": { + "Description": "Indicates that a query parameter was supplied that is out of range for the given resource. This can happen with values that are too low or beyond that possible for the supplied resource, such as when a page is requested that is beyond the last page.", + "Message": "The value %1 for the query parameter %2 is out of range %3.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Reduce the value for the query parameter to a value that is within range, such as a start or count value that is within bounds of the number of resources in a collection or a page that is within the range of valid pages." + }, + "QueryNotSupportedOnResource": { + "Description": "Indicates that query is not supported on the given resource, such as when a start/count query is attempted on a resource that is not a collection.", + "Message": "Querying is not supported on the requested resource.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "QueryNotSupported": { + "Description": "Indicates that query is not supported on the implementation.", + "Message": "Querying is not supported by the implementation.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "SessionLimitExceeded": { + "Description": "Indicates that a session establishment has been requested but the operation failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Message": "The session establishment failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other sessions before trying to establish the session or increase the limit of simultaneous sessions (if supported)." + }, + "EventSubscriptionLimitExceeded": { + "Description": "Indicates that a event subscription establishment has been requested but the operation failed due to the number of simultaneous connection exceeding the limit of the implementation.", + "Message": "The event subscription failed due to the number of simultaneous subscriptions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other subscriptions before trying to establish the event subscription or increase the limit of simultaneous subscriptions (if supported)." + }, + "ResourceCannotBeDeleted": { + "Description": "Indicates that a delete operation was attempted on a resource that cannot be deleted.", + "Message": "The delete request failed because the resource requested cannot be deleted.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not attempt to delete a non-deletable resource." + }, + "ResourceInUse": { + "Description": "Indicates that a change was requested to a resource but the change was rejected due to the resource being in use or transition.", + "Message": "The change to the requested resource failed because the resource is in use or in transition.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the condition and resubmit the request if the operation failed." + }, + "ResourceAlreadyExists": { + "Description": "Indicates that a resource change or creation was attempted but that the operation cannot proceed because the resource already exists.", + "Message": "The requested resource already exists.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not repeat the create operation as the resource has already been created." + }, + "CreateFailedMissingReqProperties": { + "Description": "Indicates that a create was attempted on a resource but that properties that are required for the create operation were missing from the request.", + "Message": "The create operation failed because the required property %1 was missing from the request.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Correct the body to include the required property with a valid value and resubmit the request if the operation failed." + }, + "CreateLimitReachedForResource": { + "Description": "Indicates that no more resources can be created on the resource as it has reached its create limit.", + "Message": "The create operation failed because the resource has reached the limit of possible resources.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either delete resources and resubmit the request if the operation failed or do not resubmit the request." + }, + "ServiceShuttingDown": { + "Description": "Indicates that the operation failed as the service is shutting down, such as when the service reboots.", + "Message": "The operation failed because the service is shutting down and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "When the service becomes available, resubmit the request if the operation failed." + }, + "ServiceInUnknownState": { + "Description": "Indicates that the operation failed because the service is in an unknown state and cannot accept additional requests.", + "Message": "The operation failed because the service is in an unknown state and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Restart the service and resubmit the request if the operation failed." + }, + "NoValidSession": { + "Description": "Indicates that the operation failed because a valid session is required in order to access any resources.", + "Message": "There is no valid session established with the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Establish as session before attempting any operations." + }, + "InsufficientPrivilege": { + "Description": "Indicates that the credentials associated with the established session do not have sufficient privileges for the requested operation", + "Message": "There are insufficient privileges for the account or credentials associated with the current session to perform the requested operation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either abandon the operation or change the associated access rights and resubmit the request if the operation failed." + }, + "AccountModified": { + "Description": "Indicates that the account was successfully modified.", + "Message": "The account was successfully modifed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountNotModified": { + "Description": "Indicates that the modification requested for the account was not successful.", + "Message": "The account modification request failed.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "The modification may have failed due to permission issues or issues with the request body." + }, + "AccountRemoved": { + "Description": "Indicates that the account was successfully removed.", + "Message": "The account was successfully removed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountForSessionNoLongerExists": { + "Description": "Indicates that the account for the session has been removed, thus the session has been removed as well.", + "Message": "The account for the current session has been removed, thus the current session has been removed as well.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "Attempt to connect with a valid account." + }, + "InvalidObject": { + "Description": "Indicates that the object in question is invalid according to the implementation. Examples include a firmware update malformed URI.", + "Message": "The object at %1 is invalid.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Either the object is malformed or the URI is not correct. Correct the condition and resubmit the request if it failed." + }, + "InternalError": { + "Description": "Indicates that the request failed for an unknown internal error but that the service is still operational.", + "Message": "The request failed due to an internal service error. The service is still operational.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Resubmit the request. If the problem persists, consider resetting the service." + }, + "UnrecognizedRequestBody": { + "Description": "Indicates that the service encountered an unrecognizable request body that could not even be interpreted as malformed JSON.", + "Message": "The service detected a malformed request body that it was unable to interpret.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Correct the request body and resubmit the request if it failed." + }, + "ResourceMissingAtURI": { + "Description": "Indicates that the operation expected an image or other resource at the provided URI but none was found. Examples of this are in requests that require URIs like Firmware Update.", + "Message": "The resource at the URI %1 was not found.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place a valid resource at thr URI or correct the URI and resubmit the request." + }, + "ResourceAtUriInUnknownFormat": { + "Description": "Indicates that the URI was valid but the resource or image at that URI was in a format not supported by the service.", + "Message": "The resource at %1 is in a format not recognized by the service.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place an image or resource or file that is recognized by the service at the URI." + }, + "ResourceAtUriUnauthorized": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unauthorized.", + "Message": "While accessing the resource at %1, the service received an authorization error %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Ensure that the appropriate access is provided for the service in order for it to access the URI." + }, + "CouldNotEstablishConnection": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unsuccessful because a session could not be established.", + "Message": "The service failed to establish a connection with the URI %1.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the URI contains a valid and reachable node name, protocol information and other URI components." + }, + "SourceDoesNotSupportProtocol": { + "Description": "Indicates that while attempting to access, connect to or transfer a resource/file/image from another location that the other end of the connection did not support the protocol", + "Message": "The other end of the connection at %1 does not support the specified protocol %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Change protocols or URIs. " + }, + "AccessDenied": { + "Description": "Indicates that while attempting to access, connect to or transfer to/from another resource, the service was denied access.", + "Message": "While attempting to establish a connection to %1, the service was denied access.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Attempt to ensure that the URI is correct and that the service has the appropriate credentials." + }, + "ServiceTemporarilyUnavailable": { + "Description": "Indicates the service is temporarily unavailable.", + "Message": "The service is temporarily unavailable. Retry in %1 seconds.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Wait for the indicated retry duration and retry the operation." + }, + "InvalidIndex": { + "Description": "The Index is not valid.", + "Message": "The Index %1 is not a valid offset into the array.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "number" + ], + "Resolution": "Verify the index value provided is within the bounds of the array." + }, + "PropertyValueModified": { + "Description": "Indicates that a property was given the correct value type but the value of that property was modified. Examples are truncated or rounded values.", + "Message": "The property %1 was assigned the value %2 due to modification by the service.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "No resolution is required." + } + } +} diff --git a/sushy/standard_registries/Base.1.2.0.json b/sushy/standard_registries/Base.1.2.0.json new file mode 100644 index 0000000..f250bc8 --- /dev/null +++ b/sushy/standard_registries/Base.1.2.0.json @@ -0,0 +1,517 @@ +{ + "@Redfish.Copyright": "Copyright 2014-2015, 2017 Distributed Management Task Force, Inc. (DMTF). All rights reserved.", + "@Redfish.License": "Creative Commons Attribution 4.0 License. For full text see link: https://creativecommons.org/licenses/by/4.0/", + "@odata.type": "#MessageRegistry.v1_0_0.MessageRegistry", + "Id": "Base.1.2.0", + "Name": "Base Message Registry", + "Language": "en", + "Description": "This registry defines the base messages for Redfish", + "RegistryPrefix": "Base", + "RegistryVersion": "1.2.0", + "OwningEntity": "DMTF", + "Messages": { + "Success": { + "Description": "Indicates that all conditions of a successful operation have been met.", + "Message": "Successfully Completed Request", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "GeneralError": { + "Description": "Indicates that a general error has occurred.", + "Message": "A general error has occurred. See ExtendedInfo for more information.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "See ExtendedInfo for more information." + }, + "Created": { + "Description": "Indicates that all conditions of a successful creation operation have been met.", + "Message": "The resource has been created successfully", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "PropertyDuplicate": { + "Description": "Indicates that a duplicate property was included in the request body.", + "Message": "The property %1 was duplicated in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the duplicate property from the request body and resubmit the request if the operation failed." + }, + "PropertyUnknown": { + "Description": "Indicates that an unknown property was included in the request body.", + "Message": "The property %1 is not in the list of valid properties for the resource.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the unknown property from the request body and resubmit the request if the operation failed." + }, + "PropertyValueTypeError": { + "Description": "Indicates that a property was given the wrong value type, such as when a number is supplied for a property that requires a string.", + "Message": "The value %1 for the property %2 is of a different type than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueFormatError": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported.", + "Message": "The value %1 for the property %2 is of a different format than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueNotInList": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This values not in an enumeration", + "Message": "The value %1 for the property %2 is not in the list of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Choose a value from the enumeration list that the implementation can support and resubmit the request if the operation failed." + }, + "PropertyNotWritable": { + "Description": "Indicates that a property was given a value in the request body, but the property is a readonly property.", + "Message": "The property %1 is a read only property and cannot be assigned a value.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the property from the request body and resubmit the request if the operation failed." + }, + "PropertyMissing": { + "Description": "Indicates that a required property was not supplied as part of the request.", + "Message": "The property %1 is a required property and must be included in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the property is in the request body and has a valid value and resubmit the request if the operation failed." + }, + "MalformedJSON": { + "Description": "Indicates that the request body was malformed JSON. Could be duplicate, syntax error,etc.", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." + }, + "EmptyJSON": { + "Description": "Indicates that the request body contained an empty JSON object when one or more properties are expected in the body.", + "Message": "The request body submitted contained an empty JSON object and the service is unable to process it.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Add properties in the JSON object and resubmit the request." + }, + "ActionNotSupported": { + "Description": "Indicates that the action supplied with the POST operation is not supported by the resource.", + "Message": "The action %1 is not supported by the resource.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "The action supplied cannot be resubmitted to the implementation. Perhaps the action was invalid, the wrong resource was the target or the implementation documentation may be of assistance." + }, + "ActionParameterMissing": { + "Description": "Indicates that the action requested was missing a parameter that is required to process the action.", + "Message": "The action %1 requires the parameter %2 to be present in the request body.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Supply the action with the required parameter in the request body when the request is resubmitted." + }, + "ActionParameterDuplicate": { + "Description": "Indicates that the action was supplied with a duplicated parameter in the request body.", + "Message": "The action %1 was submitted with more than one value for the parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the action with only one instance of the parameter in the request body if the operation failed." + }, + "ActionParameterUnknown": { + "Description": "Indicates that an action was submitted but a parameter supplied did not match any of the known parameters.", + "Message": "The action %1 was submitted with the invalid parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the invalid parameter and resubmit the request if the operation failed." + }, + "ActionParameterValueTypeError": { + "Description": "Indicates that a parameter was given the wrong value type, such as when a number is supplied for a parameter that requires a string.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterValueFormatError": { + "Description": "Indicates that a parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterNotSupported": { + "Description": "Indicates that the parameter supplied for the action is not supported on the resource.", + "Message": "The parameter %1 for the action %2 is not supported on the target resource.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Remove the parameter supplied and resubmit the request if the operation failed." + }, + "QueryParameterValueTypeError": { + "Description": "Indicates that a query parameter was given the wrong value type, such as when a number is supplied for a query parameter that requires a string.", + "Message": "The value %1 for the query parameter %2 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterValueFormatError": { + "Description": "Indicates that a query parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterOutOfRange": { + "Description": "Indicates that a query parameter was supplied that is out of range for the given resource. This can happen with values that are too low or beyond that possible for the supplied resource, such as when a page is requested that is beyond the last page.", + "Message": "The value %1 for the query parameter %2 is out of range %3.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Reduce the value for the query parameter to a value that is within range, such as a start or count value that is within bounds of the number of resources in a collection or a page that is within the range of valid pages." + }, + "QueryNotSupportedOnResource": { + "Description": "Indicates that query is not supported on the given resource, such as when a start/count query is attempted on a resource that is not a collection.", + "Message": "Querying is not supported on the requested resource.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "QueryNotSupported": { + "Description": "Indicates that query is not supported on the implementation.", + "Message": "Querying is not supported by the implementation.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "SessionLimitExceeded": { + "Description": "Indicates that a session establishment has been requested but the operation failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Message": "The session establishment failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other sessions before trying to establish the session or increase the limit of simultaneous sessions (if supported)." + }, + "EventSubscriptionLimitExceeded": { + "Description": "Indicates that a event subscription establishment has been requested but the operation failed due to the number of simultaneous connection exceeding the limit of the implementation.", + "Message": "The event subscription failed due to the number of simultaneous subscriptions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other subscriptions before trying to establish the event subscription or increase the limit of simultaneous subscriptions (if supported)." + }, + "ResourceCannotBeDeleted": { + "Description": "Indicates that a delete operation was attempted on a resource that cannot be deleted.", + "Message": "The delete request failed because the resource requested cannot be deleted.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not attempt to delete a non-deletable resource." + }, + "ResourceInUse": { + "Description": "Indicates that a change was requested to a resource but the change was rejected due to the resource being in use or transition.", + "Message": "The change to the requested resource failed because the resource is in use or in transition.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the condition and resubmit the request if the operation failed." + }, + "ResourceAlreadyExists": { + "Description": "Indicates that a resource change or creation was attempted but that the operation cannot proceed because the resource already exists.", + "Message": "The requested resource of type %1 with the property %2 with the value %3 already exists.", + "Severity": "Critical", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Do not repeat the create operation as the resource has already been created." + }, + "ResourceNotFound": { + "Description": "Indicates that the operation expected a resource identifier that corresponds to an existing resource but one was not found.", + "Message": "The requested resource of type %1 named %2 was not found.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Provide a valid resource identifier and resubmit the request." + }, + "CreateFailedMissingReqProperties": { + "Description": "Indicates that a create was attempted on a resource but that properties that are required for the create operation were missing from the request.", + "Message": "The create operation failed because the required property %1 was missing from the request.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Correct the body to include the required property with a valid value and resubmit the request if the operation failed." + }, + "CreateLimitReachedForResource": { + "Description": "Indicates that no more resources can be created on the resource as it has reached its create limit.", + "Message": "The create operation failed because the resource has reached the limit of possible resources.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either delete resources and resubmit the request if the operation failed or do not resubmit the request." + }, + "ServiceShuttingDown": { + "Description": "Indicates that the operation failed as the service is shutting down, such as when the service reboots.", + "Message": "The operation failed because the service is shutting down and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "When the service becomes available, resubmit the request if the operation failed." + }, + "ServiceInUnknownState": { + "Description": "Indicates that the operation failed because the service is in an unknown state and cannot accept additional requests.", + "Message": "The operation failed because the service is in an unknown state and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Restart the service and resubmit the request if the operation failed." + }, + "NoValidSession": { + "Description": "Indicates that the operation failed because a valid session is required in order to access any resources.", + "Message": "There is no valid session established with the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Establish as session before attempting any operations." + }, + "InsufficientPrivilege": { + "Description": "Indicates that the credentials associated with the established session do not have sufficient privileges for the requested operation", + "Message": "There are insufficient privileges for the account or credentials associated with the current session to perform the requested operation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either abandon the operation or change the associated access rights and resubmit the request if the operation failed." + }, + "AccountModified": { + "Description": "Indicates that the account was successfully modified.", + "Message": "The account was successfully modified.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountNotModified": { + "Description": "Indicates that the modification requested for the account was not successful.", + "Message": "The account modification request failed.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "The modification may have failed due to permission issues or issues with the request body." + }, + "AccountRemoved": { + "Description": "Indicates that the account was successfully removed.", + "Message": "The account was successfully removed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountForSessionNoLongerExists": { + "Description": "Indicates that the account for the session has been removed, thus the session has been removed as well.", + "Message": "The account for the current session has been removed, thus the current session has been removed as well.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "Attempt to connect with a valid account." + }, + "InvalidObject": { + "Description": "Indicates that the object in question is invalid according to the implementation. Examples include a firmware update malformed URI.", + "Message": "The object at %1 is invalid.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Either the object is malformed or the URI is not correct. Correct the condition and resubmit the request if it failed." + }, + "InternalError": { + "Description": "Indicates that the request failed for an unknown internal error but that the service is still operational.", + "Message": "The request failed due to an internal service error. The service is still operational.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Resubmit the request. If the problem persists, consider resetting the service." + }, + "UnrecognizedRequestBody": { + "Description": "Indicates that the service encountered an unrecognizable request body that could not even be interpreted as malformed JSON.", + "Message": "The service detected a malformed request body that it was unable to interpret.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Correct the request body and resubmit the request if it failed." + }, + "ResourceMissingAtURI": { + "Description": "Indicates that the operation expected an image or other resource at the provided URI but none was found. Examples of this are in requests that require URIs like Firmware Update.", + "Message": "The resource at the URI %1 was not found.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place a valid resource at the URI or correct the URI and resubmit the request." + }, + "ResourceAtUriInUnknownFormat": { + "Description": "Indicates that the URI was valid but the resource or image at that URI was in a format not supported by the service.", + "Message": "The resource at %1 is in a format not recognized by the service.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place an image or resource or file that is recognized by the service at the URI." + }, + "ResourceAtUriUnauthorized": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unauthorized.", + "Message": "While accessing the resource at %1, the service received an authorization error %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Ensure that the appropriate access is provided for the service in order for it to access the URI." + }, + "CouldNotEstablishConnection": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unsuccessful because a session could not be established.", + "Message": "The service failed to establish a connection with the URI %1.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the URI contains a valid and reachable node name, protocol information and other URI components." + }, + "SourceDoesNotSupportProtocol": { + "Description": "Indicates that while attempting to access, connect to or transfer a resource/file/image from another location that the other end of the connection did not support the protocol", + "Message": "The other end of the connection at %1 does not support the specified protocol %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Change protocols or URIs. " + }, + "AccessDenied": { + "Description": "Indicates that while attempting to access, connect to or transfer to/from another resource, the service denied access.", + "Message": "While attempting to establish a connection to %1, the service denied access.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Attempt to ensure that the URI is correct and that the service has the appropriate credentials." + }, + "ServiceTemporarilyUnavailable": { + "Description": "Indicates the service is temporarily unavailable.", + "Message": "The service is temporarily unavailable. Retry in %1 seconds.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Wait for the indicated retry duration and retry the operation." + }, + "InvalidIndex": { + "Description": "The Index is not valid.", + "Message": "The Index %1 is not a valid offset into the array.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "number" + ], + "Resolution": "Verify the index value provided is within the bounds of the array." + }, + "PropertyValueModified": { + "Description": "Indicates that a property was given the correct value type but the value of that property was modified. Examples are truncated or rounded values.", + "Message": "The property %1 was assigned the value %2 due to modification by the service.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "No resolution is required." + }, + "ResourceInStandby": { + "Description": "Indicates that the request could not be performed because the resource is in standby.", + "Message": "The request could not be performed because the resource is in standby.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the resource is in the correct power state and resubmit the request." + }, + "ResourceExhaustion": { + "Description": "Indicates that a resource could not satisfy the request due to some unavailability of resources. An example is that available capacity has been allocated.", + "Message": "The resource %1 was unable to satisfy the request due to unavailability of resources.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the resources are available and resubmit the request." + }, + "StringValueTooLong": { + "Description": "Indicates that a string value passed to the given resource exceeded its length limit. An example is when a shorter limit is imposed by an implementation than that allowed by the specification.", + "Message": "The string %1 exceeds the length limit %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Resubmit the request with an appropriate string length." + } + } +} diff --git a/sushy/standard_registries/Base.1.3.0.json b/sushy/standard_registries/Base.1.3.0.json new file mode 100644 index 0000000..980c9c2 --- /dev/null +++ b/sushy/standard_registries/Base.1.3.0.json @@ -0,0 +1,535 @@ +{ + "@Redfish.Copyright": "Copyright 2014-2015, 2017-2018 DMTF. All rights reserved.", + "@Redfish.License": "Creative Commons Attribution 4.0 License. For full text see link: https://creativecommons.org/licenses/by/4.0/", + "@odata.type": "#MessageRegistry.v1_0_0.MessageRegistry", + "Id": "Base.1.3.0", + "Name": "Base Message Registry", + "Language": "en", + "Description": "This registry defines the base messages for Redfish", + "RegistryPrefix": "Base", + "RegistryVersion": "1.3.0", + "OwningEntity": "DMTF", + "Messages": { + "Success": { + "Description": "Indicates that all conditions of a successful operation have been met.", + "Message": "Successfully Completed Request", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "GeneralError": { + "Description": "Indicates that a general error has occurred.", + "Message": "A general error has occurred. See ExtendedInfo for more information.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "See ExtendedInfo for more information." + }, + "Created": { + "Description": "Indicates that all conditions of a successful creation operation have been met.", + "Message": "The resource has been created successfully", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "PropertyDuplicate": { + "Description": "Indicates that a duplicate property was included in the request body.", + "Message": "The property %1 was duplicated in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the duplicate property from the request body and resubmit the request if the operation failed." + }, + "PropertyUnknown": { + "Description": "Indicates that an unknown property was included in the request body.", + "Message": "The property %1 is not in the list of valid properties for the resource.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the unknown property from the request body and resubmit the request if the operation failed." + }, + "PropertyValueTypeError": { + "Description": "Indicates that a property was given the wrong value type, such as when a number is supplied for a property that requires a string.", + "Message": "The value %1 for the property %2 is of a different type than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueFormatError": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported.", + "Message": "The value %1 for the property %2 is of a different format than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueNotInList": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This values not in an enumeration", + "Message": "The value %1 for the property %2 is not in the list of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Choose a value from the enumeration list that the implementation can support and resubmit the request if the operation failed." + }, + "PropertyValueOutOfRange": { + "Description": "Indicates that a property was given the correct value type but the value of that property is outside the supported range.", + "Message": "The value %1 for the property %2 is not in the supported range of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyNotWritable": { + "Description": "Indicates that a property was given a value in the request body, but the property is a readonly property.", + "Message": "The property %1 is a read only property and cannot be assigned a value.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the property from the request body and resubmit the request if the operation failed." + }, + "PropertyMissing": { + "Description": "Indicates that a required property was not supplied as part of the request.", + "Message": "The property %1 is a required property and must be included in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the property is in the request body and has a valid value and resubmit the request if the operation failed." + }, + "MalformedJSON": { + "Description": "Indicates that the request body was malformed JSON. Could be duplicate, syntax error,etc.", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." + }, + "EmptyJSON": { + "Description": "Indicates that the request body contained an empty JSON object when one or more properties are expected in the body.", + "Message": "The request body submitted contained an empty JSON object and the service is unable to process it.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Add properties in the JSON object and resubmit the request." + }, + "ActionNotSupported": { + "Description": "Indicates that the action supplied with the POST operation is not supported by the resource.", + "Message": "The action %1 is not supported by the resource.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "The action supplied cannot be resubmitted to the implementation. Perhaps the action was invalid, the wrong resource was the target or the implementation documentation may be of assistance." + }, + "ActionParameterMissing": { + "Description": "Indicates that the action requested was missing a parameter that is required to process the action.", + "Message": "The action %1 requires the parameter %2 to be present in the request body.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Supply the action with the required parameter in the request body when the request is resubmitted." + }, + "ActionParameterDuplicate": { + "Description": "Indicates that the action was supplied with a duplicated parameter in the request body.", + "Message": "The action %1 was submitted with more than one value for the parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the action with only one instance of the parameter in the request body if the operation failed." + }, + "ActionParameterUnknown": { + "Description": "Indicates that an action was submitted but a parameter supplied did not match any of the known parameters.", + "Message": "The action %1 was submitted with the invalid parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the invalid parameter and resubmit the request if the operation failed." + }, + "ActionParameterValueTypeError": { + "Description": "Indicates that a parameter was given the wrong value type, such as when a number is supplied for a parameter that requires a string.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterValueFormatError": { + "Description": "Indicates that a parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterNotSupported": { + "Description": "Indicates that the parameter supplied for the action is not supported on the resource.", + "Message": "The parameter %1 for the action %2 is not supported on the target resource.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Remove the parameter supplied and resubmit the request if the operation failed." + }, + "QueryParameterValueTypeError": { + "Description": "Indicates that a query parameter was given the wrong value type, such as when a number is supplied for a query parameter that requires a string.", + "Message": "The value %1 for the query parameter %2 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterValueFormatError": { + "Description": "Indicates that a query parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterOutOfRange": { + "Description": "Indicates that a query parameter was supplied that is out of range for the given resource. This can happen with values that are too low or beyond that possible for the supplied resource, such as when a page is requested that is beyond the last page.", + "Message": "The value %1 for the query parameter %2 is out of range %3.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Reduce the value for the query parameter to a value that is within range, such as a start or count value that is within bounds of the number of resources in a collection or a page that is within the range of valid pages." + }, + "QueryNotSupportedOnResource": { + "Description": "Indicates that query is not supported on the given resource, such as when a start/count query is attempted on a resource that is not a collection.", + "Message": "Querying is not supported on the requested resource.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "QueryNotSupported": { + "Description": "Indicates that query is not supported on the implementation.", + "Message": "Querying is not supported by the implementation.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "SessionLimitExceeded": { + "Description": "Indicates that a session establishment has been requested but the operation failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Message": "The session establishment failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other sessions before trying to establish the session or increase the limit of simultaneous sessions (if supported)." + }, + "EventSubscriptionLimitExceeded": { + "Description": "Indicates that a event subscription establishment has been requested but the operation failed due to the number of simultaneous connection exceeding the limit of the implementation.", + "Message": "The event subscription failed due to the number of simultaneous subscriptions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other subscriptions before trying to establish the event subscription or increase the limit of simultaneous subscriptions (if supported)." + }, + "ResourceCannotBeDeleted": { + "Description": "Indicates that a delete operation was attempted on a resource that cannot be deleted.", + "Message": "The delete request failed because the resource requested cannot be deleted.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not attempt to delete a non-deletable resource." + }, + "ResourceInUse": { + "Description": "Indicates that a change was requested to a resource but the change was rejected due to the resource being in use or transition.", + "Message": "The change to the requested resource failed because the resource is in use or in transition.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the condition and resubmit the request if the operation failed." + }, + "ResourceAlreadyExists": { + "Description": "Indicates that a resource change or creation was attempted but that the operation cannot proceed because the resource already exists.", + "Message": "The requested resource of type %1 with the property %2 with the value %3 already exists.", + "Severity": "Critical", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Do not repeat the create operation as the resource has already been created." + }, + "ResourceNotFound": { + "Description": "Indicates that the operation expected a resource identifier that corresponds to an existing resource but one was not found.", + "Message": "The requested resource of type %1 named %2 was not found.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Provide a valid resource identifier and resubmit the request." + }, + "CreateFailedMissingReqProperties": { + "Description": "Indicates that a create was attempted on a resource but that properties that are required for the create operation were missing from the request.", + "Message": "The create operation failed because the required property %1 was missing from the request.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Correct the body to include the required property with a valid value and resubmit the request if the operation failed." + }, + "CreateLimitReachedForResource": { + "Description": "Indicates that no more resources can be created on the resource as it has reached its create limit.", + "Message": "The create operation failed because the resource has reached the limit of possible resources.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either delete resources and resubmit the request if the operation failed or do not resubmit the request." + }, + "ServiceShuttingDown": { + "Description": "Indicates that the operation failed as the service is shutting down, such as when the service reboots.", + "Message": "The operation failed because the service is shutting down and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "When the service becomes available, resubmit the request if the operation failed." + }, + "ServiceInUnknownState": { + "Description": "Indicates that the operation failed because the service is in an unknown state and cannot accept additional requests.", + "Message": "The operation failed because the service is in an unknown state and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Restart the service and resubmit the request if the operation failed." + }, + "NoValidSession": { + "Description": "Indicates that the operation failed because a valid session is required in order to access any resources.", + "Message": "There is no valid session established with the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Establish as session before attempting any operations." + }, + "InsufficientPrivilege": { + "Description": "Indicates that the credentials associated with the established session do not have sufficient privileges for the requested operation", + "Message": "There are insufficient privileges for the account or credentials associated with the current session to perform the requested operation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either abandon the operation or change the associated access rights and resubmit the request if the operation failed." + }, + "AccountModified": { + "Description": "Indicates that the account was successfully modified.", + "Message": "The account was successfully modified.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountNotModified": { + "Description": "Indicates that the modification requested for the account was not successful.", + "Message": "The account modification request failed.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "The modification may have failed due to permission issues or issues with the request body." + }, + "AccountRemoved": { + "Description": "Indicates that the account was successfully removed.", + "Message": "The account was successfully removed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountForSessionNoLongerExists": { + "Description": "Indicates that the account for the session has been removed, thus the session has been removed as well.", + "Message": "The account for the current session has been removed, thus the current session has been removed as well.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "Attempt to connect with a valid account." + }, + "InvalidObject": { + "Description": "Indicates that the object in question is invalid according to the implementation. Examples include a firmware update malformed URI.", + "Message": "The object at %1 is invalid.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Either the object is malformed or the URI is not correct. Correct the condition and resubmit the request if it failed." + }, + "InternalError": { + "Description": "Indicates that the request failed for an unknown internal error but that the service is still operational.", + "Message": "The request failed due to an internal service error. The service is still operational.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Resubmit the request. If the problem persists, consider resetting the service." + }, + "UnrecognizedRequestBody": { + "Description": "Indicates that the service encountered an unrecognizable request body that could not even be interpreted as malformed JSON.", + "Message": "The service detected a malformed request body that it was unable to interpret.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Correct the request body and resubmit the request if it failed." + }, + "ResourceMissingAtURI": { + "Description": "Indicates that the operation expected an image or other resource at the provided URI but none was found. Examples of this are in requests that require URIs like Firmware Update.", + "Message": "The resource at the URI %1 was not found.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place a valid resource at the URI or correct the URI and resubmit the request." + }, + "ResourceAtUriInUnknownFormat": { + "Description": "Indicates that the URI was valid but the resource or image at that URI was in a format not supported by the service.", + "Message": "The resource at %1 is in a format not recognized by the service.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place an image or resource or file that is recognized by the service at the URI." + }, + "ResourceAtUriUnauthorized": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unauthorized.", + "Message": "While accessing the resource at %1, the service received an authorization error %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Ensure that the appropriate access is provided for the service in order for it to access the URI." + }, + "CouldNotEstablishConnection": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unsuccessful because a session could not be established.", + "Message": "The service failed to establish a connection with the URI %1.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the URI contains a valid and reachable node name, protocol information and other URI components." + }, + "SourceDoesNotSupportProtocol": { + "Description": "Indicates that while attempting to access, connect to or transfer a resource/file/image from another location that the other end of the connection did not support the protocol", + "Message": "The other end of the connection at %1 does not support the specified protocol %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Change protocols or URIs. " + }, + "AccessDenied": { + "Description": "Indicates that while attempting to access, connect to or transfer to/from another resource, the service denied access.", + "Message": "While attempting to establish a connection to %1, the service denied access.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Attempt to ensure that the URI is correct and that the service has the appropriate credentials." + }, + "ServiceTemporarilyUnavailable": { + "Description": "Indicates the service is temporarily unavailable.", + "Message": "The service is temporarily unavailable. Retry in %1 seconds.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Wait for the indicated retry duration and retry the operation." + }, + "InvalidIndex": { + "Description": "The Index is not valid.", + "Message": "The Index %1 is not a valid offset into the array.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "number" + ], + "Resolution": "Verify the index value provided is within the bounds of the array." + }, + "PropertyValueModified": { + "Description": "Indicates that a property was given the correct value type but the value of that property was modified. Examples are truncated or rounded values.", + "Message": "The property %1 was assigned the value %2 due to modification by the service.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "No resolution is required." + }, + "ResourceInStandby": { + "Description": "Indicates that the request could not be performed because the resource is in standby.", + "Message": "The request could not be performed because the resource is in standby.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the resource is in the correct power state and resubmit the request." + }, + "ResourceExhaustion": { + "Description": "Indicates that a resource could not satisfy the request due to some unavailability of resources. An example is that available capacity has been allocated.", + "Message": "The resource %1 was unable to satisfy the request due to unavailability of resources.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the resources are available and resubmit the request." + }, + "StringValueTooLong": { + "Description": "Indicates that a string value passed to the given resource exceeded its length limit. An example is when a shorter limit is imposed by an implementation than that allowed by the specification.", + "Message": "The string %1 exceeds the length limit %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Resubmit the request with an appropriate string length." + }, + "SessionTerminated": { + "Description": "Indicates that the DELETE operation on the Session resource resulted in the successful termination of the session.", + "Message": "The session was successfully terminated.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + } + } +} diff --git a/sushy/standard_registries/Base.1.3.1.json b/sushy/standard_registries/Base.1.3.1.json new file mode 100644 index 0000000..a6bb996 --- /dev/null +++ b/sushy/standard_registries/Base.1.3.1.json @@ -0,0 +1,535 @@ +{ + "@Redfish.Copyright": "Copyright 2014-2018 DMTF. All rights reserved.", + "@Redfish.License": "Creative Commons Attribution 4.0 License. For full text see link: https://creativecommons.org/licenses/by/4.0/", + "@odata.type": "#MessageRegistry.v1_0_0.MessageRegistry", + "Id": "Base.1.3.1", + "Name": "Base Message Registry", + "Language": "en", + "Description": "This registry defines the base messages for Redfish", + "RegistryPrefix": "Base", + "RegistryVersion": "1.3.1", + "OwningEntity": "DMTF", + "Messages": { + "Success": { + "Description": "Indicates that all conditions of a successful operation have been met.", + "Message": "Successfully Completed Request", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "GeneralError": { + "Description": "Indicates that a general error has occurred. Use in ExtendedInfo is discouraged. When used in ExtendedInfo, implementations are expected to include a Resolution property with this error to indicate how to resolve the problem.", + "Message": "A general error has occurred. See Resolution for information on how to resolve the error.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "None." + }, + "Created": { + "Description": "Indicates that all conditions of a successful creation operation have been met.", + "Message": "The resource has been created successfully", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "PropertyDuplicate": { + "Description": "Indicates that a duplicate property was included in the request body.", + "Message": "The property %1 was duplicated in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the duplicate property from the request body and resubmit the request if the operation failed." + }, + "PropertyUnknown": { + "Description": "Indicates that an unknown property was included in the request body.", + "Message": "The property %1 is not in the list of valid properties for the resource.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the unknown property from the request body and resubmit the request if the operation failed." + }, + "PropertyValueTypeError": { + "Description": "Indicates that a property was given the wrong value type, such as when a number is supplied for a property that requires a string.", + "Message": "The value %1 for the property %2 is of a different type than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueFormatError": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported.", + "Message": "The value %1 for the property %2 is of a different format than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueNotInList": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This values not in an enumeration", + "Message": "The value %1 for the property %2 is not in the list of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Choose a value from the enumeration list that the implementation can support and resubmit the request if the operation failed." + }, + "PropertyValueOutOfRange": { + "Description": "Indicates that a property was given the correct value type but the value of that property is outside the supported range.", + "Message": "The value %1 for the property %2 is not in the supported range of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyNotWritable": { + "Description": "Indicates that a property was given a value in the request body, but the property is a readonly property.", + "Message": "The property %1 is a read only property and cannot be assigned a value.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the property from the request body and resubmit the request if the operation failed." + }, + "PropertyMissing": { + "Description": "Indicates that a required property was not supplied as part of the request.", + "Message": "The property %1 is a required property and must be included in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the property is in the request body and has a valid value and resubmit the request if the operation failed." + }, + "MalformedJSON": { + "Description": "Indicates that the request body was malformed JSON. Could be duplicate, syntax error,etc.", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." + }, + "EmptyJSON": { + "Description": "Indicates that the request body contained an empty JSON object when one or more properties are expected in the body.", + "Message": "The request body submitted contained an empty JSON object and the service is unable to process it.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Add properties in the JSON object and resubmit the request." + }, + "ActionNotSupported": { + "Description": "Indicates that the action supplied with the POST operation is not supported by the resource.", + "Message": "The action %1 is not supported by the resource.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "The action supplied cannot be resubmitted to the implementation. Perhaps the action was invalid, the wrong resource was the target or the implementation documentation may be of assistance." + }, + "ActionParameterMissing": { + "Description": "Indicates that the action requested was missing a parameter that is required to process the action.", + "Message": "The action %1 requires the parameter %2 to be present in the request body.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Supply the action with the required parameter in the request body when the request is resubmitted." + }, + "ActionParameterDuplicate": { + "Description": "Indicates that the action was supplied with a duplicated parameter in the request body.", + "Message": "The action %1 was submitted with more than one value for the parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the action with only one instance of the parameter in the request body if the operation failed." + }, + "ActionParameterUnknown": { + "Description": "Indicates that an action was submitted but a parameter supplied did not match any of the known parameters.", + "Message": "The action %1 was submitted with the invalid parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the invalid parameter and resubmit the request if the operation failed." + }, + "ActionParameterValueTypeError": { + "Description": "Indicates that a parameter was given the wrong value type, such as when a number is supplied for a parameter that requires a string.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterValueFormatError": { + "Description": "Indicates that a parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterNotSupported": { + "Description": "Indicates that the parameter supplied for the action is not supported on the resource.", + "Message": "The parameter %1 for the action %2 is not supported on the target resource.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Remove the parameter supplied and resubmit the request if the operation failed." + }, + "QueryParameterValueTypeError": { + "Description": "Indicates that a query parameter was given the wrong value type, such as when a number is supplied for a query parameter that requires a string.", + "Message": "The value %1 for the query parameter %2 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterValueFormatError": { + "Description": "Indicates that a query parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterOutOfRange": { + "Description": "Indicates that a query parameter was supplied that is out of range for the given resource. This can happen with values that are too low or beyond that possible for the supplied resource, such as when a page is requested that is beyond the last page.", + "Message": "The value %1 for the query parameter %2 is out of range %3.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Reduce the value for the query parameter to a value that is within range, such as a start or count value that is within bounds of the number of resources in a collection or a page that is within the range of valid pages." + }, + "QueryNotSupportedOnResource": { + "Description": "Indicates that query is not supported on the given resource, such as when a start/count query is attempted on a resource that is not a collection.", + "Message": "Querying is not supported on the requested resource.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "QueryNotSupported": { + "Description": "Indicates that query is not supported on the implementation.", + "Message": "Querying is not supported by the implementation.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "SessionLimitExceeded": { + "Description": "Indicates that a session establishment has been requested but the operation failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Message": "The session establishment failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other sessions before trying to establish the session or increase the limit of simultaneous sessions (if supported)." + }, + "EventSubscriptionLimitExceeded": { + "Description": "Indicates that a event subscription establishment has been requested but the operation failed due to the number of simultaneous connection exceeding the limit of the implementation.", + "Message": "The event subscription failed due to the number of simultaneous subscriptions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other subscriptions before trying to establish the event subscription or increase the limit of simultaneous subscriptions (if supported)." + }, + "ResourceCannotBeDeleted": { + "Description": "Indicates that a delete operation was attempted on a resource that cannot be deleted.", + "Message": "The delete request failed because the resource requested cannot be deleted.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not attempt to delete a non-deletable resource." + }, + "ResourceInUse": { + "Description": "Indicates that a change was requested to a resource but the change was rejected due to the resource being in use or transition.", + "Message": "The change to the requested resource failed because the resource is in use or in transition.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the condition and resubmit the request if the operation failed." + }, + "ResourceAlreadyExists": { + "Description": "Indicates that a resource change or creation was attempted but that the operation cannot proceed because the resource already exists.", + "Message": "The requested resource of type %1 with the property %2 with the value %3 already exists.", + "Severity": "Critical", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Do not repeat the create operation as the resource has already been created." + }, + "ResourceNotFound": { + "Description": "Indicates that the operation expected a resource identifier that corresponds to an existing resource but one was not found.", + "Message": "The requested resource of type %1 named %2 was not found.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Provide a valid resource identifier and resubmit the request." + }, + "CreateFailedMissingReqProperties": { + "Description": "Indicates that a create was attempted on a resource but that properties that are required for the create operation were missing from the request.", + "Message": "The create operation failed because the required property %1 was missing from the request.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Correct the body to include the required property with a valid value and resubmit the request if the operation failed." + }, + "CreateLimitReachedForResource": { + "Description": "Indicates that no more resources can be created on the resource as it has reached its create limit.", + "Message": "The create operation failed because the resource has reached the limit of possible resources.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either delete resources and resubmit the request if the operation failed or do not resubmit the request." + }, + "ServiceShuttingDown": { + "Description": "Indicates that the operation failed as the service is shutting down, such as when the service reboots.", + "Message": "The operation failed because the service is shutting down and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "When the service becomes available, resubmit the request if the operation failed." + }, + "ServiceInUnknownState": { + "Description": "Indicates that the operation failed because the service is in an unknown state and cannot accept additional requests.", + "Message": "The operation failed because the service is in an unknown state and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Restart the service and resubmit the request if the operation failed." + }, + "NoValidSession": { + "Description": "Indicates that the operation failed because a valid session is required in order to access any resources.", + "Message": "There is no valid session established with the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Establish as session before attempting any operations." + }, + "InsufficientPrivilege": { + "Description": "Indicates that the credentials associated with the established session do not have sufficient privileges for the requested operation", + "Message": "There are insufficient privileges for the account or credentials associated with the current session to perform the requested operation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either abandon the operation or change the associated access rights and resubmit the request if the operation failed." + }, + "AccountModified": { + "Description": "Indicates that the account was successfully modified.", + "Message": "The account was successfully modified.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountNotModified": { + "Description": "Indicates that the modification requested for the account was not successful.", + "Message": "The account modification request failed.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "The modification may have failed due to permission issues or issues with the request body." + }, + "AccountRemoved": { + "Description": "Indicates that the account was successfully removed.", + "Message": "The account was successfully removed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountForSessionNoLongerExists": { + "Description": "Indicates that the account for the session has been removed, thus the session has been removed as well.", + "Message": "The account for the current session has been removed, thus the current session has been removed as well.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "Attempt to connect with a valid account." + }, + "InvalidObject": { + "Description": "Indicates that the object in question is invalid according to the implementation. Examples include a firmware update malformed URI.", + "Message": "The object at %1 is invalid.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Either the object is malformed or the URI is not correct. Correct the condition and resubmit the request if it failed." + }, + "InternalError": { + "Description": "Indicates that the request failed for an unknown internal error but that the service is still operational.", + "Message": "The request failed due to an internal service error. The service is still operational.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Resubmit the request. If the problem persists, consider resetting the service." + }, + "UnrecognizedRequestBody": { + "Description": "Indicates that the service encountered an unrecognizable request body that could not even be interpreted as malformed JSON.", + "Message": "The service detected a malformed request body that it was unable to interpret.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Correct the request body and resubmit the request if it failed." + }, + "ResourceMissingAtURI": { + "Description": "Indicates that the operation expected an image or other resource at the provided URI but none was found. Examples of this are in requests that require URIs like Firmware Update.", + "Message": "The resource at the URI %1 was not found.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place a valid resource at the URI or correct the URI and resubmit the request." + }, + "ResourceAtUriInUnknownFormat": { + "Description": "Indicates that the URI was valid but the resource or image at that URI was in a format not supported by the service.", + "Message": "The resource at %1 is in a format not recognized by the service.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place an image or resource or file that is recognized by the service at the URI." + }, + "ResourceAtUriUnauthorized": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unauthorized.", + "Message": "While accessing the resource at %1, the service received an authorization error %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Ensure that the appropriate access is provided for the service in order for it to access the URI." + }, + "CouldNotEstablishConnection": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unsuccessful because a session could not be established.", + "Message": "The service failed to establish a connection with the URI %1.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the URI contains a valid and reachable node name, protocol information and other URI components." + }, + "SourceDoesNotSupportProtocol": { + "Description": "Indicates that while attempting to access, connect to or transfer a resource/file/image from another location that the other end of the connection did not support the protocol", + "Message": "The other end of the connection at %1 does not support the specified protocol %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Change protocols or URIs. " + }, + "AccessDenied": { + "Description": "Indicates that while attempting to access, connect to or transfer to/from another resource, the service denied access.", + "Message": "While attempting to establish a connection to %1, the service denied access.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Attempt to ensure that the URI is correct and that the service has the appropriate credentials." + }, + "ServiceTemporarilyUnavailable": { + "Description": "Indicates the service is temporarily unavailable.", + "Message": "The service is temporarily unavailable. Retry in %1 seconds.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Wait for the indicated retry duration and retry the operation." + }, + "InvalidIndex": { + "Description": "The Index is not valid.", + "Message": "The Index %1 is not a valid offset into the array.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "number" + ], + "Resolution": "Verify the index value provided is within the bounds of the array." + }, + "PropertyValueModified": { + "Description": "Indicates that a property was given the correct value type but the value of that property was modified. Examples are truncated or rounded values.", + "Message": "The property %1 was assigned the value %2 due to modification by the service.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "No resolution is required." + }, + "ResourceInStandby": { + "Description": "Indicates that the request could not be performed because the resource is in standby.", + "Message": "The request could not be performed because the resource is in standby.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the resource is in the correct power state and resubmit the request." + }, + "ResourceExhaustion": { + "Description": "Indicates that a resource could not satisfy the request due to some unavailability of resources. An example is that available capacity has been allocated.", + "Message": "The resource %1 was unable to satisfy the request due to unavailability of resources.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the resources are available and resubmit the request." + }, + "StringValueTooLong": { + "Description": "Indicates that a string value passed to the given resource exceeded its length limit. An example is when a shorter limit is imposed by an implementation than that allowed by the specification.", + "Message": "The string %1 exceeds the length limit %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Resubmit the request with an appropriate string length." + }, + "SessionTerminated": { + "Description": "Indicates that the DELETE operation on the Session resource resulted in the successful termination of the session.", + "Message": "The session was successfully terminated.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + } + } +} diff --git a/sushy/standard_registries/Base.1.4.0.json b/sushy/standard_registries/Base.1.4.0.json new file mode 100644 index 0000000..343f8bc --- /dev/null +++ b/sushy/standard_registries/Base.1.4.0.json @@ -0,0 +1,542 @@ +{ + "@Redfish.Copyright": "Copyright 2014-2018 DMTF. All rights reserved.", + "@Redfish.License": "Creative Commons Attribution 4.0 License. For full text see link: https://creativecommons.org/licenses/by/4.0/", + "@odata.type": "#MessageRegistry.v1_0_0.MessageRegistry", + "Id": "Base.1.4.0", + "Name": "Base Message Registry", + "Language": "en", + "Description": "This registry defines the base messages for Redfish", + "RegistryPrefix": "Base", + "RegistryVersion": "1.4.0", + "OwningEntity": "DMTF", + "Messages": { + "Success": { + "Description": "Indicates that all conditions of a successful operation have been met.", + "Message": "Successfully Completed Request", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "GeneralError": { + "Description": "Indicates that a general error has occurred. Use in ExtendedInfo is discouraged. When used in ExtendedInfo, implementations are expected to include a Resolution property with this error to indicate how to resolve the problem.", + "Message": "A general error has occurred. See Resolution for information on how to resolve the error.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "None." + }, + "Created": { + "Description": "Indicates that all conditions of a successful creation operation have been met.", + "Message": "The resource has been created successfully", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "NoOperation": { + "Description": "Indicates that the requested operation will not perform any changes on the service.", + "Message": "The request body submitted contain no data to act upon and no changes to the resource took place.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Add properties in the JSON object and resubmit the request." + }, + "PropertyDuplicate": { + "Description": "Indicates that a duplicate property was included in the request body.", + "Message": "The property %1 was duplicated in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the duplicate property from the request body and resubmit the request if the operation failed." + }, + "PropertyUnknown": { + "Description": "Indicates that an unknown property was included in the request body.", + "Message": "The property %1 is not in the list of valid properties for the resource.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the unknown property from the request body and resubmit the request if the operation failed." + }, + "PropertyValueTypeError": { + "Description": "Indicates that a property was given the wrong value type, such as when a number is supplied for a property that requires a string.", + "Message": "The value %1 for the property %2 is of a different type than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueFormatError": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported.", + "Message": "The value %1 for the property %2 is of a different format than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueNotInList": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This values not in an enumeration", + "Message": "The value %1 for the property %2 is not in the list of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Choose a value from the enumeration list that the implementation can support and resubmit the request if the operation failed." + }, + "PropertyNotWritable": { + "Description": "Indicates that a property was given a value in the request body, but the property is a readonly property.", + "Message": "The property %1 is a read only property and cannot be assigned a value.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the property from the request body and resubmit the request if the operation failed." + }, + "PropertyMissing": { + "Description": "Indicates that a required property was not supplied as part of the request.", + "Message": "The property %1 is a required property and must be included in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the property is in the request body and has a valid value and resubmit the request if the operation failed." + }, + "MalformedJSON": { + "Description": "Indicates that the request body was malformed JSON. Could be duplicate, syntax error,etc.", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." + }, + "EmptyJSON": { + "Description": "Indicates that the request body contained an empty JSON object when one or more properties are expected in the body.", + "Message": "The request body submitted contained an empty JSON object and the service is unable to process it.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Add properties in the JSON object and resubmit the request." + }, + "ActionNotSupported": { + "Description": "Indicates that the action supplied with the POST operation is not supported by the resource.", + "Message": "The action %1 is not supported by the resource.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "The action supplied cannot be resubmitted to the implementation. Perhaps the action was invalid, the wrong resource was the target or the implementation documentation may be of assistance." + }, + "ActionParameterMissing": { + "Description": "Indicates that the action requested was missing a parameter that is required to process the action.", + "Message": "The action %1 requires the parameter %2 to be present in the request body.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Supply the action with the required parameter in the request body when the request is resubmitted." + }, + "ActionParameterDuplicate": { + "Description": "Indicates that the action was supplied with a duplicated parameter in the request body.", + "Message": "The action %1 was submitted with more than one value for the parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the action with only one instance of the parameter in the request body if the operation failed." + }, + "ActionParameterUnknown": { + "Description": "Indicates that an action was submitted but a parameter supplied did not match any of the known parameters.", + "Message": "The action %1 was submitted with the invalid parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the invalid parameter and resubmit the request if the operation failed." + }, + "ActionParameterValueTypeError": { + "Description": "Indicates that a parameter was given the wrong value type, such as when a number is supplied for a parameter that requires a string.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterValueFormatError": { + "Description": "Indicates that a parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterNotSupported": { + "Description": "Indicates that the parameter supplied for the action is not supported on the resource.", + "Message": "The parameter %1 for the action %2 is not supported on the target resource.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Remove the parameter supplied and resubmit the request if the operation failed." + }, + "QueryParameterValueTypeError": { + "Description": "Indicates that a query parameter was given the wrong value type, such as when a number is supplied for a query parameter that requires a string.", + "Message": "The value %1 for the query parameter %2 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterValueFormatError": { + "Description": "Indicates that a query parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterOutOfRange": { + "Description": "Indicates that a query parameter was supplied that is out of range for the given resource. This can happen with values that are too low or beyond that possible for the supplied resource, such as when a page is requested that is beyond the last page.", + "Message": "The value %1 for the query parameter %2 is out of range %3.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Reduce the value for the query parameter to a value that is within range, such as a start or count value that is within bounds of the number of resources in a collection or a page that is within the range of valid pages." + }, + "QueryNotSupportedOnResource": { + "Description": "Indicates that query is not supported on the given resource, such as when a start/count query is attempted on a resource that is not a collection.", + "Message": "Querying is not supported on the requested resource.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "QueryNotSupported": { + "Description": "Indicates that query is not supported on the implementation.", + "Message": "Querying is not supported by the implementation.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "SessionLimitExceeded": { + "Description": "Indicates that a session establishment has been requested but the operation failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Message": "The session establishment failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other sessions before trying to establish the session or increase the limit of simultaneous sessions (if supported)." + }, + "EventSubscriptionLimitExceeded": { + "Description": "Indicates that a event subscription establishment has been requested but the operation failed due to the number of simultaneous connection exceeding the limit of the implementation.", + "Message": "The event subscription failed due to the number of simultaneous subscriptions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other subscriptions before trying to establish the event subscription or increase the limit of simultaneous subscriptions (if supported)." + }, + "ResourceCannotBeDeleted": { + "Description": "Indicates that a delete operation was attempted on a resource that cannot be deleted.", + "Message": "The delete request failed because the resource requested cannot be deleted.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not attempt to delete a non-deletable resource." + }, + "ResourceInUse": { + "Description": "Indicates that a change was requested to a resource but the change was rejected due to the resource being in use or transition.", + "Message": "The change to the requested resource failed because the resource is in use or in transition.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the condition and resubmit the request if the operation failed." + }, + "ResourceAlreadyExists": { + "Description": "Indicates that a resource change or creation was attempted but that the operation cannot proceed because the resource already exists.", + "Message": "The requested resource of type %1 with the property %2 with the value %3 already exists.", + "Severity": "Critical", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Do not repeat the create operation as the resource has already been created." + }, + "ResourceNotFound": { + "Description": "Indicates that the operation expected a resource identifier that corresponds to an existing resource but one was not found.", + "Message": "The requested resource of type %1 named %2 was not found.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Provide a valid resource identifier and resubmit the request." + }, + "CreateFailedMissingReqProperties": { + "Description": "Indicates that a create was attempted on a resource but that properties that are required for the create operation were missing from the request.", + "Message": "The create operation failed because the required property %1 was missing from the request.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Correct the body to include the required property with a valid value and resubmit the request if the operation failed." + }, + "CreateLimitReachedForResource": { + "Description": "Indicates that no more resources can be created on the resource as it has reached its create limit.", + "Message": "The create operation failed because the resource has reached the limit of possible resources.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either delete resources and resubmit the request if the operation failed or do not resubmit the request." + }, + "ServiceShuttingDown": { + "Description": "Indicates that the operation failed as the service is shutting down, such as when the service reboots.", + "Message": "The operation failed because the service is shutting down and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "When the service becomes available, resubmit the request if the operation failed." + }, + "ServiceInUnknownState": { + "Description": "Indicates that the operation failed because the service is in an unknown state and cannot accept additional requests.", + "Message": "The operation failed because the service is in an unknown state and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Restart the service and resubmit the request if the operation failed." + }, + "NoValidSession": { + "Description": "Indicates that the operation failed because a valid session is required in order to access any resources.", + "Message": "There is no valid session established with the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Establish as session before attempting any operations." + }, + "InsufficientPrivilege": { + "Description": "Indicates that the credentials associated with the established session do not have sufficient privileges for the requested operation", + "Message": "There are insufficient privileges for the account or credentials associated with the current session to perform the requested operation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either abandon the operation or change the associated access rights and resubmit the request if the operation failed." + }, + "AccountModified": { + "Description": "Indicates that the account was successfully modified.", + "Message": "The account was successfully modified.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountNotModified": { + "Description": "Indicates that the modification requested for the account was not successful.", + "Message": "The account modification request failed.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "The modification may have failed due to permission issues or issues with the request body." + }, + "AccountRemoved": { + "Description": "Indicates that the account was successfully removed.", + "Message": "The account was successfully removed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountForSessionNoLongerExists": { + "Description": "Indicates that the account for the session has been removed, thus the session has been removed as well.", + "Message": "The account for the current session has been removed, thus the current session has been removed as well.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "Attempt to connect with a valid account." + }, + "InvalidObject": { + "Description": "Indicates that the object in question is invalid according to the implementation. Examples include a firmware update malformed URI.", + "Message": "The object at %1 is invalid.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Either the object is malformed or the URI is not correct. Correct the condition and resubmit the request if it failed." + }, + "InternalError": { + "Description": "Indicates that the request failed for an unknown internal error but that the service is still operational.", + "Message": "The request failed due to an internal service error. The service is still operational.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Resubmit the request. If the problem persists, consider resetting the service." + }, + "UnrecognizedRequestBody": { + "Description": "Indicates that the service encountered an unrecognizable request body that could not even be interpreted as malformed JSON.", + "Message": "The service detected a malformed request body that it was unable to interpret.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Correct the request body and resubmit the request if it failed." + }, + "ResourceMissingAtURI": { + "Description": "Indicates that the operation expected an image or other resource at the provided URI but none was found. Examples of this are in requests that require URIs like Firmware Update.", + "Message": "The resource at the URI %1 was not found.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place a valid resource at the URI or correct the URI and resubmit the request." + }, + "ResourceAtUriInUnknownFormat": { + "Description": "Indicates that the URI was valid but the resource or image at that URI was in a format not supported by the service.", + "Message": "The resource at %1 is in a format not recognized by the service.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place an image or resource or file that is recognized by the service at the URI." + }, + "ResourceAtUriUnauthorized": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unauthorized.", + "Message": "While accessing the resource at %1, the service received an authorization error %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Ensure that the appropriate access is provided for the service in order for it to access the URI." + }, + "CouldNotEstablishConnection": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unsuccessful because a session could not be established.", + "Message": "The service failed to establish a connection with the URI %1.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the URI contains a valid and reachable node name, protocol information and other URI components." + }, + "SourceDoesNotSupportProtocol": { + "Description": "Indicates that while attempting to access, connect to or transfer a resource/file/image from another location that the other end of the connection did not support the protocol", + "Message": "The other end of the connection at %1 does not support the specified protocol %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Change protocols or URIs. " + }, + "AccessDenied": { + "Description": "Indicates that while attempting to access, connect to or transfer to/from another resource, the service denied access.", + "Message": "While attempting to establish a connection to %1, the service denied access.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Attempt to ensure that the URI is correct and that the service has the appropriate credentials." + }, + "ServiceTemporarilyUnavailable": { + "Description": "Indicates the service is temporarily unavailable.", + "Message": "The service is temporarily unavailable. Retry in %1 seconds.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Wait for the indicated retry duration and retry the operation." + }, + "InvalidIndex": { + "Description": "The Index is not valid.", + "Message": "The Index %1 is not a valid offset into the array.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "number" + ], + "Resolution": "Verify the index value provided is within the bounds of the array." + }, + "PropertyValueModified": { + "Description": "Indicates that a property was given the correct value type but the value of that property was modified. Examples are truncated or rounded values.", + "Message": "The property %1 was assigned the value %2 due to modification by the service.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "No resolution is required." + }, + "ResourceInStandby": { + "Description": "Indicates that the request could not be performed because the resource is in standby.", + "Message": "The request could not be performed because the resource is in standby.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the resource is in the correct power state and resubmit the request." + }, + "ResourceExhaustion": { + "Description": "Indicates that a resource could not satisfy the request due to some unavailability of resources. An example is that available capacity has been allocated.", + "Message": "The resource %1 was unable to satisfy the request due to unavailability of resources.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the resources are available and resubmit the request." + }, + "StringValueTooLong": { + "Description": "Indicates that a string value passed to the given resource exceeded its length limit. An example is when a shorter limit is imposed by an implementation than that allowed by the specification.", + "Message": "The string %1 exceeds the length limit %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Resubmit the request with an appropriate string length." + }, + "SessionTerminated": { + "Description": "Indicates that the DELETE operation on the Session resource resulted in the successful termination of the session.", + "Message": "The session was successfully terminated.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "ResourceTypeIncompatible": { + "Description": "Indicates that the resource type of the operation does not match that for the operation destination. Examples of when this can happen include during a POST to a collection using the wrong resource type, an update where the @odata.types do not match or on a major version incompatability.", + "Message": "The @odata.type of the request body %1 is incompatible with the @odata.type of the resource which is %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the request with a payload compatible with the resource's schema." + } + } +} diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index e2faf43..e15d664 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -134,6 +134,13 @@ class ResourceBaseTestCase(base.TestCase): self.assertTrue(oem_vendor in ('Contoso', 'EID_412_ASB_123')) self.assertEqual('base_resource2', self.base_resource2.resource_name) + def test_refresh_local(self): + resource = BaseResource(None, 'json_samples/message_registry.json', + reader=resource_base. + JsonPackagedFileReader('sushy.tests.unit')) + self.assertIsNotNone(resource._json) + self.assertEqual('Test.1.1.1', resource._json['Id']) + class TestResource(resource_base.ResourceBase): """A concrete Test Resource to test against""" diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index a343f3c..70bd8e5 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -14,7 +14,6 @@ # under the License. import json - import mock from sushy import auth @@ -172,6 +171,13 @@ class MainTestCase(base.TestCase): self.root._conn, '/redfish/v1/CompositionService', redfish_version=self.root.redfish_version) + def test__get_standard_message_registry_collection(self): + registries = self.root._get_standard_message_registry_collection() + + self.assertEqual(5, len(registries)) + self.assertTrue([r.identity for r in registries + if r.identity == 'Base.1.3.0']) + class BareMinimumMainTestCase(base.TestCase): -- GitLab From fa8ce9806b1d0b4741d7394692cacecdb31cb2ef Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Fri, 10 Aug 2018 13:53:40 +0300 Subject: [PATCH 130/303] Add public resource loading and message parsing Adds: * support for loading resources from the Internet using full URL, * load all provided message registries based on their source, * combine provided message registries with standard message registries, * parse a message using the message registries. Change-Id: I5a85344c094e890554c400fc87a960dc4cccb8b6 Story: 2001791 Task: 23062 --- sushy/main.py | 35 +++++++++- sushy/resources/base.py | 8 +++ sushy/resources/registry/message_registry.py | 26 +++++++ .../registry/message_registry_file.py | 38 +++++++++++ .../registry/test_message_registry.py | 42 ++++++++++++ .../registry/test_message_registry_file.py | 68 +++++++++++++++++++ sushy/tests/unit/resources/test_base.py | 13 ++++ sushy/tests/unit/test_main.py | 37 ++++++++++ 8 files changed, 266 insertions(+), 1 deletion(-) diff --git a/sushy/main.py b/sushy/main.py index 24f1a37..4a4c617 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -14,6 +14,7 @@ # under the License. import logging import pkg_resources +import requests from sushy import auth as sushy_auth from sushy import connector as sushy_connector @@ -94,7 +95,9 @@ class Sushy(base.ResourceBase): def __init__(self, base_url, username=None, password=None, root_prefix='/redfish/v1/', verify=True, - auth=None, connector=None): + auth=None, connector=None, + public_connector=None, + language='en'): """A class representing a RootService :param base_url: The base URL to the Redfish controller. It @@ -113,6 +116,10 @@ class Sushy(base.ResourceBase): the certificates in the directory. Defaults to True. :param auth: An authentication mechanism to utilize. :param connector: A user-defined connector object. Defaults to None. + :param public_connector: A user-defined connector to use for requests + on the Internet, e.g., for Message Registries. Defaults to None. + :param language: RFC 5646 language code for Message Registries. + Defaults to 'en'. """ self._root_prefix = root_prefix if (auth is not None and (password is not None or @@ -131,6 +138,8 @@ class Sushy(base.ResourceBase): self._auth = auth self._auth.set_context(self, self._conn) self._auth.authenticate() + self._public_connector = public_connector or requests + self._language = language def __del__(self): if self._auth: @@ -298,3 +307,27 @@ class Sushy(base.ResourceBase): message_registries.append(mes_reg) return message_registries + + def _get_message_registries(self): + """Gets and combines all message registries together + + Fetches all registries if any provided by Redfish service + and combines together with packaged standard registries. + + :returns: dict of combined message registries where key is + Registry_name.Major_version.Minor_version and value is registry + itself. + """ + + standard = self._get_standard_message_registry_collection() + registries = {r.registry_prefix + '.' + + r.registry_version.rsplit('.', 1)[0]: r + for r in standard if r.language == self._language} + registry_col = self._get_registry_collection() + if registry_col: + provided = registry_col.get_members() + registries.update({r.registry: r.get_message_registry( + self._language, + self._public_connector) for r in provided}) + + return registries diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 6065e97..3398660 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -274,6 +274,14 @@ class JsonDataReader(AbstractJsonReader): return self._conn.get(path=self._path).json() +class JsonPublicFileReader(AbstractJsonReader): + """Loads the data from the Internet""" + + def get_json(self): + """Get JSON file from full URI""" + return self._conn.get(self._path).json() + + class JsonArchiveReader(AbstractJsonReader): """Gets the data from JSON file in archive""" diff --git a/sushy/resources/registry/message_registry.py b/sushy/resources/registry/message_registry.py index d09a9ea..e0b9f5b 100644 --- a/sushy/resources/registry/message_registry.py +++ b/sushy/resources/registry/message_registry.py @@ -79,3 +79,29 @@ class MessageRegistry(base.ResourceBase): messages = MessageDictionaryField('Messages') """List of messages in this registry""" + + +def parse_message(message_registries, message_field): + """Using message registries parse the message and substitute any parms + + :param message_registries: dict of Message Registries + :param message_field: settings.MessageListField to parse + + :returns: parsed settings.MessageListField with missing attributes filled + """ + + registry, msg_key = message_field.message_id.rsplit('.', 1) + + reg_msg = message_registries[registry].messages[msg_key] + + msg = reg_msg.message + for i in range(1, reg_msg.number_of_args + 1): + msg = msg.replace('%%%i' % i, str(message_field.message_args[i - 1])) + + message_field.message = msg + if not message_field.severity: + message_field.severity = reg_msg.severity + if not message_field.resolution: + message_field.resolution = reg_msg.resolution + + return message_field diff --git a/sushy/resources/registry/message_registry_file.py b/sushy/resources/registry/message_registry_file.py index 37e545b..587b0c0 100644 --- a/sushy/resources/registry/message_registry_file.py +++ b/sushy/resources/registry/message_registry_file.py @@ -14,8 +14,12 @@ # https://redfish.dmtf.org/schemas/v1/MessageRegistryFileCollection.json # https://redfish.dmtf.org/schemas/v1/MessageRegistryFile.v1_1_0.json +import logging from sushy.resources import base +from sushy.resources.registry import message_registry + +LOG = logging.getLogger(__name__) class LocationListField(base.ListField): @@ -67,6 +71,40 @@ class MessageRegistryFile(base.ResourceBase): location = LocationListField('Location', required=True) """List of locations of Registry files for each supported language""" + def get_message_registry(self, language, public_connector): + """Load message registry file depending on its source + + Will try to find a registry based on provided language, if not found + then will use a registry that has 'default' language. + + :param language: RFC 5646 language code for registry files + :param public_connector: connector to use when downloading registry + from the Internet + """ + + location = next((l for l in self.location if l.language == language), + [d for d in self.location if d.language == 'default'] + [0]) + + if location.uri: + return message_registry.MessageRegistry( + self._conn, path=location.uri, + redfish_version=self.redfish_version) + elif location.archive_uri: + return message_registry.MessageRegistry( + self._conn, path=location.archive_uri, + redfish_version=self.redfish_version, + reader=base.JsonArchiveReader(location.archive_file)) + elif location.publication_uri: + return message_registry.MessageRegistry( + public_connector, + path=location.publication_uri, + redfish_version=self.redfish_version, + reader=base.JsonPublicFileReader()) + else: + LOG.warning('No location defined for language %(language)s', + {'language': language}) + class MessageRegistryFileCollection(base.ResourceCollectionBase): """Collection of Message Registry Files""" diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py index 0b3d37b..c506faa 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -19,6 +19,7 @@ import mock from sushy.resources import constants as res_cons from sushy.resources.registry import message_registry +from sushy.resources import settings from sushy.tests.unit import base @@ -65,3 +66,44 @@ class MessageRegistryTestCase(base.TestCase): self.assertRaisesRegex(KeyError, 'unknown_type', self.registry._parse_attributes) + + def test_parse_message(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = settings.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.TooBig' + message_field.message_args = ['arg1', 10] + message_field.severity = None + message_field.resolution = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Try again', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_WARNING, parsed_msg.severity) + self.assertEqual('Property\'s arg1 value cannot be greater than 10.', + parsed_msg.message) + + def test_parse_message_with_severity_resolution_no_args(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = settings.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.Success' + message_field.severity = res_cons.SEVERITY_OK + message_field.resolution = 'Do nothing' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Do nothing', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_OK, parsed_msg.severity) + self.assertEqual('Everything done successfully.', + parsed_msg.message) diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py index 9c63e5d..21cb905 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry_file.py +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -51,6 +51,74 @@ class MessageRegistryFileTestCase(base.TestCase): self.assertEqual('Test.1.0.json', self.reg_file.location[0].archive_file) + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + def test_get_message_registry_uri(self, mock_msg_reg): + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + + registry = self.reg_file.get_message_registry('en', None) + mock_msg_reg.assert_called_once_with( + self.conn, path='/redfish/v1/Registries/Test/Test.1.0.json', + redfish_version=self.reg_file.redfish_version) + self.assertEqual(mock_msg_reg_rv, registry) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.base.JsonArchiveReader', autospec=True) + def test_get_message_registry_archive(self, mock_reader, mock_msg_reg): + mock_reader_rv = mock.Mock() + mock_reader.return_value = mock_reader_rv + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].uri = None + + registry = self.reg_file.get_message_registry('fr', None) + mock_msg_reg.assert_called_once_with( + self.conn, path='/redfish/v1/Registries/Archive.zip', + redfish_version=self.reg_file.redfish_version, + reader=mock_reader_rv) + mock_reader.assert_called_once_with('Test.1.0.json') + self.assertEqual(mock_msg_reg_rv, registry) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.base.JsonPublicFileReader', autospec=True) + def test_get_message_registry_public(self, mock_reader, mock_msg_reg): + public_connector = mock.Mock() + mock_reader_rv = mock.Mock() + mock_reader.return_value = mock_reader_rv + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].uri = None + self.reg_file.location[0].archive_uri = None + + registry = self.reg_file.get_message_registry('en', public_connector) + mock_msg_reg.assert_called_once_with( + public_connector, + path='https://example.com/Registries/Test.1.0.json', + redfish_version=self.reg_file.redfish_version, + reader=mock_reader_rv) + self.assertEqual(mock_msg_reg_rv, registry) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.LOG', + autospec=True) + def test_get_message_registry_invalid(self, mock_log, mock_msg_reg): + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].uri = None + self.reg_file.location[0].archive_uri = None + self.reg_file.location[0].publication_uri = None + + registry = self.reg_file.get_message_registry('en', None) + mock_msg_reg.assert_not_called() + self.assertIsNone(registry) + mock_log.warning.assert_called_with( + 'No location defined for language %(language)s', + {'language': 'en'}) + class MessageRegistryFileCollectionTestCase(base.TestCase): diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index e15d664..060f97d 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -15,6 +15,7 @@ import copy import io +import json import mock from six.moves import http_client @@ -141,6 +142,18 @@ class ResourceBaseTestCase(base.TestCase): self.assertIsNotNone(resource._json) self.assertEqual('Test.1.1.1', resource._json['Id']) + def test_refresh_public(self): + mock_connector = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + mock_connector.get.return_value.json.return_value = json.load(f) + resource = BaseResource(mock_connector, 'https://example.com/' + 'message_registry.json', + reader=resource_base.JsonPublicFileReader()) + mock_connector.get.assert_called_once_with('https://example.com/' + 'message_registry.json') + self.assertIsNotNone(resource._json) + self.assertEqual('Test.1.1.1', resource._json['Id']) + class TestResource(resource_base.ResourceBase): """A concrete Test Resource to test against""" diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 70bd8e5..151c948 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -178,6 +178,43 @@ class MainTestCase(base.TestCase): self.assertTrue([r.identity for r in registries if r.identity == 'Base.1.3.0']) + @mock.patch('sushy.Sushy._get_standard_message_registry_collection', + autospec=True) + @mock.patch('sushy.Sushy._get_registry_collection', autospec=True) + def test__get_message_registries(self, mock_col, mock_st_col): + mock_msg_reg1 = mock.Mock() + mock_msg_reg1.registry_prefix = 'RegistryA' + mock_msg_reg1.registry_version = '2.0.0' + mock_msg_reg1.language = 'en' + mock_st_col.return_value = [mock_msg_reg1] + + mock_msg_reg2 = mock.Mock() + mock_msg_reg2.registry_prefix = 'RegistryB' + mock_msg_reg2.registry_version = '1.0.0' + mock_msg_reg_file = mock.Mock() + mock_msg_reg_file.registry = 'RegistryB.1.0' + mock_msg_reg_file.get_message_registry.return_value = mock_msg_reg2 + mock_col.return_value.get_members.return_value = [mock_msg_reg_file] + + registries = self.root._get_message_registries() + self.assertEqual({'RegistryA.2.0': mock_msg_reg1, + 'RegistryB.1.0': mock_msg_reg2}, registries) + + @mock.patch('sushy.Sushy._get_standard_message_registry_collection', + autospec=True) + @mock.patch('sushy.Sushy._get_registry_collection', autospec=True) + def test__get_message_registries_provided_empty(self, mock_col, + mock_st_col): + mock_msg_reg1 = mock.Mock() + mock_msg_reg1.registry_prefix = 'RegistryA' + mock_msg_reg1.registry_version = '2.0.0' + mock_msg_reg1.language = 'en' + mock_st_col.return_value = [mock_msg_reg1] + mock_col.return_value = None + + registries = self.root._get_message_registries() + self.assertEqual({'RegistryA.2.0': mock_msg_reg1}, registries) + class BareMinimumMainTestCase(base.TestCase): -- GitLab From fc311f9ddbb5fe8559acc6974e371101ecb9353c Mon Sep 17 00:00:00 2001 From: Lin Yang Date: Tue, 5 Mar 2019 14:21:17 -0800 Subject: [PATCH 131/303] Fix wrong default JsonDataReader() argument It expect to use new JsonDataReader() instance as default argument in function call, but it will be only created once when the function is defined, and the same object is used in each successive call. It does not work as expected. So Use default argument None to signal that Reader was provided. Change-Id: I8b659132e4699c2f9a1b8109869c3a6c1b3ca096 --- sushy/resources/base.py | 7 ++++--- sushy/tests/unit/resources/test_base.py | 9 +++++++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 364749d..bb3cc17 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -313,7 +313,7 @@ class ResourceBase(object): connector, path='', redfish_version=None, - reader=JsonDataReader()): + reader=None): """A class representing the base of any Redfish resource Invokes the ``refresh()`` method of resource for the first @@ -322,8 +322,7 @@ class ResourceBase(object): :param path: sub-URI path to the resource. :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. - :param reader: Reader to use to fetch JSON data. Defaults to - JsonDataReader + :param reader: Reader to use to fetch JSON data. """ self._conn = connector self._path = path @@ -334,6 +333,8 @@ class ResourceBase(object): # attribute values are fetched. self._is_stale = True + if reader is None: + reader = JsonDataReader() reader.set_connection(connector, path) self._reader = reader diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index e2faf43..57de41d 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -129,6 +129,15 @@ class ResourceBaseTestCase(base.TestCase): reader=resource_base. JsonArchiveReader('Test.2.0.json')) + def test_init_default_reader(self): + resource_a = BaseResource(connector=self.conn) + resource_b = BaseResource(connector=self.conn) + + self.assertIsInstance(resource_a._reader, resource_base.JsonDataReader) + self.assertIsInstance(resource_b._reader, resource_base.JsonDataReader) + + self.assertIsNot(resource_a._reader, resource_b._reader) + def test__parse_attributes(self): for oem_vendor in self.base_resource2.oem_vendors: self.assertTrue(oem_vendor in ('Contoso', 'EID_412_ASB_123')) -- GitLab From 5b5794d7aa59fcc4be0717c177f3f413edc10ca9 Mon Sep 17 00:00:00 2001 From: Julia Kreger Date: Tue, 14 Aug 2018 12:06:18 -0700 Subject: [PATCH 132/303] Change sushy devstack job to python3 - Set default job to python3 - Create python2 job to run on check and gate Change-Id: Ia03ba1a4471481a7bc6136f3c2a1aeecb2aa9eb2 --- zuul.d/project.yaml | 2 ++ zuul.d/sushy-jobs.yaml | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 6c936e3..92fb8bf 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -11,6 +11,8 @@ check: jobs: - sushy-tempest-ironic-partition-redfish-src + - sushy-tempest-ironic-partition-redfish-src-python2 gate: jobs: - sushy-tempest-ironic-partition-redfish-src + - sushy-tempest-ironic-partition-redfish-src-python2 diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml index 69870fb..b7b083e 100644 --- a/zuul.d/sushy-jobs.yaml +++ b/zuul.d/sushy-jobs.yaml @@ -9,7 +9,15 @@ - openstack/sushy vars: devstack_localrc: + USE_PYTHON3: True IRONIC_DEPLOY_DRIVER: redfish IRONIC_ENABLED_HARDWARE_TYPES: redfish IRONIC_DEFAULT_RESCUE_INTERFACE: "" EBTABLES_RACE_FIX: True + +- job: + name: sushy-tempest-ironic-partition-redfish-src-python2 + parent: sushy-tempest-ironic-partition-redfish-src + vars: + devstack_localrc: + USE_PYTHON3: False -- GitLab From 680d023ab6d9ff514f79cf286b282c2ae774878c Mon Sep 17 00:00:00 2001 From: Varsha Date: Tue, 12 Mar 2019 18:50:54 +0530 Subject: [PATCH 133/303] Add `FabricCollection` and `Fabric` classes Add representation of Fabric and FabricCollection resources. The Fabric represents a simple fabric consisting of one or more switches, zero or more endpoints, and zero or more zones. Also adds the methods get_fabric_collection and get_fabric in the public API. Implements: FabricCollection and Fabric classes Story: #2003853 Task: #26648 Change-Id: Ib9ad7f562cfd46efac4038e17d524efcdc9fff92 --- .../add-fabric-support-1520f7fcb0e12539.yaml | 5 ++ sushy/__init__.py | 1 + sushy/main.py | 27 +++++++ sushy/resources/fabric/__init__.py | 0 sushy/resources/fabric/constants.py | 42 ++++++++++ sushy/resources/fabric/fabric.py | 77 ++++++++++++++++++ sushy/resources/fabric/mappings.py | 47 +++++++++++ sushy/tests/unit/json_samples/fabric.json | 29 +++++++ .../unit/json_samples/fabric_collection.json | 16 ++++ sushy/tests/unit/json_samples/root.json | 3 + sushy/tests/unit/resources/fabric/__init__.py | 0 .../unit/resources/fabric/test_fabric.py | 79 +++++++++++++++++++ sushy/tests/unit/test_main.py | 21 +++++ 13 files changed, 347 insertions(+) create mode 100644 releasenotes/notes/add-fabric-support-1520f7fcb0e12539.yaml create mode 100644 sushy/resources/fabric/__init__.py create mode 100644 sushy/resources/fabric/constants.py create mode 100644 sushy/resources/fabric/fabric.py create mode 100644 sushy/resources/fabric/mappings.py create mode 100644 sushy/tests/unit/json_samples/fabric.json create mode 100644 sushy/tests/unit/json_samples/fabric_collection.json create mode 100644 sushy/tests/unit/resources/fabric/__init__.py create mode 100644 sushy/tests/unit/resources/fabric/test_fabric.py diff --git a/releasenotes/notes/add-fabric-support-1520f7fcb0e12539.yaml b/releasenotes/notes/add-fabric-support-1520f7fcb0e12539.yaml new file mode 100644 index 0000000..f9f1298 --- /dev/null +++ b/releasenotes/notes/add-fabric-support-1520f7fcb0e12539.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds support for the Fabric resource to the library. + diff --git a/sushy/__init__.py b/sushy/__init__.py index 92623cf..8b49ac2 100644 --- a/sushy/__init__.py +++ b/sushy/__init__.py @@ -21,6 +21,7 @@ from sushy.resources.constants import * # noqa from sushy.resources.system.constants import * # noqa from sushy.resources.manager.constants import * # noqa from sushy.resources.chassis.constants import * # noqa +from sushy.resources.fabric.constants import * # noqa __all__ = ('Sushy',) __version__ = pbr.version.VersionInfo( diff --git a/sushy/main.py b/sushy/main.py index b1c5963..437dbdb 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -20,6 +20,7 @@ from sushy import exceptions from sushy.resources import base from sushy.resources.chassis import chassis from sushy.resources.compositionservice import compositionservice +from sushy.resources.fabric import fabric from sushy.resources.manager import manager from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session @@ -79,6 +80,9 @@ class Sushy(base.ResourceBase): _chassis_path = base.Field(['Chassis', '@odata.id']) """ChassisCollection path""" + _fabrics_path = base.Field(['Fabrics', '@odata.id']) + """FabricCollection path""" + _session_service_path = base.Field(['SessionService', '@odata.id']) """SessionService path""" @@ -188,6 +192,29 @@ class Sushy(base.ResourceBase): return chassis.Chassis(self._conn, identity, redfish_version=self.redfish_version) + def get_fabric_collection(self): + """Get the FabricCollection object + + :raises: MissingAttributeError, if the collection attribute is + not found + :returns: a FabricCollection object + """ + if not self._fabrics_path: + raise exceptions.MissingAttributeError( + attribute='Fabrics/@odata.id', resource=self._path) + + return fabric.FabricCollection(self._conn, self._fabrics_path, + redfish_version=self.redfish_version) + + def get_fabric(self, identity): + """Given the identity return a Fabric object + + :param identity: The identity of the Fabric resource + :returns: The Fabric object + """ + return fabric.Fabric(self._conn, identity, + redfish_version=self.redfish_version) + def get_manager_collection(self): """Get the ManagerCollection object diff --git a/sushy/resources/fabric/__init__.py b/sushy/resources/fabric/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/resources/fabric/constants.py b/sushy/resources/fabric/constants.py new file mode 100644 index 0000000..27f094f --- /dev/null +++ b/sushy/resources/fabric/constants.py @@ -0,0 +1,42 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values come from the Redfish Fabric json-schema 1.0.4: +# http://redfish.dmtf.org/schemas/v1/Fabric.v1_0_4.json#/definitions/Fabric + +# Fabric Types constants + +FABRIC_TYPE_AHCI = 'Advanced Host Controller Interface' +FABRIC_TYPE_FC = 'Fibre Channel' +FABRIC_TYPE_FCP = 'Fibre Channel Protocol for SCSI' +FABRIC_TYPE_FCoE = 'Fibre Channel over Ethernet' +FABRIC_TYPE_FICON = 'FIbre CONnection (FICON)' +FABRIC_TYPE_FTP = 'File Transfer Protocol' +FABRIC_TYPE_HTTP = 'Hypertext Transport Protocol' +FABRIC_TYPE_HTTPS = 'Secure Hypertext Transport Protocol' +FABRIC_TYPE_I2C = 'Inter-Integrated Circuit Bus' +FABRIC_TYPE_NFSv3 = 'Network File System version 3' +FABRIC_TYPE_NFSv4 = 'Network File System version 4' +FABRIC_TYPE_NVMe = 'Non-Volatile Memory Express' +FABRIC_TYPE_NVMeOverFabrics = 'NVMe over Fabrics' +FABRIC_TYPE_OEM = 'OEM specific' +FABRIC_TYPE_PCIe = 'PCI Express' +FABRIC_TYPE_RoCE = 'RDMA over Converged Ethernet Protocol' +FABRIC_TYPE_RoCEv2 = 'RDMA over Converged Ethernet Protocol Version 2' +FABRIC_TYPE_SAS = 'Serial Attached SCSI' +FABRIC_TYPE_SATA = 'Serial AT Attachment' +FABRIC_TYPE_SFTP = 'Secure File Transfer Protocol' +FABRIC_TYPE_SMB = 'Server Message Block (aka CIFS Common Internet File System)' +FABRIC_TYPE_UHCI = 'Universal Host Controller Interface' +FABRIC_TYPE_USB = 'Universal Serial Bus' +FABRIC_TYPE_iSCSI = 'Internet SCSI' +FABRIC_TYPE_iWARP = 'Internet Wide Area Remote Direct Memory Access Protocol' diff --git a/sushy/resources/fabric/fabric.py b/sushy/resources/fabric/fabric.py new file mode 100644 index 0000000..e9c6fc2 --- /dev/null +++ b/sushy/resources/fabric/fabric.py @@ -0,0 +1,77 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Fabric.v1_0_4.json + +from sushy.resources import base +from sushy.resources import common +from sushy.resources.fabric import mappings as fab_maps + +import logging + +LOG = logging.getLogger(__name__) + + +class Fabric(base.ResourceBase): + """Fabric resource + + The Fabric represents a simple fabric consisting of one or more + switches, zero or more endpoints, and zero or more zones. + """ + + identity = base.Field('Id', required=True) + """Identifier for the fabric""" + + name = base.Field('Name', required=True) + """The fabric name""" + + description = base.Field('Description') + """The fabric description""" + + max_zones = base.Field('MaxZones') + """The maximum number of zones the switch can currently configure""" + + status = common.StatusField('Status') + """The fabric status""" + + fabric_type = base.MappedField('FabricType', + fab_maps.FABRIC_TYPE_VALUE_MAP) + """The protocol being sent over this fabric""" + + def __init__(self, connector, identity, redfish_version=None): + """A class representing a Fabric + + :param connector: A Connector instance + :param identity: The identity of the Fabric resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + """ + super(Fabric, self).__init__(connector, identity, redfish_version) + + +class FabricCollection(base.ResourceCollectionBase): + + @property + def _resource_type(self): + return Fabric + + def __init__(self, connector, path, redfish_version=None): + """A class representing a FabricCollection + + :param connector: A Connector instance + :param path: The canonical path to the Fabric collection resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + """ + super(FabricCollection, self).__init__(connector, path, + redfish_version) diff --git a/sushy/resources/fabric/mappings.py b/sushy/resources/fabric/mappings.py new file mode 100644 index 0000000..6ddd890 --- /dev/null +++ b/sushy/resources/fabric/mappings.py @@ -0,0 +1,47 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.fabric import constants as fab_cons +from sushy import utils + +FABRIC_TYPE_VALUE_MAP = { + 'AHCI': fab_cons.FABRIC_TYPE_AHCI, + 'FC': fab_cons.FABRIC_TYPE_FC, + 'FCP': fab_cons.FABRIC_TYPE_FCP, + 'FCoE': fab_cons.FABRIC_TYPE_FCoE, + 'FICON': fab_cons.FABRIC_TYPE_FICON, + 'FTP': fab_cons.FABRIC_TYPE_FTP, + 'HTTP': fab_cons.FABRIC_TYPE_HTTP, + 'HTTPS': fab_cons.FABRIC_TYPE_HTTPS, + 'I2C': fab_cons.FABRIC_TYPE_I2C, + 'NFSv3': fab_cons.FABRIC_TYPE_NFSv3, + 'NFSv4': fab_cons.FABRIC_TYPE_NFSv4, + 'NVMe': fab_cons.FABRIC_TYPE_NVMe, + 'NVMeOverFabrics': fab_cons.FABRIC_TYPE_NVMeOverFabrics, + 'OEM': fab_cons.FABRIC_TYPE_OEM, + 'PCIe': fab_cons.FABRIC_TYPE_PCIe, + 'RoCE': fab_cons.FABRIC_TYPE_RoCE, + 'RoCEv2': fab_cons.FABRIC_TYPE_RoCEv2, + 'SAS': fab_cons.FABRIC_TYPE_SAS, + 'SATA': fab_cons.FABRIC_TYPE_SATA, + 'SFTP': fab_cons.FABRIC_TYPE_SFTP, + 'SMB': fab_cons.FABRIC_TYPE_SMB, + 'UHCI': fab_cons.FABRIC_TYPE_UHCI, + 'USB': fab_cons.FABRIC_TYPE_USB, + 'iSCSI': fab_cons.FABRIC_TYPE_iSCSI, + 'iWARP': fab_cons.FABRIC_TYPE_iWARP, +} + +FABRIC_TYPE_VALUE_MAP_REV = utils.revert_dictionary(FABRIC_TYPE_VALUE_MAP) diff --git a/sushy/tests/unit/json_samples/fabric.json b/sushy/tests/unit/json_samples/fabric.json new file mode 100644 index 0000000..9876f07 --- /dev/null +++ b/sushy/tests/unit/json_samples/fabric.json @@ -0,0 +1,29 @@ +{ + "@odata.type": "#Fabric.v1_0_3.Fabric", + "Id": "SAS", + "Name": "SAS Fabric", + "FabricType": "SAS", + "Description": "A SAS Fabric with redundant switches.", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "Zones": { + "@odata.id": "/redfish/v1/Fabrics/SAS/Zones" + }, + "Endpoints": { + "@odata.id": "/redfish/v1/Fabrics/SAS/Endpoints" + }, + "Switches": { + "@odata.id": "/redfish/v1/Fabrics/SAS/Switches" + }, + "Links": { + "Oem": {} + }, + "Actions": { + "Oem": {} + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#Fabric.Fabric", + "@odata.id": "/redfish/v1/Fabrics/SAS" +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/fabric_collection.json b/sushy/tests/unit/json_samples/fabric_collection.json new file mode 100644 index 0000000..f58a5d6 --- /dev/null +++ b/sushy/tests/unit/json_samples/fabric_collection.json @@ -0,0 +1,16 @@ +{ + "@odata.type": "#FabricCollection.FabricCollection", + "Name": "Fabric Collection", + "Members@odata.count": 2, + "Members": [ + { + "@odata.id": "/redfish/v1/Fabrics/SAS1" + }, + { + "@odata.id": "/redfish/v1/Fabrics/SAS2" + } + ], + "@odata.context": "/redfish/v1/$metadata#FabricCollection.FabricCollection", + "@odata.id": "/redfish/v1/Fabrics", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/root.json b/sushy/tests/unit/json_samples/root.json index 6e770ef..c1705ab 100644 --- a/sushy/tests/unit/json_samples/root.json +++ b/sushy/tests/unit/json_samples/root.json @@ -21,6 +21,9 @@ "Managers": { "@odata.id": "/redfish/v1/Managers" }, + "Fabrics": { + "@odata.id": "/redfish/v1/Fabrics" + }, "Tasks": { "@odata.id": "/redfish/v1/TaskService" }, diff --git a/sushy/tests/unit/resources/fabric/__init__.py b/sushy/tests/unit/resources/fabric/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/tests/unit/resources/fabric/test_fabric.py b/sushy/tests/unit/resources/fabric/test_fabric.py new file mode 100644 index 0000000..4ac0481 --- /dev/null +++ b/sushy/tests/unit/resources/fabric/test_fabric.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +import sushy +from sushy.resources.fabric import fabric +from sushy.tests.unit import base + + +class FabricTestCase(base.TestCase): + + def setUp(self): + super(FabricTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/fabric.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.fabric = fabric.Fabric(self.conn, '/redfish/v1/Fabrics/SAS', + redfish_version='1.0.3') + + def test__parse_attributes(self): + # | WHEN | + self.fabric._parse_attributes() + # | THEN | + self.assertEqual('1.0.3', self.fabric.redfish_version) + self.assertEqual('SAS', self.fabric.identity) + self.assertEqual('SAS Fabric', self.fabric.name) + self.assertEqual('A SAS Fabric with redundant switches.', + self.fabric.description) + self.assertEqual(sushy.FABRIC_TYPE_SAS, + self.fabric.fabric_type) + self.assertEqual(sushy.STATE_ENABLED, self.fabric.status.state) + self.assertEqual(sushy.HEALTH_OK, self.fabric.status.health) + + +class FabricCollectionTestCase(base.TestCase): + + def setUp(self): + super(FabricCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'fabric_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.fabric = fabric.FabricCollection( + self.conn, '/redfish/v1/Fabrics', redfish_version='1.0.3') + + @mock.patch.object(fabric, 'Fabric', autospec=True) + def test_get_member(self, fabric_mock): + self.fabric.get_member('/redfish/v1/Fabrics/SAS1') + fabric_mock.assert_called_once_with( + self.fabric._conn, '/redfish/v1/Fabrics/SAS1', + redfish_version=self.fabric.redfish_version) + + @mock.patch.object(fabric, 'Fabric', autospec=True) + def test_get_members(self, fabric_mock): + members = self.fabric.get_members() + calls = [ + mock.call(self.fabric._conn, '/redfish/v1/Fabrics/SAS1', + redfish_version=self.fabric.redfish_version), + mock.call(self.fabric._conn, '/redfish/v1/Fabrics/SAS2', + redfish_version=self.fabric.redfish_version) + ] + fabric_mock.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(2, len(members)) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index a343f3c..bab2645 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -23,6 +23,7 @@ from sushy import exceptions from sushy import main from sushy.resources.chassis import chassis from sushy.resources.compositionservice import compositionservice +from sushy.resources.fabric import fabric from sushy.resources.manager import manager from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session @@ -68,6 +69,7 @@ class MainTestCase(base.TestCase): self.assertEqual('/redfish/v1/Systems', self.root._systems_path) self.assertEqual('/redfish/v1/Managers', self.root._managers_path) self.assertEqual('/redfish/v1/Chassis', self.root._chassis_path) + self.assertEqual('/redfish/v1/Fabrics', self.root._fabrics_path) self.assertEqual('/redfish/v1/SessionService', self.root._session_service_path) self.assertEqual('/redfish/v1/CompositionService', @@ -119,6 +121,20 @@ class MainTestCase(base.TestCase): self.root._conn, '/redfish/v1/Chassis', redfish_version=self.root.redfish_version) + @mock.patch.object(fabric, 'Fabric', autospec=True) + def test_get_fabric(self, mock_fabric): + self.root.get_fabric('fake-fabric-id') + mock_fabric.assert_called_once_with( + self.root._conn, 'fake-fabric-id', + redfish_version=self.root.redfish_version) + + @mock.patch.object(fabric, 'FabricCollection', autospec=True) + def test_get_fabric_collection(self, fabric_collection_mock): + self.root.get_fabric_collection() + fabric_collection_mock.assert_called_once_with( + self.root._conn, '/redfish/v1/Fabrics', + redfish_version=self.root.redfish_version) + @mock.patch.object(manager, 'ManagerCollection', autospec=True) def test_get_manager_collection(self, ManagerCollection_mock): self.root.get_manager_collection() @@ -199,6 +215,11 @@ class BareMinimumMainTestCase(base.TestCase): exceptions.MissingAttributeError, 'Chassis/@odata.id', self.root.get_chassis_collection) + def test_get_fabric_collection_when_fabrics_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Fabrics/@odata.id', self.root.get_fabric_collection) + def test_get_session_service_when_sessionservice_attr_absent(self): self.assertRaisesRegex( exceptions.MissingAttributeError, -- GitLab From 88ef6fea392574e55dac7e74cfa4ecb66318deb4 Mon Sep 17 00:00:00 2001 From: Lin Yang Date: Tue, 5 Mar 2019 14:21:17 -0800 Subject: [PATCH 134/303] Fix wrong default JsonDataReader() argument It expect to use new JsonDataReader() instance as default argument in function call, but it will be only created once when the function is defined, and the same object is used in each successive call. It does not work as expected. So Use default argument None to signal that Reader was provided. Change-Id: I8b659132e4699c2f9a1b8109869c3a6c1b3ca096 --- sushy/resources/base.py | 7 ++++--- sushy/tests/unit/resources/test_base.py | 9 +++++++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 364749d..bb3cc17 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -313,7 +313,7 @@ class ResourceBase(object): connector, path='', redfish_version=None, - reader=JsonDataReader()): + reader=None): """A class representing the base of any Redfish resource Invokes the ``refresh()`` method of resource for the first @@ -322,8 +322,7 @@ class ResourceBase(object): :param path: sub-URI path to the resource. :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. - :param reader: Reader to use to fetch JSON data. Defaults to - JsonDataReader + :param reader: Reader to use to fetch JSON data. """ self._conn = connector self._path = path @@ -334,6 +333,8 @@ class ResourceBase(object): # attribute values are fetched. self._is_stale = True + if reader is None: + reader = JsonDataReader() reader.set_connection(connector, path) self._reader = reader diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index e2faf43..57de41d 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -129,6 +129,15 @@ class ResourceBaseTestCase(base.TestCase): reader=resource_base. JsonArchiveReader('Test.2.0.json')) + def test_init_default_reader(self): + resource_a = BaseResource(connector=self.conn) + resource_b = BaseResource(connector=self.conn) + + self.assertIsInstance(resource_a._reader, resource_base.JsonDataReader) + self.assertIsInstance(resource_b._reader, resource_base.JsonDataReader) + + self.assertIsNot(resource_a._reader, resource_b._reader) + def test__parse_attributes(self): for oem_vendor in self.base_resource2.oem_vendors: self.assertTrue(oem_vendor in ('Contoso', 'EID_412_ASB_123')) -- GitLab From e348ac8dd57630ed908b6dc17af7f241a2b544e8 Mon Sep 17 00:00:00 2001 From: Varsha Date: Mon, 4 Mar 2019 19:26:54 +0530 Subject: [PATCH 135/303] Add mappings for `system_type` Create mappings for the system_type of the System resource. This represents the type of the computer system. Story: #2005160 Task: #29880 Change-Id: Icf688752791c21c8652e7fc64478ef0307682f1e --- .../add-system-type-mapping-bf456c5c15a90877.yaml | 5 +++++ sushy/resources/system/constants.py | 15 +++++++++++++++ sushy/resources/system/mappings.py | 12 ++++++++++++ sushy/resources/system/system.py | 4 ++-- sushy/tests/unit/resources/system/test_system.py | 3 ++- 5 files changed, 36 insertions(+), 3 deletions(-) create mode 100644 releasenotes/notes/add-system-type-mapping-bf456c5c15a90877.yaml diff --git a/releasenotes/notes/add-system-type-mapping-bf456c5c15a90877.yaml b/releasenotes/notes/add-system-type-mapping-bf456c5c15a90877.yaml new file mode 100644 index 0000000..e16704b --- /dev/null +++ b/releasenotes/notes/add-system-type-mapping-bf456c5c15a90877.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds mappings and constants for possible values of System Type in System + resource. This represents the type of the computer system. diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index 7874f40..13eb868 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -146,3 +146,18 @@ PROCESSOR_INSTRUCTIONSET_MIPS64 = 'MIPS 64-bit' PROCESSOR_INSTRUCTIONSET_OEM = 'OEM-defined' PROCESSOR_INSTRUCTIONSET_x86 = 'x86 32-bit' PROCESSOR_INSTRUCTIONSET_x86_64 = 'x86 64-bit' + +# System type constants + +SYSTEM_TYPE_PHYSICAL = "Physical" +"""A physical computer system""" +SYSTEM_TYPE_VIRTUAL = "Virtual" +"""A virtual machine instance""" +SYSTEM_TYPE_OS = "OS" +"""An operating system instance""" +SYSTEM_TYPE_PHYSICALLY_PARTITIONED = "PhysicallyPartitioned" +"""A hardware-based partition of a computer system""" +SYSTEM_TYPE_VIRTUALLY_PARTITIONED = "VirtuallyPartitioned" +"""A virtual or software-based partition of a computer system""" +SYSTEM_TYPE_COMPOSED = "Composed" +"""A computer system created by binding resource blocks together""" diff --git a/sushy/resources/system/mappings.py b/sushy/resources/system/mappings.py index dcb6e76..fa59a3f 100644 --- a/sushy/resources/system/mappings.py +++ b/sushy/resources/system/mappings.py @@ -101,3 +101,15 @@ PROCESSOR_INSTRUCTIONSET_VALUE_MAP = { PROCESSOR_INSTRUCTIONSET_VALUE_MAP_REV = ( utils.revert_dictionary(PROCESSOR_INSTRUCTIONSET_VALUE_MAP)) + +SYSTEM_TYPE_VALUE_MAP = { + 'Physical': sys_cons.SYSTEM_TYPE_PHYSICAL, + 'Virtual': sys_cons.SYSTEM_TYPE_VIRTUAL, + 'OS': sys_cons.SYSTEM_TYPE_OS, + 'PhysicallyPartitioned': sys_cons.SYSTEM_TYPE_PHYSICALLY_PARTITIONED, + 'VirtuallyPartitioned': sys_cons.SYSTEM_TYPE_VIRTUALLY_PARTITIONED, + 'Composed': sys_cons.SYSTEM_TYPE_COMPOSED +} + +SYSTEM_TYPE_VALUE_MAP_REV = ( + utils.revert_dictionary(SYSTEM_TYPE_VALUE_MAP)) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 69c44b8..879a9bb 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -117,8 +117,8 @@ class System(base.ResourceBase): status = common.StatusField('Status') """The system status""" - # TODO(lucasagomes): Create mappings for the system_type - system_type = base.Field('SystemType') + system_type = base.MappedField('SystemType', + sys_maps.SYSTEM_TYPE_VALUE_MAP) """The system type""" uuid = base.Field('UUID') diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 5113f69..6aa7760 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -59,7 +59,8 @@ class SystemTestCase(base.TestCase): self.assertEqual('224071-J23', self.sys_inst.part_number) self.assertEqual('437XR1138R2', self.sys_inst.serial_number) self.assertEqual('8675309', self.sys_inst.sku) - self.assertEqual('Physical', self.sys_inst.system_type) + self.assertEqual(sushy.SYSTEM_TYPE_PHYSICAL, + self.sys_inst.system_type) self.assertEqual('38947555-7742-3448-3784-823347823834', self.sys_inst.uuid) self.assertEqual(res_cons.STATE_ENABLED, self.sys_inst.status.state) -- GitLab From d35a30f85645408073e0edf5bf56ab1acc63df02 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Thu, 21 Mar 2019 09:48:48 +0100 Subject: [PATCH 136/303] Add versions to release notes series Change-Id: I2d24583e88bf8762323e617d8663a9c4d12b0cc9 --- releasenotes/source/pike.rst | 6 +++--- releasenotes/source/queens.rst | 6 +++--- releasenotes/source/rocky.rst | 6 +++--- releasenotes/source/stein.rst | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/releasenotes/source/pike.rst b/releasenotes/source/pike.rst index e43bfc0..0a4fa10 100644 --- a/releasenotes/source/pike.rst +++ b/releasenotes/source/pike.rst @@ -1,6 +1,6 @@ -=================================== - Pike Series Release Notes -=================================== +========================================= +Pike Series (1.0.0 - 1.1.x) Release Notes +========================================= .. release-notes:: :branch: stable/pike diff --git a/releasenotes/source/queens.rst b/releasenotes/source/queens.rst index 36ac616..47688af 100644 --- a/releasenotes/source/queens.rst +++ b/releasenotes/source/queens.rst @@ -1,6 +1,6 @@ -=================================== - Queens Series Release Notes -=================================== +=========================================== +Queens Series (1.2.0 - 1.3.x) Release Notes +=========================================== .. release-notes:: :branch: stable/queens diff --git a/releasenotes/source/rocky.rst b/releasenotes/source/rocky.rst index 40dd517..1aac5a7 100644 --- a/releasenotes/source/rocky.rst +++ b/releasenotes/source/rocky.rst @@ -1,6 +1,6 @@ -=================================== - Rocky Series Release Notes -=================================== +========================================== +Rocky Series (1.4.0 - 1.6.x) Release Notes +========================================== .. release-notes:: :branch: stable/rocky diff --git a/releasenotes/source/stein.rst b/releasenotes/source/stein.rst index efaceb6..f7e7823 100644 --- a/releasenotes/source/stein.rst +++ b/releasenotes/source/stein.rst @@ -1,6 +1,6 @@ -=================================== - Stein Series Release Notes -=================================== +========================================== +Stein Series (1.7.0 - 1.8.x) Release Notes +========================================== .. release-notes:: :branch: stable/stein -- GitLab From 366e59ba17baab500d27ce39fee4c8f7173e7757 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Wed, 27 Mar 2019 13:36:32 +0100 Subject: [PATCH 137/303] Now packaging 1.8.1. --- debian/changelog | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/debian/changelog b/debian/changelog index 0df7cf6..3637e6c 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,8 +1,12 @@ -python-sushy (1.3.1-4) UNRELEASED; urgency=medium +python-sushy (1.8.1-1) experimental; urgency=medium + [ Ondřej Nový ] * Running wrap-and-sort -bast. - -- Ondřej Nový Wed, 09 Jan 2019 14:26:54 +0100 + [ Thomas Goirand ] + * New upstream release. + + -- Thomas Goirand Wed, 27 Mar 2019 13:36:13 +0100 python-sushy (1.3.1-3) unstable; urgency=medium -- GitLab From 7182f07c1d1632da682be0fab2f9b3bbc828142a Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Wed, 27 Mar 2019 13:38:19 +0100 Subject: [PATCH 138/303] Fixed (build-)depends for this release. --- debian/changelog | 1 + debian/control | 16 +++++++++------- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/debian/changelog b/debian/changelog index 3637e6c..3720ab6 100644 --- a/debian/changelog +++ b/debian/changelog @@ -5,6 +5,7 @@ python-sushy (1.8.1-1) experimental; urgency=medium [ Thomas Goirand ] * New upstream release. + * Fixed (build-)depends for this release. -- Thomas Goirand Wed, 27 Mar 2019 13:36:13 +0100 diff --git a/debian/control b/debian/control index 7894374..251163f 100644 --- a/debian/control +++ b/debian/control @@ -9,20 +9,22 @@ Build-Depends: dh-python, openstack-pkg-tools, python3-all, - python3-pbr (>= 2.0.0), + python3-pbr, python3-setuptools, - python3-sphinx (>= 1.6.2), + python3-sphinx, Build-Depends-Indep: python3-coverage, + python3-dateutil, python3-hacking, - python3-openstackdocstheme (>= 1.17.0), - python3-oslotest (>= 1:3.2.0), - python3-requests (>= 2.14.2), + python3-openstackdocstheme, + python3-oslotest, + python3-requests, python3-six, + python3-stestr, + python3-stevedore, python3-testscenarios, - python3-testtools (>= 2.2.0), + python3-testtools, subunit, - testrepository, Standards-Version: 4.1.3 Vcs-Browser: https://salsa.debian.org/openstack-team/libs/python-sushy Vcs-Git: https://salsa.debian.org/openstack-team/libs/python-sushy.git -- GitLab From dbf08343bdc619b1c6ac3cfe725dadd1a73c0248 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Wed, 27 Mar 2019 13:38:58 +0100 Subject: [PATCH 139/303] Standards-Version: 4.3.0 (no change). --- debian/changelog | 1 + debian/control | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/debian/changelog b/debian/changelog index 3720ab6..d433e89 100644 --- a/debian/changelog +++ b/debian/changelog @@ -6,6 +6,7 @@ python-sushy (1.8.1-1) experimental; urgency=medium [ Thomas Goirand ] * New upstream release. * Fixed (build-)depends for this release. + * Standards-Version: 4.3.0 (no change). -- Thomas Goirand Wed, 27 Mar 2019 13:36:13 +0100 diff --git a/debian/control b/debian/control index 251163f..2fe8bca 100644 --- a/debian/control +++ b/debian/control @@ -25,7 +25,7 @@ Build-Depends-Indep: python3-testscenarios, python3-testtools, subunit, -Standards-Version: 4.1.3 +Standards-Version: 4.3.0 Vcs-Browser: https://salsa.debian.org/openstack-team/libs/python-sushy Vcs-Git: https://salsa.debian.org/openstack-team/libs/python-sushy.git Homepage: https://docs.openstack.org/sushy -- GitLab From 2c7e37fc2a379dbc6fc2764f4115a09117ef2746 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Wed, 27 Mar 2019 13:39:39 +0100 Subject: [PATCH 140/303] Fixed diff with upstream tag. --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index f9cffac..b14f1c6 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ setenv = PYTHONWARNINGS=default::DeprecationWarning install_command = pip install {opts} {packages} deps = - -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt?h=stable/queens} + -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt commands = stestr run --slowest {posargs} -- GitLab From 29fddda9ae9f1fff991e632f23bdbecad74d547a Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Wed, 27 Mar 2019 13:53:25 +0100 Subject: [PATCH 141/303] Running unit tests with installed Python module. --- debian/changelog | 1 + debian/python3-sushy.install | 1 + debian/rules | 11 +++++++---- 3 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 debian/python3-sushy.install diff --git a/debian/changelog b/debian/changelog index d433e89..24c81ac 100644 --- a/debian/changelog +++ b/debian/changelog @@ -7,6 +7,7 @@ python-sushy (1.8.1-1) experimental; urgency=medium * New upstream release. * Fixed (build-)depends for this release. * Standards-Version: 4.3.0 (no change). + * Running unit tests with installed Python module. -- Thomas Goirand Wed, 27 Mar 2019 13:36:13 +0100 diff --git a/debian/python3-sushy.install b/debian/python3-sushy.install new file mode 100644 index 0000000..74e4e23 --- /dev/null +++ b/debian/python3-sushy.install @@ -0,0 +1 @@ +/usr diff --git a/debian/rules b/debian/rules index 292b154..71309ed 100755 --- a/debian/rules +++ b/debian/rules @@ -13,13 +13,16 @@ override_dh_auto_build: echo "Do nothing..." override_dh_auto_install: - pkgos-dh_auto_install - -override_dh_auto_test: + for i in $(PYTHON3S) ; do \ + python3 setup.py install -f --install-layout=deb --root=$(CURDIR)/debian/tmp ; \ + done ifeq (,$(findstring nocheck, $(DEB_BUILD_OPTIONS))) - pkgos-dh_auto_test --no-py2 + PYTHONPATH=$(CURDIR)/debian/tmp/usr/lib/python3/dist-packages pkgos-dh_auto_test --no-py2 endif +override_dh_auto_test: + echo "Do nothing..." + override_dh_sphinxdoc: ifeq (,$(findstring nodocs, $(DEB_BUILD_OPTIONS))) PYTHONPATH=. PYTHON=python3 python3 -m sphinx -b html doc/source debian/python-sushy-doc/usr/share/doc/python-sushy-doc/html -- GitLab From c8a63542cfc21cfe6a19f21ebd36d365c6822d65 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Tue, 2 Apr 2019 19:54:16 +0200 Subject: [PATCH 142/303] Add settable `IndicatorLED` of `System` and `Chassis` The ``IndicatorLED`` property of ``System`` and ``Chassis`` resources made settable with the introduction of the ``.set_indicator_led()`` method to the respective sushy classes. Change-Id: Ie4e0d3ad20f051fa0fc1d679f020d174de71bca0 Story: 2005342 Task: 30290 --- .../make-leds-settable-c82cb513de0171f5.yaml | 6 ++++++ sushy/resources/chassis/chassis.py | 20 +++++++++++++++++++ sushy/resources/mappings.py | 2 ++ sushy/resources/system/system.py | 20 +++++++++++++++++++ .../unit/resources/chassis/test_chassis.py | 15 ++++++++++++++ .../unit/resources/system/test_system.py | 15 ++++++++++++++ 6 files changed, 78 insertions(+) create mode 100644 releasenotes/notes/make-leds-settable-c82cb513de0171f5.yaml diff --git a/releasenotes/notes/make-leds-settable-c82cb513de0171f5.yaml b/releasenotes/notes/make-leds-settable-c82cb513de0171f5.yaml new file mode 100644 index 0000000..846f75e --- /dev/null +++ b/releasenotes/notes/make-leds-settable-c82cb513de0171f5.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + The ``IndicatorLED`` property of ``System`` and ``Chassis`` resources + made settable with the introduction of the ``.set_indicator_led()`` + method to the respective sushy classes. diff --git a/sushy/resources/chassis/chassis.py b/sushy/resources/chassis/chassis.py index a88fb5f..bd99955 100644 --- a/sushy/resources/chassis/chassis.py +++ b/sushy/resources/chassis/chassis.py @@ -196,6 +196,26 @@ class Chassis(base.ResourceBase): self._conn.post(target_uri, data={'ResetType': value}) LOG.info('The Chassis %s is being reset', self.identity) + def set_indicator_led(self, state): + """Set IndicatorLED to the given state. + + :param state: Desired LED state, lit (INDICATOR_LED_LIT), blinking + (INDICATOR_LED_BLINKING), off (INDICATOR_LED_OFF) + :raises: InvalidParameterValueError, if any information passed is + invalid. + """ + if state not in res_maps.INDICATOR_LED_VALUE_MAP_REV: + raise exceptions.InvalidParameterValueError( + parameter='state', value=state, + valid_values=list(res_maps.INDICATOR_LED_VALUE_MAP_REV)) + + data = { + 'IndicatorLED': res_maps.INDICATOR_LED_VALUE_MAP_REV[state] + } + + self._conn.patch(self.path, data=data) + self.invalidate() + @property @utils.cache_it def managers(self): diff --git a/sushy/resources/mappings.py b/sushy/resources/mappings.py index 3416769..f1687a0 100644 --- a/sushy/resources/mappings.py +++ b/sushy/resources/mappings.py @@ -53,6 +53,8 @@ INDICATOR_LED_VALUE_MAP = { 'Unknown': res_cons.INDICATOR_LED_UNKNOWN, } +INDICATOR_LED_VALUE_MAP_REV = utils.revert_dictionary(INDICATOR_LED_VALUE_MAP) + POWER_STATE_VALUE_MAP = { 'On': res_cons.POWER_STATE_ON, 'Off': res_cons.POWER_STATE_OFF, diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 879a9bb..428de88 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -246,6 +246,26 @@ class System(base.ResourceBase): # Probably we should call refresh() as well. self._conn.patch(self.path, data=data) + def set_indicator_led(self, state): + """Set IndicatorLED to the given state. + + :param state: Desired LED state, lit (INDICATOR_LED_LIT), blinking + (INDICATOR_LED_BLINKING), off (INDICATOR_LED_OFF) + :raises: InvalidParameterValueError, if any information passed is + invalid. + """ + if state not in res_maps.INDICATOR_LED_VALUE_MAP_REV: + raise exceptions.InvalidParameterValueError( + parameter='state', value=state, + valid_values=list(res_maps.INDICATOR_LED_VALUE_MAP_REV)) + + data = { + 'IndicatorLED': res_maps.INDICATOR_LED_VALUE_MAP_REV[state] + } + + self._conn.patch(self.path, data=data) + self.invalidate() + def _get_processor_collection_path(self): """Helper function to find the ProcessorCollection path""" return utils.get_sub_resource_path_by(self, 'Processors') diff --git a/sushy/tests/unit/resources/chassis/test_chassis.py b/sushy/tests/unit/resources/chassis/test_chassis.py index bed4319..e4f55a6 100644 --- a/sushy/tests/unit/resources/chassis/test_chassis.py +++ b/sushy/tests/unit/resources/chassis/test_chassis.py @@ -120,6 +120,21 @@ class ChassisTestCase(base.TestCase): self.assertRaises(exceptions.InvalidParameterValueError, self.chassis.reset_chassis, 'invalid-value') + def test_set_indicator_led(self): + with mock.patch.object( + self.chassis, 'invalidate', autospec=True) as invalidate_mock: + self.chassis.set_indicator_led(sushy.INDICATOR_LED_BLINKING) + self.chassis._conn.patch.assert_called_once_with( + '/redfish/v1/Chassis/Blade1', + data={'IndicatorLED': 'Blinking'}) + + invalidate_mock.assert_called_once_with() + + def test_set_indicator_led_invalid_state(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.chassis.set_indicator_led, + 'spooky-glowing') + def test_managers(self): # | GIVEN | with open('sushy/tests/unit/json_samples/' diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 6aa7760..c1c0ca2 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -233,6 +233,21 @@ class SystemTestCase(base.TestCase): sushy.BOOT_SOURCE_TARGET_HDD, enabled='invalid-enabled') + def test_set_indicator_led(self): + with mock.patch.object( + self.sys_inst, 'invalidate', autospec=True) as invalidate_mock: + self.sys_inst.set_indicator_led(sushy.INDICATOR_LED_BLINKING) + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', + data={'IndicatorLED': 'Blinking'}) + + invalidate_mock.assert_called_once_with() + + def test_set_indicator_led_invalid_state(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.sys_inst.set_indicator_led, + 'spooky-glowing') + def test__get_processor_collection_path_missing_processors_attr(self): self.sys_inst._json.pop('Processors') self.assertRaisesRegex( -- GitLab From 4ad580bbc88ff69bfd55f087d029ee770444b1ee Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Wed, 3 Apr 2019 11:44:03 +0200 Subject: [PATCH 143/303] Add settable `IndicatorLED` property to the `Drive` resource Adds ``IndicatorLED`` property to the ``Drive`` resource. The state of the LED can be read and can be changed via the ``.set_indicator_led()`` method of the ``Drive`` class. Change-Id: Ief81d5315dee9a98d69223ca01de11ef757a348c Story: 2005342 Task: 30290 --- .../notes/add-drive-led-97b687013fec88c9.yaml | 6 +++++ sushy/resources/system/storage/drive.py | 26 +++++++++++++++++++ .../resources/system/storage/test_drive.py | 18 +++++++++++++ 3 files changed, 50 insertions(+) create mode 100644 releasenotes/notes/add-drive-led-97b687013fec88c9.yaml diff --git a/releasenotes/notes/add-drive-led-97b687013fec88c9.yaml b/releasenotes/notes/add-drive-led-97b687013fec88c9.yaml new file mode 100644 index 0000000..af31939 --- /dev/null +++ b/releasenotes/notes/add-drive-led-97b687013fec88c9.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Adds the ``IndicatorLED`` property to the ``Drive`` resource. The state of + the LED can be read and can be changed via the ``.set_indicator_led()`` + method of the ``Drive`` sushy class. diff --git a/sushy/resources/system/storage/drive.py b/sushy/resources/system/storage/drive.py index e45ca9e..ca418ff 100644 --- a/sushy/resources/system/storage/drive.py +++ b/sushy/resources/system/storage/drive.py @@ -15,7 +15,9 @@ import logging +from sushy import exceptions from sushy.resources import base +from sushy.resources import mappings as res_maps from sushy import utils LOG = logging.getLogger(__name__) @@ -27,8 +29,32 @@ class Drive(base.ResourceBase): identity = base.Field('Id', required=True) """The Drive identity string""" + indicator_led = base.MappedField('IndicatorLED', + res_maps.INDICATOR_LED_VALUE_MAP) + """Whether the indicator LED is lit or off""" + name = base.Field('Name') """The name of the resource""" capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) """The size in bytes of this Drive""" + + def set_indicator_led(self, state): + """Set IndicatorLED to the given state. + + :param state: Desired LED state, lit (INDICATOR_LED_LIT), blinking + (INDICATOR_LED_BLINKING), off (INDICATOR_LED_OFF) + :raises: InvalidParameterValueError, if any information passed is + invalid. + """ + if state not in res_maps.INDICATOR_LED_VALUE_MAP_REV: + raise exceptions.InvalidParameterValueError( + parameter='state', value=state, + valid_values=list(res_maps.INDICATOR_LED_VALUE_MAP_REV)) + + data = { + 'IndicatorLED': res_maps.INDICATOR_LED_VALUE_MAP_REV[state] + } + + self._conn.patch(self.path, data=data) + self.invalidate() diff --git a/sushy/tests/unit/resources/system/storage/test_drive.py b/sushy/tests/unit/resources/system/storage/test_drive.py index 8d3edb0..65d6e1b 100644 --- a/sushy/tests/unit/resources/system/storage/test_drive.py +++ b/sushy/tests/unit/resources/system/storage/test_drive.py @@ -14,6 +14,8 @@ import json import mock +import sushy +from sushy import exceptions from sushy.resources.system.storage import drive from sushy.tests.unit import base @@ -37,3 +39,19 @@ class DriveTestCase(base.TestCase): self.assertEqual('32ADF365C6C1B7BD', self.stor_drive.identity) self.assertEqual('Drive Sample', self.stor_drive.name) self.assertEqual(899527000000, self.stor_drive.capacity_bytes) + + def test_set_indicator_led(self): + with mock.patch.object( + self.stor_drive, 'invalidate', + autospec=True) as invalidate_mock: + self.stor_drive.set_indicator_led(sushy.INDICATOR_LED_BLINKING) + self.stor_drive._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138/Storage/1/Drives/' + '32ADF365C6C1B7BD', data={'IndicatorLED': 'Blinking'}) + + invalidate_mock.assert_called_once_with() + + def test_set_indicator_led_invalid_state(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.stor_drive.set_indicator_led, + 'spooky-glowing') -- GitLab From 87dcc7a1398b0f398e5241744be6a8c60a0b2316 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Tue, 9 Apr 2019 17:54:24 +0200 Subject: [PATCH 144/303] Adding Thermal resource schema Adding Thermal resource schema from Redfish API Change-Id: I40279cfa689632b8e6efa5e8db991224bd02d33e Story: 2005414 Task: 30431 --- ...add-thermal-resource-5c965a3c940f9028.yaml | 4 + sushy/resources/chassis/thermal/__init__.py | 0 sushy/resources/chassis/thermal/constants.py | 18 +++ sushy/resources/chassis/thermal/mappings.py | 18 +++ sushy/resources/chassis/thermal/thermal.py | 138 ++++++++++++++++++ sushy/tests/unit/json_samples/thermal.json | 52 +++++++ .../unit/resources/chassis/test_thermal.py | 73 +++++++++ 7 files changed, 303 insertions(+) create mode 100644 releasenotes/notes/add-thermal-resource-5c965a3c940f9028.yaml create mode 100644 sushy/resources/chassis/thermal/__init__.py create mode 100644 sushy/resources/chassis/thermal/constants.py create mode 100644 sushy/resources/chassis/thermal/mappings.py create mode 100644 sushy/resources/chassis/thermal/thermal.py create mode 100644 sushy/tests/unit/json_samples/thermal.json create mode 100644 sushy/tests/unit/resources/chassis/test_thermal.py diff --git a/releasenotes/notes/add-thermal-resource-5c965a3c940f9028.yaml b/releasenotes/notes/add-thermal-resource-5c965a3c940f9028.yaml new file mode 100644 index 0000000..1f69131 --- /dev/null +++ b/releasenotes/notes/add-thermal-resource-5c965a3c940f9028.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds the Thermal resource to the Library. \ No newline at end of file diff --git a/sushy/resources/chassis/thermal/__init__.py b/sushy/resources/chassis/thermal/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/resources/chassis/thermal/constants.py b/sushy/resources/chassis/thermal/constants.py new file mode 100644 index 0000000..fbf8b46 --- /dev/null +++ b/sushy/resources/chassis/thermal/constants.py @@ -0,0 +1,18 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +FAN_READING_UNIT_PERCENTAGE = 'Percentage' +"""Indicates that the fan reading and thresholds are measured in percentage""" + +FAN_READING_UNIT_RPM = 'RPM' +"""Indicates that the fan reading and thresholds +are measured in rotations per minute.""" diff --git a/sushy/resources/chassis/thermal/mappings.py b/sushy/resources/chassis/thermal/mappings.py new file mode 100644 index 0000000..e67f63f --- /dev/null +++ b/sushy/resources/chassis/thermal/mappings.py @@ -0,0 +1,18 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.chassis.thermal import constants as the_cons + +FAN_READING_UNITS_MAP = { + 'Percentage': the_cons.FAN_READING_UNIT_PERCENTAGE, + 'RPM': the_cons.FAN_READING_UNIT_RPM, +} diff --git a/sushy/resources/chassis/thermal/thermal.py b/sushy/resources/chassis/thermal/thermal.py new file mode 100644 index 0000000..b058b81 --- /dev/null +++ b/sushy/resources/chassis/thermal/thermal.py @@ -0,0 +1,138 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Thermal.v1_3_0.json + +from sushy.resources import base +from sushy.resources.chassis.thermal import mappings as the_maps +from sushy.resources import common +from sushy.resources import mappings as res_maps +from sushy import utils + + +class Sensor(base.ListField): + """The sensor device/s associated with Thermal.""" + + identity = base.Field('MemberId', required=True) + """Identifier of the Sensor""" + + lower_threshold_critical = base.Field('LowerThresholdCritical', + adapter=utils.int_or_none) + """Below normal range but not yet fatal""" + + lower_threshold_fatal = base.Field('LowerThresholdFatal', + adapter=utils.int_or_none) + """Below normal range and is fatal""" + + lower_threshold_non_critical = base.Field('LowerThresholdNonCritical', + adapter=utils.int_or_none) + """Below normal range""" + + name = base.Field('Name') + """The name of this sensor""" + + physical_context = base.Field('PhysicalContext') + """Area or device associated with this sensor""" + + status = common.StatusField('Status') + """Status of the sensor""" + + upper_threshold_critical = base.Field('UpperThresholdCritical', + adapter=utils.int_or_none) + """Above normal range but not yet fatal""" + + upper_threshold_fatal = base.Field('UpperThresholdFatal', + adapter=utils.int_or_none) + """Above normal range and is fatal""" + + upper_threshold_non_critical = base.Field('UpperThresholdNonCritical', + adapter=utils.int_or_none) + """Above normal range""" + + +class FansListField(Sensor): + """The Fan device/s associated with Thermal.""" + + indicator_led = base.MappedField('IndicatorLED', + res_maps.INDICATOR_LED_VALUE_MAP) + """The state of the indicator LED, used to identify the fan""" + + manufacturer = base.Field('Manufacturer') + """This is the manufacturer of this Fan""" + + max_reading_range = base.Field('MaxReadingRange', + adapter=utils.int_or_none) + """Maximum value for Reading""" + + min_reading_range = base.Field('MinReadingRange', + adapter=utils.int_or_none) + """Minimum value for Reading""" + + model = base.Field('Model') + """The model of this Fan""" + + part_number = base.Field('PartNumber') + """Part number of this Fan""" + + reading = base.Field('Reading', adapter=utils.int_or_none) + """Current Fan Speed""" + + reading_units = base.MappedField('ReadingUnits', + the_maps.FAN_READING_UNITS_MAP) + """Units in which the reading and thresholds are measured""" + + serial_number = base.Field('SerialNumber') + """Serial number of this Fan""" + + +class TemperaturesListField(Sensor): + """The Temperature device/s associated with Thermal.""" + + max_allowable_operating_value = base.Field('MaxAllowableOperatingValue', + adapter=utils.int_or_none) + """Maximum allowable operating temperature for this equipment""" + + min_allowable_operating_value = base.Field('MinAllowableOperatingValue', + adapter=utils.int_or_none) + """Minimum allowable operating temperature for this equipment""" + + max_reading_range_temp = base.Field('MaxReadingRangeTemp') + """Maximum value for ReadingCelsius""" + + min_reading_range_temp = base.Field('MinReadingRangeTemp') + """Minimum value for ReadingCelsius""" + + reading_celsius = base.Field('ReadingCelsius') + """Temperature""" + + sensor_number = base.Field('SensorNumber', adapter=utils.int_or_none) + """A numerical identifier to represent the temperature sensor""" + + +class Thermal(base.ResourceBase): + """This class represents a Thermal resource.""" + + identity = base.Field('Id') + """Identifier of the resource""" + + name = base.Field('Name') + """The name of the resource""" + + status = common.StatusField('Status') + """Status of the resource""" + + fans = FansListField('Fans', default=[]) + """A tuple of Fan identities""" + + temperatures = TemperaturesListField('Temperatures', default=[]) + """A tuple of Temperature identities""" diff --git a/sushy/tests/unit/json_samples/thermal.json b/sushy/tests/unit/json_samples/thermal.json new file mode 100644 index 0000000..9e7f698 --- /dev/null +++ b/sushy/tests/unit/json_samples/thermal.json @@ -0,0 +1,52 @@ +{ + "@odata.type": "#Thermal.v1_3_0.Thermal", + "Id": "Thermal", + "Name": "Blade Thermal", + "Temperatures": [ + { + "@odata.id": "/redfish/v1/Chassis/Blade1/Thermal#/Temperatures/0", + "MemberId": "0", + "Name": "CPU Temp", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "ReadingCelsius": 62, + "UpperThresholdNonCritical": 75, + "UpperThresholdCritical": 90, + "UpperThresholdFatal": 95, + "MinReadingRangeTemp": 0, + "MaxReadingRangeTemp": 120, + "PhysicalContext": "CPU", + "RelatedItem": [ + { + "@odata.id": "/redfish/v1/Systems/529QB9450R6/Processors/CPU" + } + ] + } + ], + "Fans": [ + { + "@odata.id": "/redfish/v1/Chassis/Blade1/Thermal#/Fans/0", + "MemberId": "0", + "Name": "CPU Fan", + "PhysicalContext": "CPU", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "Reading": 6000, + "ReadingUnits": "RPM", + "LowerThresholdFatal": 2000, + "MinReadingRange": 0, + "MaxReadingRange": 10000, + "RelatedItem": [ + { + "@odata.id": "/redfish/v1/Systems/529QB9450R6/Processors/CPU" + } + ] + } + ], + "@odata.context": "/redfish/v1/$metadata#Thermal.Thermal", + "@odata.id": "/redfish/v1/Chassis/Blade1/Thermal" +} diff --git a/sushy/tests/unit/resources/chassis/test_thermal.py b/sushy/tests/unit/resources/chassis/test_thermal.py new file mode 100644 index 0000000..2e5e4b0 --- /dev/null +++ b/sushy/tests/unit/resources/chassis/test_thermal.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy.resources.chassis.thermal import thermal +from sushy.tests.unit import base + + +class ThermalTestCase(base.TestCase): + + def setUp(self): + super(ThermalTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/thermal.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.thermal = thermal.Thermal( + self.conn, '/redfish/v1/Chassis/Blade1/Thermal', + redfish_version='1.5.0') + + def test__parse_attributes(self): + self.thermal._parse_attributes() + self.assertEqual('1.5.0', self.thermal.redfish_version) + self.assertEqual('Thermal', self.thermal.identity) + self.assertEqual('Blade Thermal', self.thermal.name) + + self.assertEqual('0', self.thermal.fans[0].identity) + self.assertEqual('CPU Fan', self.thermal.fans[0].name) + self.assertEqual('CPU', self.thermal.fans[0].physical_context) + self.assertEqual('enabled', self.thermal.fans[0].status.state) + self.assertEqual('ok', self.thermal.fans[0].status.health) + self.assertEqual(6000, self.thermal.fans[0].reading) + self.assertEqual('RPM', self.thermal.fans[0].reading_units) + self.assertEqual(2000, self.thermal.fans[0].lower_threshold_fatal) + self.assertEqual(0, self.thermal.fans[0].min_reading_range) + self.assertEqual(10000, self.thermal.fans[0].max_reading_range) + + self.assertEqual('0', self.thermal.temperatures[0].identity) + self.assertEqual('CPU Temp', self.thermal.temperatures[0].name) + self.assertEqual('enabled', self.thermal.temperatures[0].status.state) + self.assertEqual('ok', self.thermal.temperatures[0].status.health) + self.assertEqual(62, self.thermal.temperatures[0].reading_celsius) + self.assertEqual( + 75, + self.thermal.temperatures[0].upper_threshold_non_critical + ) + self.assertEqual( + 90, + self.thermal.temperatures[0].upper_threshold_critical + ) + self.assertEqual( + 95, + self.thermal.temperatures[0].upper_threshold_fatal + ) + self.assertEqual(0, + self.thermal.temperatures[0].min_reading_range_temp) + self.assertEqual(120, + self.thermal.temperatures[0].max_reading_range_temp) + self.assertEqual('CPU', self.thermal.temperatures[0].physical_context) -- GitLab From 324f564c7e81be0e5f1135f4645def9bfabd04e3 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Tue, 9 Apr 2019 11:58:42 +0200 Subject: [PATCH 145/303] Expand Drive schema Adding more properties to the Drive schema Change-Id: I4fac9f264fdcbb920306582fdde904d75d6bcdf2 --- .../expand-drive-schema-042901f919be646c.yaml | 5 +++++ sushy/resources/system/storage/drive.py | 20 +++++++++++++++++-- .../resources/system/storage/test_drive.py | 6 ++++++ 3 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 releasenotes/notes/expand-drive-schema-042901f919be646c.yaml diff --git a/releasenotes/notes/expand-drive-schema-042901f919be646c.yaml b/releasenotes/notes/expand-drive-schema-042901f919be646c.yaml new file mode 100644 index 0000000..14ded88 --- /dev/null +++ b/releasenotes/notes/expand-drive-schema-042901f919be646c.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds ``CapacityBites``, ``Manufacturer``, ``Model``, ``PartNumber``, + ``SerialNumber`` and ``Status`` properties to the ``Drive`` resource. \ No newline at end of file diff --git a/sushy/resources/system/storage/drive.py b/sushy/resources/system/storage/drive.py index ca418ff..92f21fb 100644 --- a/sushy/resources/system/storage/drive.py +++ b/sushy/resources/system/storage/drive.py @@ -17,6 +17,7 @@ import logging from sushy import exceptions from sushy.resources import base +from sushy.resources import common from sushy.resources import mappings as res_maps from sushy import utils @@ -26,6 +27,9 @@ LOG = logging.getLogger(__name__) class Drive(base.ResourceBase): """This class represents a disk drive or other physical storage medium.""" + capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) + """The size in bytes of this Drive""" + identity = base.Field('Id', required=True) """The Drive identity string""" @@ -33,11 +37,23 @@ class Drive(base.ResourceBase): res_maps.INDICATOR_LED_VALUE_MAP) """Whether the indicator LED is lit or off""" + manufacturer = base.Field('Manufacturer') + """This is the manufacturer of this drive""" + + model = base.Field('Model') + """This is the model number for the drive""" + name = base.Field('Name') """The name of the resource""" - capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) - """The size in bytes of this Drive""" + part_number = base.Field('PartNumber') + """The part number for this drive""" + + serial_number = base.Field('SerialNumber') + """The serial number for this drive""" + + status = common.StatusField('Status') + """This type describes the status and health of the drive""" def set_indicator_led(self, state): """Set IndicatorLED to the given state. diff --git a/sushy/tests/unit/resources/system/storage/test_drive.py b/sushy/tests/unit/resources/system/storage/test_drive.py index 65d6e1b..d62af27 100644 --- a/sushy/tests/unit/resources/system/storage/test_drive.py +++ b/sushy/tests/unit/resources/system/storage/test_drive.py @@ -39,6 +39,12 @@ class DriveTestCase(base.TestCase): self.assertEqual('32ADF365C6C1B7BD', self.stor_drive.identity) self.assertEqual('Drive Sample', self.stor_drive.name) self.assertEqual(899527000000, self.stor_drive.capacity_bytes) + self.assertEqual('Contoso', self.stor_drive.manufacturer) + self.assertEqual('C123', self.stor_drive.model) + self.assertEqual('C123-1111', self.stor_drive.part_number) + self.assertEqual('1234570', self.stor_drive.serial_number) + self.assertEqual(sushy.STATE_ENABLED, self.stor_drive.status.state) + self.assertEqual(sushy.HEALTH_OK, self.stor_drive.status.health) def test_set_indicator_led(self): with mock.patch.object( -- GitLab From ad15a88908c64bd4acafdec477ba3c1a5aff0420 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Wed, 10 Apr 2019 16:36:22 +0200 Subject: [PATCH 146/303] Adding Power resource schema Adding Power resource schema from Redfish API Change-Id: Iaabdda6c1fe0c0c492ad05c19f34172864703da8 Story: 2005414 Task: 30451 --- .../add-power-resource-e141ddf298673305.yaml | 4 + sushy/resources/chassis/power/__init__.py | 0 sushy/resources/chassis/power/constants.py | 69 ++++++++ sushy/resources/chassis/power/mappings.py | 40 +++++ sushy/resources/chassis/power/power.py | 122 ++++++++++++++ sushy/tests/unit/json_samples/power.json | 159 ++++++++++++++++++ .../unit/resources/chassis/test_power.py | 135 +++++++++++++++ 7 files changed, 529 insertions(+) create mode 100644 releasenotes/notes/add-power-resource-e141ddf298673305.yaml create mode 100644 sushy/resources/chassis/power/__init__.py create mode 100644 sushy/resources/chassis/power/constants.py create mode 100644 sushy/resources/chassis/power/mappings.py create mode 100644 sushy/resources/chassis/power/power.py create mode 100644 sushy/tests/unit/json_samples/power.json create mode 100644 sushy/tests/unit/resources/chassis/test_power.py diff --git a/releasenotes/notes/add-power-resource-e141ddf298673305.yaml b/releasenotes/notes/add-power-resource-e141ddf298673305.yaml new file mode 100644 index 0000000..ef9070a --- /dev/null +++ b/releasenotes/notes/add-power-resource-e141ddf298673305.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds the Power resource to the Library. \ No newline at end of file diff --git a/sushy/resources/chassis/power/__init__.py b/sushy/resources/chassis/power/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/resources/chassis/power/constants.py b/sushy/resources/chassis/power/constants.py new file mode 100644 index 0000000..1425680 --- /dev/null +++ b/sushy/resources/chassis/power/constants.py @@ -0,0 +1,69 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +# Power Supply Types +POWER_SUPPLY_TYPE_UNKNOWN = 'unknown' +"""The power supply type cannot be determined.""" + +POWER_SUPPLY_TYPE_AC = 'ac' +"""Alternating Current (AC) power supply.""" + +POWER_SUPPLY_TYPE_DC = 'dc' +"""Direct Current (DC) power supply.""" + +POWER_SUPPLY_TYPE_ACDC = 'acdc' +"""Power Supply supports both DC or AC.""" + +# Line Input Voltage Types +LINE_INPUT_VOLTAGE_TYPE_UNKNOWN = 'unknown' +"""The power supply line input voltage tpye cannot be determined.""" + +LINE_INPUT_VOLTAGE_TYPE_ACLOW = 'aclowline' +"""100-127V AC input.""" + +LINE_INPUT_VOLTAGE_TYPE_ACMID = 'acmidline' +"""200-240V AC input.""" + +LINE_INPUT_VOLTAGE_TYPE_ACHIGH = 'achighline' +"""277V AC input.""" + +LINE_INPUT_VOLTAGE_TYPE_DCNEG48 = 'dcneg48v' +"""-48V DC input.""" + +LINE_INPUT_VOLTAGE_TYPE_DC380 = 'dc380v' +"""High Voltage DC input (380V).""" + +LINE_INPUT_VOLTAGE_TYPE_AC120 = 'ac120v' +"""AC 120V nominal input.""" + +LINE_INPUT_VOLTAGE_TYPE_AC240 = 'ac240v' +"""AC 240V nominal input.""" + +LINE_INPUT_VOLTAGE_TYPE_AC277 = 'ac277v' +"""AC 277V nominal input.""" + +LINE_INPUT_VOLTAGE_TYPE_ACDCWIDE = 'acdcwiderange' +"""Wide range AC or DC input.""" + +LINE_INPUT_VOLTAGE_TYPE_ACWIDE = 'acwiderange' +"""Wide range AC input.""" + +LINE_INPUT_VOLTAGE_TYPE_DC240 = 'dc240v' +"""DC 240V nominal input.""" + +# Input Types +INPUT_TYPE_AC = 'ac' +"""Alternating Current (AC) input range.""" + +INPUT_TYPE_DC = 'dc' +"""Direct Current (DC) input range.""" diff --git a/sushy/resources/chassis/power/mappings.py b/sushy/resources/chassis/power/mappings.py new file mode 100644 index 0000000..12d8a1d --- /dev/null +++ b/sushy/resources/chassis/power/mappings.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.chassis.power import constants as pow_cons + +POWER_SUPPLY_TYPE_MAP = { + 'Unknown': pow_cons.POWER_SUPPLY_TYPE_UNKNOWN, + 'AC': pow_cons.POWER_SUPPLY_TYPE_AC, + 'DC': pow_cons.POWER_SUPPLY_TYPE_DC, + 'ACorDC': pow_cons.POWER_SUPPLY_TYPE_ACDC, +} + +POWER_SUPPLY_INPUT_TYPE_MAP = { + 'AC': pow_cons.INPUT_TYPE_AC, + 'DC': pow_cons.INPUT_TYPE_DC, +} + +LINE_INPUT_VOLTAGE_TYPE_MAP = { + 'Unknown': pow_cons.LINE_INPUT_VOLTAGE_TYPE_UNKNOWN, + 'ACLowLine': pow_cons.LINE_INPUT_VOLTAGE_TYPE_ACLOW, + 'ACMidLine': pow_cons.LINE_INPUT_VOLTAGE_TYPE_ACMID, + 'ACHighLine': pow_cons.LINE_INPUT_VOLTAGE_TYPE_ACHIGH, + 'DCNeg48V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_DCNEG48, + 'DC380V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_DC380, + 'AC120V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_AC120, + 'AC240V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_AC240, + 'AC277V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_AC277, + 'ACandDCWideRange': pow_cons.LINE_INPUT_VOLTAGE_TYPE_ACDCWIDE, + 'ACWideRange': pow_cons.LINE_INPUT_VOLTAGE_TYPE_ACWIDE, + 'DC240V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_DC240, +} diff --git a/sushy/resources/chassis/power/power.py b/sushy/resources/chassis/power/power.py new file mode 100644 index 0000000..c48e1d4 --- /dev/null +++ b/sushy/resources/chassis/power/power.py @@ -0,0 +1,122 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Power.v1_3_0.json + +from sushy.resources import base +from sushy.resources.chassis.power import mappings as pow_maps +from sushy.resources import common +from sushy.resources import mappings as res_maps +from sushy import utils + + +class InputRangeListField(base.ListField): + """This type describes an input range for a power supply""" + + input_type = base.MappedField('InputType', + pow_maps.POWER_SUPPLY_INPUT_TYPE_MAP) + """The Input type (AC or DC)""" + + maximum_frequency_hz = base.Field('MaximumFrequencyHz', + adapter=utils.int_or_none) + """The maximum line input frequency at which this power supply input range + is effective""" + + maximum_voltage = base.Field('MaximumVoltage', adapter=utils.int_or_none) + """The maximum line input voltage at which this power supply input range + is effective""" + + minimum_frequency_hz = base.Field('MinimumFrequencyHz', + adapter=utils.int_or_none) + """The minimum line input frequency at which this power supply input range + is effective""" + + minimum_voltage = base.Field('MinimumVoltage', adapter=utils.int_or_none) + """The minimum line input voltage at which this power supply input range + is effective""" + + output_wattage = base.Field('OutputWattage', adapter=utils.int_or_none) + """The maximum capacity of this Power Supply when operating in this input + range""" + + +class PowerSupplyListField(base.ListField): + """The power supplies associated with this Power resource""" + + firmware_version = base.Field('FirmwareVersion') + """The firmware version for this Power Supply""" + + identity = base.Field('MemberId') + """Identifier of the Power Supply""" + + indicator_led = base.MappedField('IndicatorLed', + res_maps.INDICATOR_LED_VALUE_MAP) + """The state of the indicator LED, used to identify the power supply""" + + input_ranges = InputRangeListField('InputRanges', default=[]) + """This is the input ranges that the power supply can use""" + + last_power_output_watts = base.Field('LastPowerOutputWatts', + adapter=utils.int_or_none) + """The average power output of this Power Supply""" + + line_input_voltage = base.Field('LineInputVoltage', + adapter=utils.int_or_none) + """The line input voltage at which the Power Supply is operating""" + + line_input_voltage_type = base.MappedField( + 'LineInputVoltageType', + pow_maps.LINE_INPUT_VOLTAGE_TYPE_MAP) + """The line voltage type supported as an input to this Power Supply""" + + manufacturer = base.Field('Manufacturer') + """This is the manufacturer of this power supply""" + + model = base.Field('Model') + """The model number for this Power Supply""" + + name = base.Field('Name') + """Name of the Power Supply""" + + part_number = base.Field('PartNumber') + """The part number for this Power Supply""" + + power_capacity_watts = base.Field('PowerCapacityWatts', + adapter=utils.int_or_none) + """The maximum capacity of this Power Supply""" + + power_supply_type = base.MappedField('PowerSupplyType', + pow_maps.POWER_SUPPLY_TYPE_MAP) + """The Power Supply type (AC or DC)""" + + serial_number = base.Field('SerialNumber') + """The serial number for this Power Supply""" + + spare_part_number = base.Field('SparePartNumber') + """The spare part number for this Power Supply""" + + status = common.StatusField('Status') + """Status of the sensor""" + + +class Power(base.ResourceBase): + """This class represents a Power resource.""" + + identity = base.Field('Id', required=True) + """Identifier of the resource""" + + name = base.Field('Name', required=True) + """The name of the resource""" + + power_supplies = PowerSupplyListField('PowerSupplies', default=[]) + """Details of a power supplies associated with this system or device""" diff --git a/sushy/tests/unit/json_samples/power.json b/sushy/tests/unit/json_samples/power.json new file mode 100644 index 0000000..59c4ccd --- /dev/null +++ b/sushy/tests/unit/json_samples/power.json @@ -0,0 +1,159 @@ +{ + "@odata.type": "#Power.v1_3_0.Power", + "Id": "Power", + "Name": "Quad Blade Chassis Power", + "PowerSupplies": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/0", + "MemberId": "0", + "Name": "Power Supply 0", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "PowerSupplyType": "AC", + "LineInputVoltageType": "AC240V", + "LineInputVoltage": 220, + "PowerCapacityWatts": 1450, + "InputRanges": [ + { + "InputType": "AC", + "MinimumVoltage": 185, + "MaximumVoltage": 250, + "MinimumFrequencyHz": 47, + "MaximumFrequencyHz": 63, + "OutputWattage": 1450 + } + ], + "LastPowerOutputWatts": 650, + "Model": "325457-A06", + "Manufacturer": "Cyberdyne", + "FirmwareVersion": "2.20", + "SerialNumber": "1S0000523", + "PartNumber": "425-591-654", + "SparePartNumber": "425-591-654", + "Redundancy": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/Redundancy/0" + } + ], + "RelatedItem": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade1" + }, + { + "@odata.id":"/redfish/v1/Chassis/Blade2" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade3" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade4" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9450R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9451R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9452R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9453R6" + } + ] + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/1", + "MemberId": "1", + "Name": "Power Supply 1", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "PowerSupplyType": "AC", + "LineInputVoltageType": "AC240V", + "LineInputVoltage": 222, + "PowerCapacityWatts": 1450, + "InputRanges": [ + { + "InputType": "AC", + "MinimumVoltage": 185, + "MaximumVoltage": 250, + "MinimumFrequencyHz": 47, + "MaximumFrequencyHz": 63, + "OutputWattage": 1450 + } + ], + "LastPowerOutputWatts": 635, + "Model": "325457-A06", + "Manufacturer": "Cyberdyne", + "FirmwareVersion": "2.20", + "SerialNumber": "1S0000524", + "PartNumber": "425-591-654", + "SparePartNumber": "425-591-654", + "Redundancy": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/Redundancy/0" + } + ], + "RelatedItem": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade1" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade2" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade3" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade4" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9450R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9451R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9452R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9453R6" + } + ] + } + ], + "Redundancy": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/Redundancy/0", + "MemberId": "0", + "Name": "Power Supply Redundancy", + "RedundancySet": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/0" + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/1" + } + ], + "Mode": "N+m", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "MinNumNeeded": 1, + "MaxNumSupported": 2 + } + ], + "@odata.context": "/redfish/v1/$metadata#Power.Power", + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power" +} diff --git a/sushy/tests/unit/resources/chassis/test_power.py b/sushy/tests/unit/resources/chassis/test_power.py new file mode 100644 index 0000000..80d862c --- /dev/null +++ b/sushy/tests/unit/resources/chassis/test_power.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +from sushy.resources.chassis.power import power +from sushy.tests.unit import base + + +class PowerTestCase(base.TestCase): + + def setUp(self): + super(PowerTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/power.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.power = power.Power( + self.conn, '/redfish/v1/Chassis/MultiBladeEnc1/Power', + redfish_version='1.5.0') + + def test__parse_attributes(self): + self.power._parse_attributes() + self.assertEqual('1.5.0', self.power.redfish_version) + self.assertEqual('Power', self.power.identity) + self.assertEqual('Quad Blade Chassis Power', self.power.name) + + self.assertEqual('0', self.power.power_supplies[0].identity) + self.assertEqual('Power Supply 0', self.power.power_supplies[0].name) + self.assertEqual('enabled', self.power.power_supplies[0].status.state) + self.assertEqual('ok', self.power.power_supplies[0].status.health) + self.assertEqual('ac', self.power.power_supplies[0].power_supply_type) + self.assertEqual('ac240v', + self.power.power_supplies[0].line_input_voltage_type) + self.assertEqual(220, self.power.power_supplies[0].line_input_voltage) + self.assertEqual(1450, + self.power.power_supplies[0].power_capacity_watts) + self.assertEqual( + 'ac', + self.power.power_supplies[0].input_ranges[0].input_type + ) + self.assertEqual( + 185, + self.power.power_supplies[0].input_ranges[0].minimum_voltage + ) + self.assertEqual( + 250, + self.power.power_supplies[0].input_ranges[0].maximum_voltage + ) + self.assertEqual( + 47, + self.power.power_supplies[0].input_ranges[0].minimum_frequency_hz + ) + self.assertEqual( + 63, + self.power.power_supplies[0].input_ranges[0].maximum_frequency_hz + ) + self.assertEqual( + 1450, + self.power.power_supplies[0].input_ranges[0].output_wattage + ) + self.assertEqual(650, + self.power.power_supplies[0].last_power_output_watts) + self.assertEqual('325457-A06', self.power.power_supplies[0].model) + self.assertEqual('Cyberdyne', + self.power.power_supplies[0].manufacturer) + self.assertEqual('2.20', + self.power.power_supplies[0].firmware_version) + self.assertEqual('1S0000523', + self.power.power_supplies[0].serial_number) + self.assertEqual('425-591-654', + self.power.power_supplies[0].part_number) + self.assertEqual('425-591-654', + self.power.power_supplies[0].spare_part_number) + + self.assertEqual('1', self.power.power_supplies[1].identity) + self.assertEqual('Power Supply 1', self.power.power_supplies[1].name) + self.assertEqual('enabled', self.power.power_supplies[1].status.state) + self.assertEqual('ok', self.power.power_supplies[1].status.health) + self.assertEqual('ac', self.power.power_supplies[1].power_supply_type) + self.assertEqual('ac240v', + self.power.power_supplies[1].line_input_voltage_type) + self.assertEqual(222, self.power.power_supplies[1].line_input_voltage) + self.assertEqual(1450, + self.power.power_supplies[1].power_capacity_watts) + self.assertEqual( + 'ac', + self.power.power_supplies[1].input_ranges[0].input_type + ) + self.assertEqual( + 185, + self.power.power_supplies[1].input_ranges[0].minimum_voltage + ) + self.assertEqual( + 250, + self.power.power_supplies[1].input_ranges[0].maximum_voltage + ) + self.assertEqual( + 47, + self.power.power_supplies[1].input_ranges[0].minimum_frequency_hz + ) + self.assertEqual( + 63, + self.power.power_supplies[1].input_ranges[0].maximum_frequency_hz + ) + self.assertEqual( + 1450, + self.power.power_supplies[1].input_ranges[0].output_wattage + ) + self.assertEqual(635, + self.power.power_supplies[1].last_power_output_watts) + self.assertEqual('325457-A06', self.power.power_supplies[1].model) + self.assertEqual('Cyberdyne', + self.power.power_supplies[1].manufacturer) + self.assertEqual('2.20', + self.power.power_supplies[1].firmware_version) + self.assertEqual('1S0000524', + self.power.power_supplies[1].serial_number) + self.assertEqual('425-591-654', + self.power.power_supplies[1].part_number) + self.assertEqual('425-591-654', + self.power.power_supplies[1].spare_part_number) -- GitLab From 2f08e09a09c825176f7511fe9be9e21053b5e786 Mon Sep 17 00:00:00 2001 From: Bill Dodd Date: Fri, 15 Mar 2019 14:00:21 -0500 Subject: [PATCH 147/303] Update model to support ApplyTime annotations The @Redfish.OperationApplyTimeSupport annotation is currently incorrectly being read from within @Redfish.Settings annotations. Update to instead read it from Actions and VolumeCollection. Add new field class for the @Redfish.SettingsApplyTime annotation and read it from the Bios Settings resource. Change-Id: I083d717ce8801eda0f3b0a24dd1eef0ca2c14709 Story: 2005218 Task: 29997 --- ...e-apply-time-support-53c5445b58cd3b42.yaml | 13 ++++ sushy/resources/common.py | 40 +++++++++--- sushy/resources/settings.py | 61 +++++++++++++------ sushy/resources/system/bios.py | 6 ++ sushy/resources/system/storage/volume.py | 5 ++ sushy/resources/system/system.py | 6 ++ .../unit/json_samples/bios_settings.json | 7 +++ sushy/tests/unit/json_samples/settings.json | 15 ----- sushy/tests/unit/json_samples/system.json | 16 ++++- .../unit/json_samples/volume_collection.json | 9 +++ .../resources/system/storage/test_volume.py | 14 ++++- .../tests/unit/resources/system/test_bios.py | 16 ++++- .../unit/resources/system/test_system.py | 28 ++++++++- sushy/tests/unit/resources/test_settings.py | 38 +++--------- 14 files changed, 197 insertions(+), 77 deletions(-) create mode 100644 releasenotes/notes/update-apply-time-support-53c5445b58cd3b42.yaml diff --git a/releasenotes/notes/update-apply-time-support-53c5445b58cd3b42.yaml b/releasenotes/notes/update-apply-time-support-53c5445b58cd3b42.yaml new file mode 100644 index 0000000..ec74b82 --- /dev/null +++ b/releasenotes/notes/update-apply-time-support-53c5445b58cd3b42.yaml @@ -0,0 +1,13 @@ +--- +features: + - | + Update sushy models to support the Redfish SettingsApplyTime and + OperationApplyTimeSupport annotations. +deprecations: + - | + The ``operation_apply_time_support`` and ``maintenance_window`` + properties in the ``SettingsField`` class are deprecated. The + ``SettingsField`` class represents the ``@Redfish.Settings`` + annotation and those properties cannot appear within this + annotation. Instead use the ``apply_time_settings`` property + in the target resource (e.g. ``Bios`` resource). diff --git a/sushy/resources/common.py b/sushy/resources/common.py index 9e6e634..9eaa87a 100644 --- a/sushy/resources/common.py +++ b/sushy/resources/common.py @@ -10,12 +10,45 @@ # License for the specific language governing permissions and limitations # under the License. +from dateutil import parser + from sushy.resources import base from sushy.resources import mappings as res_maps +class IdRefField(base.CompositeField): + """Reference to the resource odata identity field.""" + + resource_uri = base.Field('@odata.id') + """The unique identifier for a resource""" + + +class OperationApplyTimeSupportField(base.CompositeField): + def __init__(self): + super(OperationApplyTimeSupportField, self).__init__( + path="@Redfish.OperationApplyTimeSupport") + + maintenance_window_duration_in_seconds = base.Field( + 'MaintenanceWindowDurationInSeconds', adapter=int) + """The expiry time of maintenance window in seconds""" + + _maintenance_window_resource = IdRefField('MaintenanceWindowResource') + """The location of the maintenance window settings""" + + maintenance_window_start_time = base.Field( + 'MaintenanceWindowStartTime', + adapter=parser.parse) + """The start time of a maintenance window""" + + supported_values = base.Field('SupportedValues', required=True, + adapter=list) + """The client is allowed request when performing a create, delete, or + action operation""" + + class ActionField(base.CompositeField): target_uri = base.Field('target', required=True) + operation_apply_time_support = OperationApplyTimeSupportField() class ResetActionField(ActionField): @@ -23,13 +56,6 @@ class ResetActionField(ActionField): adapter=list) -class IdRefField(base.CompositeField): - """Reference to the resource odata identity field.""" - - resource_uri = base.Field('@odata.id') - """The unique identifier for a resource""" - - class StatusField(base.CompositeField): """This Field describes the status of a resource and its children. diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index eb53ebe..0d97196 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -14,12 +14,16 @@ # https://redfish.dmtf.org/schemas/Settings.v1_2_0.json from dateutil import parser +import logging from sushy.resources import base from sushy.resources import common from sushy.resources import mappings as res_maps +LOG = logging.getLogger(__name__) + + class MessageListField(base.ListField): """List of messages with details of settings update status""" @@ -63,24 +67,25 @@ class MaintenanceWindowField(base.CompositeField): """The start time of a maintenance window""" -class OperationApplyTimeSupportField(base.CompositeField): +class SettingsApplyTimeField(base.CompositeField): + def __init__(self): + super(SettingsApplyTimeField, self).__init__( + path="@Redfish.SettingsApplyTime") - maintenance_window_duration_in_seconds = base.Field( - 'MaintenanceWindowDurationInSeconds') - """The expiry time of maintenance window in seconds""" + apply_time = base.Field('ApplyTime', adapter=str) + """When the future configuration should be applied""" - maintenance_window_resource = base.Field( - 'MaintenanceWindowResource') - """The location of the maintenance window settings""" + apply_time_allowable_values = base.Field( + 'ApplyTime@Redfish.AllowableValues', adapter=list) + """The list of allowable ApplyTime values""" - maintenance_window_start_time = base.Field( - 'MaintenanceWindowStartTime', - adapter=parser.parse) + maintenance_window_start_time = base.Field('MaintenanceWindowStartTime', + adapter=parser.parse) """The start time of a maintenance window""" - supported_values = base.Field('SupportedValues', required=True) - """The client is allowed request when performing a create, delete, or - action operation""" + maintenance_window_duration_in_seconds = base.Field( + 'MaintenanceWindowDurationInSeconds', adapter=int) + """The expiry time of maintenance window in seconds""" class SettingsField(base.CompositeField): @@ -111,18 +116,34 @@ class SettingsField(base.CompositeField): to change this resource """ - maintenance_window = MaintenanceWindowField('MaintenanceWindow') - """Indicates if a given resource has a maintenance window assignment - for applying settings or operations""" + @property + def maintenance_window(self): + """MaintenanceWindow field + + Indicates if a given resource has a maintenance window assignment + for applying settings or operations + """ + LOG.warning('The @Redfish.MaintenanceWindow annotation does not ' + 'appear within @Redfish.Settings. Instead use the ' + 'maintenance_window property in the target resource ' + '(e.g. System resource)') + return None messages = MessageListField("Messages") """Represents the results of the last time the values of the Settings resource were applied to the server""" - operation_apply_time_support = OperationApplyTimeSupportField( - 'OperationApplyTimeSupport') - """Indicates if a client is allowed to request for a specific apply - time of a create, delete, or action operation of a given resource""" + @property + def operation_apply_time_support(self): + """OperationApplyTimeSupport field + + Indicates if a client is allowed to request for a specific apply + time of a create, delete, or action operation of a given resource + """ + LOG.warning('Redfish ApplyTime annotations do not appear within ' + '@Redfish.Settings. Instead use the apply_time_settings ' + 'property in the target resource (e.g. Bios resource)') + return None def commit(self, connector, value): """Commits new settings values diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 4190b32..84f7fce 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -57,6 +57,8 @@ class Bios(base.ResourceBase): _actions = ActionsField('Actions') + _apply_time_settings = settings.SettingsApplyTimeField() + @property @utils.cache_it def _pending_settings_resource(self): @@ -74,6 +76,10 @@ class Bios(base.ResourceBase): """ return self._pending_settings_resource.attributes + @property + def apply_time_settings(self): + return self._pending_settings_resource._apply_time_settings + def set_attribute(self, key, value): """Update an attribute diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 7d2e20d..c22ccf0 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -16,6 +16,7 @@ import logging from sushy.resources import base +from sushy.resources import common from sushy import utils LOG = logging.getLogger(__name__) @@ -62,3 +63,7 @@ class VolumeCollection(base.ResourceCollectionBase): # NOTE(etingof): for backward compatibility max_size_bytes = max_volume_size_bytes + + operation_apply_time_support = common.OperationApplyTimeSupportField() + """Indicates if a client is allowed to request for a specific apply + time of a create, delete, or action operation of a given resource""" diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 428de88..1e88edf 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -24,6 +24,7 @@ from sushy.resources.chassis import chassis from sushy.resources import common from sushy.resources.manager import manager from sushy.resources import mappings as res_maps +from sushy.resources import settings from sushy.resources.system import bios from sushy.resources.system import constants as sys_cons from sushy.resources.system import ethernet_interface @@ -127,6 +128,11 @@ class System(base.ResourceBase): memory_summary = MemorySummaryField('MemorySummary') """The summary info of memory of the system in general detail""" + maintenance_window = settings.MaintenanceWindowField( + '@Redfish.MaintenanceWindow') + """Indicates if a given resource has a maintenance window assignment + for applying settings or operations""" + _actions = ActionsField('Actions', required=True) def __init__(self, connector, identity, redfish_version=None): diff --git a/sushy/tests/unit/json_samples/bios_settings.json b/sushy/tests/unit/json_samples/bios_settings.json index 9b59836..2751a52 100644 --- a/sushy/tests/unit/json_samples/bios_settings.json +++ b/sushy/tests/unit/json_samples/bios_settings.json @@ -15,6 +15,13 @@ "ProcTurboMode": "Disabled", "UsbControl": "UsbEnabled" }, + "@Redfish.SettingsApplyTime": { + "@odata.type": "#Settings.v1_1_0.PreferredApplyTime", + "ApplyTime": "OnReset", + "ApplyTime@Redfish.AllowableValues": [ "OnReset", "Immediate", "AtMaintenanceWindowStart", "InMaintenanceWindowOnReset" ], + "MaintenanceWindowStartTime": "2017-05-03T23:12:37-05:00", + "MaintenanceWindowDurationInSeconds": 600 + }, "@odata.context": "/redfish/v1/$metadata#Bios.Bios", "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings", "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." diff --git a/sushy/tests/unit/json_samples/settings.json b/sushy/tests/unit/json_samples/settings.json index b1055e0..9e01e14 100644 --- a/sushy/tests/unit/json_samples/settings.json +++ b/sushy/tests/unit/json_samples/settings.json @@ -2,21 +2,6 @@ "@Redfish.Settings": { "@odata.type": "#Settings.v1_2_0.Settings", "ETag": "9234ac83b9700123cc32", - "MaintenanceWindow": { - "MaintenanceWindowDurationInSeconds": 1, - "MaintenanceWindowStartTime": "2016-03-07T14:44:30-05:05" - }, - "OperationApplyTimeSupport": { - "MaintenanceWindowDurationInSeconds": 1, - "MaintenanceWindowResource": "", - "MaintenanceWindowStartTime": "2016-03-07T14:44:30-05:10", - "SupportedValues": [ - "Immediate", - "OnReset", - "AtMaintenanceWindowStart", - "InMaintenanceWindowOnReset" - ] - }, "Messages": [{ "MessageId": "Base.1.0.SettingsFailed", "Message": "Settings update failed due to invalid value", diff --git a/sushy/tests/unit/json_samples/system.json b/sushy/tests/unit/json_samples/system.json index a455c03..55847a9 100644 --- a/sushy/tests/unit/json_samples/system.json +++ b/sushy/tests/unit/json_samples/system.json @@ -122,7 +122,16 @@ "ForceRestart", "Nmi", "ForceOn" - ] + ], + "@Redfish.OperationApplyTimeSupport": { + "@odata.type": "#Settings.v1_2_0.OperationApplyTimeSupport", + "SupportedValues": [ "Immediate", "AtMaintenanceWindowStart" ], + "MaintenanceWindowStartTime": "2017-05-03T23:12:37-05:00", + "MaintenanceWindowDurationInSeconds": 600, + "MaintenanceWindowResource": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2" + } + } }, "Oem": { "#Contoso.Reset": { @@ -130,6 +139,11 @@ } } }, + "@Redfish.MaintenanceWindow": { + "@odata.type": "#Settings.v1_2_0.MaintenanceWindow", + "MaintenanceWindowDurationInSeconds": 1, + "MaintenanceWindowStartTime": "2016-03-07T14:44:30-05:05" + }, "@odata.context": "/redfish/v1/$metadata#ComputerSystem.ComputerSystem", "@odata.id": "/redfish/v1/Systems/437XR1138R2", "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." diff --git a/sushy/tests/unit/json_samples/volume_collection.json b/sushy/tests/unit/json_samples/volume_collection.json index 0643e68..71123a8 100644 --- a/sushy/tests/unit/json_samples/volume_collection.json +++ b/sushy/tests/unit/json_samples/volume_collection.json @@ -14,6 +14,15 @@ "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3" } ], + "@Redfish.OperationApplyTimeSupport": { + "@odata.type": "#Settings.v1_2_0.OperationApplyTimeSupport", + "SupportedValues": [ "Immediate", "OnReset", "AtMaintenanceWindowStart" ], + "MaintenanceWindowStartTime": "2017-05-03T23:12:37-05:00", + "MaintenanceWindowDurationInSeconds": 600, + "MaintenanceWindowResource": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2" + } + }, "Oem": {}, "@odata.context": "/redfish/v1/$metadata#VolumeCollection.VolumeCollection", "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes", diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index d1a4d96..cf0b5c4 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -11,9 +11,10 @@ # under the License. import json - import mock +from dateutil import parser + from sushy.resources.system.storage import volume from sushy.tests.unit import base @@ -58,6 +59,17 @@ class VolumeCollectionTestCase(base.TestCase): '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3'), self.stor_vol_col.members_identities) + def test_operation_apply_time_support(self): + support = self.stor_vol_col.operation_apply_time_support + self.assertIsNotNone(support) + self.assertEqual(600, support.maintenance_window_duration_in_seconds) + self.assertEqual(parser.parse('2017-05-03T23:12:37-05:00'), + support.maintenance_window_start_time) + self.assertEqual('/redfish/v1/Systems/437XR1138R2', + support._maintenance_window_resource.resource_uri) + self.assertEqual(['Immediate', 'OnReset', 'AtMaintenanceWindowStart'], + support.supported_values) + @mock.patch.object(volume, 'Volume', autospec=True) def test_get_member(self, Volume_mock): self.stor_vol_col.get_member( diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index b433db9..98b5013 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -13,9 +13,10 @@ # under the License. import json - import mock +from dateutil import parser + from sushy import exceptions from sushy.resources.system import bios from sushy.tests.unit import base @@ -106,6 +107,19 @@ class BiosTestCase(base.TestCase): self.sys_bios.pending_attributes self.assertTrue(self.conn.get.called) + def test_apply_time_settings(self): + self.conn.get.reset_mock() + apply_time_settings = self.sys_bios.apply_time_settings + self.assertIsNotNone(apply_time_settings) + self.assertEqual('OnReset', apply_time_settings.apply_time) + self.assertEqual(['OnReset', 'Immediate', 'AtMaintenanceWindowStart', + 'InMaintenanceWindowOnReset'], + apply_time_settings.apply_time_allowable_values) + self.assertEqual(parser.parse('2017-05-03T23:12:37-05:00'), + apply_time_settings.maintenance_window_start_time) + self.assertEqual(600, apply_time_settings. + maintenance_window_duration_in_seconds) + def test__get_reset_bios_action_element(self): value = self.sys_bios._get_reset_bios_action_element() self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Actions/' diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index c1c0ca2..02f0457 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -14,9 +14,10 @@ # under the License. import json - import mock +from dateutil import parser + import sushy from sushy import exceptions from sushy.resources.chassis import chassis @@ -71,6 +72,12 @@ class SystemTestCase(base.TestCase): self.sys_inst.power_state) self.assertEqual(96, self.sys_inst.memory_summary.size_gib) self.assertEqual("OK", self.sys_inst.memory_summary.health) + self.assertIsNotNone(self.sys_inst.maintenance_window) + self.assertEqual(1, self.sys_inst.maintenance_window + .maintenance_window_duration_in_seconds) + self.assertEqual(parser.parse('2016-03-07T14:44:30-05:05'), + self.sys_inst.maintenance_window + .maintenance_window_start_time) for oem_vendor in self.sys_inst.oem_vendors: self.assertIn(oem_vendor, ('Contoso', 'Chipwise')) @@ -99,6 +106,14 @@ class SystemTestCase(base.TestCase): self.sys_inst._parse_attributes() self.assertIsNone(self.sys_inst.memory_summary.size_gib) + def test__parse_attributes_bad_maintenance_window_time(self): + self.sys_inst.json['@Redfish.MaintenanceWindow'][ + 'MaintenanceWindowStartTime'] = 'bad date' + self.assertRaisesRegex( + exceptions.MalformedAttributeError, + '@Redfish.MaintenanceWindow/MaintenanceWindowStartTime', + self.sys_inst._parse_attributes) + def test_get__reset_action_element(self): value = self.sys_inst._get_reset_action_element() self.assertEqual("/redfish/v1/Systems/437XR1138R2/Actions/" @@ -150,6 +165,17 @@ class SystemTestCase(base.TestCase): self.assertIsInstance(values, set) self.assertEqual(1, mock_log.call_count) + def test_reset_action_operation_apply_time_support(self): + support = self.sys_inst._actions.reset.operation_apply_time_support + self.assertIsNotNone(support) + self.assertEqual(['Immediate', 'AtMaintenanceWindowStart'], + support.supported_values) + self.assertEqual(parser.parse('2017-05-03T23:12:37-05:00'), + support.maintenance_window_start_time) + self.assertEqual(600, support.maintenance_window_duration_in_seconds) + self.assertEqual('/redfish/v1/Systems/437XR1138R2', + support._maintenance_window_resource.resource_uri) + def test_reset_system(self): self.sys_inst.reset_system(sushy.RESET_FORCE_OFF) self.sys_inst._conn.post.assert_called_once_with( diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index 3535297..5cc1c57 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -16,9 +16,6 @@ import json import mock -from dateutil import parser - -from sushy import exceptions from sushy.resources import constants as res_cons from sushy.resources import settings from sushy.tests.unit import base @@ -33,7 +30,8 @@ class SettingsFieldTestCase(base.TestCase): self.settings = settings.SettingsField() - def test__load(self): + @mock.patch.object(settings, 'LOG', autospec=True) + def test__load(self, mock_LOG): instance = self.settings._load(self.json, mock.Mock()) self.assertEqual('9234ac83b9700123cc32', @@ -56,33 +54,11 @@ class SettingsFieldTestCase(base.TestCase): instance.messages[0]._related_properties[0]) self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', instance._settings_object_idref.resource_uri) - self.assertEqual( - 1, - instance. - maintenance_window.maintenance_window_duration_in_seconds) - self.assertEqual( - parser.parse('2016-03-07T14:44:30-05:05'), - instance.maintenance_window.maintenance_window_start_time) - self.assertEqual( - 1, - instance.operation_apply_time_support. - maintenance_window_duration_in_seconds) - self.assertEqual( - parser.parse('2016-03-07T14:44:30-05:10'), - instance.operation_apply_time_support. - maintenance_window_start_time) - self.assertIn( - 'Immediate', - instance.operation_apply_time_support.supported_values) - - def test__load_failure(self): - self.json[ - '@Redfish.Settings']['MaintenanceWindow'][ - 'MaintenanceWindowStartTime'] = 'bad date' - self.assertRaisesRegex( - exceptions.MalformedAttributeError, - '@Redfish.Settings/MaintenanceWindow/MaintenanceWindowStartTime', - self.settings._load, self.json, mock.Mock()) + self.assertIsNone(instance.maintenance_window) + mock_LOG.warning.assert_called_once() + mock_LOG.reset_mock() + self.assertIsNone(instance.operation_apply_time_support) + mock_LOG.warning.assert_called_once() def test_commit(self): conn = mock.Mock() -- GitLab From fa09bbcee93dc63b637c36c32e98648426a1dc94 Mon Sep 17 00:00:00 2001 From: Ghanshyam Mann Date: Sun, 14 Apr 2019 22:35:22 +0000 Subject: [PATCH 148/303] Dropping the py35 testing All the integration testing has been moved to Bionic now[1] and py3.5 is not tested runtime for Train or stable/stein[2]. As per below ML thread, we are good to drop the py35 testing now: http://lists.openstack.org/pipermail/openstack-discuss/2019-April/005097.html [1] http://lists.openstack.org/pipermail/openstack-discuss/2019-April/004647.html [2] https://governance.openstack.org/tc/reference/runtimes/stein.html https://governance.openstack.org/tc/reference/runtimes/train.html Change-Id: I2ff17e4c61781cf9f8068e1dbcc48365a6ec531f --- setup.cfg | 2 +- zuul.d/project.yaml | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index a007730..88325fb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -16,7 +16,7 @@ classifier = Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 - Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3.6 [files] packages = diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 92fb8bf..5922afc 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -4,7 +4,6 @@ - openstack-cover-jobs - openstack-lower-constraints-jobs - openstack-python-jobs - - openstack-python35-jobs - openstack-python36-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 -- GitLab From f2cf60f3ceeff97d29118dbc31fd3f03aad78591 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Mon, 15 Apr 2019 17:54:19 +0200 Subject: [PATCH 149/303] Deprecate System-specific `IndicatorLED` state constants Change-Id: Ib4ccad20dc49e0b33a1d2b1ea24b6c8f989222a3 Story: 2005342 Task: 30290 --- ...eprecate-system-leds-f1a72422c53d281e.yaml | 9 +++++++++ sushy/resources/system/constants.py | 20 +++++++++++++++---- .../unit/resources/system/test_system.py | 2 +- 3 files changed, 26 insertions(+), 5 deletions(-) create mode 100644 releasenotes/notes/deprecate-system-leds-f1a72422c53d281e.yaml diff --git a/releasenotes/notes/deprecate-system-leds-f1a72422c53d281e.yaml b/releasenotes/notes/deprecate-system-leds-f1a72422c53d281e.yaml new file mode 100644 index 0000000..1cdef5d --- /dev/null +++ b/releasenotes/notes/deprecate-system-leds-f1a72422c53d281e.yaml @@ -0,0 +1,9 @@ +--- +deprecations: + - | + Deprecates system-specific indicator LEDs as redundant. + The ``SYSTEM_INDICATOR_LED_LIT``, ``SYSTEM_INDICATOR_LED_BLINKING``, + ``SYSTEM_INDICATOR_LED_OFF`` and ``SYSTEM_INDICATOR_LED_UNKNOWN`` constants + should not be used. Generic indicator LED constants should be used instead. + Those are ``INDICATOR_LED_LIT``, ``INDICATOR_LED_BLINKING``, + ``INDICATOR_LED_OFF`` and ``INDICATOR_LED_UNKNOWN`` respectively. diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index 13eb868..26c3866 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -49,16 +49,28 @@ SYSTEM_POWER_STATE_POWERING_OFF = res_cons.POWER_STATE_POWERING_OFF # Indicator LED Constants SYSTEM_INDICATOR_LED_LIT = res_cons.INDICATOR_LED_LIT -"""The Indicator LED is lit""" +"""The Indicator LED is lit + +Deprecated: Use `sushy.resources.constants.INDICATOR_LED_LIT`. +""" SYSTEM_INDICATOR_LED_BLINKING = res_cons.INDICATOR_LED_BLINKING -"""The Indicator LED is blinking""" +"""The Indicator LED is blinking + +Deprecated: Use `sushy.resources.constants.INDICATOR_LED_BLINKING`. +""" SYSTEM_INDICATOR_LED_OFF = res_cons.INDICATOR_LED_OFF -"""The Indicator LED is off""" +"""The Indicator LED is off + +Deprecated: Use `sushy.resources.constants.INDICATOR_LED_OFF`. +""" SYSTEM_INDICATOR_LED_UNKNOWN = res_cons.INDICATOR_LED_UNKNOWN -"""The state of the Indicator LED cannot be determine""" +"""The state of the Indicator LED cannot be determine + +Deprecated: Use `sushy.resources.constants.INDICATOR_LED_UNKNOWN`. +""" # Boot source target constants diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index c1c0ca2..5a6cebb 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -52,7 +52,7 @@ class SystemTestCase(base.TestCase): self.assertEqual('Web Front End node', self.sys_inst.description) self.assertEqual('web483', self.sys_inst.hostname) self.assertEqual('437XR1138R2', self.sys_inst.identity) - self.assertEqual(sushy.SYSTEM_INDICATOR_LED_OFF, + self.assertEqual(sushy.INDICATOR_LED_OFF, self.sys_inst.indicator_led) self.assertEqual('Contoso', self.sys_inst.manufacturer) self.assertEqual('WebFrontEnd483', self.sys_inst.name) -- GitLab From 363008829a56cceb40e938198b11ae9d756a4aa4 Mon Sep 17 00:00:00 2001 From: OpenDev Sysadmins Date: Fri, 19 Apr 2019 19:48:38 +0000 Subject: [PATCH 150/303] OpenDev Migration Patch This commit was bulk generated and pushed by the OpenDev sysadmins as a part of the Git hosting and code review systems migration detailed in these mailing list posts: http://lists.openstack.org/pipermail/openstack-discuss/2019-March/003603.html http://lists.openstack.org/pipermail/openstack-discuss/2019-April/004920.html Attempts have been made to correct repository namespaces and hostnames based on simple pattern matching, but it's possible some were updated incorrectly or missed entirely. Please reach out to us via the contact information listed at https://opendev.org/ with any questions you may have. --- .gitreview | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitreview b/.gitreview index 6ec9ed8..9ca57a4 100644 --- a/.gitreview +++ b/.gitreview @@ -1,4 +1,4 @@ [gerrit] -host=review.openstack.org +host=review.opendev.org port=29418 project=openstack/sushy.git -- GitLab From 103737ad3cab03a0f6c148b8aef47699928febe4 Mon Sep 17 00:00:00 2001 From: liuwei Date: Mon, 29 Apr 2019 10:41:28 +0800 Subject: [PATCH 151/303] update git.openstack.org to opendev Change-Id: I0f1e1cb920268f059062c12a6a08d25cb905f680 --- README.rst | 2 +- doc/source/contributor/index.rst | 2 +- tox.ini | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index aa951da..ef3ec98 100644 --- a/README.rst +++ b/README.rst @@ -18,7 +18,7 @@ needed we can expand Sushy to fulfill those requirements. https://creativecommons.org/licenses/by/4.0/ * Documentation: https://docs.openstack.org/sushy/latest/ * Usage: https://docs.openstack.org/sushy/latest/reference/usage.html -* Source: https://git.openstack.org/cgit/openstack/sushy +* Source: https://opendev.org/openstack/sushy * Bugs: https://storyboard.openstack.org/#!/project/960 .. _Redfish: http://www.dmtf.org/standards/redfish diff --git a/doc/source/contributor/index.rst b/doc/source/contributor/index.rst index 211620f..06fbeec 100644 --- a/doc/source/contributor/index.rst +++ b/doc/source/contributor/index.rst @@ -97,4 +97,4 @@ pointing to the certificate file when instantiating Sushy, for example: s = sushy.Sushy('https://localhost:8000', verify='cert.pem', username='foo', password='bar') .. _SSL: https://en.wikipedia.org/wiki/Secure_Sockets_Layer -.. _sushy-tools: https://git.openstack.org/cgit/openstack/sushy-tools +.. _sushy-tools: https://opendev.org/openstack/sushy-tools diff --git a/tox.ini b/tox.ini index b14f1c6..bf76370 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ setenv = PYTHONWARNINGS=default::DeprecationWarning install_command = pip install {opts} {packages} deps = - -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} + -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt commands = stestr run --slowest {posargs} -- GitLab From 188a72cee434f4f2cdabd5d71f82e97b28112b2e Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Fri, 3 May 2019 17:34:04 +0200 Subject: [PATCH 152/303] Add Power and Thermal resources to Chassis Adding the references to Power and Thermal resources to the Chassis schema. Change-Id: I041a896f01503ddfac7f343e36b10b9be5318e9f --- sushy/resources/chassis/chassis.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/sushy/resources/chassis/chassis.py b/sushy/resources/chassis/chassis.py index bd99955..7d4cd16 100644 --- a/sushy/resources/chassis/chassis.py +++ b/sushy/resources/chassis/chassis.py @@ -16,6 +16,8 @@ from sushy import exceptions from sushy.resources import base from sushy.resources.chassis import mappings as cha_maps +from sushy.resources.chassis.power import power +from sushy.resources.chassis.thermal import thermal from sushy.resources import common from sushy.resources.manager import manager from sushy.resources import mappings as res_maps @@ -253,6 +255,34 @@ class Chassis(base.ResourceBase): redfish_version=self.redfish_version) for path in paths] + @property + @utils.cache_it + def power(self): + """Property to reference `Power` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return power.Power( + self._conn, + utils.get_sub_resource_path_by(self, 'Power'), + redfish_version=self.redfish_version) + + @property + @utils.cache_it + def thermal(self): + """Property to reference `Thermal` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return thermal.Thermal( + self._conn, + utils.get_sub_resource_path_by(self, 'Thermal'), + redfish_version=self.redfish_version) + class ChassisCollection(base.ResourceCollectionBase): -- GitLab From a8918713fe3cdca506d50204291d0a32f23f2f58 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Mon, 13 May 2019 20:09:14 +0200 Subject: [PATCH 153/303] Make Manager->Actions field optional Redfish schema does not require Actions field to be present [1]. 1. https://redfish.dmtf.org/schemas/Manager.v1_5_1.json Change-Id: Ic9402f7639a561b88c4a9be91d1c6c88c6b12c31 --- .../notes/fix-manager-action-d71fd415cea29aa6.yaml | 7 +++++++ sushy/resources/manager/manager.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/fix-manager-action-d71fd415cea29aa6.yaml diff --git a/releasenotes/notes/fix-manager-action-d71fd415cea29aa6.yaml b/releasenotes/notes/fix-manager-action-d71fd415cea29aa6.yaml new file mode 100644 index 0000000..870bd0e --- /dev/null +++ b/releasenotes/notes/fix-manager-action-d71fd415cea29aa6.yaml @@ -0,0 +1,7 @@ +--- +fixes: + - | + Makes ``Manager->Actions`` field optional as Redfish Manager schema + defines it. Otherwise sushy fails hard at parsing response from a + Redfish agent that does not include ``Actions`` field in its document + tree. diff --git a/sushy/resources/manager/manager.py b/sushy/resources/manager/manager.py index 4211f20..797d4ee 100644 --- a/sushy/resources/manager/manager.py +++ b/sushy/resources/manager/manager.py @@ -82,7 +82,7 @@ class Manager(base.ResourceBase): uuid = base.Field('UUID') """The manager UUID""" - _actions = ActionsField('Actions', required=True) + _actions = ActionsField('Actions') def __init__(self, connector, identity, redfish_version=None): """A class representing a Manager -- GitLab From 6cb68f89f5f8b6268ca779c15a13925d480394b2 Mon Sep 17 00:00:00 2001 From: ajya Date: Wed, 22 May 2019 11:20:32 +0300 Subject: [PATCH 154/303] Cleanup for Standard message registry loading * Clarify why path.join is not used * Update test assert for better failure message Change-Id: I2d91a867c1defc33001492361dc1452bc1e51386 --- sushy/main.py | 1 + sushy/tests/unit/test_main.py | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sushy/main.py b/sushy/main.py index bf8737d..2be5dd3 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -328,6 +328,7 @@ class Sushy(base.ResourceBase): resource_package_name = __name__ for json_file in pkg_resources.resource_listdir( resource_package_name, STANDARD_REGISTRY_PATH): + # Not using path.join according to pkg_resources docs mes_reg = message_registry.MessageRegistry( None, STANDARD_REGISTRY_PATH + json_file, reader=base.JsonPackagedFileReader(resource_package_name)) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index adafdeb..a0f39b7 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -191,8 +191,7 @@ class MainTestCase(base.TestCase): registries = self.root._get_standard_message_registry_collection() self.assertEqual(5, len(registries)) - self.assertTrue([r.identity for r in registries - if r.identity == 'Base.1.3.0']) + self.assertIn('Base.1.3.0', {r.identity for r in registries}) @mock.patch('sushy.Sushy._get_standard_message_registry_collection', autospec=True) -- GitLab From 74e37a7420128caf479811000ab0ee0c0931ff2c Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Thu, 23 May 2019 10:21:53 +0200 Subject: [PATCH 155/303] Update sphinx requirements Following recent changes to global requirements [1], we need to update sphinx requirements locally. [1] If558f184c959e4b63b56dec3ca1571d1034cfe5c Change-Id: Ifa2363aa2076f4698f89c6da68739047532537a2 --- test-requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index 1d8472a..6f8ebee 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,7 +6,8 @@ hacking>=1.0.0,<1.1.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD -sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD +sphinx!=1.6.6,!=1.6.7,>=1.6.2,<2.0.0;python_version=='2.7' # BSD +sphinx!=1.6.6,!=1.6.7,>=1.6.2;python_version>='3.4' # BSD openstackdocstheme>=1.18.1 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 -- GitLab From 51032b62caa4104083dd040c12e6626bf2abc1ea Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Tue, 14 Aug 2018 14:39:28 +0300 Subject: [PATCH 156/303] Add @Redfish.Settings update status and expose it for BIOS Adds logic to determine status exposed to user based on message severities. This is initial logic and in future can be improved to determine, e.g., if settings update is pending. Change-Id: Ic5da181af128641c6eaa432be566cbe0a5f69de4 Story: 2001791 Task: 19767 --- ...d-bios-update-status-cc59816c374b78e4.yaml | 5 ++ sushy/main.py | 3 +- sushy/resources/settings.py | 63 +++++++++++++++++++ sushy/resources/system/bios.py | 21 +++++++ sushy/resources/system/system.py | 9 ++- sushy/tests/unit/json_samples/bios.json | 5 +- sushy/tests/unit/json_samples/settings.json | 4 +- .../tests/unit/resources/system/test_bios.py | 12 ++++ sushy/tests/unit/resources/test_settings.py | 43 ++++++++++++- sushy/tests/unit/test_main.py | 9 ++- 10 files changed, 164 insertions(+), 10 deletions(-) create mode 100644 releasenotes/notes/add-bios-update-status-cc59816c374b78e4.yaml diff --git a/releasenotes/notes/add-bios-update-status-cc59816c374b78e4.yaml b/releasenotes/notes/add-bios-update-status-cc59816c374b78e4.yaml new file mode 100644 index 0000000..72f7f79 --- /dev/null +++ b/releasenotes/notes/add-bios-update-status-cc59816c374b78e4.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + ``Bios`` resource introduces ``update_status`` property that exposes + the status and any errors of last BIOS attribute update. diff --git a/sushy/main.py b/sushy/main.py index bf8737d..3492521 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -180,7 +180,8 @@ class Sushy(base.ResourceBase): :returns: The System object """ return system.System(self._conn, identity, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self._get_message_registries()) def get_chassis_collection(self): """Get the ChassisCollection object diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index 0d97196..e048fc2 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -18,7 +18,44 @@ import logging from sushy.resources import base from sushy.resources import common +from sushy.resources import constants as res_cons from sushy.resources import mappings as res_maps +from sushy.resources.registry import message_registry + +# Settings update statuses + +UPDATE_UNKNOWN = 0 +"""Update status unknown""" + +UPDATE_SUCCESS = 1 +"""Update was successful""" + +UPDATE_FAILURE = 2 +"""Update encountered errors""" + +UPDATE_PENDING = 3 +"""Update waiting for being applied""" + +NO_UPDATES = 4 +"""No updates made""" + + +class SettingsUpdate(object): + """Contains Settings update status and details of the update""" + + def __init__(self, status, messages): + self._status = status + self._messages = messages + + @property + def status(self): + """The status of the update""" + return self._status + + @property + def messages(self): + """List of :class:`.MessageListField` with messages from the update""" + return self._messages LOG = logging.getLogger(__name__) @@ -161,3 +198,29 @@ class SettingsField(base.CompositeField): @property def resource_uri(self): return self._settings_object_idref.resource_uri + + def get_status(self, registries): + """Determines the status of last update based + + Uses message id-s and severity to determine the status. + + :param registries: registries to use to parse message + :returns: :class:`.SettingsUpdate` object containing status + and any messages + """ + + if not self.time: + return SettingsUpdate(NO_UPDATES, None) + + parsed_msgs = [] + for m in self.messages: + parsed_msgs.append( + message_registry.parse_message(registries, m)) + any_errors = any(m for m in parsed_msgs + if not m.severity == res_cons.SEVERITY_OK) + + if any_errors: + status = UPDATE_FAILURE + else: + status = UPDATE_SUCCESS + return SettingsUpdate(status, parsed_msgs) diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 84f7fce..749cbe6 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -31,6 +31,17 @@ class ActionsField(base.CompositeField): class Bios(base.ResourceBase): + def __init__(self, connector, path, registries, *args, **kwargs): + super(Bios, self).__init__(connector, path, *args, **kwargs) + self._registries = registries + """A class representing a Bios + + :param connector: A Connector instance + :param path: Sub-URI path to the Bios resource + :param registries: Dict of message registries to be used when + parsing messages of attribute update status + """ + identity = base.Field('Id', required=True) """The Bios resource identity string""" @@ -65,6 +76,7 @@ class Bios(base.ResourceBase): """Pending BIOS settings resource""" return Bios( self._conn, self._settings.resource_uri, + registries=None, redfish_version=self.redfish_version) @property @@ -153,3 +165,12 @@ class Bios(base.ResourceBase): 'OldPassword': old_password, 'PasswordName': password_name}) LOG.info('BIOS password %s is being changed', self.identity) + + @property + def update_status(self): + """Status of the last attribute update + + :returns: :class:`sushy.resources.settings.SettingsUpdate` object + containing status and any messages + """ + return self._settings.get_status(self._registries) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 1e88edf..929fd14 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -135,15 +135,19 @@ class System(base.ResourceBase): _actions = ActionsField('Actions', required=True) - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a ComputerSystem :param connector: A Connector instance :param identity: The identity of the System resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of registries to be used in any resource + that needs registries to parse messages """ super(System, self).__init__(connector, identity, redfish_version) + self._registries = registries def _get_reset_action_element(self): reset_action = self._actions.reset @@ -315,7 +319,8 @@ class System(base.ResourceBase): return bios.Bios( self._conn, utils.get_sub_resource_path_by(self, 'Bios'), - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self._registries) @property @utils.cache_it diff --git a/sushy/tests/unit/json_samples/bios.json b/sushy/tests/unit/json_samples/bios.json index 1d947b8..8063b3d 100644 --- a/sushy/tests/unit/json_samples/bios.json +++ b/sushy/tests/unit/json_samples/bios.json @@ -20,9 +20,12 @@ "ETag": "9234ac83b9700123cc32", "Messages": [ { - "MessageId": "Base.1.0.SettingsFailed", + "MessageId": "Test.1.0.Failed", "RelatedProperties": [ "#/Attributes/ProcTurboMode" + ], + "MessageArgs": [ + "arg1" ] } ], diff --git a/sushy/tests/unit/json_samples/settings.json b/sushy/tests/unit/json_samples/settings.json index 9e01e14..75e2d92 100644 --- a/sushy/tests/unit/json_samples/settings.json +++ b/sushy/tests/unit/json_samples/settings.json @@ -3,8 +3,8 @@ "@odata.type": "#Settings.v1_2_0.Settings", "ETag": "9234ac83b9700123cc32", "Messages": [{ - "MessageId": "Base.1.0.SettingsFailed", - "Message": "Settings update failed due to invalid value", + "MessageId": "Test.1.0.Failed", + "Message": "Settings %1 update failed due to invalid value", "Severity": "Critical", "Resolution": "Fix the value and try again", "MessageArgs": [ diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index 98b5013..bd6bb6e 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -18,6 +18,8 @@ import mock from dateutil import parser from sushy import exceptions +from sushy.resources.registry import message_registry +from sushy.resources import settings from sushy.resources.system import bios from sushy.tests.unit import base @@ -37,8 +39,16 @@ class BiosTestCase(base.TestCase): bios_settings_json, bios_settings_json] + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + self.sys_bios = bios.Bios( self.conn, '/redfish/v1/Systems/437XR1138R2/BIOS', + registries={'Test.1.0': registry}, redfish_version='1.0.2') def test__parse_attributes(self): @@ -59,6 +69,8 @@ class BiosTestCase(base.TestCase): self.sys_bios._settings._etag) self.assertEqual('(404) 555-1212', self.sys_bios.pending_attributes['AdminPhone']) + self.assertEqual(settings.UPDATE_FAILURE, + self.sys_bios.update_status.status) def test_set_attribute(self): self.sys_bios.set_attribute('ProcTurboMode', 'Disabled') diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index 5cc1c57..d32b037 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -17,6 +17,7 @@ import json import mock from sushy.resources import constants as res_cons +from sushy.resources.registry import message_registry from sushy.resources import settings from sushy.tests.unit import base @@ -30,6 +31,14 @@ class SettingsFieldTestCase(base.TestCase): self.settings = settings.SettingsField() + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + self.registries = {'Test.1.0': registry} + @mock.patch.object(settings, 'LOG', autospec=True) def test__load(self, mock_LOG): instance = self.settings._load(self.json, mock.Mock()) @@ -40,9 +49,9 @@ class SettingsFieldTestCase(base.TestCase): instance.time) self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', instance._settings_object_idref.resource_uri) - self.assertEqual('Base.1.0.SettingsFailed', + self.assertEqual('Test.1.0.Failed', instance.messages[0].message_id) - self.assertEqual('Settings update failed due to invalid value', + self.assertEqual('Settings %1 update failed due to invalid value', instance.messages[0].message) self.assertEqual(res_cons.SEVERITY_CRITICAL, instance.messages[0].severity) @@ -67,3 +76,33 @@ class SettingsFieldTestCase(base.TestCase): conn.patch.assert_called_once_with( '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', data={'Attributes': {'key': 'value'}}) + + def test_get_status_failure(self): + instance = self.settings._load(self.json, mock.Mock()) + + status = instance.get_status(self.registries) + self.assertEqual(status.status, + settings.UPDATE_FAILURE) + self.assertEqual(status.messages[0].severity, + res_cons.SEVERITY_CRITICAL) + self.assertEqual(status.messages[0].message, + 'The property arg1 broke everything.') + + def test_get_status_success(self): + instance = self.settings._load(self.json, mock.Mock()) + instance.messages[0].message_id = 'Test.1.0.Success' + instance.messages[0].severity = res_cons.SEVERITY_OK + status = instance.get_status(self.registries) + self.assertEqual(status.status, + settings.UPDATE_SUCCESS) + self.assertEqual(status.messages[0].severity, res_cons.SEVERITY_OK) + self.assertEqual(status.messages[0].message, + 'Everything done successfully.') + + def test_get_status_noupdates(self): + instance = self.settings._load(self.json, mock.Mock()) + instance.time = None + status = instance.get_status(self.registries) + self.assertEqual(status.status, + settings.NO_UPDATES) + self.assertIsNone(status.messages) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index adafdeb..2645bbd 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -100,11 +100,16 @@ class MainTestCase(base.TestCase): redfish_version=self.root.redfish_version) @mock.patch.object(system, 'System', autospec=True) - def test_get_system(self, mock_system): + @mock.patch('sushy.Sushy._get_message_registries', autospec=True) + def test_get_system(self, mock_registries, mock_system): + mock_registry = mock.Mock() + mock_registries.return_value = [mock_registry] + self.root._standard_message_registries_path = None self.root.get_system('fake-system-id') mock_system.assert_called_once_with( self.root._conn, 'fake-system-id', - redfish_version=self.root.redfish_version) + redfish_version=self.root.redfish_version, + registries=[mock_registry]) @mock.patch.object(chassis, 'Chassis', autospec=True) def test_get_chassis(self, mock_chassis): -- GitLab From 343f7a6c6370cade2e05f57458497cc16681d858 Mon Sep 17 00:00:00 2001 From: Bill Dodd Date: Tue, 7 May 2019 15:50:45 -0500 Subject: [PATCH 157/303] Enhance Storage models to support RAID config The Storage, Volume, and Drive resource models are enhanced to enable the implementation of RAID configuration management. Change-Id: Ia1ba7f08c6574c7497ca7464a52f46a5b72bc0cd Story: 2003514 Task: 30593 --- ...volume-drive-support-16314d30f3631fb3.yaml | 5 + sushy/exceptions.py | 13 ++- sushy/resources/common.py | 15 +++ sushy/resources/system/storage/constants.py | 41 +++++++++ sushy/resources/system/storage/drive.py | 12 +++ sushy/resources/system/storage/mappings.py | 33 +++++++ sushy/resources/system/storage/storage.py | 37 +++++++- sushy/resources/system/storage/volume.py | 92 +++++++++++++++++++ sushy/tests/unit/json_samples/error.json | 7 ++ sushy/tests/unit/json_samples/volume4.json | 41 +++++++++ .../resources/manager/test_virtual_media.py | 2 +- .../sessionservice/test_sessionservice.py | 2 +- .../resources/system/storage/test_drive.py | 9 ++ .../resources/system/storage/test_storage.py | 22 +++++ .../resources/system/storage/test_volume.py | 61 ++++++++++++ sushy/tests/unit/resources/test_base.py | 2 +- sushy/tests/unit/test_connector.py | 8 +- 17 files changed, 393 insertions(+), 9 deletions(-) create mode 100644 releasenotes/notes/enhance-storage-volume-drive-support-16314d30f3631fb3.yaml create mode 100644 sushy/resources/system/storage/constants.py create mode 100644 sushy/resources/system/storage/mappings.py create mode 100644 sushy/tests/unit/json_samples/volume4.json diff --git a/releasenotes/notes/enhance-storage-volume-drive-support-16314d30f3631fb3.yaml b/releasenotes/notes/enhance-storage-volume-drive-support-16314d30f3631fb3.yaml new file mode 100644 index 0000000..d3194cb --- /dev/null +++ b/releasenotes/notes/enhance-storage-volume-drive-support-16314d30f3631fb3.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Update the Storage, Volume, and Drive models to support RAID + configuration management. diff --git a/sushy/exceptions.py b/sushy/exceptions.py index e903858..9f552b8 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -97,10 +97,12 @@ class HTTPError(SushyError): self.status_code}) error = 'unknown error' else: - # TODO(dtantsur): parse @Message.ExtendedInfo self.body = body.get('error', {}) self.code = self.body.get('code', 'Base.1.0.GeneralError') self.detail = self.body.get('message') + ext_info = self.body.get('@Message.ExtendedInfo', [{}]) + index = self._get_most_severe_msg_index(ext_info) + self.detail = ext_info[index].get('Message', self.detail) error = '%s: %s' % (self.code, self.detail or 'unknown error') kwargs = {'method': method, 'url': url, 'code': self.status_code, @@ -109,6 +111,15 @@ class HTTPError(SushyError): 'status code: %(code)s, error: %(error)s', kwargs) super(HTTPError, self).__init__(**kwargs) + @staticmethod + def _get_most_severe_msg_index(extended_info): + if len(extended_info) > 0: + for sev in ['Critical', 'Warning']: + for i, m in enumerate(extended_info): + if m.get('Severity') == sev: + return i + return 0 + class BadRequestError(HTTPError): pass diff --git a/sushy/resources/common.py b/sushy/resources/common.py index 9eaa87a..1b27910 100644 --- a/sushy/resources/common.py +++ b/sushy/resources/common.py @@ -56,6 +56,11 @@ class ResetActionField(ActionField): adapter=list) +class InitializeActionField(ActionField): + allowed_values = base.Field('InitializeType@Redfish.AllowableValues', + adapter=list) + + class StatusField(base.CompositeField): """This Field describes the status of a resource and its children. @@ -69,3 +74,13 @@ class StatusField(base.CompositeField): state = base.MappedField('State', res_maps.STATE_VALUE_MAP) """Indicates the known state of the resource, such as if it is enabled.""" + + +class IdentifiersListField(base.ListField): + """This type describes any additional identifiers for a resource.""" + + durable_name = base.Field('DurableName') + """This indicates the world wide, persistent name of the resource.""" + + durable_name_format = base.Field('DurableNameFormat') + """This represents the format of the DurableName property.""" diff --git a/sushy/resources/system/storage/constants.py b/sushy/resources/system/storage/constants.py new file mode 100644 index 0000000..92b9c9c --- /dev/null +++ b/sushy/resources/system/storage/constants.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Volume Initialization Types +VOLUME_INIT_TYPE_FAST = 'fast' +"""The volume is prepared for use quickly, typically by erasing just the +beginning and end of the space so that partitioning can be performed.""" + +VOLUME_INIT_TYPE_SLOW = 'slow' +"""The volume is prepared for use slowly, typically by completely erasing +the volume.""" + +# VolumeType Types +VOLUME_TYPE_RAW_DEVICE = 'rawdevice' +"""The volume is a raw physical device without any RAID or other +virtualization applied.""" + +VOLUME_TYPE_NON_REDUNDANT = 'nonredundant' +"""The volume is a non-redundant storage device.""" + +VOLUME_TYPE_MIRRORED = 'mirrored' +"""The volume is a mirrored device.""" + +VOLUME_TYPE_STRIPED_WITH_PARITY = 'stripedwithparity' +"""The volume is a device which uses parity to retain redundant information.""" + +VOLUME_TYPE_SPANNED_MIRRORS = 'spannedmirrors' +"""The volume is a spanned set of mirrored devices.""" + +VOLUME_TYPE_SPANNED_STRIPES_WITH_PARITY = 'spannedstripeswithparity' +"""The volume is a spanned set of devices which uses parity to retain +redundant information.""" diff --git a/sushy/resources/system/storage/drive.py b/sushy/resources/system/storage/drive.py index 92f21fb..879833b 100644 --- a/sushy/resources/system/storage/drive.py +++ b/sushy/resources/system/storage/drive.py @@ -27,9 +27,15 @@ LOG = logging.getLogger(__name__) class Drive(base.ResourceBase): """This class represents a disk drive or other physical storage medium.""" + block_size_bytes = base.Field('BlockSizeBytes', adapter=utils.int_or_none) + """The size of the smallest addressable unit of this drive in bytes""" + capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) """The size in bytes of this Drive""" + identifiers = common.IdentifiersListField('Identifiers', default=[]) + """The Durable names for the drive""" + identity = base.Field('Id', required=True) """The Drive identity string""" @@ -40,6 +46,9 @@ class Drive(base.ResourceBase): manufacturer = base.Field('Manufacturer') """This is the manufacturer of this drive""" + media_type = base.Field('MediaType') + """The type of media contained in this drive""" + model = base.Field('Model') """This is the model number for the drive""" @@ -49,6 +58,9 @@ class Drive(base.ResourceBase): part_number = base.Field('PartNumber') """The part number for this drive""" + protocol = base.Field('Protocol') + """Protocol this drive is using to communicate to the storage controller""" + serial_number = base.Field('SerialNumber') """The serial number for this drive""" diff --git a/sushy/resources/system/storage/mappings.py b/sushy/resources/system/storage/mappings.py new file mode 100644 index 0000000..462f9a7 --- /dev/null +++ b/sushy/resources/system/storage/mappings.py @@ -0,0 +1,33 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.system.storage import constants as store_cons +from sushy import utils + +VOLUME_INIT_TYPE_MAP = { + 'Fast': store_cons.VOLUME_INIT_TYPE_FAST, + 'Slow': store_cons.VOLUME_INIT_TYPE_SLOW +} + +VOLUME_INIT_TYPE_MAP_REV = ( + utils.revert_dictionary(VOLUME_INIT_TYPE_MAP) +) + +VOLUME_TYPE_TYPE_MAP = { + 'RawDevice': store_cons.VOLUME_TYPE_RAW_DEVICE, + 'NonRedundant': store_cons.VOLUME_TYPE_NON_REDUNDANT, + 'Mirrored': store_cons.VOLUME_TYPE_MIRRORED, + 'StripedWithParity': store_cons.VOLUME_TYPE_STRIPED_WITH_PARITY, + 'SpannedMirrors': store_cons.VOLUME_TYPE_SPANNED_MIRRORS, + 'SpannedStripesWithParity': + store_cons.VOLUME_TYPE_SPANNED_STRIPES_WITH_PARITY +} diff --git a/sushy/resources/system/storage/storage.py b/sushy/resources/system/storage/storage.py index 762e8d6..3e76a45 100644 --- a/sushy/resources/system/storage/storage.py +++ b/sushy/resources/system/storage/storage.py @@ -16,6 +16,7 @@ import logging from sushy.resources import base +from sushy.resources import common from sushy.resources.system.storage import drive from sushy.resources.system.storage import volume from sushy import utils @@ -24,6 +25,33 @@ from sushy import utils LOG = logging.getLogger(__name__) +class StorageControllersListField(base.ListField): + """The set of storage controllers represented by this resource.""" + + member_id = base.Field('MemberId', required=True) + """Uniquely identifies the member within the collection.""" + + name = base.Field('Name', required=True) + """The name of the storage controller""" + + status = common.StatusField('Status') + """Describes the status and health of the resource and its children.""" + + identifiers = common.IdentifiersListField('Identifiers', default=[]) + """The Durable names for the storage controller.""" + + speed_gbps = base.Field('SpeedGbps') + """The maximum speed of the storage controller's device interface.""" + + controller_protocols = base.Field('SupportedControllerProtocols', + adapter=list) + """The protocols by which this storage controller can be communicated to""" + + device_protocols = base.Field('SupportedDeviceProtocols', + adapter=list) + """The protocols which the controller can use tocommunicate with devices""" + + class Storage(base.ResourceBase): """This class represents the storage subsystem resources. @@ -42,10 +70,13 @@ class Storage(base.ResourceBase): adapter=utils.get_members_identities) """A tuple with the drive identities""" + status = common.StatusField('Status') + """Describes the status and health of the resource and its children.""" + def get_drive(self, drive_identity): """Given the drive identity return a ``Drive`` object - :param identity: The identity of the ``Drive`` + :param drive_identity: The identity of the ``Drive`` :returns: The ``Drive`` object :raises: ResourceNotFoundError """ @@ -95,6 +126,10 @@ class Storage(base.ResourceBase): self._conn, utils.get_sub_resource_path_by(self, 'Volumes'), redfish_version=self.redfish_version) + storage_controllers = StorageControllersListField('StorageControllers', + default=[]) + """The storage devices associated with this resource.""" + class StorageCollection(base.ResourceCollectionBase): """This class represents the collection of Storage resources""" diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index c22ccf0..36b0693 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -15,13 +15,19 @@ import logging +from sushy import exceptions from sushy.resources import base from sushy.resources import common +from sushy.resources.system.storage import mappings as store_maps from sushy import utils LOG = logging.getLogger(__name__) +class ActionsField(base.CompositeField): + initialize = common.InitializeActionField('#Volume.Initialize') + + class Volume(base.ResourceBase): """This class adds the Storage Volume resource""" @@ -34,6 +40,73 @@ class Volume(base.ResourceBase): capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) """The size in bytes of this Volume.""" + volume_type = base.MappedField('VolumeType', + store_maps.VOLUME_TYPE_TYPE_MAP) + """The type of this volume.""" + + encrypted = base.Field('Encrypted', adapter=bool) + """Is this Volume encrypted.""" + + identifiers = common.IdentifiersListField('Identifiers', default=[]) + """The Durable names for the volume.""" + + block_size_bytes = base.Field('BlockSizeBytes', adapter=int) + """The size of the smallest addressable unit of this volume in bytes.""" + + operation_apply_time_support = common.OperationApplyTimeSupportField() + """Indicates if a client is allowed to request for a specific apply + time of a create, delete, or action operation of a given resource""" + + _actions = ActionsField('Actions', required=True) + + def _get_initialize_action_element(self): + initialize_action = self._actions.initialize + if not initialize_action: + raise exceptions.MissingActionError(action='#Volume.Initialize', + resource=self._path) + return initialize_action + + def get_allowed_initialize_volume_values(self): + """Get the allowed values for initializing the volume. + + :returns: A set with the allowed values. + """ + action = self._get_initialize_action_element() + + if not action.allowed_values: + LOG.warning('Could not figure out the allowed values for the ' + 'initialize volume action for Volume %s', + self.identity) + return set(store_maps.VOLUME_INIT_TYPE_MAP_REV) + + return set([store_maps.VOLUME_INIT_TYPE_MAP[v] for v in + set(store_maps.VOLUME_INIT_TYPE_MAP). + intersection(action.allowed_values)]) + + def initialize_volume(self, value): + """Initialize the volume. + + :param value: The InitializeType value. + :raises: InvalidParameterValueError, if the target value is not + allowed. + """ + valid_values = self.get_allowed_initialize_volume_values() + if value not in valid_values: + raise exceptions.InvalidParameterValueError( + parameter='value', value=value, valid_values=valid_values) + value = store_maps.VOLUME_INIT_TYPE_MAP_REV[value] + target_uri = self._get_initialize_action_element().target_uri + self._conn.post(target_uri, data={'InitializeType': value}) + + def delete_volume(self, payload=None): + """Delete the volume. + + :param payload: May contain @Redfish.OperationApplyTime property + :raises: ConnectionError + :raises: HTTPError + """ + self._conn.delete(self._path, data=payload) + class VolumeCollection(base.ResourceCollectionBase): """This class represents the Storage Volume collection""" @@ -67,3 +140,22 @@ class VolumeCollection(base.ResourceCollectionBase): operation_apply_time_support = common.OperationApplyTimeSupportField() """Indicates if a client is allowed to request for a specific apply time of a create, delete, or action operation of a given resource""" + + def create_volume(self, payload): + """Create a volume. + + :param payload: The payload representing the new volume to create. + :raises: ConnectionError + :raises: HTTPError + """ + r = self._conn.post(self._path, data=payload) + location = r.headers.get('Location') + new_volume = None + if r.status_code == 201: + if location: + self.refresh() + new_volume = self.get_member(location) + elif r.status_code == 202: + # TODO(billdodd): TaskMonitor support to be added in subsequent PR + pass + return new_volume diff --git a/sushy/tests/unit/json_samples/error.json b/sushy/tests/unit/json_samples/error.json index 89598b1..155af2a 100644 --- a/sushy/tests/unit/json_samples/error.json +++ b/sushy/tests/unit/json_samples/error.json @@ -29,6 +29,13 @@ ], "Severity": "Warning", "Resolution": "Remove the property from the request body and resubmit the request if the operation failed" + }, + { + "@odata.type": "/redfish/v1/$metadata#Message.1.0.0.Message", + "MessageId": "Base.1.0.MalformedJSON", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." } ] } diff --git a/sushy/tests/unit/json_samples/volume4.json b/sushy/tests/unit/json_samples/volume4.json new file mode 100644 index 0000000..0d76a62 --- /dev/null +++ b/sushy/tests/unit/json_samples/volume4.json @@ -0,0 +1,41 @@ +{ + "@odata.type": "#Volume.v1_0_3.Volume", + "Id": "4", + "Name": "My Volume 4", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "Encrypted": false, + "VolumeType": "Mirrored", + "CapacityBytes": 107374182400, + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "UUID", + "DurableName": "eb179a30-6f87-4fdb-8f92-639eb7aaabcb" + } + ], + "Links": { + "@odata.type": "#Volume.v1_0_0.Links", + "Drives": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2" + } + ] + }, + "Actions": { + "@odata.type": "#Volume.v1_0_0.Actions", + "#Volume.Initialize": { + "target": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4/Actions/Volume.Initialize", + "InitializeType@Redfish.AllowableValues": [ + "Fast", + "Slow" + ] + } + }, + "@odata.context": "/redfish/v1/$metadata#Volume.Volume", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py index 690bc64..0ac6370 100644 --- a/sushy/tests/unit/resources/manager/test_virtual_media.py +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -89,7 +89,7 @@ class VirtualMediaTestCase(base.TestCase): target_uri = ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" "/VirtualMedia.EjectMedia") self.conn.post.side_effect = [exceptions.HTTPError( - method='POST', url=target_uri, response=mock.Mock( + method='POST', url=target_uri, response=mock.MagicMock( status_code=http_client.UNSUPPORTED_MEDIA_TYPE)), '200'] self.sys_virtual_media.eject_media() post_calls = [ diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py index c87178f..f3b75ed 100644 --- a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -27,7 +27,7 @@ class SessionServiceTestCase(base.TestCase): def setUp(self): super(SessionServiceTestCase, self).setUp() - self.conn = mock.Mock() + self.conn = mock.MagicMock() with open('sushy/tests/unit/json_samples/session_service.json') as f: self.conn.get.return_value.json.return_value = json.load(f) diff --git a/sushy/tests/unit/resources/system/storage/test_drive.py b/sushy/tests/unit/resources/system/storage/test_drive.py index d62af27..796c6fc 100644 --- a/sushy/tests/unit/resources/system/storage/test_drive.py +++ b/sushy/tests/unit/resources/system/storage/test_drive.py @@ -38,10 +38,19 @@ class DriveTestCase(base.TestCase): self.assertEqual('1.0.2', self.stor_drive.redfish_version) self.assertEqual('32ADF365C6C1B7BD', self.stor_drive.identity) self.assertEqual('Drive Sample', self.stor_drive.name) + self.assertEqual(512, self.stor_drive.block_size_bytes) self.assertEqual(899527000000, self.stor_drive.capacity_bytes) + identifiers = self.stor_drive.identifiers + self.assertIsInstance(identifiers, list) + self.assertEqual(1, len(identifiers)) + identifier = identifiers[0] + self.assertEqual('NAA', identifier.durable_name_format) + self.assertEqual('32ADF365C6C1B7BD', identifier.durable_name) self.assertEqual('Contoso', self.stor_drive.manufacturer) + self.assertEqual('HDD', self.stor_drive.media_type) self.assertEqual('C123', self.stor_drive.model) self.assertEqual('C123-1111', self.stor_drive.part_number) + self.assertEqual('SAS', self.stor_drive.protocol) self.assertEqual('1234570', self.stor_drive.serial_number) self.assertEqual(sushy.STATE_ENABLED, self.stor_drive.status.state) self.assertEqual(sushy.HEALTH_OK, self.stor_drive.status.health) diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index 502aa58..e9f57dd 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -51,6 +51,9 @@ class StorageTestCase(base.TestCase): self.assertEqual('1.0.2', self.storage.redfish_version) self.assertEqual('1', self.storage.identity) self.assertEqual('Local Storage Controller', self.storage.name) + self.assertEqual('ok', self.storage.status.health) + self.assertEqual('ok', self.storage.status.health_rollup) + self.assertEqual('enabled', self.storage.status.state) self.assertEqual( ('/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3', # noqa '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233', # noqa @@ -101,6 +104,25 @@ class StorageTestCase(base.TestCase): self.assertEqual(4, len(all_drives)) self.assertIsInstance(all_drives[0], drive.Drive.__class__) + def test_storage_controllers(self): + controllers = self.storage.storage_controllers + self.assertIsInstance(controllers, list) + self.assertEqual(1, len(controllers)) + controller = controllers[0] + self.assertEqual('0', controller.member_id) + self.assertEqual('Contoso Integrated RAID', controller.name) + self.assertEqual('ok', controller.status.health) + self.assertEqual('enabled', controller.status.state) + identifiers = controller.identifiers + self.assertIsInstance(identifiers, list) + self.assertEqual(1, len(identifiers)) + identifier = identifiers[0] + self.assertEqual('NAA', identifier.durable_name_format) + self.assertEqual('345C59DBD970859C', identifier.durable_name) + self.assertEqual(12, controller.speed_gbps) + self.assertEqual(["PCIe"], controller.controller_protocols) + self.assertEqual(["SAS", "SATA"], controller.device_protocols) + def test_drives_after_refresh(self): self.storage.refresh() self.conn.get.return_value.json.reset_mock() diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index cf0b5c4..a061d70 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -15,6 +15,8 @@ import mock from dateutil import parser +from sushy import exceptions +from sushy.resources.system.storage import constants as store_cons from sushy.resources.system.storage import volume from sushy.tests.unit import base @@ -37,6 +39,41 @@ class VolumeTestCase(base.TestCase): self.assertEqual('1', self.stor_volume.identity) self.assertEqual('Virtual Disk 1', self.stor_volume.name) self.assertEqual(899527000000, self.stor_volume.capacity_bytes) + self.assertEqual(store_cons.VOLUME_TYPE_MIRRORED, + self.stor_volume.volume_type) + self.assertFalse(self.stor_volume.encrypted) + identifiers = self.stor_volume.identifiers + self.assertIsInstance(identifiers, list) + self.assertEqual(1, len(identifiers)) + identifier = identifiers[0] + self.assertEqual('UUID', identifier.durable_name_format) + self.assertEqual('38f1818b-111e-463a-aa19-fa54f792e468', + identifier.durable_name) + self.assertIsNone(self.stor_volume.block_size_bytes) + + def test_initialize_volume(self): + target_uri = '/redfish/v1/Systems/3/Storage/RAIDIntegrated/' \ + 'Volumes/1/Actions/Volume.Initialize' + self.stor_volume.initialize_volume('fast') + self.stor_volume._conn.post.assert_called_once_with( + target_uri, data={'InitializeType': 'Fast'}) + + def test_initialize_volume_bad_value(self): + self.assertRaisesRegex( + exceptions.InvalidParameterValueError, + 'The parameter.*lazy.*invalid', + self.stor_volume.initialize_volume, 'lazy') + + def test_delete_volume(self): + self.stor_volume.delete_volume() + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=None) + + def test_delete_volume_with_payload(self): + payload = {'@Redfish.OperationApplyTime': 'OnReset'} + self.stor_volume.delete_volume(payload=payload) + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=payload) class VolumeCollectionTestCase(base.TestCase): @@ -50,6 +87,7 @@ class VolumeCollectionTestCase(base.TestCase): self.stor_vol_col = volume.VolumeCollection( self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', redfish_version='1.0.2') + self.stor_vol_col.refresh = mock.Mock() def test__parse_attributes(self): self.stor_vol_col._parse_attributes() @@ -130,3 +168,26 @@ class VolumeCollectionTestCase(base.TestCase): self.conn.get.return_value.json.side_effect = successive_return_values self.assertEqual(1073741824000, self.stor_vol_col.max_size_bytes) + + def test_create_volume(self): + payload = { + 'Name': 'My Volume 4', + 'VolumeType': 'Mirrored', + 'CapacityBytes': 107374182400 + } + with open('sushy/tests/unit/json_samples/volume4.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.conn.post.return_value.status_code = 201 + self.conn.post.return_value.headers.return_value = { + 'Location': '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4' + } + new_vol = self.stor_vol_col.create_volume(payload) + self.stor_vol_col._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', + data=payload) + self.stor_vol_col.refresh.assert_called_once() + self.assertIsNotNone(new_vol) + self.assertEqual('4', new_vol.identity) + self.assertEqual('My Volume 4', new_vol.name) + self.assertEqual(107374182400, new_vol.capacity_bytes) + self.assertEqual(store_cons.VOLUME_TYPE_MIRRORED, new_vol.volume_type) diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 1a036b2..976493c 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -227,7 +227,7 @@ class ResourceCollectionBaseTestCase(base.TestCase): self.test_resource_collection.members_identities = ('1',) self.conn.get.side_effect = exceptions.ResourceNotFoundError( method='GET', url='http://foo.bar:8000/redfish/v1/Fakes/2', - response=mock.Mock(status_code=http_client.NOT_FOUND)) + response=mock.MagicMock(status_code=http_client.NOT_FOUND)) # | WHEN & THEN | self.assertRaises(exceptions.ResourceNotFoundError, self.test_resource_collection.get_member, '2') diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 0547563..f13d37a 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -183,9 +183,9 @@ class ConnectorOpTestCase(base.TestCase): self.session = mock.Mock(spec=requests.Session) self.conn._session = self.session self.request = self.session.request - first_response = mock.Mock() + first_response = mock.MagicMock() first_response.status_code = http_client.FORBIDDEN - second_response = mock.Mock() + second_response = mock.MagicMock() second_response.status_code = http_client.OK second_response.json = {'Test': 'Testing'} self.request.side_effect = [first_response, second_response] @@ -216,12 +216,12 @@ class ConnectorOpTestCase(base.TestCase): self.request.return_value.json.return_value = json.load(f) with self.assertRaisesRegex(exceptions.BadRequestError, - 'A general error has occurred') as cm: + 'body submitted was malformed JSON') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception self.assertEqual(http_client.BAD_REQUEST, exc.status_code) self.assertIsNotNone(exc.body) - self.assertIn('A general error has occurred', exc.detail) + self.assertIn('body submitted was malformed JSON', exc.detail) def test_not_found_error(self): self.request.return_value.status_code = http_client.NOT_FOUND -- GitLab From 6f4539d1b00d31b6f831fc8e23278261c0fdd55e Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Wed, 5 Jun 2019 15:26:42 +0200 Subject: [PATCH 158/303] Use collections.abc instead of collections when available In Python 3.8, the abstract base classes in collections.abc will no longer be exposed in the regular collections module. Python 2.7 will maintain the old behavior. This patch allows the usage of the abstract classes in both Python versions. Change-Id: I6d6d3ac2bc8057a7fa018a6a44cf98f98c3b45d0 --- sushy/resources/base.py | 13 +++++++++++-- sushy/utils.py | 14 +++++++++++--- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index d79a863..0c53a0e 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -15,6 +15,14 @@ import abc import collections + +# (rpittau) this allows usage of collection ABC abstract classes in both +# Python 2.7 and Python 3.8+ +try: + collectionsAbc = collections.abc +except AttributeError: + collectionsAbc = collections + import copy import io import json @@ -112,7 +120,7 @@ def _collect_fields(resource): @six.add_metaclass(abc.ABCMeta) -class CompositeField(collections.Mapping, Field): +class CompositeField(collectionsAbc.Mapping, Field): """Base class for fields consisting of several sub-fields.""" def __init__(self, *args, **kwargs): @@ -144,6 +152,7 @@ class CompositeField(collections.Mapping, Field): # Satisfy the mapping interface, see # https://docs.python.org/2/library/collections.html#collections.Mapping. + # https://docs.python.org/3/library/collections.abc.html#collections.abc.Mapping def __getitem__(self, key): if key in self._subfields: @@ -241,7 +250,7 @@ class MappedField(Field): Only has effect when the field is not required. This value is not matched against the mapping. """ - if not isinstance(mapping, collections.Mapping): + if not isinstance(mapping, collectionsAbc.Mapping): raise TypeError("The mapping argument must be a mapping") super(MappedField, self).__init__( diff --git a/sushy/utils.py b/sushy/utils.py index 32fbf7a..b4d2a92 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -14,6 +14,14 @@ # under the License. import collections + +# (rpittau) this allows usage of collection ABC abstract classes in both +# Python 2.7 and Python 3.8+ +try: + collectionsAbc = collections.abc +except AttributeError: + collectionsAbc = collections + import logging import threading @@ -231,7 +239,7 @@ def cache_it(res_accessor_method): if isinstance(cache_attr_val, base.ResourceBase): cache_attr_val.refresh(force=False) - elif isinstance(cache_attr_val, collections.Sequence): + elif isinstance(cache_attr_val, collectionsAbc.Sequence): for elem in cache_attr_val: if isinstance(elem, base.ResourceBase): elem.refresh(force=False) @@ -260,7 +268,7 @@ def cache_clear(res_selfie, force_refresh, only_these=None): cache_attr_names = setdefaultattr( res_selfie, CACHE_ATTR_NAMES_VAR_NAME, set()) if only_these is not None: - if not isinstance(only_these, collections.Sequence): + if not isinstance(only_these, collectionsAbc.Sequence): raise TypeError("'only_these' must be a sequence.") cache_attr_names = cache_attr_names.intersection( @@ -273,7 +281,7 @@ def cache_clear(res_selfie, force_refresh, only_these=None): if isinstance(cache_attr_val, base.ResourceBase): cache_attr_val.invalidate(force_refresh) - elif isinstance(cache_attr_val, collections.Sequence): + elif isinstance(cache_attr_val, collectionsAbc.Sequence): for elem in cache_attr_val: if isinstance(elem, base.ResourceBase): elem.invalidate(force_refresh) -- GitLab From f6a167c612f806473c6459c849aed8946f2a177f Mon Sep 17 00:00:00 2001 From: Bill Dodd Date: Thu, 6 Jun 2019 18:42:40 -0500 Subject: [PATCH 159/303] Include OData-Version header in Redfish requests It is recommeded by the Redfish spec to send the OData-Version header in requests. This improves interoperability. The logic to add the OData-Version header is in the _op() method of the Connector class so that it only needs to be done in one place. Change-Id: Ia74dadf8f383566e00f2ed092f610a8070730aa2 Story: 2003514 Task: 33600 --- ...odata-version-header-96dc8179c0e2e9bd.yaml | 5 +++ sushy/connector.py | 3 ++ sushy/tests/unit/test_connector.py | 33 ++++++++++++++++++- 3 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/add-odata-version-header-96dc8179c0e2e9bd.yaml diff --git a/releasenotes/notes/add-odata-version-header-96dc8179c0e2e9bd.yaml b/releasenotes/notes/add-odata-version-header-96dc8179c0e2e9bd.yaml new file mode 100644 index 0000000..12d8138 --- /dev/null +++ b/releasenotes/notes/add-odata-version-header-96dc8179c0e2e9bd.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + Improve interoperability by including the recommended OData-Version + header in outgoing Redfish requests. diff --git a/sushy/connector.py b/sushy/connector.py index cae99c1..9c4d292 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -71,6 +71,9 @@ class Connector(object): :raises: HTTPError """ url = parse.urljoin(self._url, path) + headers = headers or {} + if not any(k.lower() == 'odata-version' for k in headers): + headers['OData-Version'] = '4.0' # TODO(lucasagomes): We should mask the data to remove sensitive # information LOG.debug('HTTP request: %(method)s %(url)s; headers: %(headers)s; ' diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 0547563..1a90e30 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -146,15 +146,18 @@ class ConnectorOpTestCase(base.TestCase): json=self.data, headers=self.headers) def test_ok_delete(self): + expected_headers = self.headers.copy() + expected_headers['OData-Version'] = '4.0' self.conn._op('DELETE', path='fake/path', headers=self.headers.copy()) self.request.assert_called_once_with( 'DELETE', 'http://foo.bar:1234/fake/path', - headers=self.headers, json=None) + headers=expected_headers, json=None) def test_ok_post_with_session(self): self.conn._session.headers = {} self.conn._session.headers['X-Auth-Token'] = 'asdf1234' expected_headers = self.headers.copy() + expected_headers['OData-Version'] = '4.0' self.conn._op('POST', path='fake/path', headers=self.headers, data=self.data) self.request.assert_called_once_with( @@ -163,6 +166,34 @@ class ConnectorOpTestCase(base.TestCase): self.assertEqual(self.conn._session.headers, {'X-Auth-Token': 'asdf1234'}) + def test_odata_version_header_redfish(self): + path = '/redfish/v1/path' + headers = dict(self.headers) + expected_headers = dict(self.headers) + expected_headers['OData-Version'] = '4.0' + self.request.reset_mock() + self.conn._op('GET', path=path, headers=headers) + self.request.assert_called_once_with( + 'GET', 'http://foo.bar:1234' + path, + headers=expected_headers, json=None) + + def test_odata_version_header_redfish_no_headers(self): + path = '/redfish/v1/bar' + expected_headers = {'OData-Version': '4.0'} + self.conn._op('GET', path=path) + self.request.assert_called_once_with( + 'GET', 'http://foo.bar:1234' + path, + headers=expected_headers, json=None) + + def test_odata_version_header_redfish_existing_header(self): + path = '/redfish/v1/foo' + headers = {'OData-Version': '3.0'} + expected_headers = dict(headers) + self.conn._op('GET', path=path, headers=headers) + self.request.assert_called_once_with( + 'GET', 'http://foo.bar:1234' + path, + headers=expected_headers, json=None) + def test_timed_out_session_unable_to_create_session(self): self.conn._auth.can_refresh_session.return_value = False self.conn._session = self.session -- GitLab From 1e74911cf1f4f66c542b6398e8e72e119753b8a8 Mon Sep 17 00:00:00 2001 From: Kaifeng Wang Date: Thu, 13 Jun 2019 14:07:19 +0800 Subject: [PATCH 160/303] Update Python 3 test runtimes for Train One of train goals to have consistent python3 jobs. https://governance.openstack.org/tc/goals/train/python3-updates.html Change-Id: Id64bf93d2238dd4276336ae23e4e8a22d9344449 --- setup.cfg | 1 + zuul.d/project.yaml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 88325fb..1981463 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,6 +17,7 @@ classifier = Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 [files] packages = diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 5922afc..861d37f 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -4,7 +4,7 @@ - openstack-cover-jobs - openstack-lower-constraints-jobs - openstack-python-jobs - - openstack-python36-jobs + - openstack-python3-train-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 check: -- GitLab From ca8471196ea86e8f32e0676c5bfaf979c749479c Mon Sep 17 00:00:00 2001 From: Varsha Date: Wed, 24 Apr 2019 02:06:31 +0530 Subject: [PATCH 161/303] Add ``Endpoint`` sub-resource to ``Fabric`` Add representation of the Endpoint sub-resource to Fabric resource in sushy. A fabric endpoint represents the properties of an entity that sends or receives protocol defined messages over a transport. Story: #2003853 Task: #26648 Change-Id: I76c99da2aac8e0629bb468e22abd104cb29bc4ab --- ...ubresource-to-fabric-b03e5fd99ece1bf4.yaml | 6 + sushy/resources/fabric/constants.py | 111 ++++++++--- sushy/resources/fabric/endpoint.py | 178 ++++++++++++++++++ sushy/resources/fabric/fabric.py | 13 +- sushy/resources/fabric/mappings.py | 111 ++++++++--- sushy/tests/unit/json_samples/endpoint.json | 40 ++++ .../json_samples/endpoint_collection.json | 13 ++ .../unit/resources/fabric/test_endpoint.py | 48 +++++ .../unit/resources/fabric/test_fabric.py | 60 +++++- 9 files changed, 523 insertions(+), 57 deletions(-) create mode 100644 releasenotes/notes/add-endpoint-subresource-to-fabric-b03e5fd99ece1bf4.yaml create mode 100644 sushy/resources/fabric/endpoint.py create mode 100644 sushy/tests/unit/json_samples/endpoint.json create mode 100644 sushy/tests/unit/json_samples/endpoint_collection.json create mode 100644 sushy/tests/unit/resources/fabric/test_endpoint.py diff --git a/releasenotes/notes/add-endpoint-subresource-to-fabric-b03e5fd99ece1bf4.yaml b/releasenotes/notes/add-endpoint-subresource-to-fabric-b03e5fd99ece1bf4.yaml new file mode 100644 index 0000000..fa752fc --- /dev/null +++ b/releasenotes/notes/add-endpoint-subresource-to-fabric-b03e5fd99ece1bf4.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Exposes the ``endpoint`` sub-resource from the ``fabric`` resource. + ``endpoint`` represents the properties of an entity that sends or receives + protocol defined messages over a transport. diff --git a/sushy/resources/fabric/constants.py b/sushy/resources/fabric/constants.py index 27f094f..7d4fd8c 100644 --- a/sushy/resources/fabric/constants.py +++ b/sushy/resources/fabric/constants.py @@ -13,30 +13,87 @@ # Values come from the Redfish Fabric json-schema 1.0.4: # http://redfish.dmtf.org/schemas/v1/Fabric.v1_0_4.json#/definitions/Fabric -# Fabric Types constants - -FABRIC_TYPE_AHCI = 'Advanced Host Controller Interface' -FABRIC_TYPE_FC = 'Fibre Channel' -FABRIC_TYPE_FCP = 'Fibre Channel Protocol for SCSI' -FABRIC_TYPE_FCoE = 'Fibre Channel over Ethernet' -FABRIC_TYPE_FICON = 'FIbre CONnection (FICON)' -FABRIC_TYPE_FTP = 'File Transfer Protocol' -FABRIC_TYPE_HTTP = 'Hypertext Transport Protocol' -FABRIC_TYPE_HTTPS = 'Secure Hypertext Transport Protocol' -FABRIC_TYPE_I2C = 'Inter-Integrated Circuit Bus' -FABRIC_TYPE_NFSv3 = 'Network File System version 3' -FABRIC_TYPE_NFSv4 = 'Network File System version 4' -FABRIC_TYPE_NVMe = 'Non-Volatile Memory Express' -FABRIC_TYPE_NVMeOverFabrics = 'NVMe over Fabrics' -FABRIC_TYPE_OEM = 'OEM specific' -FABRIC_TYPE_PCIe = 'PCI Express' -FABRIC_TYPE_RoCE = 'RDMA over Converged Ethernet Protocol' -FABRIC_TYPE_RoCEv2 = 'RDMA over Converged Ethernet Protocol Version 2' -FABRIC_TYPE_SAS = 'Serial Attached SCSI' -FABRIC_TYPE_SATA = 'Serial AT Attachment' -FABRIC_TYPE_SFTP = 'Secure File Transfer Protocol' -FABRIC_TYPE_SMB = 'Server Message Block (aka CIFS Common Internet File System)' -FABRIC_TYPE_UHCI = 'Universal Host Controller Interface' -FABRIC_TYPE_USB = 'Universal Serial Bus' -FABRIC_TYPE_iSCSI = 'Internet SCSI' -FABRIC_TYPE_iWARP = 'Internet Wide Area Remote Direct Memory Access Protocol' +# Protocol type constants + +PROTOCOL_TYPE_AHCI = 'Advanced Host Controller Interface' +PROTOCOL_TYPE_FC = 'Fibre Channel' +PROTOCOL_TYPE_FCP = 'Fibre Channel Protocol for SCSI' +PROTOCOL_TYPE_FCoE = 'Fibre Channel over Ethernet' +PROTOCOL_TYPE_FICON = 'FIbre CONnection (FICON)' +PROTOCOL_TYPE_FTP = 'File Transfer Protocol' +PROTOCOL_TYPE_HTTP = 'Hypertext Transport Protocol' +PROTOCOL_TYPE_HTTPS = 'Secure Hypertext Transport Protocol' +PROTOCOL_TYPE_I2C = 'Inter-Integrated Circuit Bus' +PROTOCOL_TYPE_NFSv3 = 'Network File System version 3' +PROTOCOL_TYPE_NFSv4 = 'Network File System version 4' +PROTOCOL_TYPE_NVMe = 'Non-Volatile Memory Express' +PROTOCOL_TYPE_NVMeOverFabrics = 'NVMe over Fabrics' +PROTOCOL_TYPE_OEM = 'OEM specific' +PROTOCOL_TYPE_PCIe = 'PCI Express' +PROTOCOL_TYPE_RoCE = 'RDMA over Converged Ethernet Protocol' +PROTOCOL_TYPE_RoCEv2 = 'RDMA over Converged Ethernet Protocol Version 2' +PROTOCOL_TYPE_SAS = 'Serial Attached SCSI' +PROTOCOL_TYPE_SATA = 'Serial AT Attachment' +PROTOCOL_TYPE_SFTP = 'Secure File Transfer Protocol' +PROTOCOL_TYPE_SMB = 'Server Message Block (CIFS Common Internet File System)' +PROTOCOL_TYPE_UHCI = 'Universal Host Controller Interface' +PROTOCOL_TYPE_USB = 'Universal Serial Bus' +PROTOCOL_TYPE_iSCSI = 'Internet SCSI' +PROTOCOL_TYPE_iWARP = 'Internet Wide Area Remote Direct Memory Access Protocol' + +# Address origin IPv4 constants + +ADDRESS_ORIGIN_IPv4_BOOTP = 'Address is provided by a BOOTP service' +ADDRESS_ORIGIN_IPv4_DHCP = 'Address is provided by a DHCPv4 service' +ADDRESS_ORIGIN_IPv4_IPv4LINKLOCAL = 'Address valid only for this segment' +ADDRESS_ORIGIN_IPv4_STATIC = 'A static address as configured by the user' + +# Address origin IPv6 constants + +ADDRESS_ORIGIN_IPv6_DHCPv6 = 'Address is provided by a DHCPv6 service' +ADDRESS_ORIGIN_IPv6_LINKLOCAL = 'Address valid only for this network segment' +ADDRESS_ORIGIN_IPv6_SLAAC = 'Stateless Address Auto Configuration service' +ADDRESS_ORIGIN_IPv6_STATIC = 'A static address as configured by the user' + +# Address state constants + +ADDRESS_STATE_DEPRECATED = 'Deprecated' +"""This address is currently within it's valid lifetime, but is now outside of +it's preferred lifetime as defined in RFC 4862.""" +ADDRESS_STATE_FAILED = 'Failed' +"""This address has failed Duplicate Address Detection testing as defined in +RFC 4862 section 5.4 and is not currently in use.""" +ADDRESS_STATE_PREFERRED = 'Preferred' +"""This address is currently within both it's valid and preferred lifetimes as +defined in RFC 4862.""" +ADDRESS_STATE_TENTATIVE = 'Tentative' +"""This address is currently undergoing Duplicate Address Detection testing as +defined in RFC 4862 section 5.4.""" + +# Durable name format constants + +DURABLE_NAME_FORMAT_EUI = 'IEEE-defined 64-bit Extended Unique Identifier' +DURABLE_NAME_FORMAT_FC_WWN = 'Fibre Channel World Wide Name' +DURABLE_NAME_FORMAT_NAA = 'Name Address Authority Format' +DURABLE_NAME_FORMAT_NQN = 'NVMe Qualified Name' +DURABLE_NAME_FORMAT_NSID = 'NVM Namespace Identifier' +DURABLE_NAME_FORMAT_UUID = 'Universally Unique Identifier' +DURABLE_NAME_FORMAT_iQN = 'iSCSI Qualified Name' + +# Entity role constants + +ENTITY_ROLE_BOTH = 'The entity is acting as both an initiator and a target' +ENTITY_ROLE_INITIATOR = 'The entity is acting as an initiator' +ENTITY_ROLE_TARGET = 'The entity is acting as a target' + +# Entity type constants + +ENTITY_TYPE_PCI_BRIDGE = 'PCI(e) Bridge' +ENTITY_TYPE_DISPLAY_CONTROLLER = 'Display Controller' +ENTITY_TYPE_DRIVE = 'Disk Drive' +ENTITY_TYPE_NETWORK_CONTROLLER = 'Network Controller' +ENTITY_TYPE_PROCESSOR = 'Processor Device' +ENTITY_TYPE_ROOT_COMPLEX = 'Root Complex' +ENTITY_TYPE_STORAGE_EXPANDER = 'Storage Expander' +ENTITY_TYPE_STORAGE_INITIATOR = 'Storage Initiator' +ENTITY_TYPE_VOLUME = 'Volume' diff --git a/sushy/resources/fabric/endpoint.py b/sushy/resources/fabric/endpoint.py new file mode 100644 index 0000000..ef22b43 --- /dev/null +++ b/sushy/resources/fabric/endpoint.py @@ -0,0 +1,178 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Endpoint.v1_3_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources.fabric import mappings as fab_maps +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class IPv4AddressField(base.CompositeField): + + address = base.Field('Address') + """This is the IPv4 Address.""" + + gateway = base.Field('Gateway') + """This is the IPv4 gateway for this address.""" + + subnet_mask = base.Field('SubnetMask') + """This is the IPv4 Subnet mask.""" + + address_origin = base.MappedField('AddressOrigin', + fab_maps.ADDRESS_ORIGIN_IPv4_VALUE_MAP) + """This indicates how the address was determined.""" + + +class IPv6AddressField(base.CompositeField): + + address = base.Field('Address') + """This is the IPv6 Address.""" + + prefix_length = base.Field('PrefixLength', adapter=utils.int_or_none) + """This is the IPv6 Address Prefix Length.""" + + address_origin = base.MappedField('AddressOrigin', + fab_maps.ADDRESS_ORIGIN_IPv6_VALUE_MAP) + """This indicates how the address was determined.""" + + address_state = base.MappedField('AddressState', + fab_maps.ADDRESS_STATE_VALUE_MAP) + """The current state of this address as defined in RFC 4862.""" + + +class IPTransportDetailsListField(base.ListField): + """IP transport details + + This array contains details for each IP transport supported by this + endpoint. The array structure can be used to model multiple IP addresses + for this endpoint. + """ + + port = base.Field('Port', adapter=utils.int_or_none) + """The UDP or TCP port number used by the Endpoint.""" + + transport_protocol = base.MappedField('TransportProtocol', + fab_maps.PROTOCOL_TYPE_VALUE_MAP) + """The protocol used by the connection entity.""" + + ipv4_address = IPv4AddressField('IPv4Address') + """The IPv4 address object.""" + + ipv6_address = IPv6AddressField('IPv6Address') + """The IPv6 address object.""" + + +class PciIdField(base.CompositeField): + + device_id = base.Field('DeviceId') + """The Device ID of this PCIe function.""" + + subsystem_id = base.Field('SubsystemId') + """The Subsystem ID of this PCIefunction.""" + + subsystem_vendor_id = base.Field('SubsystemVendorId') + """The Subsystem Vendor ID of thisPCIe function.""" + + vendor_id = base.Field('VendorId') + """The Vendor ID of this PCIe function.""" + + +class IdentifierListField(base.ListField): + + durable_name = base.Field('DurableName') + """This indicates the world wide, persistent name of the entity.""" + + durable_name_format = base.MappedField('DurableNameFormat', + fab_maps.DUR_NAME_FORMAT_VALUE_MAP) + """This represents the format of the DurableName property.""" + + +class ConnectedEntitiesListField(base.ListField): + """All the entities connected to this endpoint.""" + + pci_class_code = base.Field('PciClassCode') + """The Class Code, Subclass code, and Programming Interface code of + this PCIe function.""" + + pci_function_number = base.Field('PciFunctionNumber', + adapter=utils.int_or_none) + """The PCI ID of the connected entity.""" + + entity_pci_id = PciIdField('EntityPciId') + """The PCI ID of the connected entity.""" + + identifiers = IdentifierListField('Identifiers') + """Identifiers for the remote entity.""" + + entity_role = base.MappedField('EntityRole', + fab_maps.ENTITY_ROLE_VALUE_MAP) + """The role of the connected entity.""" + + entity_type = base.MappedField('EntityType', + fab_maps.ENTITY_TYPE_VALUE_MAP) + """The type of the connected entity.""" + + +class Endpoint(base.ResourceBase): + """This class represents a fabric endpoint. + + It represents the properties of an entity that sends or receives protocol + defined messages over a transport. + """ + + identity = base.Field('Id', required=True) + """Identifier for the endpoint""" + + name = base.Field('Name', required=True) + """The endpoint name""" + + description = base.Field('Description') + """The endpoint description""" + + status = common.StatusField('Status') + """The endpoint status""" + + host_reservation_memory_bytes = base.Field('HostReservationMemoryBytes', + adapter=utils.int_or_none) + """The amount of memory in Bytes that the Host should allocate to connect + to this endpoint. + """ + + endpoint_protocol = base.MappedField('EndpointProtocol', + fab_maps.PROTOCOL_TYPE_VALUE_MAP) + """The protocol supported by this endpoint.""" + + pci_id = PciIdField('PciId') + """The PCI ID of the endpoint.""" + + IP_transport_details = IPTransportDetailsListField('IPTransportDetails') + """This array contains details for each IP transport supported by this + endpoint. The array structure can be used to model multiple IP addresses + for this endpoint.""" + + connected_entities = ConnectedEntitiesListField('ConnectedEntities') + """All entities connected to this endpoint.""" + + +class EndpointCollection(base.ResourceCollectionBase): + """Represents a collection of endpoints associated with the fabric.""" + + @property + def _resource_type(self): + return Endpoint diff --git a/sushy/resources/fabric/fabric.py b/sushy/resources/fabric/fabric.py index e9c6fc2..5753718 100644 --- a/sushy/resources/fabric/fabric.py +++ b/sushy/resources/fabric/fabric.py @@ -15,7 +15,9 @@ from sushy.resources import base from sushy.resources import common +from sushy.resources.fabric import endpoint as fab_endpoint from sushy.resources.fabric import mappings as fab_maps +from sushy import utils import logging @@ -38,14 +40,14 @@ class Fabric(base.ResourceBase): description = base.Field('Description') """The fabric description""" - max_zones = base.Field('MaxZones') + max_zones = base.Field('MaxZones', adapter=utils.int_or_none) """The maximum number of zones the switch can currently configure""" status = common.StatusField('Status') """The fabric status""" fabric_type = base.MappedField('FabricType', - fab_maps.FABRIC_TYPE_VALUE_MAP) + fab_maps.PROTOCOL_TYPE_VALUE_MAP) """The protocol being sent over this fabric""" def __init__(self, connector, identity, redfish_version=None): @@ -58,6 +60,13 @@ class Fabric(base.ResourceBase): """ super(Fabric, self).__init__(connector, identity, redfish_version) + @property + @utils.cache_it + def endpoints(self): + return fab_endpoint.EndpointCollection( + self._conn, utils.get_sub_resource_path_by(self, 'Endpoints'), + redfish_version=self.redfish_version) + class FabricCollection(base.ResourceCollectionBase): diff --git a/sushy/resources/fabric/mappings.py b/sushy/resources/fabric/mappings.py index 6ddd890..0ced928 100644 --- a/sushy/resources/fabric/mappings.py +++ b/sushy/resources/fabric/mappings.py @@ -16,32 +16,89 @@ from sushy.resources.fabric import constants as fab_cons from sushy import utils -FABRIC_TYPE_VALUE_MAP = { - 'AHCI': fab_cons.FABRIC_TYPE_AHCI, - 'FC': fab_cons.FABRIC_TYPE_FC, - 'FCP': fab_cons.FABRIC_TYPE_FCP, - 'FCoE': fab_cons.FABRIC_TYPE_FCoE, - 'FICON': fab_cons.FABRIC_TYPE_FICON, - 'FTP': fab_cons.FABRIC_TYPE_FTP, - 'HTTP': fab_cons.FABRIC_TYPE_HTTP, - 'HTTPS': fab_cons.FABRIC_TYPE_HTTPS, - 'I2C': fab_cons.FABRIC_TYPE_I2C, - 'NFSv3': fab_cons.FABRIC_TYPE_NFSv3, - 'NFSv4': fab_cons.FABRIC_TYPE_NFSv4, - 'NVMe': fab_cons.FABRIC_TYPE_NVMe, - 'NVMeOverFabrics': fab_cons.FABRIC_TYPE_NVMeOverFabrics, - 'OEM': fab_cons.FABRIC_TYPE_OEM, - 'PCIe': fab_cons.FABRIC_TYPE_PCIe, - 'RoCE': fab_cons.FABRIC_TYPE_RoCE, - 'RoCEv2': fab_cons.FABRIC_TYPE_RoCEv2, - 'SAS': fab_cons.FABRIC_TYPE_SAS, - 'SATA': fab_cons.FABRIC_TYPE_SATA, - 'SFTP': fab_cons.FABRIC_TYPE_SFTP, - 'SMB': fab_cons.FABRIC_TYPE_SMB, - 'UHCI': fab_cons.FABRIC_TYPE_UHCI, - 'USB': fab_cons.FABRIC_TYPE_USB, - 'iSCSI': fab_cons.FABRIC_TYPE_iSCSI, - 'iWARP': fab_cons.FABRIC_TYPE_iWARP, +PROTOCOL_TYPE_VALUE_MAP = { + 'AHCI': fab_cons.PROTOCOL_TYPE_AHCI, + 'FC': fab_cons.PROTOCOL_TYPE_FC, + 'FCP': fab_cons.PROTOCOL_TYPE_FCP, + 'FCoE': fab_cons.PROTOCOL_TYPE_FCoE, + 'FICON': fab_cons.PROTOCOL_TYPE_FICON, + 'FTP': fab_cons.PROTOCOL_TYPE_FTP, + 'HTTP': fab_cons.PROTOCOL_TYPE_HTTP, + 'HTTPS': fab_cons.PROTOCOL_TYPE_HTTPS, + 'I2C': fab_cons.PROTOCOL_TYPE_I2C, + 'NFSv3': fab_cons.PROTOCOL_TYPE_NFSv3, + 'NFSv4': fab_cons.PROTOCOL_TYPE_NFSv4, + 'NVMe': fab_cons.PROTOCOL_TYPE_NVMe, + 'NVMeOverFabrics': fab_cons.PROTOCOL_TYPE_NVMeOverFabrics, + 'OEM': fab_cons.PROTOCOL_TYPE_OEM, + 'PCIe': fab_cons.PROTOCOL_TYPE_PCIe, + 'RoCE': fab_cons.PROTOCOL_TYPE_RoCE, + 'RoCEv2': fab_cons.PROTOCOL_TYPE_RoCEv2, + 'SAS': fab_cons.PROTOCOL_TYPE_SAS, + 'SATA': fab_cons.PROTOCOL_TYPE_SATA, + 'SFTP': fab_cons.PROTOCOL_TYPE_SFTP, + 'SMB': fab_cons.PROTOCOL_TYPE_SMB, + 'UHCI': fab_cons.PROTOCOL_TYPE_UHCI, + 'USB': fab_cons.PROTOCOL_TYPE_USB, + 'iSCSI': fab_cons.PROTOCOL_TYPE_iSCSI, + 'iWARP': fab_cons.PROTOCOL_TYPE_iWARP, } -FABRIC_TYPE_VALUE_MAP_REV = utils.revert_dictionary(FABRIC_TYPE_VALUE_MAP) + +ADDRESS_ORIGIN_IPv4_VALUE_MAP = { + 'BOOTP': fab_cons.ADDRESS_ORIGIN_IPv4_BOOTP, + 'DHCP': fab_cons.ADDRESS_ORIGIN_IPv4_DHCP, + 'IPv4LinkLocal': fab_cons.ADDRESS_ORIGIN_IPv4_IPv4LINKLOCAL, + 'Static': fab_cons.ADDRESS_ORIGIN_IPv4_STATIC, +} + + +ADDRESS_ORIGIN_IPv6_VALUE_MAP = { + 'DHCPv6': fab_cons.ADDRESS_ORIGIN_IPv6_DHCPv6, + 'LinkLocal': fab_cons.ADDRESS_ORIGIN_IPv6_LINKLOCAL, + 'SLAAC': fab_cons.ADDRESS_ORIGIN_IPv6_SLAAC, + 'Static': fab_cons.ADDRESS_ORIGIN_IPv6_STATIC, +} + + +ADDRESS_STATE_VALUE_MAP = { + 'Deprecated': fab_cons.ADDRESS_STATE_DEPRECATED, + 'Failed': fab_cons.ADDRESS_STATE_FAILED, + 'Preferred': fab_cons.ADDRESS_STATE_PREFERRED, + 'Tentative': fab_cons.ADDRESS_STATE_TENTATIVE, +} + + +DUR_NAME_FORMAT_VALUE_MAP = { + 'EUI': fab_cons.DURABLE_NAME_FORMAT_EUI, + 'FC_WWN': fab_cons.DURABLE_NAME_FORMAT_FC_WWN, + 'NAA': fab_cons.DURABLE_NAME_FORMAT_NAA, + 'NQN': fab_cons.DURABLE_NAME_FORMAT_NQN, + 'NSID': fab_cons.DURABLE_NAME_FORMAT_NSID, + 'UUID': fab_cons.DURABLE_NAME_FORMAT_UUID, + 'iQN': fab_cons.DURABLE_NAME_FORMAT_iQN, +} + + +ENTITY_ROLE_VALUE_MAP = { + 'Both': fab_cons.ENTITY_ROLE_BOTH, + 'Initiator': fab_cons.ENTITY_ROLE_INITIATOR, + 'Target': fab_cons.ENTITY_ROLE_TARGET, +} + +ENTITY_ROLE_VALUE_MAP_REV = utils.revert_dictionary(ENTITY_ROLE_VALUE_MAP) + + +ENTITY_TYPE_VALUE_MAP = { + 'Bridge': fab_cons.ENTITY_TYPE_PCI_BRIDGE, + 'DisplayController': fab_cons.ENTITY_TYPE_DISPLAY_CONTROLLER, + 'Drive': fab_cons.ENTITY_TYPE_DRIVE, + 'NetworkController': fab_cons.ENTITY_TYPE_NETWORK_CONTROLLER, + 'Processor': fab_cons.ENTITY_TYPE_PROCESSOR, + 'RootComplex': fab_cons.ENTITY_TYPE_ROOT_COMPLEX, + 'StorageExpander': fab_cons.ENTITY_TYPE_STORAGE_EXPANDER, + 'StorageInitiator': fab_cons.ENTITY_TYPE_STORAGE_INITIATOR, + 'Volume': fab_cons.ENTITY_TYPE_VOLUME, +} + +ENTITY_TYPE_VALUE_MAP_REV = utils.revert_dictionary(ENTITY_TYPE_VALUE_MAP) diff --git a/sushy/tests/unit/json_samples/endpoint.json b/sushy/tests/unit/json_samples/endpoint.json new file mode 100644 index 0000000..6e43ceb --- /dev/null +++ b/sushy/tests/unit/json_samples/endpoint.json @@ -0,0 +1,40 @@ +{ + "@odata.type":"#Endpoint.v1_1_0.Endpoint", + "Id":"Drive1", + "Name":"SAS Drive", + "Description":"The SAS Drive in Enclosure 2 Bay 0", + "EndpointProtocol":"SAS", + "ConnectedEntities":[ + { + "EntityType":"Drive", + "EntityRole":"Target", + "Identifiers":[ + { + "DurableNameFormat":"NAA", + "DurableName":"32ADF365C6C1B7C3" + } + ], + "Oem":{} + } + ], + "Links": + { + "MutuallyExclusiveEndpoints":[ + { + "@odata.id":"/redfish/v1/Fabrics/SAS/Endpoints/Enclosure2" + } + ], + "Ports":[ + { + "@odata.id":"/redfish/v1/Fabrics/SAS/Switches/Switch1/Ports/8" + }, + { + "@odata.id":"/redfish/v1/Fabrics/SAS/Switches/Switch2/Ports/8" + } + ], + "Oem":{} + }, + "Oem":{}, + "@odata.context":"/redfish/v1/$metadata#Endpoint.Endpoint", + "@odata.id":"/redfish/v1/Fabrics/SAS/Endpoints/Drive1" +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/endpoint_collection.json b/sushy/tests/unit/json_samples/endpoint_collection.json new file mode 100644 index 0000000..6dd664f --- /dev/null +++ b/sushy/tests/unit/json_samples/endpoint_collection.json @@ -0,0 +1,13 @@ +{ + "@odata.type": "#EndpointCollection.EndpointCollection", + "Name": "Endpoint Collection", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/Fabrics/SAS/Endpoints/Drive1" + } + ], + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#EndpointCollection.EndpointCollection", + "@odata.id": "/redfish/v1/Fabrics/SAS/Endpoints" +} diff --git a/sushy/tests/unit/resources/fabric/test_endpoint.py b/sushy/tests/unit/resources/fabric/test_endpoint.py new file mode 100644 index 0000000..e7a95d7 --- /dev/null +++ b/sushy/tests/unit/resources/fabric/test_endpoint.py @@ -0,0 +1,48 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +import mock + +import sushy +from sushy.resources.fabric import endpoint +from sushy.tests.unit import base + + +class EndpointTestCase(base.TestCase): + + def setUp(self): + super(EndpointTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'endpoint.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.fab_endpoint = endpoint.Endpoint( + self.conn, '/redfish/v1/Fabrics/SAS/Endpoints/Drive1', + redfish_version='1.0.2') + + def test__parse_atrtributes(self): + self.fab_endpoint._parse_attributes() + self.assertEqual('Drive1', self.fab_endpoint.identity) + self.assertEqual('SAS Drive', self.fab_endpoint.name) + self.assertEqual(sushy.PROTOCOL_TYPE_SAS, + self.fab_endpoint.endpoint_protocol) + self.assertEqual(sushy.ENTITY_TYPE_DRIVE, + self.fab_endpoint.connected_entities[0].entity_type) + self.assertEqual(sushy.ENTITY_ROLE_TARGET, + self.fab_endpoint.connected_entities[0].entity_role) + con_entity = self.fab_endpoint.connected_entities[0] + self.assertEqual(sushy.DURABLE_NAME_FORMAT_NAA, + con_entity.identifiers[0].durable_name_format) + self.assertEqual('32ADF365C6C1B7C3', + con_entity.identifiers[0].durable_name) diff --git a/sushy/tests/unit/resources/fabric/test_fabric.py b/sushy/tests/unit/resources/fabric/test_fabric.py index 4ac0481..792c3cb 100644 --- a/sushy/tests/unit/resources/fabric/test_fabric.py +++ b/sushy/tests/unit/resources/fabric/test_fabric.py @@ -17,6 +17,7 @@ import json import mock import sushy +from sushy.resources.fabric import endpoint from sushy.resources.fabric import fabric from sushy.tests.unit import base @@ -41,11 +42,68 @@ class FabricTestCase(base.TestCase): self.assertEqual('SAS Fabric', self.fabric.name) self.assertEqual('A SAS Fabric with redundant switches.', self.fabric.description) - self.assertEqual(sushy.FABRIC_TYPE_SAS, + self.assertEqual(sushy.PROTOCOL_TYPE_SAS, self.fabric.fabric_type) self.assertEqual(sushy.STATE_ENABLED, self.fabric.status.state) self.assertEqual(sushy.HEALTH_OK, self.fabric.status.health) + def test_endpoints(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'endpoint_collection.json') as f: + endpoint_collection_return_value = json.load(f) + + with open('sushy/tests/unit/json_samples/' + 'endpoint.json') as f: + endpoint_return_value = json.load(f) + + self.conn.get.return_value.json.side_effect = [ + endpoint_collection_return_value, endpoint_return_value] + + # | WHEN | + actual_endpoints = self.fabric.endpoints + + # | THEN | + self.assertIsInstance(actual_endpoints, + endpoint.EndpointCollection) + self.assertEqual(actual_endpoints.name, 'Endpoint Collection') + + member = actual_endpoints.get_member( + '/redfish/v1/Fabrics/SAS/Endpoints/Drive1') + + self.assertEqual(member.name, "SAS Drive") + self.assertEqual(member.endpoint_protocol, sushy.PROTOCOL_TYPE_SAS) + + def test_endpoints_on_refresh(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'endpoint_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + endpts = self.fabric.endpoints + self.assertIsInstance(endpts, endpoint.EndpointCollection) + + # On refreshing the fabric instance... + with open('sushy/tests/unit/json_samples/fabric.json', 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + + self.fabric.invalidate() + self.fabric.refresh(force=False) + + # | WHEN & THEN | + self.assertTrue(endpts._is_stale) + + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'endpoint_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + self.assertIsInstance(self.fabric.endpoints, + endpoint.EndpointCollection) + self.assertFalse(endpts._is_stale) + class FabricCollectionTestCase(base.TestCase): -- GitLab From dc127c80e514f60e0567f82a1d31c4d66c5e35e9 Mon Sep 17 00:00:00 2001 From: Varsha Date: Tue, 2 Jul 2019 17:31:46 +0530 Subject: [PATCH 162/303] Refactor DurableName identifier and Protocol fields The code for the Durable Name identifiers and the Protocol related fields had been replicated at different places in different forms. This patch aims at cleaning the same. Change-Id: I7f5c16ef867b0b931f9a25674831ade0a78dcd6e --- sushy/__init__.py | 1 + sushy/resources/common.py | 3 +- sushy/resources/constants.py | 38 ++++++++++++++++++ sushy/resources/fabric/constants.py | 38 ------------------ sushy/resources/fabric/endpoint.py | 17 ++------ sushy/resources/fabric/fabric.py | 4 +- sushy/resources/fabric/mappings.py | 40 ------------------- sushy/resources/mappings.py | 38 ++++++++++++++++++ sushy/resources/system/storage/drive.py | 2 +- .../resources/system/storage/test_drive.py | 5 ++- .../resources/system/storage/test_storage.py | 4 +- .../resources/system/storage/test_volume.py | 9 +++-- 12 files changed, 97 insertions(+), 102 deletions(-) diff --git a/sushy/__init__.py b/sushy/__init__.py index 8b49ac2..248aeb1 100644 --- a/sushy/__init__.py +++ b/sushy/__init__.py @@ -22,6 +22,7 @@ from sushy.resources.system.constants import * # noqa from sushy.resources.manager.constants import * # noqa from sushy.resources.chassis.constants import * # noqa from sushy.resources.fabric.constants import * # noqa +from sushy.resources.system.storage.constants import * # noqa __all__ = ('Sushy',) __version__ = pbr.version.VersionInfo( diff --git a/sushy/resources/common.py b/sushy/resources/common.py index 1b27910..df2fc93 100644 --- a/sushy/resources/common.py +++ b/sushy/resources/common.py @@ -82,5 +82,6 @@ class IdentifiersListField(base.ListField): durable_name = base.Field('DurableName') """This indicates the world wide, persistent name of the resource.""" - durable_name_format = base.Field('DurableNameFormat') + durable_name_format = base.MappedField('DurableNameFormat', + res_maps.DUR_NAME_FORMAT_VALUE_MAP) """This represents the format of the DurableName property.""" diff --git a/sushy/resources/constants.py b/sushy/resources/constants.py index dc72f81..242a152 100644 --- a/sushy/resources/constants.py +++ b/sushy/resources/constants.py @@ -101,3 +101,41 @@ RESET_TYPE_PUSH_POWER_BUTTON = 'push power button' RESET_TYPE_POWER_CYCLE = 'power cycle' """Perform a power cycle of the unit""" + +# Protocol type constants + +PROTOCOL_TYPE_AHCI = 'Advanced Host Controller Interface' +PROTOCOL_TYPE_FC = 'Fibre Channel' +PROTOCOL_TYPE_FCP = 'Fibre Channel Protocol for SCSI' +PROTOCOL_TYPE_FCoE = 'Fibre Channel over Ethernet' +PROTOCOL_TYPE_FICON = 'FIbre CONnection (FICON)' +PROTOCOL_TYPE_FTP = 'File Transfer Protocol' +PROTOCOL_TYPE_HTTP = 'Hypertext Transport Protocol' +PROTOCOL_TYPE_HTTPS = 'Secure Hypertext Transport Protocol' +PROTOCOL_TYPE_I2C = 'Inter-Integrated Circuit Bus' +PROTOCOL_TYPE_NFSv3 = 'Network File System version 3' +PROTOCOL_TYPE_NFSv4 = 'Network File System version 4' +PROTOCOL_TYPE_NVMe = 'Non-Volatile Memory Express' +PROTOCOL_TYPE_NVMeOverFabrics = 'NVMe over Fabrics' +PROTOCOL_TYPE_OEM = 'OEM specific' +PROTOCOL_TYPE_PCIe = 'PCI Express' +PROTOCOL_TYPE_RoCE = 'RDMA over Converged Ethernet Protocol' +PROTOCOL_TYPE_RoCEv2 = 'RDMA over Converged Ethernet Protocol Version 2' +PROTOCOL_TYPE_SAS = 'Serial Attached SCSI' +PROTOCOL_TYPE_SATA = 'Serial AT Attachment' +PROTOCOL_TYPE_SFTP = 'Secure File Transfer Protocol' +PROTOCOL_TYPE_SMB = 'Server Message Block (CIFS Common Internet File System)' +PROTOCOL_TYPE_UHCI = 'Universal Host Controller Interface' +PROTOCOL_TYPE_USB = 'Universal Serial Bus' +PROTOCOL_TYPE_iSCSI = 'Internet SCSI' +PROTOCOL_TYPE_iWARP = 'Internet Wide Area Remote Direct Memory Access Protocol' + +# Durable name format constants + +DURABLE_NAME_FORMAT_EUI = 'IEEE-defined 64-bit Extended Unique Identifier' +DURABLE_NAME_FORMAT_FC_WWN = 'Fibre Channel World Wide Name' +DURABLE_NAME_FORMAT_NAA = 'Name Address Authority Format' +DURABLE_NAME_FORMAT_NQN = 'NVMe Qualified Name' +DURABLE_NAME_FORMAT_NSID = 'NVM Namespace Identifier' +DURABLE_NAME_FORMAT_UUID = 'Universally Unique Identifier' +DURABLE_NAME_FORMAT_iQN = 'iSCSI Qualified Name' diff --git a/sushy/resources/fabric/constants.py b/sushy/resources/fabric/constants.py index 7d4fd8c..7026a99 100644 --- a/sushy/resources/fabric/constants.py +++ b/sushy/resources/fabric/constants.py @@ -13,34 +13,6 @@ # Values come from the Redfish Fabric json-schema 1.0.4: # http://redfish.dmtf.org/schemas/v1/Fabric.v1_0_4.json#/definitions/Fabric -# Protocol type constants - -PROTOCOL_TYPE_AHCI = 'Advanced Host Controller Interface' -PROTOCOL_TYPE_FC = 'Fibre Channel' -PROTOCOL_TYPE_FCP = 'Fibre Channel Protocol for SCSI' -PROTOCOL_TYPE_FCoE = 'Fibre Channel over Ethernet' -PROTOCOL_TYPE_FICON = 'FIbre CONnection (FICON)' -PROTOCOL_TYPE_FTP = 'File Transfer Protocol' -PROTOCOL_TYPE_HTTP = 'Hypertext Transport Protocol' -PROTOCOL_TYPE_HTTPS = 'Secure Hypertext Transport Protocol' -PROTOCOL_TYPE_I2C = 'Inter-Integrated Circuit Bus' -PROTOCOL_TYPE_NFSv3 = 'Network File System version 3' -PROTOCOL_TYPE_NFSv4 = 'Network File System version 4' -PROTOCOL_TYPE_NVMe = 'Non-Volatile Memory Express' -PROTOCOL_TYPE_NVMeOverFabrics = 'NVMe over Fabrics' -PROTOCOL_TYPE_OEM = 'OEM specific' -PROTOCOL_TYPE_PCIe = 'PCI Express' -PROTOCOL_TYPE_RoCE = 'RDMA over Converged Ethernet Protocol' -PROTOCOL_TYPE_RoCEv2 = 'RDMA over Converged Ethernet Protocol Version 2' -PROTOCOL_TYPE_SAS = 'Serial Attached SCSI' -PROTOCOL_TYPE_SATA = 'Serial AT Attachment' -PROTOCOL_TYPE_SFTP = 'Secure File Transfer Protocol' -PROTOCOL_TYPE_SMB = 'Server Message Block (CIFS Common Internet File System)' -PROTOCOL_TYPE_UHCI = 'Universal Host Controller Interface' -PROTOCOL_TYPE_USB = 'Universal Serial Bus' -PROTOCOL_TYPE_iSCSI = 'Internet SCSI' -PROTOCOL_TYPE_iWARP = 'Internet Wide Area Remote Direct Memory Access Protocol' - # Address origin IPv4 constants ADDRESS_ORIGIN_IPv4_BOOTP = 'Address is provided by a BOOTP service' @@ -70,16 +42,6 @@ ADDRESS_STATE_TENTATIVE = 'Tentative' """This address is currently undergoing Duplicate Address Detection testing as defined in RFC 4862 section 5.4.""" -# Durable name format constants - -DURABLE_NAME_FORMAT_EUI = 'IEEE-defined 64-bit Extended Unique Identifier' -DURABLE_NAME_FORMAT_FC_WWN = 'Fibre Channel World Wide Name' -DURABLE_NAME_FORMAT_NAA = 'Name Address Authority Format' -DURABLE_NAME_FORMAT_NQN = 'NVMe Qualified Name' -DURABLE_NAME_FORMAT_NSID = 'NVM Namespace Identifier' -DURABLE_NAME_FORMAT_UUID = 'Universally Unique Identifier' -DURABLE_NAME_FORMAT_iQN = 'iSCSI Qualified Name' - # Entity role constants ENTITY_ROLE_BOTH = 'The entity is acting as both an initiator and a target' diff --git a/sushy/resources/fabric/endpoint.py b/sushy/resources/fabric/endpoint.py index ef22b43..180d3df 100644 --- a/sushy/resources/fabric/endpoint.py +++ b/sushy/resources/fabric/endpoint.py @@ -18,6 +18,7 @@ import logging from sushy.resources import base from sushy.resources import common from sushy.resources.fabric import mappings as fab_maps +from sushy.resources import mappings as res_maps from sushy import utils LOG = logging.getLogger(__name__) @@ -68,7 +69,7 @@ class IPTransportDetailsListField(base.ListField): """The UDP or TCP port number used by the Endpoint.""" transport_protocol = base.MappedField('TransportProtocol', - fab_maps.PROTOCOL_TYPE_VALUE_MAP) + res_maps.PROTOCOL_TYPE_VALUE_MAP) """The protocol used by the connection entity.""" ipv4_address = IPv4AddressField('IPv4Address') @@ -93,16 +94,6 @@ class PciIdField(base.CompositeField): """The Vendor ID of this PCIe function.""" -class IdentifierListField(base.ListField): - - durable_name = base.Field('DurableName') - """This indicates the world wide, persistent name of the entity.""" - - durable_name_format = base.MappedField('DurableNameFormat', - fab_maps.DUR_NAME_FORMAT_VALUE_MAP) - """This represents the format of the DurableName property.""" - - class ConnectedEntitiesListField(base.ListField): """All the entities connected to this endpoint.""" @@ -117,7 +108,7 @@ class ConnectedEntitiesListField(base.ListField): entity_pci_id = PciIdField('EntityPciId') """The PCI ID of the connected entity.""" - identifiers = IdentifierListField('Identifiers') + identifiers = common.IdentifiersListField('Identifiers', default=[]) """Identifiers for the remote entity.""" entity_role = base.MappedField('EntityRole', @@ -155,7 +146,7 @@ class Endpoint(base.ResourceBase): """ endpoint_protocol = base.MappedField('EndpointProtocol', - fab_maps.PROTOCOL_TYPE_VALUE_MAP) + res_maps.PROTOCOL_TYPE_VALUE_MAP) """The protocol supported by this endpoint.""" pci_id = PciIdField('PciId') diff --git a/sushy/resources/fabric/fabric.py b/sushy/resources/fabric/fabric.py index 5753718..29edf3e 100644 --- a/sushy/resources/fabric/fabric.py +++ b/sushy/resources/fabric/fabric.py @@ -16,7 +16,7 @@ from sushy.resources import base from sushy.resources import common from sushy.resources.fabric import endpoint as fab_endpoint -from sushy.resources.fabric import mappings as fab_maps +from sushy.resources import mappings as res_maps from sushy import utils import logging @@ -47,7 +47,7 @@ class Fabric(base.ResourceBase): """The fabric status""" fabric_type = base.MappedField('FabricType', - fab_maps.PROTOCOL_TYPE_VALUE_MAP) + res_maps.PROTOCOL_TYPE_VALUE_MAP) """The protocol being sent over this fabric""" def __init__(self, connector, identity, redfish_version=None): diff --git a/sushy/resources/fabric/mappings.py b/sushy/resources/fabric/mappings.py index 0ced928..c1a34dd 100644 --- a/sushy/resources/fabric/mappings.py +++ b/sushy/resources/fabric/mappings.py @@ -16,35 +16,6 @@ from sushy.resources.fabric import constants as fab_cons from sushy import utils -PROTOCOL_TYPE_VALUE_MAP = { - 'AHCI': fab_cons.PROTOCOL_TYPE_AHCI, - 'FC': fab_cons.PROTOCOL_TYPE_FC, - 'FCP': fab_cons.PROTOCOL_TYPE_FCP, - 'FCoE': fab_cons.PROTOCOL_TYPE_FCoE, - 'FICON': fab_cons.PROTOCOL_TYPE_FICON, - 'FTP': fab_cons.PROTOCOL_TYPE_FTP, - 'HTTP': fab_cons.PROTOCOL_TYPE_HTTP, - 'HTTPS': fab_cons.PROTOCOL_TYPE_HTTPS, - 'I2C': fab_cons.PROTOCOL_TYPE_I2C, - 'NFSv3': fab_cons.PROTOCOL_TYPE_NFSv3, - 'NFSv4': fab_cons.PROTOCOL_TYPE_NFSv4, - 'NVMe': fab_cons.PROTOCOL_TYPE_NVMe, - 'NVMeOverFabrics': fab_cons.PROTOCOL_TYPE_NVMeOverFabrics, - 'OEM': fab_cons.PROTOCOL_TYPE_OEM, - 'PCIe': fab_cons.PROTOCOL_TYPE_PCIe, - 'RoCE': fab_cons.PROTOCOL_TYPE_RoCE, - 'RoCEv2': fab_cons.PROTOCOL_TYPE_RoCEv2, - 'SAS': fab_cons.PROTOCOL_TYPE_SAS, - 'SATA': fab_cons.PROTOCOL_TYPE_SATA, - 'SFTP': fab_cons.PROTOCOL_TYPE_SFTP, - 'SMB': fab_cons.PROTOCOL_TYPE_SMB, - 'UHCI': fab_cons.PROTOCOL_TYPE_UHCI, - 'USB': fab_cons.PROTOCOL_TYPE_USB, - 'iSCSI': fab_cons.PROTOCOL_TYPE_iSCSI, - 'iWARP': fab_cons.PROTOCOL_TYPE_iWARP, -} - - ADDRESS_ORIGIN_IPv4_VALUE_MAP = { 'BOOTP': fab_cons.ADDRESS_ORIGIN_IPv4_BOOTP, 'DHCP': fab_cons.ADDRESS_ORIGIN_IPv4_DHCP, @@ -69,17 +40,6 @@ ADDRESS_STATE_VALUE_MAP = { } -DUR_NAME_FORMAT_VALUE_MAP = { - 'EUI': fab_cons.DURABLE_NAME_FORMAT_EUI, - 'FC_WWN': fab_cons.DURABLE_NAME_FORMAT_FC_WWN, - 'NAA': fab_cons.DURABLE_NAME_FORMAT_NAA, - 'NQN': fab_cons.DURABLE_NAME_FORMAT_NQN, - 'NSID': fab_cons.DURABLE_NAME_FORMAT_NSID, - 'UUID': fab_cons.DURABLE_NAME_FORMAT_UUID, - 'iQN': fab_cons.DURABLE_NAME_FORMAT_iQN, -} - - ENTITY_ROLE_VALUE_MAP = { 'Both': fab_cons.ENTITY_ROLE_BOTH, 'Initiator': fab_cons.ENTITY_ROLE_INITIATOR, diff --git a/sushy/resources/mappings.py b/sushy/resources/mappings.py index f1687a0..afe9fe7 100644 --- a/sushy/resources/mappings.py +++ b/sushy/resources/mappings.py @@ -77,3 +77,41 @@ RESET_TYPE_VALUE_MAP = { } RESET_TYPE_VALUE_MAP_REV = utils.revert_dictionary(RESET_TYPE_VALUE_MAP) + +PROTOCOL_TYPE_VALUE_MAP = { + 'AHCI': res_cons.PROTOCOL_TYPE_AHCI, + 'FC': res_cons.PROTOCOL_TYPE_FC, + 'FCP': res_cons.PROTOCOL_TYPE_FCP, + 'FCoE': res_cons.PROTOCOL_TYPE_FCoE, + 'FICON': res_cons.PROTOCOL_TYPE_FICON, + 'FTP': res_cons.PROTOCOL_TYPE_FTP, + 'HTTP': res_cons.PROTOCOL_TYPE_HTTP, + 'HTTPS': res_cons.PROTOCOL_TYPE_HTTPS, + 'I2C': res_cons.PROTOCOL_TYPE_I2C, + 'NFSv3': res_cons.PROTOCOL_TYPE_NFSv3, + 'NFSv4': res_cons.PROTOCOL_TYPE_NFSv4, + 'NVMe': res_cons.PROTOCOL_TYPE_NVMe, + 'NVMeOverFabrics': res_cons.PROTOCOL_TYPE_NVMeOverFabrics, + 'OEM': res_cons.PROTOCOL_TYPE_OEM, + 'PCIe': res_cons.PROTOCOL_TYPE_PCIe, + 'RoCE': res_cons.PROTOCOL_TYPE_RoCE, + 'RoCEv2': res_cons.PROTOCOL_TYPE_RoCEv2, + 'SAS': res_cons.PROTOCOL_TYPE_SAS, + 'SATA': res_cons.PROTOCOL_TYPE_SATA, + 'SFTP': res_cons.PROTOCOL_TYPE_SFTP, + 'SMB': res_cons.PROTOCOL_TYPE_SMB, + 'UHCI': res_cons.PROTOCOL_TYPE_UHCI, + 'USB': res_cons.PROTOCOL_TYPE_USB, + 'iSCSI': res_cons.PROTOCOL_TYPE_iSCSI, + 'iWARP': res_cons.PROTOCOL_TYPE_iWARP, +} + +DUR_NAME_FORMAT_VALUE_MAP = { + 'EUI': res_cons.DURABLE_NAME_FORMAT_EUI, + 'FC_WWN': res_cons.DURABLE_NAME_FORMAT_FC_WWN, + 'NAA': res_cons.DURABLE_NAME_FORMAT_NAA, + 'NQN': res_cons.DURABLE_NAME_FORMAT_NQN, + 'NSID': res_cons.DURABLE_NAME_FORMAT_NSID, + 'UUID': res_cons.DURABLE_NAME_FORMAT_UUID, + 'iQN': res_cons.DURABLE_NAME_FORMAT_iQN, +} diff --git a/sushy/resources/system/storage/drive.py b/sushy/resources/system/storage/drive.py index 879833b..c9df1d2 100644 --- a/sushy/resources/system/storage/drive.py +++ b/sushy/resources/system/storage/drive.py @@ -58,7 +58,7 @@ class Drive(base.ResourceBase): part_number = base.Field('PartNumber') """The part number for this drive""" - protocol = base.Field('Protocol') + protocol = base.MappedField('Protocol', res_maps.PROTOCOL_TYPE_VALUE_MAP) """Protocol this drive is using to communicate to the storage controller""" serial_number = base.Field('SerialNumber') diff --git a/sushy/tests/unit/resources/system/storage/test_drive.py b/sushy/tests/unit/resources/system/storage/test_drive.py index 796c6fc..f506af7 100644 --- a/sushy/tests/unit/resources/system/storage/test_drive.py +++ b/sushy/tests/unit/resources/system/storage/test_drive.py @@ -44,13 +44,14 @@ class DriveTestCase(base.TestCase): self.assertIsInstance(identifiers, list) self.assertEqual(1, len(identifiers)) identifier = identifiers[0] - self.assertEqual('NAA', identifier.durable_name_format) + self.assertEqual(sushy.DURABLE_NAME_FORMAT_NAA, + identifier.durable_name_format) self.assertEqual('32ADF365C6C1B7BD', identifier.durable_name) self.assertEqual('Contoso', self.stor_drive.manufacturer) self.assertEqual('HDD', self.stor_drive.media_type) self.assertEqual('C123', self.stor_drive.model) self.assertEqual('C123-1111', self.stor_drive.part_number) - self.assertEqual('SAS', self.stor_drive.protocol) + self.assertEqual(sushy.PROTOCOL_TYPE_SAS, self.stor_drive.protocol) self.assertEqual('1234570', self.stor_drive.serial_number) self.assertEqual(sushy.STATE_ENABLED, self.stor_drive.status.state) self.assertEqual(sushy.HEALTH_OK, self.stor_drive.status.health) diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index e9f57dd..0561f06 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -14,6 +14,7 @@ import json import mock +import sushy from sushy.resources.system.storage import drive from sushy.resources.system.storage import storage from sushy.resources.system.storage import volume @@ -117,7 +118,8 @@ class StorageTestCase(base.TestCase): self.assertIsInstance(identifiers, list) self.assertEqual(1, len(identifiers)) identifier = identifiers[0] - self.assertEqual('NAA', identifier.durable_name_format) + self.assertEqual(sushy.DURABLE_NAME_FORMAT_NAA, + identifier.durable_name_format) self.assertEqual('345C59DBD970859C', identifier.durable_name) self.assertEqual(12, controller.speed_gbps) self.assertEqual(["PCIe"], controller.controller_protocols) diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index a061d70..c9963b1 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -15,8 +15,8 @@ import mock from dateutil import parser +import sushy from sushy import exceptions -from sushy.resources.system.storage import constants as store_cons from sushy.resources.system.storage import volume from sushy.tests.unit import base @@ -39,14 +39,15 @@ class VolumeTestCase(base.TestCase): self.assertEqual('1', self.stor_volume.identity) self.assertEqual('Virtual Disk 1', self.stor_volume.name) self.assertEqual(899527000000, self.stor_volume.capacity_bytes) - self.assertEqual(store_cons.VOLUME_TYPE_MIRRORED, + self.assertEqual(sushy.VOLUME_TYPE_MIRRORED, self.stor_volume.volume_type) self.assertFalse(self.stor_volume.encrypted) identifiers = self.stor_volume.identifiers self.assertIsInstance(identifiers, list) self.assertEqual(1, len(identifiers)) identifier = identifiers[0] - self.assertEqual('UUID', identifier.durable_name_format) + self.assertEqual(sushy.DURABLE_NAME_FORMAT_UUID, + identifier.durable_name_format) self.assertEqual('38f1818b-111e-463a-aa19-fa54f792e468', identifier.durable_name) self.assertIsNone(self.stor_volume.block_size_bytes) @@ -190,4 +191,4 @@ class VolumeCollectionTestCase(base.TestCase): self.assertEqual('4', new_vol.identity) self.assertEqual('My Volume 4', new_vol.name) self.assertEqual(107374182400, new_vol.capacity_bytes) - self.assertEqual(store_cons.VOLUME_TYPE_MIRRORED, new_vol.volume_type) + self.assertEqual(sushy.VOLUME_TYPE_MIRRORED, new_vol.volume_type) -- GitLab From a5228962365bbdf3a7ce8e36a7c8a395cffe3946 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Wed, 19 Jun 2019 11:31:35 +0200 Subject: [PATCH 163/303] Handle incomplete message registries This fix handles the case when the message registry does not contain a language file neither for the requested language nor for the default. On top of that, this fix tries to use all potentially suitable language files, not just the first one. Change-Id: Ifdc3e444ba358604c248d9f0e1d5e59fd5e7b4cc Co-Authored-By: Richard.Pioso@dell.com --- .../registry/message_registry_file.py | 53 +++++++++++-------- .../registry/test_message_registry_file.py | 44 ++++++++++++++- 2 files changed, 74 insertions(+), 23 deletions(-) diff --git a/sushy/resources/registry/message_registry_file.py b/sushy/resources/registry/message_registry_file.py index 587b0c0..0cec51a 100644 --- a/sushy/resources/registry/message_registry_file.py +++ b/sushy/resources/registry/message_registry_file.py @@ -82,28 +82,37 @@ class MessageRegistryFile(base.ResourceBase): from the Internet """ - location = next((l for l in self.location if l.language == language), - [d for d in self.location if d.language == 'default'] - [0]) - - if location.uri: - return message_registry.MessageRegistry( - self._conn, path=location.uri, - redfish_version=self.redfish_version) - elif location.archive_uri: - return message_registry.MessageRegistry( - self._conn, path=location.archive_uri, - redfish_version=self.redfish_version, - reader=base.JsonArchiveReader(location.archive_file)) - elif location.publication_uri: - return message_registry.MessageRegistry( - public_connector, - path=location.publication_uri, - redfish_version=self.redfish_version, - reader=base.JsonPublicFileReader()) - else: - LOG.warning('No location defined for language %(language)s', - {'language': language}) + # NOTE (etingof): as per RFC5646, languages are case-insensitive + language = language.lower() + + locations = [ + l for l in self.location if l.language.lower() == language] + + locations += [ + l for l in self.location if l.language.lower() == 'default'] + + for location in locations: + if location.uri: + return message_registry.MessageRegistry( + self._conn, path=location.uri, + redfish_version=self.redfish_version) + elif location.archive_uri: + return message_registry.MessageRegistry( + self._conn, path=location.archive_uri, + redfish_version=self.redfish_version, + reader=base.JsonArchiveReader(location.archive_file)) + elif location.publication_uri: + return message_registry.MessageRegistry( + public_connector, + path=location.publication_uri, + redfish_version=self.redfish_version, + reader=base.JsonPublicFileReader()) + else: + LOG.warning('Incomplete location for language %(language)s', + {'language': language}) + + LOG.warning('No message registry found for %(language)s or ' + 'default', {'language': language}) class MessageRegistryFileCollection(base.ResourceCollectionBase): diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py index 21cb905..579fa88 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry_file.py +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -116,7 +116,49 @@ class MessageRegistryFileTestCase(base.TestCase): mock_msg_reg.assert_not_called() self.assertIsNone(registry) mock_log.warning.assert_called_with( - 'No location defined for language %(language)s', + 'No message registry found for %(language)s or default', + {'language': 'en'}) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + def test_get_message_registry_non_default_lang(self, mock_msg_reg): + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].language = 'en' + + registry = self.reg_file.get_message_registry('en', None) + mock_msg_reg.assert_called_once_with( + self.conn, path='/redfish/v1/Registries/Test/Test.1.0.json', + redfish_version=self.reg_file.redfish_version) + self.assertEqual(mock_msg_reg_rv, registry) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + def test_get_message_registry_strangely_cased_lang(self, mock_msg_reg): + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].language = 'En' + + registry = self.reg_file.get_message_registry('en', None) + mock_msg_reg.assert_called_once_with( + self.conn, path='/redfish/v1/Registries/Test/Test.1.0.json', + redfish_version=self.reg_file.redfish_version) + self.assertEqual(mock_msg_reg_rv, registry) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.LOG', + autospec=True) + def test_get_message_registry_missing_lang(self, mock_log, mock_msg_reg): + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].language = 'cz' + + registry = self.reg_file.get_message_registry('en', None) + mock_msg_reg.assert_not_called() + self.assertIsNone(registry) + mock_log.warning.assert_called_with( + 'No message registry found for %(language)s or default', {'language': 'en'}) -- GitLab From 318953634329382a2247e811ddebd08a02502890 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Wed, 3 Jul 2019 17:17:21 +0200 Subject: [PATCH 164/303] Disregard registry files of unsupported types This fix disregards `MessageRegistryFile` if it links anything other than `MessageRegistry` resource. Apparently, it can also be `AttributeRegistry` or `PrivilegeRegistry` or who knows what else one could fish out of the depths of `MessageRegistryFile` [1]. 1. http://redfish.dmtf.org/schemas/v1/MessageRegistryFile_v1.xml Change-Id: Idfd2f1ed4ef78d048ac8bd5297619805db694bf6 Co-Authored-By: Richard G. Pioso Co-Authored-By: Mike Raineri --- .../registry/message_registry_file.py | 51 +++++++++++---- .../registry/test_message_registry_file.py | 64 +++++++++++++------ 2 files changed, 83 insertions(+), 32 deletions(-) diff --git a/sushy/resources/registry/message_registry_file.py b/sushy/resources/registry/message_registry_file.py index 0cec51a..9eb638f 100644 --- a/sushy/resources/registry/message_registry_file.py +++ b/sushy/resources/registry/message_registry_file.py @@ -48,6 +48,10 @@ class LocationListField(base.ListField): """Location URI of publicly available schema""" +class RegistryType(base.ResourceBase): + _odata_type = base.Field('@odata.type', required=True) + + class MessageRegistryFile(base.ResourceBase): identity = base.Field('Id', required=True) @@ -74,8 +78,9 @@ class MessageRegistryFile(base.ResourceBase): def get_message_registry(self, language, public_connector): """Load message registry file depending on its source - Will try to find a registry based on provided language, if not found - then will use a registry that has 'default' language. + Will try to find `MessageRegistry` based on `odata.type` property and + provided language. If desired language is not found, will pick a + registry that has 'default' language. :param language: RFC 5646 language code for registry files :param public_connector: connector to use when downloading registry @@ -93,23 +98,41 @@ class MessageRegistryFile(base.ResourceBase): for location in locations: if location.uri: - return message_registry.MessageRegistry( - self._conn, path=location.uri, - redfish_version=self.redfish_version) + args = self._conn, + kwargs = { + 'path': location.uri, + 'reader': None, + 'redfish_version': self.redfish_version + } + elif location.archive_uri: - return message_registry.MessageRegistry( - self._conn, path=location.archive_uri, - redfish_version=self.redfish_version, - reader=base.JsonArchiveReader(location.archive_file)) + args = self._conn, + kwargs = { + 'path': location.archive_uri, + 'reader': base.JsonArchiveReader(location.archive_file), + 'redfish_version': self.redfish_version + } + elif location.publication_uri: - return message_registry.MessageRegistry( - public_connector, - path=location.publication_uri, - redfish_version=self.redfish_version, - reader=base.JsonPublicFileReader()) + args = public_connector, + kwargs = { + 'path': location.publication_uri, + 'reader': base.JsonPublicFileReader(), + 'redfish_version': self.redfish_version + } + else: LOG.warning('Incomplete location for language %(language)s', {'language': language}) + continue + + registry = RegistryType(*args, **kwargs) + + if registry._odata_type.endswith('MessageRegistry'): + return message_registry.MessageRegistry(*args, **kwargs) + + LOG.warning('Ignoring unsupported flavor of registry %(registry)s', + {'registry': registry._odata_type}) LOG.warning('No message registry found for %(language)s or ' 'default', {'language': language}) diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py index 579fa88..37282ef 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry_file.py +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -53,14 +53,20 @@ class MessageRegistryFileTestCase(base.TestCase): @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', autospec=True) - def test_get_message_registry_uri(self, mock_msg_reg): + @mock.patch('sushy.resources.base.JsonDataReader', autospec=True) + def test_get_message_registry_uri(self, mock_reader, mock_msg_reg): + mock_reader_rv = mock.Mock() + mock_reader.return_value = mock_reader_rv + mock_reader_rv.get_json.return_value = { + "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", + } mock_msg_reg_rv = mock.Mock() mock_msg_reg.return_value = mock_msg_reg_rv registry = self.reg_file.get_message_registry('en', None) mock_msg_reg.assert_called_once_with( self.conn, path='/redfish/v1/Registries/Test/Test.1.0.json', - redfish_version=self.reg_file.redfish_version) + reader=None, redfish_version=self.reg_file.redfish_version) self.assertEqual(mock_msg_reg_rv, registry) @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', @@ -70,6 +76,9 @@ class MessageRegistryFileTestCase(base.TestCase): mock_reader_rv = mock.Mock() mock_reader.return_value = mock_reader_rv mock_msg_reg_rv = mock.Mock() + mock_reader_rv.get_json.return_value = { + "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", + } mock_msg_reg.return_value = mock_msg_reg_rv self.reg_file.location[0].uri = None @@ -89,6 +98,9 @@ class MessageRegistryFileTestCase(base.TestCase): mock_reader_rv = mock.Mock() mock_reader.return_value = mock_reader_rv mock_msg_reg_rv = mock.Mock() + mock_reader_rv.get_json.return_value = { + "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", + } mock_msg_reg.return_value = mock_msg_reg_rv self.reg_file.location[0].uri = None self.reg_file.location[0].archive_uri = None @@ -101,6 +113,23 @@ class MessageRegistryFileTestCase(base.TestCase): reader=mock_reader_rv) self.assertEqual(mock_msg_reg_rv, registry) + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.base.JsonDataReader', autospec=True) + def test_get_message_registry_unknown_type( + self, mock_reader, mock_msg_reg): + mock_reader_rv = mock.Mock() + mock_reader.return_value = mock_reader_rv + mock_reader_rv.get_json.return_value = { + "@odata.type": "#FishingRegistry.v1_1_1.FishingRegistry", + } + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + + registry = self.reg_file.get_message_registry('en', None) + self.assertFalse(mock_msg_reg.called) + self.assertIsNone(registry) + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', autospec=True) @mock.patch('sushy.resources.registry.message_registry_file.LOG', @@ -119,31 +148,30 @@ class MessageRegistryFileTestCase(base.TestCase): 'No message registry found for %(language)s or default', {'language': 'en'}) - @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', autospec=True) - def test_get_message_registry_non_default_lang(self, mock_msg_reg): - mock_msg_reg_rv = mock.Mock() - mock_msg_reg.return_value = mock_msg_reg_rv + def test_get_message_registry_non_default_lang(self, mock_registry_type): + mock_fishing_registry = mock_registry_type.return_value + mock_fishing_registry._odata_type = 'FishingRegistry' self.reg_file.location[0].language = 'en' - registry = self.reg_file.get_message_registry('en', None) - mock_msg_reg.assert_called_once_with( + mock_registry_type.assert_called_once_with( self.conn, path='/redfish/v1/Registries/Test/Test.1.0.json', - redfish_version=self.reg_file.redfish_version) - self.assertEqual(mock_msg_reg_rv, registry) + reader=None, redfish_version=self.reg_file.redfish_version) + self.assertIsNone(registry) - @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', autospec=True) - def test_get_message_registry_strangely_cased_lang(self, mock_msg_reg): - mock_msg_reg_rv = mock.Mock() - mock_msg_reg.return_value = mock_msg_reg_rv + def test_get_message_registry_strangely_cased_lang( + self, mock_registry_type): + mock_fishing_registry = mock_registry_type.return_value + mock_fishing_registry._odata_type = 'FishingRegistry' self.reg_file.location[0].language = 'En' - registry = self.reg_file.get_message_registry('en', None) - mock_msg_reg.assert_called_once_with( + mock_registry_type.assert_called_once_with( self.conn, path='/redfish/v1/Registries/Test/Test.1.0.json', - redfish_version=self.reg_file.redfish_version) - self.assertEqual(mock_msg_reg_rv, registry) + reader=None, redfish_version=self.reg_file.redfish_version) + self.assertIsNone(registry) @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', autospec=True) -- GitLab From f9a62fa6068e4d72adf2040f74c5cd3eb9c60001 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Tue, 9 Jul 2019 17:17:41 +0200 Subject: [PATCH 165/303] Low case `ParamTypes` in received `MessageRegistry` It turns out, that some implementations encountered in the wild waters use different cases for (at least) `ParamTypes` enumerations in `MessageRegistry` resource. In the schema [1] these enumerations are all in low case. To make sushy operational, this patch just ignores the case of these enumerations. It's still an open question as to how strict the case requirement is with Redfish (/cc Davy Jones). 1. https://redfish.dmtf.org/schemas/v1/MessageRegistry.v1_3_0.json Change-Id: I107b689b8f4e27dd7331473124f7108d151cc55b --- sushy/resources/registry/message_registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sushy/resources/registry/message_registry.py b/sushy/resources/registry/message_registry.py index e0b9f5b..01b2e13 100644 --- a/sushy/resources/registry/message_registry.py +++ b/sushy/resources/registry/message_registry.py @@ -37,7 +37,7 @@ class MessageDictionaryField(base.DictionaryField): param_types = base.Field('ParamTypes', adapter=lambda x: - [res_maps.PARAMTYPE_VALUE_MAP[v] + [res_maps.PARAMTYPE_VALUE_MAP[v.lower()] for v in x]) """Mapped MessageArg types, in order, for the message""" -- GitLab From 2858bb605bba26aa1910fe6a3a4d985401a8ec70 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Fri, 8 Mar 2019 12:58:20 +0100 Subject: [PATCH 166/303] Fix exposed UpdateService constants Sushy library exposed a collection of constants to the user of the library. These constants are frequently used in library calls to achieve desired behaviour. The exposed constants are strings, they are designed to be human-friendly as opposed to tedious machine identifiers. It seems that the way UpdateService resource is implemented turns the general principle upside down. Meaning that the constants are taken from JSON serialization. This patch flips these things over retaining backward compatibility. Change-Id: I0b46db187018350062aaa1cd815fa4e029df4d3e Story: 2003853 Task: 26652 --- ...te-service-constants-b8c3f48ccee6ce1f.yaml | 9 ++++ sushy/__init__.py | 7 +-- sushy/resources/constants.py | 4 ++ sushy/resources/updateservice/constants.py | 22 +++++----- sushy/resources/updateservice/mappings.py | 24 +++++------ .../resources/updateservice/updateservice.py | 43 ++++++++++++++----- .../updateservice/test_updateservice.py | 21 ++++++++- 7 files changed, 92 insertions(+), 38 deletions(-) create mode 100644 releasenotes/notes/fix-update-service-constants-b8c3f48ccee6ce1f.yaml diff --git a/releasenotes/notes/fix-update-service-constants-b8c3f48ccee6ce1f.yaml b/releasenotes/notes/fix-update-service-constants-b8c3f48ccee6ce1f.yaml new file mode 100644 index 0000000..9d1a5ab --- /dev/null +++ b/releasenotes/notes/fix-update-service-constants-b8c3f48ccee6ce1f.yaml @@ -0,0 +1,9 @@ +--- +fixes: + - | + The ``transfer_protocol`` parameter of the ``UpdateService.simple_update`` + method should be given one of the newly exposed constants rather than a + string literal. This is a breaking change. +features: + - | + Exposes ``UpdateService`` constants to ``sushy`` namespace. \ No newline at end of file diff --git a/sushy/__init__.py b/sushy/__init__.py index 248aeb1..8fb461c 100644 --- a/sushy/__init__.py +++ b/sushy/__init__.py @@ -17,12 +17,13 @@ import logging import pbr.version from sushy.main import Sushy -from sushy.resources.constants import * # noqa -from sushy.resources.system.constants import * # noqa -from sushy.resources.manager.constants import * # noqa from sushy.resources.chassis.constants import * # noqa +from sushy.resources.constants import * # noqa from sushy.resources.fabric.constants import * # noqa +from sushy.resources.manager.constants import * # noqa +from sushy.resources.system.constants import * # noqa from sushy.resources.system.storage.constants import * # noqa +from sushy.resources.updateservice.constants import * # noqa __all__ = ('Sushy',) __version__ = pbr.version.VersionInfo( diff --git a/sushy/resources/constants.py b/sushy/resources/constants.py index 242a152..f0572b5 100644 --- a/sushy/resources/constants.py +++ b/sushy/resources/constants.py @@ -105,6 +105,7 @@ RESET_TYPE_POWER_CYCLE = 'power cycle' # Protocol type constants PROTOCOL_TYPE_AHCI = 'Advanced Host Controller Interface' +PROTOCOL_TYPE_CIFS = 'Common Internet File System Protocol' PROTOCOL_TYPE_FC = 'Fibre Channel' PROTOCOL_TYPE_FCP = 'Fibre Channel Protocol for SCSI' PROTOCOL_TYPE_FCoE = 'Fibre Channel over Ethernet' @@ -113,6 +114,7 @@ PROTOCOL_TYPE_FTP = 'File Transfer Protocol' PROTOCOL_TYPE_HTTP = 'Hypertext Transport Protocol' PROTOCOL_TYPE_HTTPS = 'Secure Hypertext Transport Protocol' PROTOCOL_TYPE_I2C = 'Inter-Integrated Circuit Bus' +PROTOCOL_TYPE_NFS = 'Network File System Protocol' PROTOCOL_TYPE_NFSv3 = 'Network File System version 3' PROTOCOL_TYPE_NFSv4 = 'Network File System version 4' PROTOCOL_TYPE_NVMe = 'Non-Volatile Memory Express' @@ -123,8 +125,10 @@ PROTOCOL_TYPE_RoCE = 'RDMA over Converged Ethernet Protocol' PROTOCOL_TYPE_RoCEv2 = 'RDMA over Converged Ethernet Protocol Version 2' PROTOCOL_TYPE_SAS = 'Serial Attached SCSI' PROTOCOL_TYPE_SATA = 'Serial AT Attachment' +PROTOCOL_TYPE_SCP = 'Secure File Copy Protocol' PROTOCOL_TYPE_SFTP = 'Secure File Transfer Protocol' PROTOCOL_TYPE_SMB = 'Server Message Block (CIFS Common Internet File System)' +PROTOCOL_TYPE_TFTP = 'Trivial File Transfer Protocol' PROTOCOL_TYPE_UHCI = 'Universal Host Controller Interface' PROTOCOL_TYPE_USB = 'Universal Serial Bus' PROTOCOL_TYPE_iSCSI = 'Internet SCSI' diff --git a/sushy/resources/updateservice/constants.py b/sushy/resources/updateservice/constants.py index bffa493..a0bc2ef 100644 --- a/sushy/resources/updateservice/constants.py +++ b/sushy/resources/updateservice/constants.py @@ -11,16 +11,18 @@ # under the License. # Values come from the Redfish UpdateService json-schema. -# https://redfish.dmtf.org/schemas/UpdateService.v1_2_2.json +# https://redfish.dmtf.org/schemas/UpdateService.v1_2_2.json#/definitions/TransferProtocolType + +from sushy.resources import constants as res_cons # Transfer Protocol Type constants -TRANSFER_PROTOCOL_TYPE_CIFS = 'CIFS' -TRANSFER_PROTOCOL_TYPE_FTP = 'FTP' -TRANSFER_PROTOCOL_TYPE_SFTP = 'SFTP' -TRANSFER_PROTOCOL_TYPE_HTTP = 'HTTP' -TRANSFER_PROTOCOL_TYPE_HTTPS = 'HTTPS' -TRANSFER_PROTOCOL_TYPE_SCP = 'SCP' -TRANSFER_PROTOCOL_TYPE_TFTP = 'TFTP' -TRANSFER_PROTOCOL_TYPE_OEM = 'OEM' -TRANSFER_PROTOCOL_TYPE_NFS = 'NFS' +UPDATE_PROTOCOL_CIFS = res_cons.PROTOCOL_TYPE_CIFS +UPDATE_PROTOCOL_FTP = res_cons.PROTOCOL_TYPE_FTP +UPDATE_PROTOCOL_SFTP = res_cons.PROTOCOL_TYPE_SFTP +UPDATE_PROTOCOL_HTTP = res_cons.PROTOCOL_TYPE_HTTP +UPDATE_PROTOCOL_HTTPS = res_cons.PROTOCOL_TYPE_HTTPS +UPDATE_PROTOCOL_SCP = res_cons.PROTOCOL_TYPE_SCP +UPDATE_PROTOCOL_TFTP = res_cons.PROTOCOL_TYPE_TFTP +UPDATE_PROTOCOL_OEM = res_cons.PROTOCOL_TYPE_OEM +UPDATE_PROTOCOL_NFS = res_cons.PROTOCOL_TYPE_NFS diff --git a/sushy/resources/updateservice/mappings.py b/sushy/resources/updateservice/mappings.py index 4f5d60e..eb2be62 100644 --- a/sushy/resources/updateservice/mappings.py +++ b/sushy/resources/updateservice/mappings.py @@ -15,21 +15,19 @@ from sushy import utils TRANSFER_PROTOCOL_TYPE_VALUE_MAP = { - 'Common Internet File System Protocol': - ups_cons.TRANSFER_PROTOCOL_TYPE_CIFS, - 'File Transfer Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_FTP, - 'Secure File Transfer Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_SFTP, - 'Hypertext Transfer Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_HTTP, - 'HTTP Secure Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_HTTPS, - 'Secure File Copy Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_SCP, - 'Trivial File Transfer Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_TFTP, - 'A protocol defined by the manufacturer': - ups_cons.TRANSFER_PROTOCOL_TYPE_OEM, - 'Network File System Protocol': ups_cons.TRANSFER_PROTOCOL_TYPE_NFS + 'CIFS': ups_cons.UPDATE_PROTOCOL_CIFS, + 'FTP': ups_cons.UPDATE_PROTOCOL_FTP, + 'SFTP': ups_cons.UPDATE_PROTOCOL_SFTP, + 'HTTP': ups_cons.UPDATE_PROTOCOL_HTTP, + 'HTTPS': ups_cons.UPDATE_PROTOCOL_HTTPS, + 'SCP': ups_cons.UPDATE_PROTOCOL_SCP, + 'TFTP': ups_cons.UPDATE_PROTOCOL_TFTP, + 'OEM': ups_cons.UPDATE_PROTOCOL_OEM, + 'NFS': ups_cons.UPDATE_PROTOCOL_NFS, + 'NSF': ups_cons.UPDATE_PROTOCOL_NFS } TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV = ( utils.revert_dictionary(TRANSFER_PROTOCOL_TYPE_VALUE_MAP)) -TRANSFER_PROTOCOL_TYPE_VALUE_MAP[ - 'Network File System Protocol'] = ups_cons.TRANSFER_PROTOCOL_TYPE_NFS +TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV[ups_cons.UPDATE_PROTOCOL_NFS] = 'NFS' diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py index 309b9a2..d9e69ef 100644 --- a/sushy/resources/updateservice/updateservice.py +++ b/sushy/resources/updateservice/updateservice.py @@ -19,6 +19,7 @@ import logging from sushy import exceptions from sushy.resources import base from sushy.resources import common +from sushy.resources.updateservice import constants as up_cons from sushy.resources.updateservice import mappings as up_maps from sushy.resources.updateservice import softwareinventory from sushy import utils @@ -96,7 +97,17 @@ class UpdateService(base.ResourceBase): resource=self._path) return simple_update_action - def get_allowed_transfer_protocol_values(self): + def _get_legacy_transfer_protocols(self): + """Get the backward-compatible values for transfer protocol. + + :returns: A set of allowed values. + """ + LOG.warning( + 'Could not figure out the allowed values for the simple ' + 'update action for UpdateService %s', self.identity) + return set(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP) + + def get_allowed_transfer_protocols(self): """Get the allowed values for transfer protocol. :returns: A set of allowed values. @@ -111,19 +122,29 @@ class UpdateService(base.ResourceBase): 'update action for UpdateService %s', self.identity) return set(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV) - return set(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP[v] for v in - simple_update_action.transfer_protocol if v in - up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP) + return set(simple_update_action.transfer_protocol) - def simple_update(self, image_uri, targets, transfer_protocol='HTTP'): + def simple_update(self, image_uri, targets, + transfer_protocol=up_cons.UPDATE_PROTOCOL_HTTP): """Simple Update is used to update software components""" - transfer_protocol = transfer_protocol + valid_transfer_protocols = self.get_allowed_transfer_protocols() - valid_transfer_protocols = self.get_allowed_transfer_protocol_values() - if transfer_protocol not in valid_transfer_protocols: - raise exceptions.InvalidParameterValueError( - parameter='transfer_protocol', value=transfer_protocol, - valid_values=valid_transfer_protocols) + if transfer_protocol in valid_transfer_protocols: + transfer_protocol = up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV[ + transfer_protocol] + + else: + legacy_transfer_protocols = self._get_legacy_transfer_protocols() + + if transfer_protocol not in legacy_transfer_protocols: + raise exceptions.InvalidParameterValueError( + parameter='transfer_protocol', value=transfer_protocol, + valid_values=valid_transfer_protocols) + + LOG.warning( + 'Legacy transfer protocol constant %s is being used. ' + 'Consider migrating to any of: %s', + ', '.join(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV)) self._conn.post(data={ 'ImageURI': image_uri, diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py index 741d81a..e166b76 100644 --- a/sushy/tests/unit/resources/updateservice/test_updateservice.py +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -58,13 +58,32 @@ class UpdateServiceTestCase(base.TestCase): self.upd_serv.simple_update( image_uri='local.server/update.exe', targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', - transfer_protocol=ups_cons.TRANSFER_PROTOCOL_TYPE_HTTPS) + transfer_protocol=ups_cons.UPDATE_PROTOCOL_HTTPS) self.upd_serv._conn.post.assert_called_once_with( data={ 'ImageURI': 'local.server/update.exe', 'Targets': '/redfish/v1/UpdateService/Actions/SimpleUpdate', 'TransferProtocol': 'HTTPS'}) + def test_simple_update_backward_compatible_protocol(self): + self.upd_serv.simple_update( + image_uri='local.server/update.exe', + targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', + transfer_protocol='HTTPS') + self.upd_serv._conn.post.assert_called_once_with( + data={ + 'ImageURI': 'local.server/update.exe', + 'Targets': '/redfish/v1/UpdateService/Actions/SimpleUpdate', + 'TransferProtocol': 'HTTPS'}) + + def test_simple_update_bad_protocol(self): + self.assertRaises( + exceptions.InvalidParameterValueError, + self.upd_serv.simple_update, + image_uri='local.server/update.exe', + targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', + transfer_protocol='ROYAL') + def test_software_inventory(self): # | GIVEN | self.conn.get.return_value.json.reset_mock() -- GitLab From 0bd97a054ac225456d1764db646dbf0d5379e961 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Fri, 8 Mar 2019 13:47:45 +0100 Subject: [PATCH 167/303] Make UpdateService.simple_update() operational This change fixes a bug in `simple_update` implementation giving HTTP POST method a chance to succeed. Also, `UpdateService` action field refactored to better align with the rest of sushy. Change-Id: I3fdf58ec6c38282c67bd87729675636d8d90db1e Story: 2003853 Task: 26652 --- .../fix-simple-update-e88838fab4170920.yaml | 4 ++ .../resources/updateservice/updateservice.py | 38 +++++++------------ .../updateservice/test_updateservice.py | 6 ++- 3 files changed, 22 insertions(+), 26 deletions(-) create mode 100644 releasenotes/notes/fix-simple-update-e88838fab4170920.yaml diff --git a/releasenotes/notes/fix-simple-update-e88838fab4170920.yaml b/releasenotes/notes/fix-simple-update-e88838fab4170920.yaml new file mode 100644 index 0000000..03262c2 --- /dev/null +++ b/releasenotes/notes/fix-simple-update-e88838fab4170920.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + Fixes bug in ``UpdateService.simple_update`` method making it operational. diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py index d9e69ef..dac8651 100644 --- a/sushy/resources/updateservice/updateservice.py +++ b/sushy/resources/updateservice/updateservice.py @@ -27,25 +27,9 @@ from sushy import utils LOG = logging.getLogger(__name__) -class SimpleUpdateActionField(common.ActionField): - - image_uri = base.Field('ImageURI') - """The URI of the software image to be installed""" - - targets = base.Field('Targets') - """The array of URIs indicating where the update image is to be""" + \ - """applied""" - - transfer_protocol = base.MappedField( - 'TransferProtocol', - up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP) - """The network protocol used by the Update Service""" - - class ActionsField(base.CompositeField): - simple_update = SimpleUpdateActionField( - '#UpdateService.SimpleUpdate') + simple_update = common.ActionField('#UpdateService.SimpleUpdate') class UpdateService(base.ResourceBase): @@ -116,13 +100,13 @@ class UpdateService(base.ResourceBase): """ simple_update_action = self._get_simple_update_element() - if not simple_update_action.transfer_protocol: - LOG.warning( - 'Could not figure out the allowed values for the simple ' - 'update action for UpdateService %s', self.identity) + if not getattr(simple_update_action, 'transfer_protocol', None): + LOG.debug( + 'Server does not constrain allowed transfer protocols for ' + 'simple update action of UpdateService %s', self.identity) return set(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV) - return set(simple_update_action.transfer_protocol) + return {simple_update_action.transfer_protocol} def simple_update(self, image_uri, targets, transfer_protocol=up_cons.UPDATE_PROTOCOL_HTTP): @@ -143,10 +127,16 @@ class UpdateService(base.ResourceBase): LOG.warning( 'Legacy transfer protocol constant %s is being used. ' - 'Consider migrating to any of: %s', + 'Consider migrating to any of: %s', transfer_protocol, ', '.join(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV)) - self._conn.post(data={ + target_uri = self._get_simple_update_element().target_uri + + LOG.debug( + 'Updating software component %s via ' + '%s ...', image_uri, target_uri) + + self._conn.post(target_uri, data={ 'ImageURI': image_uri, 'Targets': targets, 'TransferProtocol': transfer_protocol}) diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py index e166b76..e3e126f 100644 --- a/sushy/tests/unit/resources/updateservice/test_updateservice.py +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -57,12 +57,13 @@ class UpdateServiceTestCase(base.TestCase): def test_simple_update(self): self.upd_serv.simple_update( image_uri='local.server/update.exe', - targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', + targets=['/redfish/v1/UpdateService/FirmwareInventory/BMC'], transfer_protocol=ups_cons.UPDATE_PROTOCOL_HTTPS) self.upd_serv._conn.post.assert_called_once_with( + '/redfish/v1/UpdateService/Actions/SimpleUpdate', data={ 'ImageURI': 'local.server/update.exe', - 'Targets': '/redfish/v1/UpdateService/Actions/SimpleUpdate', + 'Targets': ['/redfish/v1/UpdateService/FirmwareInventory/BMC'], 'TransferProtocol': 'HTTPS'}) def test_simple_update_backward_compatible_protocol(self): @@ -71,6 +72,7 @@ class UpdateServiceTestCase(base.TestCase): targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', transfer_protocol='HTTPS') self.upd_serv._conn.post.assert_called_once_with( + '/redfish/v1/UpdateService/Actions/SimpleUpdate', data={ 'ImageURI': 'local.server/update.exe', 'Targets': '/redfish/v1/UpdateService/Actions/SimpleUpdate', -- GitLab From d7a4e5cae6ec439eec5fff62a46b9621ad8efadd Mon Sep 17 00:00:00 2001 From: kesper Date: Tue, 9 Jul 2019 16:51:06 +0530 Subject: [PATCH 168/303] Added changes to `simple_update` on update service This commit made `targets` argument optional since as per the DMTF standard its not a required field, and also some BMC can differentiate between different firmware based on given image, they don't required target location. Ex. iLO5. Change-Id: Id5f2a81ac4035106c035b932d40dfc410699a036 --- sushy/resources/updateservice/updateservice.py | 10 +++++----- .../unit/resources/updateservice/test_updateservice.py | 10 ++++++++++ 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py index dac8651..cd41a2e 100644 --- a/sushy/resources/updateservice/updateservice.py +++ b/sushy/resources/updateservice/updateservice.py @@ -108,7 +108,7 @@ class UpdateService(base.ResourceBase): return {simple_update_action.transfer_protocol} - def simple_update(self, image_uri, targets, + def simple_update(self, image_uri, targets=None, transfer_protocol=up_cons.UPDATE_PROTOCOL_HTTP): """Simple Update is used to update software components""" valid_transfer_protocols = self.get_allowed_transfer_protocols() @@ -136,10 +136,10 @@ class UpdateService(base.ResourceBase): 'Updating software component %s via ' '%s ...', image_uri, target_uri) - self._conn.post(target_uri, data={ - 'ImageURI': image_uri, - 'Targets': targets, - 'TransferProtocol': transfer_protocol}) + data = {'ImageURI': image_uri, 'TransferProtocol': transfer_protocol} + if targets: + data['Targets'] = targets + self._conn.post(target_uri, data=data) def _get_software_inventory_collection_path(self): """Helper function to find the SoftwareInventoryCollections path""" diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py index e3e126f..5815840 100644 --- a/sushy/tests/unit/resources/updateservice/test_updateservice.py +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -78,6 +78,16 @@ class UpdateServiceTestCase(base.TestCase): 'Targets': '/redfish/v1/UpdateService/Actions/SimpleUpdate', 'TransferProtocol': 'HTTPS'}) + def test_simple_update_without_target(self): + self.upd_serv.simple_update( + image_uri='local.server/update.exe', + transfer_protocol='HTTPS') + self.upd_serv._conn.post.assert_called_once_with( + '/redfish/v1/UpdateService/Actions/SimpleUpdate', + data={ + 'ImageURI': 'local.server/update.exe', + 'TransferProtocol': 'HTTPS'}) + def test_simple_update_bad_protocol(self): self.assertRaises( exceptions.InvalidParameterValueError, -- GitLab From b6d13c00b3a0120ecfa0e43f84609a392a920024 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Wed, 17 Jul 2019 00:39:50 +0200 Subject: [PATCH 169/303] Uploading to unstable. --- debian/changelog | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/debian/changelog b/debian/changelog index 24c81ac..e8de2e9 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (1.8.1-2) unstable; urgency=medium + + * Uploading to unstable. + + -- Thomas Goirand Wed, 17 Jul 2019 00:39:40 +0200 + python-sushy (1.8.1-1) experimental; urgency=medium [ Ondřej Nový ] -- GitLab From 181d55f2ebd0020e1cb0f83ec4f42fd1d6750658 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Nov=C3=BD?= Date: Thu, 18 Jul 2019 16:38:25 +0200 Subject: [PATCH 170/303] Use debhelper-compat instead of debian/compat --- debian/changelog | 6 ++++++ debian/compat | 1 - debian/control | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) delete mode 100644 debian/compat diff --git a/debian/changelog b/debian/changelog index e8de2e9..f4096b1 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (1.8.1-3) UNRELEASED; urgency=medium + + * Use debhelper-compat instead of debian/compat. + + -- Ondřej Nový Thu, 18 Jul 2019 16:38:25 +0200 + python-sushy (1.8.1-2) unstable; urgency=medium * Uploading to unstable. diff --git a/debian/compat b/debian/compat deleted file mode 100644 index f599e28..0000000 --- a/debian/compat +++ /dev/null @@ -1 +0,0 @@ -10 diff --git a/debian/control b/debian/control index 2fe8bca..b9f8734 100644 --- a/debian/control +++ b/debian/control @@ -5,7 +5,7 @@ Maintainer: Debian OpenStack Uploaders: Thomas Goirand , Build-Depends: - debhelper (>= 10), + debhelper-compat (= 10), dh-python, openstack-pkg-tools, python3-all, -- GitLab From 24b93612f906ebf6a64fa06b960dcdc8175aa3aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Nov=C3=BD?= Date: Fri, 19 Jul 2019 15:53:53 +0200 Subject: [PATCH 171/303] Bump Standards-Version to 4.4.0 --- debian/changelog | 1 + debian/control | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/debian/changelog b/debian/changelog index f4096b1..1ae5a23 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,6 +1,7 @@ python-sushy (1.8.1-3) UNRELEASED; urgency=medium * Use debhelper-compat instead of debian/compat. + * Bump Standards-Version to 4.4.0. -- Ondřej Nový Thu, 18 Jul 2019 16:38:25 +0200 diff --git a/debian/control b/debian/control index b9f8734..e8b26d6 100644 --- a/debian/control +++ b/debian/control @@ -25,7 +25,7 @@ Build-Depends-Indep: python3-testscenarios, python3-testtools, subunit, -Standards-Version: 4.3.0 +Standards-Version: 4.4.0 Vcs-Browser: https://salsa.debian.org/openstack-team/libs/python-sushy Vcs-Git: https://salsa.debian.org/openstack-team/libs/python-sushy.git Homepage: https://docs.openstack.org/sushy -- GitLab From 83a6a8cf9173f612f66c34cbf20029d5ef216158 Mon Sep 17 00:00:00 2001 From: Shivanand Tendulker Date: Thu, 18 Jul 2019 04:32:38 -0400 Subject: [PATCH 172/303] Action #Bios.ResetBios fails as POST request has no body Bios action #Bios.ResetBios fails with HTTP code 415 as BMC expects the POST request with empty body instead of no body. Change-Id: I8d234b3e562a46d3be27f6eae83b6f2e99813c83 Story: 2006246 Task: 35854 --- ...eturn-http-error-415-08170df7fe6300f8.yaml | 6 +++++ sushy/resources/system/bios.py | 11 +++++++- .../tests/unit/resources/system/test_bios.py | 25 +++++++++++++++++++ 3 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/story-2006246-reset-bios-return-http-error-415-08170df7fe6300f8.yaml diff --git a/releasenotes/notes/story-2006246-reset-bios-return-http-error-415-08170df7fe6300f8.yaml b/releasenotes/notes/story-2006246-reset-bios-return-http-error-415-08170df7fe6300f8.yaml new file mode 100644 index 0000000..e306bdf --- /dev/null +++ b/releasenotes/notes/story-2006246-reset-bios-return-http-error-415-08170df7fe6300f8.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Fixes an issue in performing action ``#Bios.ResetBios`` when BMC expects + the POST request with empty body instead of no body. See `story 2006246 + `__ for details. diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 749cbe6..9258b47 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -14,6 +14,7 @@ # https://redfish.dmtf.org/schemas/Bios.v1_0_3.json import logging +from six.moves import http_client from sushy import exceptions from sushy.resources import base @@ -152,7 +153,15 @@ class Bios(base.ResourceBase): target_uri = self._get_reset_bios_action_element().target_uri LOG.debug('Resetting BIOS attributes %s ...', self.identity) - self._conn.post(target_uri) + try: + self._conn.post(target_uri) + except exceptions.HTTPError as resp: + # Send empty payload, if BMC expects body + if resp.status_code == http_client.UNSUPPORTED_MEDIA_TYPE: + self._conn.post(target_uri, data={}) + else: + raise + LOG.info('BIOS attributes %s is being reset', self.identity) def change_password(self, new_password, old_password, password_name): diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index bd6bb6e..77de438 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -16,6 +16,7 @@ import json import mock from dateutil import parser +from six.moves import http_client from sushy import exceptions from sushy.resources.registry import message_registry @@ -157,6 +158,30 @@ class BiosTestCase(base.TestCase): self.sys_bios._conn.post.assert_called_once_with( '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios') + def test_reset_bios_handle_http_error_415(self): + + target_uri = ( + '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios') + self.conn.post.side_effect = [exceptions.HTTPError( + method='POST', url=target_uri, response=mock.MagicMock( + status_code=http_client.UNSUPPORTED_MEDIA_TYPE)), '200'] + post_calls = [ + mock.call(target_uri), mock.call(target_uri, data={})] + self.sys_bios.reset_bios() + self.sys_bios._conn.post.assert_has_calls(post_calls) + + def test_reset_bios_handle_http_error_405(self): + + target_uri = ( + '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios') + self.conn.post.side_effect = exceptions.HTTPError( + method='POST', url=target_uri, response=mock.MagicMock( + status_code=http_client.METHOD_NOT_ALLOWED)) + self.assertRaises( + exceptions.HTTPError, + self.sys_bios.reset_bios) + self.sys_bios._conn.post.assert_called_once_with(target_uri) + def test__get_change_password_element(self): value = self.sys_bios._get_change_password_element() self.assertEqual("/redfish/v1/Systems/437XR1138R2/BIOS/Actions/" -- GitLab From 9351999582e396bf60d286ea25dc67b57687cb5e Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Fri, 12 Jul 2019 16:58:45 +0200 Subject: [PATCH 173/303] Make message registries available to all resources This patch makes parsed message registries accessible to all sushy resource objects for consumption. Prior to that, only System resource had access to message registries what proved insufficient. Change-Id: Ib1e24ea0b2ab81a4dad207ef8f76f1cef3a799c3 Co-Authored-By: Richard.Pioso@dell.com --- sushy/main.py | 79 ++++++++++++------- sushy/resources/base.py | 20 +++-- sushy/resources/chassis/chassis.py | 24 +++--- .../compositionservice/compositionservice.py | 13 +-- .../compositionservice/resourceblock.py | 16 ++-- .../compositionservice/resourcezone.py | 16 ++-- sushy/resources/fabric/fabric.py | 18 +++-- sushy/resources/manager/manager.py | 22 ++++-- sushy/resources/sessionservice/session.py | 15 +++- .../sessionservice/sessionservice.py | 10 ++- sushy/resources/system/bios.py | 6 +- sushy/resources/system/processor.py | 16 ++-- sushy/resources/system/storage/storage.py | 3 +- sushy/resources/system/system.py | 34 +++++--- .../updateservice/softwareinventory.py | 16 ++-- .../resources/updateservice/updateservice.py | 13 +-- .../unit/resources/chassis/test_chassis.py | 12 +-- .../compositionservice/test_resourceblock.py | 6 +- .../compositionservice/test_resourcezone.py | 6 +- .../unit/resources/fabric/test_fabric.py | 8 +- .../unit/resources/manager/test_manager.py | 4 +- .../resources/sessionservice/test_session.py | 4 +- .../sessionservice/test_sessionservice.py | 2 +- .../resources/system/storage/test_storage.py | 12 +-- .../resources/system/storage/test_volume.py | 8 +- .../system/test_ethernet_interfaces.py | 4 +- .../unit/resources/system/test_processor.py | 6 +- .../resources/system/test_simple_storage.py | 4 +- .../unit/resources/system/test_system.py | 4 +- sushy/tests/unit/resources/test_base.py | 17 ++-- .../updateservice/test_softwareinventory.py | 2 +- sushy/tests/unit/test_main.py | 44 ++++++----- 32 files changed, 279 insertions(+), 185 deletions(-) diff --git a/sushy/main.py b/sushy/main.py index 5216e3c..20677c0 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -138,12 +138,12 @@ class Sushy(base.ResourceBase): super(Sushy, self).__init__( connector or sushy_connector.Connector(base_url, verify=verify), path=self._root_prefix) + self._public_connector = public_connector or requests + self._language = language self._base_url = base_url self._auth = auth self._auth.set_context(self, self._conn) self._auth.authenticate() - self._public_connector = public_connector or requests - self._language = language def __del__(self): if self._auth: @@ -170,8 +170,10 @@ class Sushy(base.ResourceBase): raise exceptions.MissingAttributeError( attribute='Systems/@odata.id', resource=self._path) - return system.SystemCollection(self._conn, self._systems_path, - redfish_version=self.redfish_version) + return system.SystemCollection( + self._conn, self._systems_path, + redfish_version=self.redfish_version, + registries=self.registries) def get_system(self, identity): """Given the identity return a System object @@ -181,7 +183,7 @@ class Sushy(base.ResourceBase): """ return system.System(self._conn, identity, redfish_version=self.redfish_version, - registries=self._get_message_registries()) + registries=self.registries) def get_chassis_collection(self): """Get the ChassisCollection object @@ -195,7 +197,8 @@ class Sushy(base.ResourceBase): attribute='Chassis/@odata.id', resource=self._path) return chassis.ChassisCollection(self._conn, self._chassis_path, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) def get_chassis(self, identity): """Given the identity return a Chassis object @@ -204,7 +207,8 @@ class Sushy(base.ResourceBase): :returns: The Chassis object """ return chassis.Chassis(self._conn, identity, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) def get_fabric_collection(self): """Get the FabricCollection object @@ -218,7 +222,8 @@ class Sushy(base.ResourceBase): attribute='Fabrics/@odata.id', resource=self._path) return fabric.FabricCollection(self._conn, self._fabrics_path, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) def get_fabric(self, identity): """Given the identity return a Fabric object @@ -227,7 +232,8 @@ class Sushy(base.ResourceBase): :returns: The Fabric object """ return fabric.Fabric(self._conn, identity, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) def get_manager_collection(self): """Get the ManagerCollection object @@ -241,7 +247,8 @@ class Sushy(base.ResourceBase): attribute='Managers/@odata.id', resource=self._path) return manager.ManagerCollection(self._conn, self._managers_path, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) def get_manager(self, identity): """Given the identity return a Manager object @@ -250,7 +257,8 @@ class Sushy(base.ResourceBase): :returns: The Manager object """ return manager.Manager(self._conn, identity, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) def get_session_service(self): """Get the SessionService object @@ -273,8 +281,9 @@ class Sushy(base.ResourceBase): :param identity: The identity of the session resource :returns: The Session object """ - return session.Session(self._conn, identity, - redfish_version=self.redfish_version) + return session.Session( + self._conn, identity, + redfish_version=self.redfish_version, registries=self.registries) def get_update_service(self): """Get the UpdateService object @@ -287,7 +296,8 @@ class Sushy(base.ResourceBase): return updateservice.UpdateService( self._conn, self._update_service_path, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) def _get_registry_collection(self): """Get MessageRegistryFileCollection object @@ -315,9 +325,11 @@ class Sushy(base.ResourceBase): raise exceptions.MissingAttributeError( attribute='CompositionService/@odata.id', resource=self._path) + return compositionservice.CompositionService( self._conn, self._composition_service_path, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) def _get_standard_message_registry_collection(self): """Load packaged standard message registries @@ -337,26 +349,33 @@ class Sushy(base.ResourceBase): return message_registries - def _get_message_registries(self): + @property + def registries(self): """Gets and combines all message registries together Fetches all registries if any provided by Redfish service and combines together with packaged standard registries. :returns: dict of combined message registries where key is - Registry_name.Major_version.Minor_version and value is registry - itself. + Registry_name.Major_version.Minor_version and value is registry + itself. """ + if self._registries is None: + + standard = self._get_standard_message_registry_collection() + + registries = {r.registry_prefix + '.' + + r.registry_version.rsplit('.', 1)[0]: r + for r in standard if r.language == self._language} + + registry_col = self._get_registry_collection() + + if registry_col: + provided = registry_col.get_members() + registries.update({r.registry: r.get_message_registry( + self._language, + self._public_connector) for r in provided}) + + self._registries = registries - standard = self._get_standard_message_registry_collection() - registries = {r.registry_prefix + '.' + - r.registry_version.rsplit('.', 1)[0]: r - for r in standard if r.language == self._language} - registry_col = self._get_registry_collection() - if registry_col: - provided = registry_col.get_members() - registries.update({r.registry: r.get_message_registry( - self._language, - self._public_connector) for r in provided}) - - return registries + return self._registries diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 0c53a0e..5226c76 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -349,6 +349,7 @@ class ResourceBase(object): connector, path='', redfish_version=None, + registries=None, reader=None): """A class representing the base of any Redfish resource @@ -358,12 +359,15 @@ class ResourceBase(object): :param path: sub-URI path to the resource. :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages :param reader: Reader to use to fetch JSON data. """ self._conn = connector self._path = path self._json = None self.redfish_version = redfish_version + self._registries = registries # Note(deray): Indicates if the resource holds stale data or not. # Starting off with True and eventually gets set to False when # attribute values are fetched. @@ -477,6 +481,10 @@ class ResourceBase(object): return oem.get_resource_extension_by_vendor( self.resource_name, vendor, self) + @property + def registries(self): + return self._registries + @six.add_metaclass(abc.ABCMeta) class ResourceCollectionBase(ResourceBase): @@ -488,7 +496,7 @@ class ResourceCollectionBase(ResourceBase): adapter=utils.get_members_identities) """A tuple with the members identities""" - def __init__(self, connector, path, redfish_version=None): + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing the base of any Redfish resource collection It gets inherited from ``ResourceBase`` and invokes the base class @@ -497,9 +505,11 @@ class ResourceCollectionBase(ResourceBase): :param path: sub-URI path to the resource collection. :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. """ - super(ResourceCollectionBase, self).__init__(connector, path, - redfish_version) + super(ResourceCollectionBase, self).__init__( + connector, path, redfish_version, registries) LOG.debug('Received %(count)d member(s) for %(type)s %(path)s', {'count': len(self.members_identities), 'type': self.__class__.__name__, 'path': self._path}) @@ -520,8 +530,8 @@ class ResourceCollectionBase(ResourceBase): :returns: The ``_resource_type`` object :raises: ResourceNotFoundError """ - return self._resource_type(self._conn, identity, - redfish_version=self.redfish_version) + return self._resource_type( + self._conn, identity, self.redfish_version, self.registries) @utils.cache_it def get_members(self): diff --git a/sushy/resources/chassis/chassis.py b/sushy/resources/chassis/chassis.py index 7d4cd16..433b4db 100644 --- a/sushy/resources/chassis/chassis.py +++ b/sushy/resources/chassis/chassis.py @@ -143,15 +143,19 @@ class Chassis(base.ResourceBase): _actions = ActionsField('Actions') - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a Chassis :param connector: A Connector instance :param identity: The identity of the Chassis resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(Chassis, self).__init__(connector, identity, redfish_version) + super(Chassis, self).__init__( + connector, identity, redfish_version, registries) def _get_reset_action_element(self): reset_action = self._actions.reset @@ -233,7 +237,7 @@ class Chassis(base.ResourceBase): self, ["Links", "ManagedBy"], is_collection=True) return [manager.Manager(self._conn, path, - redfish_version=self.redfish_version) + self.redfish_version, self.registries) for path in paths] @property @@ -252,7 +256,7 @@ class Chassis(base.ResourceBase): from sushy.resources.system import system return [system.System(self._conn, path, - redfish_version=self.redfish_version) + self.redfish_version, self.registries) for path in paths] @property @@ -267,7 +271,7 @@ class Chassis(base.ResourceBase): return power.Power( self._conn, utils.get_sub_resource_path_by(self, 'Power'), - redfish_version=self.redfish_version) + self.redfish_version, self.registries) @property @utils.cache_it @@ -281,7 +285,7 @@ class Chassis(base.ResourceBase): return thermal.Thermal( self._conn, utils.get_sub_resource_path_by(self, 'Thermal'), - redfish_version=self.redfish_version) + self.redfish_version, self.registries) class ChassisCollection(base.ResourceCollectionBase): @@ -290,13 +294,15 @@ class ChassisCollection(base.ResourceCollectionBase): def _resource_type(self): return Chassis - def __init__(self, connector, path, redfish_version=None): + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing a ChassisCollection :param connector: A Connector instance :param path: The canonical path to the Chassis collection resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(ChassisCollection, self).__init__(connector, path, - redfish_version) + super(ChassisCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/compositionservice/compositionservice.py b/sushy/resources/compositionservice/compositionservice.py index a70ea32..73f9dad 100644 --- a/sushy/resources/compositionservice/compositionservice.py +++ b/sushy/resources/compositionservice/compositionservice.py @@ -50,18 +50,19 @@ class CompositionService(base.ResourceBase): service_enabled = base.Field('ServiceEnabled') """The status of composition service is enabled""" - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a CompositionService :param connector: A connector instance :param identity: The identity of the CompositionService resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ super(CompositionService, self).__init__( - connector, - identity, - redfish_version) + connector, identity, redfish_version, registries) def _get_resource_blocks_collection_path(self): """Helper function to find the ResourceBlockCollections path""" @@ -85,7 +86,7 @@ class CompositionService(base.ResourceBase): """Property to reference `ResourceBlockCollection` instance""" return resourceblock.ResourceBlockCollection( self.conn, self._get_resource_blocks_collection_path, - redfish_version=self.redfish_version) + self.redfish_version, self.registries) @property @utils.cache_it @@ -93,4 +94,4 @@ class CompositionService(base.ResourceBase): """Property to reference `ResourceZoneCollection` instance""" return resourcezone.ResourceZoneCollection( self.conn, self._get_resource_zones_collection_path, - redfish_version=self.redfish_version) + self.redfish_version, self.registries) diff --git a/sushy/resources/compositionservice/resourceblock.py b/sushy/resources/compositionservice/resourceblock.py index 8fabfbf..c9b7740 100644 --- a/sushy/resources/compositionservice/resourceblock.py +++ b/sushy/resources/compositionservice/resourceblock.py @@ -74,18 +74,19 @@ class ResourceBlock(base.ResourceBase): status = common.StatusField('Status') """The status of resource block""" - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a ResourceBlock :param connector: A Connector instance :param identity: The identity of the ResourceBlock resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ super(ResourceBlock, self).__init__( - connector, - identity, - redfish_version) + connector, identity, redfish_version, registries) class ResourceBlockCollection(base.ResourceCollectionBase): @@ -100,13 +101,16 @@ class ResourceBlockCollection(base.ResourceCollectionBase): def _resource_type(self): return ResourceBlock - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a ResourceBlockCollection :param connector: A Connector instance :param identity: A identity of the ResourceBlock resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ super(ResourceBlockCollection, self).__init__( - connector, identity, redfish_version) + connector, identity, redfish_version, registries) diff --git a/sushy/resources/compositionservice/resourcezone.py b/sushy/resources/compositionservice/resourcezone.py index 1a6b5bf..1ad8560 100644 --- a/sushy/resources/compositionservice/resourcezone.py +++ b/sushy/resources/compositionservice/resourcezone.py @@ -54,18 +54,19 @@ class ResourceZone(base.ResourceBase): status = common.StatusField('Status') """The resource zone status""" - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a ResourceZone :param connector: A Connector instance :param identity: The identity of the ResourceZone resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ super(ResourceZone, self).__init__( - connector, - identity, - redfish_version) + connector, identity, redfish_version, registries) class ResourceZoneCollection(base.ResourceCollectionBase): @@ -80,13 +81,16 @@ class ResourceZoneCollection(base.ResourceCollectionBase): def _resource_type(self): return ResourceZone - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a ResourceZoneCollection :param connector: A Connector instance :param identity: The identity of the ResourceZone resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ super(ResourceZoneCollection, self).__init__( - connector, identity, redfish_version) + connector, identity, redfish_version, registries) diff --git a/sushy/resources/fabric/fabric.py b/sushy/resources/fabric/fabric.py index 29edf3e..1bcc73a 100644 --- a/sushy/resources/fabric/fabric.py +++ b/sushy/resources/fabric/fabric.py @@ -50,22 +50,26 @@ class Fabric(base.ResourceBase): res_maps.PROTOCOL_TYPE_VALUE_MAP) """The protocol being sent over this fabric""" - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a Fabric :param connector: A Connector instance :param identity: The identity of the Fabric resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(Fabric, self).__init__(connector, identity, redfish_version) + super(Fabric, self).__init__( + connector, identity, redfish_version, registries) @property @utils.cache_it def endpoints(self): return fab_endpoint.EndpointCollection( self._conn, utils.get_sub_resource_path_by(self, 'Endpoints'), - redfish_version=self.redfish_version) + self.redfish_version, self.registries) class FabricCollection(base.ResourceCollectionBase): @@ -74,13 +78,15 @@ class FabricCollection(base.ResourceCollectionBase): def _resource_type(self): return Fabric - def __init__(self, connector, path, redfish_version=None): + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing a FabricCollection :param connector: A Connector instance :param path: The canonical path to the Fabric collection resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(FabricCollection, self).__init__(connector, path, - redfish_version) + super(FabricCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/manager/manager.py b/sushy/resources/manager/manager.py index 797d4ee..e943591 100644 --- a/sushy/resources/manager/manager.py +++ b/sushy/resources/manager/manager.py @@ -84,15 +84,19 @@ class Manager(base.ResourceBase): _actions = ActionsField('Actions') - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a Manager :param connector: A Connector instance :param identity: The identity of the Manager resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(Manager, self).__init__(connector, identity, redfish_version) + super(Manager, self).__init__( + connector, identity, redfish_version, registries) def get_supported_graphical_console_types(self): """Get the supported values for Graphical Console connection types. @@ -193,7 +197,7 @@ class Manager(base.ResourceBase): def virtual_media(self): return virtual_media.VirtualMediaCollection( self._conn, utils.get_sub_resource_path_by(self, 'VirtualMedia'), - redfish_version=self.redfish_version) + self.redfish_version, self.registries) @property @utils.cache_it @@ -211,7 +215,7 @@ class Manager(base.ResourceBase): from sushy.resources.system import system return [system.System(self._conn, path, - redfish_version=self.redfish_version) + self.redfish_version, self.registries) for path in paths] @property @@ -230,7 +234,7 @@ class Manager(base.ResourceBase): from sushy.resources.chassis import chassis return [chassis.Chassis(self._conn, path, - redfish_version=self.redfish_version) + self.redfish_version, self.registries) for path in paths] @@ -240,13 +244,15 @@ class ManagerCollection(base.ResourceCollectionBase): def _resource_type(self): return Manager - def __init__(self, connector, path, redfish_version=None): + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing a ManagerCollection :param connector: A Connector instance :param path: The canonical path to the Manager collection resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(ManagerCollection, self).__init__(connector, path, - redfish_version) + super(ManagerCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/sessionservice/session.py b/sushy/resources/sessionservice/session.py index fc82e66..99fb75c 100644 --- a/sushy/resources/sessionservice/session.py +++ b/sushy/resources/sessionservice/session.py @@ -37,15 +37,19 @@ class Session(base.ResourceBase): username = base.Field('UserName') """The UserName for the account for this session.""" - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a Session :param connector: A Connector instance :param identity: The identity of the Session resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(Session, self).__init__(connector, identity, redfish_version) + super(Session, self).__init__( + connector, identity, redfish_version, registries) def delete(self): """Method for deleting a Session. @@ -67,13 +71,16 @@ class SessionCollection(base.ResourceCollectionBase): def _resource_type(self): return Session - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a SessionCollection :param connector: A Connector instance :param identity: The identity of the Session resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ super(SessionCollection, self).__init__( - connector, identity, redfish_version) + connector, identity, redfish_version, registries) diff --git a/sushy/resources/sessionservice/sessionservice.py b/sushy/resources/sessionservice/sessionservice.py index 5d2affd..aea60e0 100644 --- a/sushy/resources/sessionservice/sessionservice.py +++ b/sushy/resources/sessionservice/sessionservice.py @@ -43,17 +43,21 @@ class SessionService(base.ResourceBase): session_timeout = base.Field('SessionTimeout') """The session service timeout""" - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a SessionService :param connector: A Connector instance :param identity: The identity of the SessionService resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ try: super(SessionService, self).__init__( - connector, identity, redfish_version) + connector, identity, redfish_version, registries) + except exceptions.AccessError as ae: LOG.warning('Received access error "%(ae)s". ' 'Unable to refresh SessionService.', @@ -77,7 +81,7 @@ class SessionService(base.ResourceBase): """ return session.SessionCollection( self._conn, self._get_sessions_collection_path(), - redfish_version=self.redfish_version) + self.redfish_version, self.registries) def close_session(self, session_uri): """This function is for closing a session based on its id. diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 749cbe6..9d91fb5 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -31,9 +31,7 @@ class ActionsField(base.CompositeField): class Bios(base.ResourceBase): - def __init__(self, connector, path, registries, *args, **kwargs): - super(Bios, self).__init__(connector, path, *args, **kwargs) - self._registries = registries + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing a Bios :param connector: A Connector instance @@ -41,6 +39,8 @@ class Bios(base.ResourceBase): :param registries: Dict of message registries to be used when parsing messages of attribute update status """ + super(Bios, self).__init__( + connector, path, redfish_version, registries) identity = base.Field('Id', required=True) """The Bios resource identity string""" diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index efd335c..9c21c35 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -92,15 +92,19 @@ class Processor(base.ResourceBase): total_threads = base.Field('TotalThreads', adapter=utils.int_or_none) """The total number of execution threads supported by this processor""" - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a Processor :param connector: A Connector instance :param identity: The identity of the processor :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(Processor, self).__init__(connector, identity, redfish_version) + super(Processor, self).__init__( + connector, identity, redfish_version, registries) def _get_processor_collection_path(self): """Helper function to find the ProcessorCollection path""" @@ -152,13 +156,15 @@ class ProcessorCollection(base.ResourceCollectionBase): return ProcessorSummary(count=count, architecture=architecture) - def __init__(self, connector, path, redfish_version=None): + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing a ProcessorCollection :param connector: A Connector instance :param path: The canonical path to the Processor collection resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(ProcessorCollection, self).__init__(connector, path, - redfish_version) + super(ProcessorCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/system/storage/storage.py b/sushy/resources/system/storage/storage.py index 3e76a45..4f9ae39 100644 --- a/sushy/resources/system/storage/storage.py +++ b/sushy/resources/system/storage/storage.py @@ -81,7 +81,8 @@ class Storage(base.ResourceBase): :raises: ResourceNotFoundError """ return drive.Drive(self._conn, drive_identity, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) @property @utils.cache_it diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 929fd14..9423fdb 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -144,10 +144,10 @@ class System(base.ResourceBase): :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. :param registries: Dict of registries to be used in any resource - that needs registries to parse messages + that needs registries to parse messages. """ - super(System, self).__init__(connector, identity, redfish_version) - self._registries = registries + super(System, self).__init__( + connector, identity, redfish_version, registries) def _get_reset_action_element(self): reset_action = self._actions.reset @@ -291,7 +291,8 @@ class System(base.ResourceBase): """ return processor.ProcessorCollection( self._conn, self._get_processor_collection_path(), - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) @property @utils.cache_it @@ -305,7 +306,8 @@ class System(base.ResourceBase): return ethernet_interface.EthernetInterfaceCollection( self._conn, utils.get_sub_resource_path_by(self, "EthernetInterfaces"), - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) @property @utils.cache_it @@ -320,7 +322,7 @@ class System(base.ResourceBase): self._conn, utils.get_sub_resource_path_by(self, 'Bios'), redfish_version=self.redfish_version, - registries=self._registries) + registries=self.registries) @property @utils.cache_it @@ -341,7 +343,8 @@ class System(base.ResourceBase): """ return sys_simple_storage.SimpleStorageCollection( self._conn, utils.get_sub_resource_path_by(self, "SimpleStorage"), - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) @property @utils.cache_it @@ -363,7 +366,8 @@ class System(base.ResourceBase): """ return sys_storage.StorageCollection( self._conn, utils.get_sub_resource_path_by(self, "Storage"), - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) @property @utils.cache_it @@ -380,7 +384,8 @@ class System(base.ResourceBase): self, ["Links", "ManagedBy"], is_collection=True) return [manager.Manager(self._conn, path, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) for path in paths] @property @@ -398,7 +403,8 @@ class System(base.ResourceBase): self, ["Links", "Chassis"], is_collection=True) return [chassis.Chassis(self._conn, path, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.registries) for path in paths] @@ -408,13 +414,15 @@ class SystemCollection(base.ResourceCollectionBase): def _resource_type(self): return System - def __init__(self, connector, path, redfish_version=None): + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing a ComputerSystemCollection :param connector: A Connector instance :param path: The canonical path to the System collection resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(SystemCollection, self).__init__(connector, path, - redfish_version) + super(SystemCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/updateservice/softwareinventory.py b/sushy/resources/updateservice/softwareinventory.py index 3615bb8..726e743 100644 --- a/sushy/resources/updateservice/softwareinventory.py +++ b/sushy/resources/updateservice/softwareinventory.py @@ -58,18 +58,19 @@ class SoftwareInventory(base.ResourceBase): version = base.Field('Version') """The version of the software""" - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, + redfish_version=None, registries=None): """A class representing a SoftwareInventory :param connector: A Connector instance :param identity: The identity of the SoftwareInventory resources :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ super(SoftwareInventory, self).__init__( - connector, - identity, - redfish_version) + connector, identity, redfish_version, registries) class SoftwareInventoryCollection(base.ResourceCollectionBase): @@ -84,13 +85,16 @@ class SoftwareInventoryCollection(base.ResourceCollectionBase): def _resource_type(self): return SoftwareInventory - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, + redfish_version=None, registries=None): """A class representing a SoftwareInventoryCollection :param connector: A Connector instance :param identity: The identity of SoftwareInventory resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ super(SoftwareInventoryCollection, self).__init__( - connector, identity, redfish_version) + connector, identity, redfish_version, registries) diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py index dac8651..d75037a 100644 --- a/sushy/resources/updateservice/updateservice.py +++ b/sushy/resources/updateservice/updateservice.py @@ -60,18 +60,19 @@ class UpdateService(base.ResourceBase): _actions = ActionsField('Actions', required=True) - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a UpdateService :param connector: A Connector instance :param identity: The identity of the UpdateService resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ super(UpdateService, self).__init__( - connector, - identity, - redfish_version) + connector, identity, redfish_version, registries) def _get_simple_update_element(self): simple_update_action = self._actions.simple_update @@ -155,7 +156,7 @@ class UpdateService(base.ResourceBase): """Property to reference SoftwareInventoryCollection instance""" return softwareinventory.SoftwareInventoryCollection( self._conn, self._get_software_inventory_collection_path, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, registries=self.registries) @property @utils.cache_it @@ -163,4 +164,4 @@ class UpdateService(base.ResourceBase): """Property to reference SoftwareInventoryCollection instance""" return softwareinventory.SoftwareInventoryCollection( self._conn, self._get_software_inventory_collection_path, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, registries=self.registries) diff --git a/sushy/tests/unit/resources/chassis/test_chassis.py b/sushy/tests/unit/resources/chassis/test_chassis.py index e4f55a6..18e6250 100644 --- a/sushy/tests/unit/resources/chassis/test_chassis.py +++ b/sushy/tests/unit/resources/chassis/test_chassis.py @@ -176,22 +176,22 @@ class ChassisCollectionTestCase(base.TestCase): self.chassis.get_member('/redfish/v1/Chassis/MultiBladeEncl') chassis_mock.assert_called_once_with( self.chassis._conn, '/redfish/v1/Chassis/MultiBladeEncl', - redfish_version=self.chassis.redfish_version) + self.chassis.redfish_version, None) @mock.patch.object(chassis, 'Chassis', autospec=True) def test_get_members(self, chassis_mock): members = self.chassis.get_members() calls = [ mock.call(self.chassis._conn, '/redfish/v1/Chassis/MultiBladeEncl', - redfish_version=self.chassis.redfish_version), + self.chassis.redfish_version, None), mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade1', - redfish_version=self.chassis.redfish_version), + self.chassis.redfish_version, None), mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade2', - redfish_version=self.chassis.redfish_version), + self.chassis.redfish_version, None), mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade3', - redfish_version=self.chassis.redfish_version), + self.chassis.redfish_version, None), mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade4', - redfish_version=self.chassis.redfish_version) + self.chassis.redfish_version, None) ] chassis_mock.assert_has_calls(calls) self.assertIsInstance(members, list) diff --git a/sushy/tests/unit/resources/compositionservice/test_resourceblock.py b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py index af315bf..3478aba 100644 --- a/sushy/tests/unit/resources/compositionservice/test_resourceblock.py +++ b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py @@ -77,7 +77,7 @@ class ResourceBlockCollectionTestCase(base.TestCase): self.res_block_col = resourceblock.ResourceBlockCollection( self.conn, '/redfish/v1/CompositionService/ResourceBlocks', - redfish_version='1.0.2') + '1.0.2', None) def test__parse_attributes(self): path = '/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1' @@ -94,7 +94,7 @@ class ResourceBlockCollectionTestCase(base.TestCase): self.res_block_col.get_member(path) mock_resourceblock.assert_called_once_with( self.res_block_col._conn, path, - redfish_version=self.res_block_col.redfish_version) + self.res_block_col.redfish_version, None) @mock.patch.object(resourceblock, 'ResourceBlock', autospec=True) def test_get_members(self, mock_resourceblock): @@ -102,6 +102,6 @@ class ResourceBlockCollectionTestCase(base.TestCase): members = self.res_block_col.get_members() mock_resourceblock.assert_called_once_with( self.res_block_col._conn, path, - redfish_version=self.res_block_col.redfish_version) + self.res_block_col.redfish_version, None) self.assertIsInstance(members, list) self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/compositionservice/test_resourcezone.py b/sushy/tests/unit/resources/compositionservice/test_resourcezone.py index 200daad..66475dc 100644 --- a/sushy/tests/unit/resources/compositionservice/test_resourcezone.py +++ b/sushy/tests/unit/resources/compositionservice/test_resourcezone.py @@ -65,7 +65,7 @@ class ResourceZoneCollectionTestCase(base.TestCase): self.res_zone_col = resourcezone.ResourceZoneCollection( self.conn, '/redfish/v1/CompositionService/ResourceZones', - redfish_version='1.0.2') + '1.0.2', None) def test__parse_attributes(self): path = '/redfish/v1/CompositionService/ResourceZones/1' @@ -80,7 +80,7 @@ class ResourceZoneCollectionTestCase(base.TestCase): self.res_zone_col.get_member(path) mock_resourcezone.assert_called_once_with( self.res_zone_col._conn, path, - redfish_version=self.res_zone_col.redfish_version) + self.res_zone_col.redfish_version, None) @mock.patch.object(resourcezone, 'ResourceZone', autospec=True) def test_get_members(self, mock_resourcezone): @@ -88,6 +88,6 @@ class ResourceZoneCollectionTestCase(base.TestCase): members = self.res_zone_col.get_members() mock_resourcezone.assert_called_once_with( self.res_zone_col._conn, path, - redfish_version=self.res_zone_col.redfish_version) + self.res_zone_col.redfish_version, None) self.assertIsInstance(members, list) self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/fabric/test_fabric.py b/sushy/tests/unit/resources/fabric/test_fabric.py index 792c3cb..3f7f9ce 100644 --- a/sushy/tests/unit/resources/fabric/test_fabric.py +++ b/sushy/tests/unit/resources/fabric/test_fabric.py @@ -114,23 +114,23 @@ class FabricCollectionTestCase(base.TestCase): 'fabric_collection.json') as f: self.conn.get.return_value.json.return_value = json.load(f) self.fabric = fabric.FabricCollection( - self.conn, '/redfish/v1/Fabrics', redfish_version='1.0.3') + self.conn, '/redfish/v1/Fabrics', '1.0.3', None) @mock.patch.object(fabric, 'Fabric', autospec=True) def test_get_member(self, fabric_mock): self.fabric.get_member('/redfish/v1/Fabrics/SAS1') fabric_mock.assert_called_once_with( self.fabric._conn, '/redfish/v1/Fabrics/SAS1', - redfish_version=self.fabric.redfish_version) + self.fabric.redfish_version, None) @mock.patch.object(fabric, 'Fabric', autospec=True) def test_get_members(self, fabric_mock): members = self.fabric.get_members() calls = [ mock.call(self.fabric._conn, '/redfish/v1/Fabrics/SAS1', - redfish_version=self.fabric.redfish_version), + self.fabric.redfish_version, None), mock.call(self.fabric._conn, '/redfish/v1/Fabrics/SAS2', - redfish_version=self.fabric.redfish_version) + self.fabric.redfish_version, None) ] fabric_mock.assert_has_calls(calls) self.assertIsInstance(members, list) diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index 60c9c36..7da31a4 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -309,13 +309,13 @@ class ManagerCollectionTestCase(base.TestCase): self.managers.get_member('/redfish/v1/Managers/BMC') Manager_mock.assert_called_once_with( self.managers._conn, '/redfish/v1/Managers/BMC', - redfish_version=self.managers.redfish_version) + self.managers.redfish_version, None) @mock.patch.object(manager, 'Manager', autospec=True) def test_get_members(self, Manager_mock): members = self.managers.get_members() Manager_mock.assert_called_once_with( self.managers._conn, '/redfish/v1/Managers/BMC', - redfish_version=self.managers.redfish_version) + self.managers.redfish_version, None) self.assertIsInstance(members, list) self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/sessionservice/test_session.py b/sushy/tests/unit/resources/sessionservice/test_session.py index 4cc7419..953111a 100644 --- a/sushy/tests/unit/resources/sessionservice/test_session.py +++ b/sushy/tests/unit/resources/sessionservice/test_session.py @@ -87,7 +87,7 @@ class SessionCollectionTestCase(base.TestCase): self.sess_col.get_member(path) mock_session.assert_called_once_with( self.sess_col._conn, path, - redfish_version=self.sess_col.redfish_version) + self.sess_col.redfish_version, None) @mock.patch.object(session, 'Session', autospec=True) def test_get_members(self, mock_session): @@ -95,6 +95,6 @@ class SessionCollectionTestCase(base.TestCase): members = self.sess_col.get_members() mock_session.assert_called_once_with( self.sess_col._conn, path, - redfish_version=self.sess_col.redfish_version) + self.sess_col.redfish_version, None) self.assertIsInstance(members, list) self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py index f3b75ed..d59b8af 100644 --- a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -72,7 +72,7 @@ class SessionServiceTestCase(base.TestCase): mock_sess_col.assert_called_once_with( self.sess_serv_inst._conn, '/redfish/v1/SessionService/Sessions', - redfish_version=self.sess_serv_inst.redfish_version) + self.sess_serv_inst.redfish_version, None) def test_create_session(self): with open('sushy/tests/unit/json_samples/' diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index 0561f06..b391ee2 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -79,16 +79,16 @@ class StorageTestCase(base.TestCase): calls = [ mock.call(self.storage._conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3', # noqa - redfish_version=self.storage.redfish_version), + self.storage.redfish_version, None), mock.call(self.storage._conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233', # noqa - redfish_version=self.storage.redfish_version), + self.storage.redfish_version, None), mock.call(self.storage._conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD', # noqa - redfish_version=self.storage.redfish_version), + self.storage.redfish_version, None), mock.call(self.storage._conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2', # noqa - redfish_version=self.storage.redfish_version) + self.storage.redfish_version, None) ] Drive_mock.assert_has_calls(calls) self.assertIsInstance(all_drives, list) @@ -249,7 +249,7 @@ class StorageCollectionTestCase(base.TestCase): Storage_mock.assert_called_once_with( self.stor_col._conn, '/redfish/v1/Systems/437XR1138R2/Storage/1', - redfish_version=self.stor_col.redfish_version) + self.stor_col.redfish_version, None) @mock.patch.object(storage, 'Storage', autospec=True) def test_get_members(self, Storage_mock): @@ -257,7 +257,7 @@ class StorageCollectionTestCase(base.TestCase): Storage_mock.assert_called_once_with( self.stor_col._conn, '/redfish/v1/Systems/437XR1138R2/Storage/1', - redfish_version=self.stor_col.redfish_version) + self.stor_col.redfish_version, None) self.assertIsInstance(members, list) self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index c9963b1..61f6a57 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -116,7 +116,7 @@ class VolumeCollectionTestCase(base.TestCase): Volume_mock.assert_called_once_with( self.stor_vol_col._conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', - redfish_version=self.stor_vol_col.redfish_version) + self.stor_vol_col.redfish_version, None) @mock.patch.object(volume, 'Volume', autospec=True) def test_get_members(self, Volume_mock): @@ -124,13 +124,13 @@ class VolumeCollectionTestCase(base.TestCase): calls = [ mock.call(self.stor_vol_col._conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', - redfish_version=self.stor_vol_col.redfish_version), + self.stor_vol_col.redfish_version, None), mock.call(self.stor_vol_col._conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2', - redfish_version=self.stor_vol_col.redfish_version), + self.stor_vol_col.redfish_version, None), mock.call(self.stor_vol_col._conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3', - redfish_version=self.stor_vol_col.redfish_version), + self.stor_vol_col.redfish_version, None), ] Volume_mock.assert_has_calls(calls) self.assertIsInstance(members, list) diff --git a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py index 89a9065..97c9683 100644 --- a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py +++ b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py @@ -77,7 +77,7 @@ class EthernetInterfaceCollectionTestCase(base.TestCase): self.sys_eth_col._conn, ('/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/' '12446A3B0411'), - redfish_version=self.sys_eth_col.redfish_version) + self.sys_eth_col.redfish_version, None) @mock.patch.object(ethernet_interface, 'EthernetInterface', autospec=True) def test_get_members(self, mock_eth): @@ -86,7 +86,7 @@ class EthernetInterfaceCollectionTestCase(base.TestCase): "12446A3B0411") calls = [ mock.call(self.sys_eth_col._conn, eth_path, - redfish_version=self.sys_eth_col.redfish_version), + self.sys_eth_col.redfish_version, None), ] mock_eth.assert_has_calls(calls) self.assertIsInstance(members, list) diff --git a/sushy/tests/unit/resources/system/test_processor.py b/sushy/tests/unit/resources/system/test_processor.py index ca3dc48..558ef48 100644 --- a/sushy/tests/unit/resources/system/test_processor.py +++ b/sushy/tests/unit/resources/system/test_processor.py @@ -102,7 +102,7 @@ class ProcessorCollectionTestCase(base.TestCase): mock_processor.assert_called_once_with( self.sys_processor_col._conn, '/redfish/v1/Systems/437XR1138R2/Processors/CPU1', - redfish_version=self.sys_processor_col.redfish_version) + self.sys_processor_col.redfish_version, None) @mock.patch.object(processor, 'Processor', autospec=True) def test_get_members(self, mock_processor): @@ -110,10 +110,10 @@ class ProcessorCollectionTestCase(base.TestCase): calls = [ mock.call(self.sys_processor_col._conn, '/redfish/v1/Systems/437XR1138R2/Processors/CPU1', - redfish_version=self.sys_processor_col.redfish_version), + self.sys_processor_col.redfish_version, None), mock.call(self.sys_processor_col._conn, '/redfish/v1/Systems/437XR1138R2/Processors/CPU2', - redfish_version=self.sys_processor_col.redfish_version) + self.sys_processor_col.redfish_version, None) ] mock_processor.assert_has_calls(calls) self.assertIsInstance(members, list) diff --git a/sushy/tests/unit/resources/system/test_simple_storage.py b/sushy/tests/unit/resources/system/test_simple_storage.py index f3d6ffa..906ceee 100644 --- a/sushy/tests/unit/resources/system/test_simple_storage.py +++ b/sushy/tests/unit/resources/system/test_simple_storage.py @@ -74,7 +74,7 @@ class SimpleStorageCollectionTestCase(base.TestCase): SimpleStorage_mock.assert_called_once_with( self.simpl_stor_col._conn, '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', - redfish_version=self.simpl_stor_col.redfish_version) + self.simpl_stor_col.redfish_version, None) @mock.patch.object(simple_storage, 'SimpleStorage', autospec=True) def test_get_members(self, SimpleStorage_mock): @@ -82,7 +82,7 @@ class SimpleStorageCollectionTestCase(base.TestCase): SimpleStorage_mock.assert_called_once_with( self.simpl_stor_col._conn, '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', - redfish_version=self.simpl_stor_col.redfish_version) + self.simpl_stor_col.redfish_version, None) self.assertIsInstance(members, list) self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 63fffe2..8de7dee 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -582,13 +582,13 @@ class SystemCollectionTestCase(base.TestCase): self.sys_col.get_member('/redfish/v1/Systems/437XR1138R2') mock_system.assert_called_once_with( self.sys_col._conn, '/redfish/v1/Systems/437XR1138R2', - redfish_version=self.sys_col.redfish_version) + self.sys_col.redfish_version, None) @mock.patch.object(system, 'System', autospec=True) def test_get_members(self, mock_system): members = self.sys_col.get_members() mock_system.assert_called_once_with( self.sys_col._conn, '/redfish/v1/Systems/437XR1138R2', - redfish_version=self.sys_col.redfish_version) + self.sys_col.redfish_version, None) self.assertIsInstance(members, list) self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 976493c..17901ca 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -167,16 +167,19 @@ class ResourceBaseTestCase(base.TestCase): class TestResource(resource_base.ResourceBase): """A concrete Test Resource to test against""" - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """Ctor of TestResource :param connector: A Connector instance :param identity: The id of the Resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. """ super(TestResource, self).__init__(connector, 'Fakes/%s' % identity, - redfish_version) + redfish_version, registries) self.identity = identity def _parse_attributes(self): @@ -190,15 +193,17 @@ class TestResourceCollection(resource_base.ResourceCollectionBase): def _resource_type(self): return TestResource - def __init__(self, connector, redfish_version=None): + def __init__(self, connector, redfish_version=None, registries=None): """Ctor of TestResourceCollection :param connector: A Connector instance :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. """ - super(TestResourceCollection, self).__init__(connector, 'Fakes', - redfish_version) + super(TestResourceCollection, self).__init__( + connector, 'Fakes', redfish_version, registries) class ResourceCollectionBaseTestCase(base.TestCase): @@ -207,7 +212,7 @@ class ResourceCollectionBaseTestCase(base.TestCase): super(ResourceCollectionBaseTestCase, self).setUp() self.conn = mock.MagicMock() self.test_resource_collection = TestResourceCollection( - self.conn, redfish_version='1.0.x') + self.conn, redfish_version='1.0.x', registries=None) self.conn.reset_mock() def test_get_member(self): diff --git a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py index 98536ca..e3e8523 100644 --- a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py +++ b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py @@ -87,4 +87,4 @@ class SoftwareInventoryCollectionTestCase(base.TestCase): self.soft_inv_col.get_member(path) mock_softwareinventory.assert_called_once_with( self.soft_inv_col._conn, path, - redfish_version=self.soft_inv_col.redfish_version) + self.soft_inv_col.redfish_version, None) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 5774781..4e23254 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -93,86 +93,89 @@ class MainTestCase(base.TestCase): self.assertFalse(mock_Sushy_Connector.called) @mock.patch.object(system, 'SystemCollection', autospec=True) - def test_get_system_collection(self, mock_system_collection): + @mock.patch('sushy.Sushy.registries', autospec=True) + def test_get_system_collection( + self, mock_registries, mock_system_collection): + self.root._standard_message_registries_path = None self.root.get_system_collection() mock_system_collection.assert_called_once_with( self.root._conn, '/redfish/v1/Systems', - redfish_version=self.root.redfish_version) + redfish_version=self.root.redfish_version, + registries=mock_registries + ) @mock.patch.object(system, 'System', autospec=True) - @mock.patch('sushy.Sushy._get_message_registries', autospec=True) + @mock.patch('sushy.Sushy.registries', autospec=True) def test_get_system(self, mock_registries, mock_system): - mock_registry = mock.Mock() - mock_registries.return_value = [mock_registry] self.root._standard_message_registries_path = None self.root.get_system('fake-system-id') mock_system.assert_called_once_with( self.root._conn, 'fake-system-id', redfish_version=self.root.redfish_version, - registries=[mock_registry]) + registries=mock_registries) @mock.patch.object(chassis, 'Chassis', autospec=True) def test_get_chassis(self, mock_chassis): self.root.get_chassis('fake-chassis-id') mock_chassis.assert_called_once_with( self.root._conn, 'fake-chassis-id', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.registries) @mock.patch.object(chassis, 'ChassisCollection', autospec=True) def test_get_chassis_collection(self, chassis_collection_mock): self.root.get_chassis_collection() chassis_collection_mock.assert_called_once_with( self.root._conn, '/redfish/v1/Chassis', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.registries) @mock.patch.object(fabric, 'Fabric', autospec=True) def test_get_fabric(self, mock_fabric): self.root.get_fabric('fake-fabric-id') mock_fabric.assert_called_once_with( self.root._conn, 'fake-fabric-id', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.registries) @mock.patch.object(fabric, 'FabricCollection', autospec=True) def test_get_fabric_collection(self, fabric_collection_mock): self.root.get_fabric_collection() fabric_collection_mock.assert_called_once_with( self.root._conn, '/redfish/v1/Fabrics', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.registries) @mock.patch.object(manager, 'ManagerCollection', autospec=True) def test_get_manager_collection(self, ManagerCollection_mock): self.root.get_manager_collection() ManagerCollection_mock.assert_called_once_with( self.root._conn, '/redfish/v1/Managers', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.registries) @mock.patch.object(manager, 'Manager', autospec=True) def test_get_manager(self, Manager_mock): self.root.get_manager('fake-manager-id') Manager_mock.assert_called_once_with( self.root._conn, 'fake-manager-id', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.registries) @mock.patch.object(sessionservice, 'SessionService', autospec=True) def test_get_sessionservice(self, mock_sess_serv): self.root.get_session_service() mock_sess_serv.assert_called_once_with( self.root._conn, '/redfish/v1/SessionService', - redfish_version=self.root.redfish_version) + self.root.redfish_version) @mock.patch.object(session, 'Session', autospec=True) def test_get_session(self, mock_sess): self.root.get_session('asdf') mock_sess.assert_called_once_with( self.root._conn, 'asdf', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.registries) @mock.patch.object(updateservice, 'UpdateService', autospec=True) def test_get_update_service(self, mock_upd_serv): self.root.get_update_service() mock_upd_serv.assert_called_once_with( self.root._conn, '/redfish/v1/UpdateService', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.registries) @mock.patch.object(message_registry_file, 'MessageRegistryFileCollection', @@ -182,7 +185,7 @@ class MainTestCase(base.TestCase): self.root._get_registry_collection() MessageRegistryFileCollection_mock.assert_called_once_with( self.root._conn, '/redfish/v1/Registries', - redfish_version=self.root.redfish_version) + self.root.redfish_version) @mock.patch.object( compositionservice, 'CompositionService', autospec=True) @@ -190,7 +193,7 @@ class MainTestCase(base.TestCase): self.root.get_composition_service() mock_comp_ser.assert_called_once_with( self.root._conn, '/redfish/v1/CompositionService', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.registries) def test__get_standard_message_registry_collection(self): registries = self.root._get_standard_message_registry_collection() @@ -216,15 +219,14 @@ class MainTestCase(base.TestCase): mock_msg_reg_file.get_message_registry.return_value = mock_msg_reg2 mock_col.return_value.get_members.return_value = [mock_msg_reg_file] - registries = self.root._get_message_registries() + registries = self.root.registries self.assertEqual({'RegistryA.2.0': mock_msg_reg1, 'RegistryB.1.0': mock_msg_reg2}, registries) @mock.patch('sushy.Sushy._get_standard_message_registry_collection', autospec=True) @mock.patch('sushy.Sushy._get_registry_collection', autospec=True) - def test__get_message_registries_provided_empty(self, mock_col, - mock_st_col): + def test_registries_provided_empty(self, mock_col, mock_st_col): mock_msg_reg1 = mock.Mock() mock_msg_reg1.registry_prefix = 'RegistryA' mock_msg_reg1.registry_version = '2.0.0' @@ -232,7 +234,7 @@ class MainTestCase(base.TestCase): mock_st_col.return_value = [mock_msg_reg1] mock_col.return_value = None - registries = self.root._get_message_registries() + registries = self.root.registries self.assertEqual({'RegistryA.2.0': mock_msg_reg1}, registries) -- GitLab From d182d6fac1098c046cfa57b07eea8a3c0c5a5038 Mon Sep 17 00:00:00 2001 From: Varsha Date: Wed, 17 Jul 2019 14:57:00 +0530 Subject: [PATCH 174/303] Add MappedListField The MappedList field supports a list of mapped instances. This type of field is being used by the `SupportedControllerProtocols` and the `SupportedDeviceProtocols` fields of a Storage Controller in the Storage resource. Change-Id: Ifd83b72a283e44d890c98ae646ef831467ed4af8 --- ...dd-mapped-list-field-04c671f7a73d83f6.yaml | 5 ++ sushy/resources/base.py | 46 +++++++++++++++++++ sushy/resources/system/storage/storage.py | 9 ++-- .../resources/system/storage/test_storage.py | 6 ++- sushy/tests/unit/resources/test_base.py | 9 +++- 5 files changed, 67 insertions(+), 8 deletions(-) create mode 100644 releasenotes/notes/add-mapped-list-field-04c671f7a73d83f6.yaml diff --git a/releasenotes/notes/add-mapped-list-field-04c671f7a73d83f6.yaml b/releasenotes/notes/add-mapped-list-field-04c671f7a73d83f6.yaml new file mode 100644 index 0000000..2006fad --- /dev/null +++ b/releasenotes/notes/add-mapped-list-field-04c671f7a73d83f6.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds a new field called ``MappedListField`` which supports a list of + mapped instances. diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 0c53a0e..b9c0896 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -258,6 +258,52 @@ class MappedField(Field): adapter=mapping.get) +class MappedListField(Field): + """Field taking a list of values with a mapping for the values + + Given JSON {'field':['xxx', 'yyy']}, a sushy resource definition and + mapping {'xxx':'a', 'yyy':'b'}, the sushy object to come out will be like + resource.field = ['a', 'b'] + """ + + def __init__(self, field, mapping, required=False, default=None): + """Create a mapped list field definition. + + :param field: JSON field to fetch the list of values from. + :param mapping: a mapping for the list elements. + :param required: whether this field is required. Missing required + fields result in MissingAttributeError. + :param default: the default value to use when the field is missing. + Only has effect when the field is not required. + """ + if not isinstance(mapping, collectionsAbc.Mapping): + raise TypeError("The mapping argument must be a mapping") + + self._mapping_adapter = mapping.get + super(MappedListField, self).__init__( + field, required=required, default=default, + adapter=lambda x: x) + + def _load(self, body, resource, nested_in=None): + """Load the mapped list. + + :param body: parent JSON body. + :param resource: parent resource. + :param nested_in: parent resource name (for error reporting only). + :returns: a new list object containing the mapped values. + """ + nested_in = (nested_in or []) + self._path + values = super(MappedListField, self)._load(body, resource) + + if values is None: + return + + instances = [self._mapping_adapter(value) for value in values + if self._mapping_adapter(value) is not None] + + return instances + + @six.add_metaclass(abc.ABCMeta) class AbstractJsonReader(object): diff --git a/sushy/resources/system/storage/storage.py b/sushy/resources/system/storage/storage.py index 3e76a45..c22a4e0 100644 --- a/sushy/resources/system/storage/storage.py +++ b/sushy/resources/system/storage/storage.py @@ -17,6 +17,7 @@ import logging from sushy.resources import base from sushy.resources import common +from sushy.resources import mappings as res_maps from sushy.resources.system.storage import drive from sushy.resources.system.storage import volume from sushy import utils @@ -43,12 +44,12 @@ class StorageControllersListField(base.ListField): speed_gbps = base.Field('SpeedGbps') """The maximum speed of the storage controller's device interface.""" - controller_protocols = base.Field('SupportedControllerProtocols', - adapter=list) + controller_protocols = base.MappedListField( + 'SupportedControllerProtocols', res_maps.PROTOCOL_TYPE_VALUE_MAP) """The protocols by which this storage controller can be communicated to""" - device_protocols = base.Field('SupportedDeviceProtocols', - adapter=list) + device_protocols = base.MappedListField('SupportedDeviceProtocols', + res_maps.PROTOCOL_TYPE_VALUE_MAP) """The protocols which the controller can use tocommunicate with devices""" diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index 0561f06..0e5807d 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -122,8 +122,10 @@ class StorageTestCase(base.TestCase): identifier.durable_name_format) self.assertEqual('345C59DBD970859C', identifier.durable_name) self.assertEqual(12, controller.speed_gbps) - self.assertEqual(["PCIe"], controller.controller_protocols) - self.assertEqual(["SAS", "SATA"], controller.device_protocols) + self.assertEqual([sushy.PROTOCOL_TYPE_PCIe], + controller.controller_protocols) + self.assertEqual([sushy.PROTOCOL_TYPE_SAS, sushy.PROTOCOL_TYPE_SATA], + controller.device_protocols) def test_drives_after_refresh(self): self.storage.refresh() diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 976493c..6d33dd5 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -289,7 +289,7 @@ class ResourceCollectionBaseTestCase(base.TestCase): TEST_JSON = { 'String': 'a string', 'Integer': '42', - 'List': ['a string', 42], + 'MappedList': ['raw1', 'raw2', 'raw'], 'Nested': { 'String': 'another string', 'Integer': 0, @@ -316,7 +316,9 @@ TEST_JSON = { MAPPING = { - 'raw': 'real' + 'raw': 'real', + 'raw1': 'real1', + 'raw2': 'real2' } @@ -342,6 +344,7 @@ class ComplexResource(resource_base.ResourceBase): string = resource_base.Field('String', required=True) integer = resource_base.Field('Integer', adapter=int) nested = NestedTestField('Nested') + mapped_list = resource_base.MappedListField('MappedList', MAPPING) field_list = TestListField('ListField') dictionary = TestDictionaryField('Dictionary') non_existing_nested = NestedTestField('NonExistingNested') @@ -366,6 +369,8 @@ class FieldTestCase(base.TestCase): self.assertEqual('field value', self.test_resource.nested.nested_field) self.assertEqual('real', self.test_resource.nested.mapped) self.assertEqual(3.14, self.test_resource.nested.non_existing) + self.assertEqual(['real1', 'real2', 'real'], + self.test_resource.mapped_list) self.assertEqual('a third string', self.test_resource.field_list[0].string) self.assertEqual(2, self.test_resource.field_list[1].integer) -- GitLab From 84e085f9a2460218bf52315a21ee4dcf8365e3e1 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Mon, 29 Jul 2019 17:21:58 +0200 Subject: [PATCH 175/303] Disable HTTP connection pooling Disable HTTP connection pooling by asking HTTP server to close our connection right upon use. The rationale is that some BMC observed in the wild seem to close persistent connections abruptly upon eventual re-use failing completely unrelated operation. So in ``sushy`` we just try not to maintain persistent connections with BMC at all. Change-Id: I721c6284b938d5a47dbe0b298467c15ca4bd1517 --- .../notes/disable-conn-pooling-3456782afe56ac94.yaml | 8 ++++++++ sushy/connector.py | 7 +++++++ 2 files changed, 15 insertions(+) create mode 100644 releasenotes/notes/disable-conn-pooling-3456782afe56ac94.yaml diff --git a/releasenotes/notes/disable-conn-pooling-3456782afe56ac94.yaml b/releasenotes/notes/disable-conn-pooling-3456782afe56ac94.yaml new file mode 100644 index 0000000..9dadcfd --- /dev/null +++ b/releasenotes/notes/disable-conn-pooling-3456782afe56ac94.yaml @@ -0,0 +1,8 @@ +--- +fixes: + - | + Disable HTTP connection pooling by asking HTTP server to close our + connection right upon use. The rationale is that some BMC observed in + the wild seem to close persistent connections abruptly upon eventual + re-use failing completely unrelated operation. So in ``sushy`` we + just try not to maintain persistent connections with BMC at all. diff --git a/sushy/connector.py b/sushy/connector.py index 9c4d292..7c31636 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -30,6 +30,13 @@ class Connector(object): self._verify = verify self._session = requests.Session() self._session.verify = self._verify + + # NOTE(etingof): field studies reveal that some BMCs choke at + # long-running persistent HTTP connections (or TCP connections). + # By default, we ask HTTP server to shut down HTTP connection we've + # just used. + self._session.headers['Connection'] = 'close' + if username or password: LOG.warning('Passing username and password to Connector is ' 'deprecated. Authentication is passed through ' -- GitLab From ba1b8220e7d2de2f689ab014284e938ee8190709 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Tue, 16 Jul 2019 17:47:41 +0200 Subject: [PATCH 176/303] Cache message registries Redfish message registries can be quite bulky. Depending on BMC's versatility, fishing message registries can become expensive. This patch caches once downloaded message registries for all resources consuming them. Change-Id: I6609e5613e7657cf1f9772cea7cfb48923aac05e --- sushy/main.py | 28 +++++++++++++-------------- sushy/tests/unit/test_main.py | 36 +++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 15 deletions(-) diff --git a/sushy/main.py b/sushy/main.py index 20677c0..845bfd4 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -30,6 +30,7 @@ from sushy.resources.sessionservice import session from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system from sushy.resources.updateservice import updateservice +from sushy import utils LOG = logging.getLogger(__name__) @@ -350,6 +351,7 @@ class Sushy(base.ResourceBase): return message_registries @property + @utils.cache_it def registries(self): """Gets and combines all message registries together @@ -360,22 +362,18 @@ class Sushy(base.ResourceBase): Registry_name.Major_version.Minor_version and value is registry itself. """ - if self._registries is None: + standard = self._get_standard_message_registry_collection() - standard = self._get_standard_message_registry_collection() + registries = {r.registry_prefix + '.' + + r.registry_version.rsplit('.', 1)[0]: r + for r in standard if r.language == self._language} - registries = {r.registry_prefix + '.' + - r.registry_version.rsplit('.', 1)[0]: r - for r in standard if r.language == self._language} + registry_col = self._get_registry_collection() - registry_col = self._get_registry_collection() + if registry_col: + provided = registry_col.get_members() + registries.update({r.registry: r.get_message_registry( + self._language, + self._public_connector) for r in provided}) - if registry_col: - provided = registry_col.get_members() - registries.update({r.registry: r.get_message_registry( - self._language, - self._public_connector) for r in provided}) - - self._registries = registries - - return self._registries + return registries diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 4e23254..1288b8f 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -223,6 +223,42 @@ class MainTestCase(base.TestCase): self.assertEqual({'RegistryA.2.0': mock_msg_reg1, 'RegistryB.1.0': mock_msg_reg2}, registries) + @mock.patch('sushy.Sushy._get_standard_message_registry_collection', + autospec=True) + @mock.patch('sushy.Sushy._get_registry_collection', autospec=True) + def test__get_message_registries_caching(self, mock_col, mock_st_col): + mock_msg_reg1 = mock.Mock() + mock_msg_reg1.registry_prefix = 'RegistryA' + mock_msg_reg1.registry_version = '2.0.0' + mock_msg_reg1.language = 'en' + mock_st_col.return_value = [mock_msg_reg1] + + mock_msg_reg2 = mock.Mock() + mock_msg_reg2.registry_prefix = 'RegistryB' + mock_msg_reg2.registry_version = '1.0.0' + mock_msg_reg_file = mock.Mock() + mock_msg_reg_file.registry = 'RegistryB.1.0' + mock_msg_reg_file.get_message_registry.return_value = mock_msg_reg2 + mock_col.return_value.get_members.return_value = [mock_msg_reg_file] + + registries = self.root.registries + + self.assertEqual(1, mock_col.call_count) + self.assertEqual(1, mock_st_col.call_count) + + cached_registries = self.root.registries + + self.assertEqual(1, mock_col.call_count) + self.assertEqual(1, mock_st_col.call_count) + + expected = { + 'RegistryA.2.0': mock_msg_reg1, + 'RegistryB.1.0': mock_msg_reg2 + } + + self.assertEqual(expected, registries) + self.assertEqual(cached_registries, registries) + @mock.patch('sushy.Sushy._get_standard_message_registry_collection', autospec=True) @mock.patch('sushy.Sushy._get_registry_collection', autospec=True) -- GitLab From 219d93aecd534b7a75dba49507523127dbde45b6 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Wed, 17 Jul 2019 16:17:07 +0200 Subject: [PATCH 177/303] Retry Virtual Media eject action on HTTP 400 response When attempting to eject virtual media and HTTP server responds with HTTP error 400, retry the same HTTP query with empty JSON document. This change simply extends HTTP error code list when this retry should happen to account for Dell server(s) - unlike HPE ones (which return error 415), Dell returns 400. See also: https://review.opendev.org/#/c/636845/ Change-Id: Ie9c2f37062b8a8c9a1a082217dc22c79c5327fba Story: 2004995 Task: 35845 --- sushy/resources/manager/virtual_media.py | 4 +++- .../unit/resources/manager/test_virtual_media.py | 15 ++++++++++++++- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/sushy/resources/manager/virtual_media.py b/sushy/resources/manager/virtual_media.py index 5253b16..7f2cdac 100644 --- a/sushy/resources/manager/virtual_media.py +++ b/sushy/resources/manager/virtual_media.py @@ -106,7 +106,9 @@ class VirtualMedia(base.ResourceBase): except exceptions.HTTPError as response: # Some vendors like HPE iLO has this kind of implementation. # It needs to pass an empty dict. - if response.status_code == http_client.UNSUPPORTED_MEDIA_TYPE: + if response.status_code in ( + http_client.UNSUPPORTED_MEDIA_TYPE, + http_client.BAD_REQUEST): self._conn.post(target_uri, data={}) self.invalidate() diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py index 0ac6370..d16116c 100644 --- a/sushy/tests/unit/resources/manager/test_virtual_media.py +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -85,7 +85,7 @@ class VirtualMediaTestCase(base.TestCase): "/VirtualMedia.EjectMedia")) self.assertTrue(self.sys_virtual_media._is_stale) - def test_eject_media_pass_empty_dict(self): + def test_eject_media_pass_empty_dict_415(self): target_uri = ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" "/VirtualMedia.EjectMedia") self.conn.post.side_effect = [exceptions.HTTPError( @@ -97,3 +97,16 @@ class VirtualMediaTestCase(base.TestCase): mock.call(target_uri, data={})] self.sys_virtual_media._conn.post.assert_has_calls(post_calls) self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_pass_empty_dict_400(self): + target_uri = ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" + "/VirtualMedia.EjectMedia") + self.conn.post.side_effect = [exceptions.HTTPError( + method='POST', url=target_uri, response=mock.MagicMock( + status_code=http_client.BAD_REQUEST)), '200'] + self.sys_virtual_media.eject_media() + post_calls = [ + mock.call(target_uri), + mock.call(target_uri, data={})] + self.sys_virtual_media._conn.post.assert_has_calls(post_calls) + self.assertTrue(self.sys_virtual_media._is_stale) -- GitLab From 72fd054da0b007e84b81728e7357c2d76c45e666 Mon Sep 17 00:00:00 2001 From: Yusef Shaban Date: Tue, 9 Jul 2019 18:36:42 -0700 Subject: [PATCH 178/303] Implements adapter checking Adds support for allowing a sushy object to be initialized when a vendor does not implement all base resources. Story: 2006195 Task: 35777 Change-Id: I59b6553d8d5aa60ad4e79df9507c9fe9fc1dc86b --- sushy/resources/base.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 0c53a0e..e0a9d40 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -96,7 +96,12 @@ class Field(object): return self._default try: - value = self._adapter(body[name]) + # Get the value based on the name, defaulting to an empty dict + value = body.get(name, {}) + # Check to ensure that value is implemented by OEM + if (value is not None and value != {} and + str(value).lower() != 'none'): + value = self._adapter(value) except (UnicodeError, ValueError, TypeError) as exc: path = (nested_in or []) + self._path raise exceptions.MalformedAttributeError( -- GitLab From bb9859da99ffd3f5b1f74d9ce43e606ea0e96153 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Tue, 6 Aug 2019 19:12:48 +0200 Subject: [PATCH 179/303] Unify OEM Actions with non-OEM Actions This change introduces a few OEM base types to align OEM Actions abstraction with non-OEM one. One of the immediate benefits is that Action fields are actually parsed and assigned to sushy fields rather than represented as a dict. Change-Id: Idee0d17558e4332f44f623add69c00d4a4e2bdd4 --- sushy/resources/oem/base.py | 24 ++++++++++++++++++++- sushy/resources/oem/fake.py | 8 +++++-- sushy/tests/unit/resources/oem/test_fake.py | 7 +++--- 3 files changed, 33 insertions(+), 6 deletions(-) diff --git a/sushy/resources/oem/base.py b/sushy/resources/oem/base.py index adac1a2..ea883cd 100644 --- a/sushy/resources/oem/base.py +++ b/sushy/resources/oem/base.py @@ -16,6 +16,7 @@ import logging import six from sushy.resources import base +from sushy.resources import common LOG = logging.getLogger(__name__) @@ -66,6 +67,15 @@ class OEMMappedField(base.MappedField, OEMField): """MappedField for OEM fields.""" +class OEMActionsField(OEMCompositeField): + """OEM Actions fields""" + + +@six.add_metaclass(abc.ABCMeta) +class OEMActionField(common.ActionField, OEMField): + """OEM Actions fields.""" + + @six.add_metaclass(abc.ABCMeta) class OEMExtensionResourceBase(object): @@ -90,9 +100,21 @@ class OEMExtensionResourceBase(object): """Parse the OEM extension attributes of a resource.""" oem_json_body = (self.core_resource.json.get('Oem'). get(self.oem_property_name)) + + oem_actions = { + 'Actions': self.core_resource.json.get( + 'Actions', {}).get('Oem', {}) + } + for attr, field in _collect_oem_fields(self): + json_body = (oem_actions + if isinstance(field, OEMActionsField) + else oem_json_body) + + value = field._load(json_body, self) + # Hide the Field object behind the real value - setattr(self, attr, field._load(oem_json_body, self)) + setattr(self, attr, value) for attr, field in _collect_base_fields(self): # Hide the Field object behind the real value diff --git a/sushy/resources/oem/fake.py b/sushy/resources/oem/fake.py index 1dcd983..2cae20f 100644 --- a/sushy/resources/oem/fake.py +++ b/sushy/resources/oem/fake.py @@ -23,11 +23,15 @@ class ProductionLocationField(oem_base.OEMCompositeField): country = base.Field('Country') +class ContosoActionsField(oem_base.OEMActionsField): + reset = oem_base.OEMActionField('#Contoso.Reset') + + class FakeOEMSystemExtension(oem_base.OEMExtensionResourceBase): data_type = oem_base.OEMField('@odata.type') production_location = ProductionLocationField('ProductionLocation') - reset_action = base.Field(['Actions', 'Oem', '#Contoso.Reset']) + _actions = ContosoActionsField('Actions') def __init__(self, resource, *args, **kwargs): """A class representing ComputerSystem OEM extension for Contoso @@ -38,4 +42,4 @@ class FakeOEMSystemExtension(oem_base.OEMExtensionResourceBase): resource, 'Contoso', *args, **kwargs) def get_reset_system_path(self): - return self.reset_action.get('target') + return self._actions.reset.target_uri diff --git a/sushy/tests/unit/resources/oem/test_fake.py b/sushy/tests/unit/resources/oem/test_fake.py index 7050e48..0178633 100644 --- a/sushy/tests/unit/resources/oem/test_fake.py +++ b/sushy/tests/unit/resources/oem/test_fake.py @@ -39,9 +39,10 @@ class FakeOEMSystemExtensionTestCase(base.TestCase): self.fake_sys_oem_extn.production_location.facility_name)) self.assertEqual('USA', ( self.fake_sys_oem_extn.production_location.country)) - self.assertEqual({ - "target": ("/redfish/v1/Systems/437XR1138R2/Oem/Contoso/Actions/" - "Contoso.Reset")}, self.fake_sys_oem_extn.reset_action) + self.assertEqual( + "/redfish/v1/Systems/437XR1138R2/Oem/Contoso/Actions/" + "Contoso.Reset", + self.fake_sys_oem_extn._actions.reset.target_uri) def test_get_reset_system_path(self): value = self.fake_sys_oem_extn.get_reset_system_path() -- GitLab From e03caca38f72b778308d51e69856fc022dc32fd8 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Thu, 15 Aug 2019 15:01:28 +0200 Subject: [PATCH 180/303] Change OEM extensions architecture This patch radically changes OEM extensions architecture in a backward-incompatible manner. /o\ The rationale is that, prior to this change, it was impossible to model non-leaf subresources in OEM subtree such as `Links` or `Actions`. The reason is that OEM objects were lacking BMC connection context to doing anything beyond just sitting on the parent resource JSON they were once given. Technically, this change rebases originally base `OEMExtensionResourceBase` class on top of universal `ResourceBase` overriding its JSON parsing logic. That gives all the benefits and features of `ResourceBase` implementation, including BMC connection context, message registries etc. The other conceptual (and breaking!) change is that the OEM extension should now expose a callable that returns `OEMResourceBase` class, as opposed to class instance as it used to be. Besides technical reasons (preserving base class constructor signature), it seems that decoupling implementation loader from initialization would give us more flexibility going forward (e.g. sushy could sense what's being exposed and initialize it differently). Change-Id: I6c981bc1b2524d0a39a09ed8f0479b06045a1c4e Story: 2006471 Task: 36402 --- ...oem-extension-design-3143717e710b3eaf.yaml | 11 ++ setup.cfg | 2 +- sushy/main.py | 12 +- sushy/resources/base.py | 20 ++- sushy/resources/oem/base.py | 134 +++++------------- sushy/resources/oem/common.py | 15 +- sushy/resources/oem/fake.py | 23 ++- .../unit/resources/chassis/test_chassis.py | 6 +- .../unit/resources/chassis/test_power.py | 6 +- .../unit/resources/chassis/test_thermal.py | 6 +- .../test_compositionservice.py | 6 +- .../compositionservice/test_resourceblock.py | 14 +- .../compositionservice/test_resourcezone.py | 14 +- .../unit/resources/fabric/test_endpoint.py | 7 +- .../unit/resources/fabric/test_fabric.py | 6 +- .../unit/resources/manager/test_manager.py | 6 +- .../resources/manager/test_virtual_media.py | 7 +- sushy/tests/unit/resources/oem/test_common.py | 58 ++++---- sushy/tests/unit/resources/oem/test_fake.py | 6 +- .../registry/test_message_registry.py | 8 +- .../registry/test_message_registry_file.py | 13 +- .../resources/sessionservice/test_session.py | 16 ++- .../sessionservice/test_sessionservice.py | 8 +- .../resources/system/storage/test_drive.py | 6 +- .../resources/system/storage/test_storage.py | 13 +- .../resources/system/storage/test_volume.py | 13 +- .../tests/unit/resources/system/test_bios.py | 16 +-- .../system/test_ethernet_interfaces.py | 13 +- .../unit/resources/system/test_processor.py | 13 +- .../resources/system/test_simple_storage.py | 14 +- .../unit/resources/system/test_system.py | 35 +++-- sushy/tests/unit/resources/test_base.py | 4 +- .../updateservice/test_softwareinventory.py | 14 +- .../updateservice/test_updateservice.py | 8 +- sushy/tests/unit/test_main.py | 6 +- sushy/tests/unit/test_utils.py | 4 +- 36 files changed, 314 insertions(+), 249 deletions(-) create mode 100644 releasenotes/notes/enhance-oem-extension-design-3143717e710b3eaf.yaml diff --git a/releasenotes/notes/enhance-oem-extension-design-3143717e710b3eaf.yaml b/releasenotes/notes/enhance-oem-extension-design-3143717e710b3eaf.yaml new file mode 100644 index 0000000..39982de --- /dev/null +++ b/releasenotes/notes/enhance-oem-extension-design-3143717e710b3eaf.yaml @@ -0,0 +1,11 @@ +--- +upgrade: + - | + OEM resource class hierarchy has been redesigned to allow for non-terminal + sub-resources (e.g. Links) to be handled within OEM resource model. As a + consequence, backward compatibility with previously existing OEM extension + framework (anything based on ``OEMExtensionResourceBase`` class) is not + preserved. User OEM code migration would involve switching from + ``OEMExtensionResourceBase`` to ``OEMResourceBase`` (note ``__init__`` + call signature change) and replacing ``OEMField``-based classes with their + generic sushy ``Field`` counterparts. diff --git a/setup.cfg b/setup.cfg index 1981463..73a1a9d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -25,7 +25,7 @@ packages = [entry_points] sushy.resources.system.oems = - contoso = sushy.resources.oem.fake:FakeOEMSystemExtension + contoso = sushy.resources.oem.fake:get_extension [build_sphinx] diff --git a/sushy/main.py b/sushy/main.py index 845bfd4..d397c32 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -156,9 +156,15 @@ class Sushy(base.ResourceBase): 'with %s: %s', self._base_url, ex) self._auth = None - def _parse_attributes(self): - super(Sushy, self)._parse_attributes() - self.redfish_version = self.json.get('RedfishVersion') + def _parse_attributes(self, json_doc): + """Parse the attributes of a resource. + + Parsed JSON fields are set to `self` as declared in the class. + + :param json_doc: parsed JSON document in form of Python types + """ + super(Sushy, self)._parse_attributes(json_doc) + self.redfish_version = json_doc.get('RedfishVersion') def get_system_collection(self): """Get the SystemCollection object diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 29720cf..14bd1b8 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -431,11 +431,16 @@ class ResourceBase(object): self.refresh() - def _parse_attributes(self): - """Parse the attributes of a resource.""" + def _parse_attributes(self, json_doc): + """Parse the attributes of a resource. + + Parsed JSON fields are set to `self` as declared in the class. + + :param json_doc: parsed JSON document in form of Python types + """ for attr, field in _collect_fields(self): # Hide the Field object behind the real value - setattr(self, attr, field._load(self.json, self)) + setattr(self, attr, field._load(json_doc, self)) def refresh(self, force=True): """Refresh the resource @@ -464,7 +469,7 @@ class ResourceBase(object): LOG.debug('Received representation of %(type)s %(path)s: %(json)s', {'type': self.__class__.__name__, 'path': self._path, 'json': self._json}) - self._parse_attributes() + self._parse_attributes(self._json) self._do_refresh(force) # Mark it fresh @@ -516,6 +521,13 @@ class ResourceBase(object): def path(self): return self._path + def clone_resource(self, new_resource, path=''): + """Instantiate given resource using existing BMC connection context""" + return new_resource( + self._conn, path or self.path, + redfish_version=self.redfish_version, + reader=self._reader) + @property def resource_name(self): return utils.camelcase_to_underscore_joined(self.__class__.__name__) diff --git a/sushy/resources/oem/base.py b/sushy/resources/oem/base.py index ea883cd..93032fe 100644 --- a/sushy/resources/oem/base.py +++ b/sushy/resources/oem/base.py @@ -10,120 +10,64 @@ # License for the specific language governing permissions and limitations # under the License. -import abc import logging -import six - from sushy.resources import base -from sushy.resources import common LOG = logging.getLogger(__name__) -class OEMField(base.Field): - """Marker class for OEM specific fields.""" - - -def _collect_oem_fields(resource): - """Collect OEM fields from resource. - - :param resource: OEMExtensionResourceBase instance. - :returns: generator of tuples (key, field) - """ - for attr in dir(resource.__class__): - field = getattr(resource.__class__, attr) - if isinstance(field, OEMField): - yield (attr, field) - - -def _collect_base_fields(resource): - """Collect base fields from resource. - - :param resource: OEMExtensionResourceBase instance. - :returns: generator of tuples (key, field) - """ - for attr in dir(resource.__class__): - field = getattr(resource.__class__, attr) - if not isinstance(field, OEMField) and isinstance(field, base.Field): - yield (attr, field) - - -@six.add_metaclass(abc.ABCMeta) -class OEMCompositeField(base.CompositeField, OEMField): - """CompositeField for OEM fields.""" - - -class OEMListField(base.ListField, OEMField): - """ListField for OEM fields.""" +class OEMResourceBase(base.ResourceBase): + def __init__(self, + connector, + path='', + redfish_version=None, + registries=None, + reader=None): + """Class representing an OEM vendor extension -class OEMDictionaryField(base.DictionaryField, OEMField): - """DictionaryField for OEM fields.""" - - -class OEMMappedField(base.MappedField, OEMField): - """MappedField for OEM fields.""" + :param connector: A Connector instance + :param path: sub-URI path to the resource. + :param redfish_version: The version of Redfish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + self._parent_resource = None + self._vendor_id = None + super(OEMResourceBase, self).__init__( + connector, path, redfish_version, registries, reader) -class OEMActionsField(OEMCompositeField): - """OEM Actions fields""" + def set_parent_resource(self, parent_resource, vendor_id): + self._parent_resource = parent_resource + self._vendor_id = vendor_id + # NOTE(etingof): this is required to pull OEM subtree + self.invalidate(force_refresh=True) + return self + def _parse_attributes(self, json_doc): + """Parse the attributes of a resource. -@six.add_metaclass(abc.ABCMeta) -class OEMActionField(common.ActionField, OEMField): - """OEM Actions fields.""" + Parsed JSON fields are set to `self` as declared in the class. + :param json_doc: parsed JSON document in form of Python types + """ + oem_json = json_doc.get( + 'Oem', {}).get(self._vendor_id, {}) -@six.add_metaclass(abc.ABCMeta) -class OEMExtensionResourceBase(object): + # NOTE(etingof): temporary copy Actions into Oem subtree for parsing + # all fields at once - def __init__(self, resource, oem_property_name, *args, **kwargs): - """A class representing the base of any resource OEM extension + oem_json = oem_json.copy() - Invokes the ``refresh()`` method for the first time from here - (constructor). - :param resource: The parent Sushy resource instance - :param oem_property_name: the unique OEM identifier string - """ - if not resource: - raise ValueError('"resource" argument cannot be void') - if not isinstance(resource, base.ResourceBase): - raise TypeError('"resource" argument must be a ResourceBase') - - self.core_resource = resource - self.oem_property_name = oem_property_name - self.refresh() - - def _parse_oem_attributes(self): - """Parse the OEM extension attributes of a resource.""" - oem_json_body = (self.core_resource.json.get('Oem'). - get(self.oem_property_name)) - - oem_actions = { - 'Actions': self.core_resource.json.get( + oem_actions_json = { + 'Actions': json_doc.get( 'Actions', {}).get('Oem', {}) } - for attr, field in _collect_oem_fields(self): - json_body = (oem_actions - if isinstance(field, OEMActionsField) - else oem_json_body) + oem_json.update(oem_actions_json) - value = field._load(json_body, self) - - # Hide the Field object behind the real value - setattr(self, attr, value) - - for attr, field in _collect_base_fields(self): - # Hide the Field object behind the real value - setattr(self, attr, field._load(self.core_resource.json, self)) - - def refresh(self): - """Refresh the attributes of the resource extension. - - Freshly parses the resource OEM attributes via - ``_parse_oem_attributes()`` method. - """ - self._parse_oem_attributes() + super(OEMResourceBase, self)._parse_attributes(oem_json) diff --git a/sushy/resources/oem/common.py b/sushy/resources/oem/common.py index 142dad7..10fb4db 100644 --- a/sushy/resources/oem/common.py +++ b/sushy/resources/oem/common.py @@ -86,22 +86,27 @@ def _get_extension_manager_of_resource(resource_name): @utils.synchronized -def _get_resource_vendor_extension_obj(extension, resource, *args, **kwds): +def _get_resource_vendor_extension_obj(extension, resource, vendor): """Get the object returned by extension's plugin() method. :param extension: stevedore Extension :param resource: The Sushy resource instance - :param *args, **kwds: constructor arguments to plugin() method. + :param vendor: This is the OEM vendor string which is the vendor-specific + extensibility identifier. Examples are: 'Contoso', 'Hpe'. As a matter + of fact the lowercase of this string will be the plugin entry point + name. :returns: The object returned by ``plugin(*args, **kwds)`` of extension. """ if extension.obj is None: - extension.obj = extension.plugin(resource, *args, **kwds) + oem_resource = extension.plugin() + extension.obj = resource.clone_resource( + oem_resource).set_parent_resource(resource, vendor) return extension.obj def get_resource_extension_by_vendor( - resource_name, vendor, resource, *args, **kwds): + resource_name, vendor, resource): """Helper method to get Resource specific OEM extension object for vendor :param resource_name: The underscore joined name of the resource e.g. @@ -128,5 +133,5 @@ def get_resource_extension_by_vendor( if resource_vendor_extn.obj is None: return _get_resource_vendor_extension_obj( - resource_vendor_extn, resource, *args, **kwds) + resource_vendor_extn, resource, vendor) return resource_vendor_extn.obj diff --git a/sushy/resources/oem/fake.py b/sushy/resources/oem/fake.py index 2cae20f..60fd610 100644 --- a/sushy/resources/oem/fake.py +++ b/sushy/resources/oem/fake.py @@ -13,33 +13,30 @@ import logging from sushy.resources import base +from sushy.resources import common from sushy.resources.oem import base as oem_base LOG = logging.getLogger(__name__) -class ProductionLocationField(oem_base.OEMCompositeField): +class ProductionLocationField(base.CompositeField): facility_name = base.Field('FacilityName') country = base.Field('Country') -class ContosoActionsField(oem_base.OEMActionsField): - reset = oem_base.OEMActionField('#Contoso.Reset') +class ContosoActionsField(base.CompositeField): + reset = common.ResetActionField('#Contoso.Reset') -class FakeOEMSystemExtension(oem_base.OEMExtensionResourceBase): +class FakeOEMSystemExtension(oem_base.OEMResourceBase): - data_type = oem_base.OEMField('@odata.type') + data_type = base.Field('@odata.type') production_location = ProductionLocationField('ProductionLocation') _actions = ContosoActionsField('Actions') - def __init__(self, resource, *args, **kwargs): - """A class representing ComputerSystem OEM extension for Contoso - - :param resource: The parent System resource instance - """ - super(FakeOEMSystemExtension, self).__init__( - resource, 'Contoso', *args, **kwargs) - def get_reset_system_path(self): return self._actions.reset.target_uri + + +def get_extension(*args, **kwargs): + return FakeOEMSystemExtension diff --git a/sushy/tests/unit/resources/chassis/test_chassis.py b/sushy/tests/unit/resources/chassis/test_chassis.py index 18e6250..718de53 100644 --- a/sushy/tests/unit/resources/chassis/test_chassis.py +++ b/sushy/tests/unit/resources/chassis/test_chassis.py @@ -30,14 +30,16 @@ class ChassisTestCase(base.TestCase): super(ChassisTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/chassis.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.chassis = chassis.Chassis(self.conn, '/redfish/v1/Chassis/Blade1', redfish_version='1.8.0') def test__parse_attributes(self): # | WHEN | - self.chassis._parse_attributes() + self.chassis._parse_attributes(self.json_doc) # | THEN | self.assertEqual('1.8.0', self.chassis.redfish_version) self.assertEqual('Blade1', self.chassis.identity) diff --git a/sushy/tests/unit/resources/chassis/test_power.py b/sushy/tests/unit/resources/chassis/test_power.py index 80d862c..27278ba 100644 --- a/sushy/tests/unit/resources/chassis/test_power.py +++ b/sushy/tests/unit/resources/chassis/test_power.py @@ -26,14 +26,16 @@ class PowerTestCase(base.TestCase): super(PowerTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/power.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.power = power.Power( self.conn, '/redfish/v1/Chassis/MultiBladeEnc1/Power', redfish_version='1.5.0') def test__parse_attributes(self): - self.power._parse_attributes() + self.power._parse_attributes(self.json_doc) self.assertEqual('1.5.0', self.power.redfish_version) self.assertEqual('Power', self.power.identity) self.assertEqual('Quad Blade Chassis Power', self.power.name) diff --git a/sushy/tests/unit/resources/chassis/test_thermal.py b/sushy/tests/unit/resources/chassis/test_thermal.py index 2e5e4b0..5500506 100644 --- a/sushy/tests/unit/resources/chassis/test_thermal.py +++ b/sushy/tests/unit/resources/chassis/test_thermal.py @@ -26,14 +26,16 @@ class ThermalTestCase(base.TestCase): super(ThermalTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/thermal.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.thermal = thermal.Thermal( self.conn, '/redfish/v1/Chassis/Blade1/Thermal', redfish_version='1.5.0') def test__parse_attributes(self): - self.thermal._parse_attributes() + self.thermal._parse_attributes(self.json_doc) self.assertEqual('1.5.0', self.thermal.redfish_version) self.assertEqual('Thermal', self.thermal.identity) self.assertEqual('Blade Thermal', self.thermal.name) diff --git a/sushy/tests/unit/resources/compositionservice/test_compositionservice.py b/sushy/tests/unit/resources/compositionservice/test_compositionservice.py index 110c3b1..878b36c 100644 --- a/sushy/tests/unit/resources/compositionservice/test_compositionservice.py +++ b/sushy/tests/unit/resources/compositionservice/test_compositionservice.py @@ -26,7 +26,9 @@ class CompositionServiceTestCase(base.TestCase): self.conn = mock.Mock() with open( 'sushy/tests/unit/json_samples/compositionservice.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.comp_ser = compositionservice.CompositionService( self.conn, @@ -34,7 +36,7 @@ class CompositionServiceTestCase(base.TestCase): redfish_version='1.0.2') def test__parse_attributes(self): - self.comp_ser._parse_attributes() + self.comp_ser._parse_attributes(self.json_doc) self.assertFalse(self.comp_ser.allow_overprovisioning) self.assertTrue(self.comp_ser.allow_zone_affinity) self.assertTrue(self.comp_ser.description, 'CompositionService1') diff --git a/sushy/tests/unit/resources/compositionservice/test_resourceblock.py b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py index 3478aba..f1fd0de 100644 --- a/sushy/tests/unit/resources/compositionservice/test_resourceblock.py +++ b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py @@ -28,7 +28,9 @@ class ResourceBlockTestCase(base.TestCase): super(ResourceBlockTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/resourceblock.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.res_block = resourceblock.ResourceBlock( self.conn, @@ -36,7 +38,7 @@ class ResourceBlockTestCase(base.TestCase): redfish_version='1.0.2') def test__parse_attributes(self): - self.res_block._parse_attributes() + self.res_block._parse_attributes(self.json_doc) self.assertEqual( res_block_cons.COMPOSITION_STATE_COMPOSED, self.res_block.composition_status.composition_state) @@ -63,7 +65,7 @@ class ResourceBlockTestCase(base.TestCase): self.res_block.json.pop('Id') self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Id', - self.res_block._parse_attributes) + self.res_block._parse_attributes, self.json_doc) class ResourceBlockCollectionTestCase(base.TestCase): @@ -73,7 +75,9 @@ class ResourceBlockCollectionTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'resourceblock_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.res_block_col = resourceblock.ResourceBlockCollection( self.conn, '/redfish/v1/CompositionService/ResourceBlocks', @@ -81,7 +85,7 @@ class ResourceBlockCollectionTestCase(base.TestCase): def test__parse_attributes(self): path = '/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1' - self.res_block_col._parse_attributes() + self.res_block_col._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.res_block_col.redfish_version) self.assertEqual( 'Resource Block Collection', diff --git a/sushy/tests/unit/resources/compositionservice/test_resourcezone.py b/sushy/tests/unit/resources/compositionservice/test_resourcezone.py index 66475dc..b19a2f8 100644 --- a/sushy/tests/unit/resources/compositionservice/test_resourcezone.py +++ b/sushy/tests/unit/resources/compositionservice/test_resourcezone.py @@ -26,7 +26,9 @@ class ResourceZoneTestCase(base.TestCase): super(ResourceZoneTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/resourcezone.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.res_zone = resourcezone.ResourceZone( self.conn, @@ -34,7 +36,7 @@ class ResourceZoneTestCase(base.TestCase): redfish_version='1.0.2') def test__parse_attributes(self): - self.res_zone._parse_attributes() + self.res_zone._parse_attributes(self.json_doc) self.assertEqual('ResourceZone1', self.res_zone.description) self.assertEqual('1', self.res_zone.identity) self.assertEqual('Resource Zone 1', self.res_zone.name) @@ -51,7 +53,7 @@ class ResourceZoneTestCase(base.TestCase): self.res_zone.json.pop('Id') self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Id', - self.res_zone._parse_attributes) + self.res_zone._parse_attributes, self.json_doc) class ResourceZoneCollectionTestCase(base.TestCase): @@ -61,7 +63,9 @@ class ResourceZoneCollectionTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'resourcezone_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.res_zone_col = resourcezone.ResourceZoneCollection( self.conn, '/redfish/v1/CompositionService/ResourceZones', @@ -69,7 +73,7 @@ class ResourceZoneCollectionTestCase(base.TestCase): def test__parse_attributes(self): path = '/redfish/v1/CompositionService/ResourceZones/1' - self.res_zone_col._parse_attributes() + self.res_zone_col._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.res_zone_col.redfish_version) self.assertEqual('Resource Zone Collection', self.res_zone_col.name) self.assertEqual((path,), self.res_zone_col.members_identities) diff --git a/sushy/tests/unit/resources/fabric/test_endpoint.py b/sushy/tests/unit/resources/fabric/test_endpoint.py index e7a95d7..3733727 100644 --- a/sushy/tests/unit/resources/fabric/test_endpoint.py +++ b/sushy/tests/unit/resources/fabric/test_endpoint.py @@ -26,13 +26,16 @@ class EndpointTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'endpoint.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.fab_endpoint = endpoint.Endpoint( self.conn, '/redfish/v1/Fabrics/SAS/Endpoints/Drive1', redfish_version='1.0.2') def test__parse_atrtributes(self): - self.fab_endpoint._parse_attributes() + self.fab_endpoint._parse_attributes(self.json_doc) self.assertEqual('Drive1', self.fab_endpoint.identity) self.assertEqual('SAS Drive', self.fab_endpoint.name) self.assertEqual(sushy.PROTOCOL_TYPE_SAS, diff --git a/sushy/tests/unit/resources/fabric/test_fabric.py b/sushy/tests/unit/resources/fabric/test_fabric.py index 3f7f9ce..a94fa23 100644 --- a/sushy/tests/unit/resources/fabric/test_fabric.py +++ b/sushy/tests/unit/resources/fabric/test_fabric.py @@ -28,14 +28,16 @@ class FabricTestCase(base.TestCase): super(FabricTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/fabric.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.fabric = fabric.Fabric(self.conn, '/redfish/v1/Fabrics/SAS', redfish_version='1.0.3') def test__parse_attributes(self): # | WHEN | - self.fabric._parse_attributes() + self.fabric._parse_attributes(self.json_doc) # | THEN | self.assertEqual('1.0.3', self.fabric.redfish_version) self.assertEqual('SAS', self.fabric.identity) diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index 7da31a4..e337c35 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -29,14 +29,16 @@ class ManagerTestCase(base.TestCase): super(ManagerTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/manager.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.manager = manager.Manager(self.conn, '/redfish/v1/Managers/BMC', redfish_version='1.0.2') def test__parse_attributes(self): # | WHEN | - self.manager._parse_attributes() + self.manager._parse_attributes(self.json_doc) # | THEN | self.assertEqual('1.0.2', self.manager.redfish_version) self.assertEqual('1.00', self.manager.firmware_version) diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py index d16116c..b85925b 100644 --- a/sushy/tests/unit/resources/manager/test_virtual_media.py +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -30,13 +30,16 @@ class VirtualMediaTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'virtual_media.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.sys_virtual_media = virtual_media.VirtualMedia( self.conn, '/redfish/v1/Managers/BMC/VirtualMedia/Floppy1', redfish_version='1.0.2') def test__parse_atrtributes(self): - self.sys_virtual_media._parse_attributes() + self.sys_virtual_media._parse_attributes(self.json_doc) self.assertEqual('Virtual Removable Media', self.sys_virtual_media.name) self.assertEqual('Floppy1', self.sys_virtual_media.identity) diff --git a/sushy/tests/unit/resources/oem/test_common.py b/sushy/tests/unit/resources/oem/test_common.py index d76ae13..4debd85 100644 --- a/sushy/tests/unit/resources/oem/test_common.py +++ b/sushy/tests/unit/resources/oem/test_common.py @@ -20,18 +20,12 @@ from sushy.resources.oem import common as oem_common from sushy.tests.unit import base -class ContosoResourceOEMExtension(oem_base.OEMExtensionResourceBase): +class ContosoResourceOEMExtension(oem_base.OEMResourceBase): + pass - def __init__(self, resource, *args, **kwargs): - super(ContosoResourceOEMExtension, self).__init__( - resource, 'Contoso', *args, **kwargs) - -class FauxResourceOEMExtension(oem_base.OEMExtensionResourceBase): - - def __init__(self, resource, *args, **kwargs): - super(FauxResourceOEMExtension, self).__init__( - resource, 'Faux', *args, **kwargs) +class FauxResourceOEMExtension(oem_base.OEMResourceBase): + pass class ResourceOEMCommonMethodsTestCase(base.TestCase): @@ -50,17 +44,18 @@ class ResourceOEMCommonMethodsTestCase(base.TestCase): contoso_ep.module_name = __name__ contoso_ep.attrs = ['ContosoResourceOEMExtension'] self.contoso_extn = stevedore.extension.Extension( - 'contoso', contoso_ep, ContosoResourceOEMExtension, None) + 'contoso', contoso_ep, lambda: ContosoResourceOEMExtension, None) self.contoso_extn_dup = stevedore.extension.Extension( - 'contoso_dup', contoso_ep, ContosoResourceOEMExtension, None) + 'contoso_dup', contoso_ep, + lambda: ContosoResourceOEMExtension, None) faux_ep = mock.Mock() faux_ep.module_name = __name__ faux_ep.attrs = ['FauxResourceOEMExtension'] self.faux_extn = stevedore.extension.Extension( - 'faux', faux_ep, FauxResourceOEMExtension, None) + 'faux', faux_ep, lambda: FauxResourceOEMExtension, None) self.faux_extn_dup = stevedore.extension.Extension( - 'faux_dup', faux_ep, FauxResourceOEMExtension, None) + 'faux_dup', faux_ep, lambda: FauxResourceOEMExtension, None) self.fake_ext_mgr = ( stevedore.extension.ExtensionManager.make_test_instance( @@ -124,41 +119,54 @@ class ResourceOEMCommonMethodsTestCase(base.TestCase): extension_mock = mock.MagicMock() extension_mock.obj = None + mock_oem_resource = extension_mock.plugin.return_value + result = oem_common._get_resource_vendor_extension_obj( - extension_mock, resource_instance_mock) - self.assertEqual(extension_mock.plugin.return_value, result) - extension_mock.plugin.assert_called_once_with(resource_instance_mock) + extension_mock, resource_instance_mock, 'fish-n-chips') + + mock_clone_resource = resource_instance_mock.clone_resource + mock_clone_resource.assert_called_once_with(mock_oem_resource) + mock_ext = mock_clone_resource.return_value + mock_ext.set_parent_resource.assert_called_once_with( + resource_instance_mock, 'fish-n-chips') + mock_ext = mock_ext.set_parent_resource.return_value + self.assertEqual(result, mock_ext) + extension_mock.reset_mock() # extension_mock.obj is not None anymore - result = oem_common._get_resource_vendor_extension_obj( - extension_mock, resource_instance_mock) - self.assertEqual(extension_mock.plugin.return_value, result) + oem_common._get_resource_vendor_extension_obj( + extension_mock, resource_instance_mock, 'fish-n-chips') + self.assertFalse(extension_mock.plugin.called) @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) def test_get_resource_extension_by_vendor(self, ExtensionManager_mock): - resource_instance_mock = mock.Mock(spec=res_base.ResourceBase) + oem_resource_mock = mock.Mock() + oem_resource_mock.set_parent_resource = lambda *x: oem_resource_mock + resource_instance_mock = mock.Mock() + resource_instance_mock.clone_resource = lambda *x: oem_resource_mock ExtensionManager_mock.side_effect = [self.fake_ext_mgr, self.fake_ext_mgr2] result = oem_common.get_resource_extension_by_vendor( 'system', 'Faux', resource_instance_mock) - self.assertIsInstance(result, FauxResourceOEMExtension) + self.assertEqual(result, oem_resource_mock) ExtensionManager_mock.assert_called_once_with( 'sushy.resources.system.oems', propagate_map_exceptions=True, on_load_failure_callback=oem_common._raise) ExtensionManager_mock.reset_mock() + oem_resource_mock.obj = None result = oem_common.get_resource_extension_by_vendor( 'system', 'Contoso', resource_instance_mock) - self.assertIsInstance(result, ContosoResourceOEMExtension) + self.assertEqual(result, oem_resource_mock) self.assertFalse(ExtensionManager_mock.called) ExtensionManager_mock.reset_mock() result = oem_common.get_resource_extension_by_vendor( 'manager', 'Faux_dup', resource_instance_mock) - self.assertIsInstance(result, FauxResourceOEMExtension) + self.assertEqual(result, oem_resource_mock) ExtensionManager_mock.assert_called_once_with( 'sushy.resources.manager.oems', propagate_map_exceptions=True, on_load_failure_callback=oem_common._raise) @@ -166,7 +174,7 @@ class ResourceOEMCommonMethodsTestCase(base.TestCase): result = oem_common.get_resource_extension_by_vendor( 'manager', 'Contoso_dup', resource_instance_mock) - self.assertIsInstance(result, ContosoResourceOEMExtension) + self.assertEqual(result, oem_resource_mock) self.assertFalse(ExtensionManager_mock.called) ExtensionManager_mock.reset_mock() diff --git a/sushy/tests/unit/resources/oem/test_fake.py b/sushy/tests/unit/resources/oem/test_fake.py index 0178633..ef451f5 100644 --- a/sushy/tests/unit/resources/oem/test_fake.py +++ b/sushy/tests/unit/resources/oem/test_fake.py @@ -30,7 +30,11 @@ class FakeOEMSystemExtensionTestCase(base.TestCase): self.sys_instance = system.System( self.conn, '/redfish/v1/Systems/437XR1138R2', redfish_version='1.0.2') - self.fake_sys_oem_extn = fake.FakeOEMSystemExtension(self.sys_instance) + self.fake_sys_oem_extn = fake.FakeOEMSystemExtension( + self.conn, '', + redfish_version='1.0.2') + self.fake_sys_oem_extn = self.fake_sys_oem_extn.set_parent_resource( + self.sys_instance, 'Contoso') def test__parse_oem_attributes(self): self.assertEqual('http://Contoso.com/Schema#Contoso.ComputerSystem', diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py index c506faa..af34760 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -29,14 +29,16 @@ class MessageRegistryTestCase(base.TestCase): super(MessageRegistryTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/message_registry.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.registry = message_registry.MessageRegistry( self.conn, '/redfish/v1/Registries/Test', redfish_version='1.0.2') def test__parse_attributes(self): - self.registry._parse_attributes() + self.registry._parse_attributes(self.json_doc) self.assertEqual('Test.1.1.1', self.registry.identity) self.assertEqual('Test Message Registry', self.registry.name) self.assertEqual('en', self.registry.language) @@ -65,7 +67,7 @@ class MessageRegistryTestCase(base.TestCase): ['unknown_type'] self.assertRaisesRegex(KeyError, 'unknown_type', - self.registry._parse_attributes) + self.registry._parse_attributes, self.json_doc) def test_parse_message(self): conn = mock.Mock() diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py index 37282ef..fd8f649 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry_file.py +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -27,14 +27,16 @@ class MessageRegistryFileTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'message_registry_file.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.reg_file = message_registry_file.MessageRegistryFile( self.conn, '/redfish/v1/Registries/Test', redfish_version='1.0.2') def test__parse_attributes(self): - self.reg_file._parse_attributes() + self.reg_file._parse_attributes(self.json_doc) self.assertEqual('Test', self.reg_file.identity) self.assertEqual('Test Message Registry File', self.reg_file.name) self.assertEqual('Message Registry file for testing', @@ -197,14 +199,17 @@ class MessageRegistryFileCollectionTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'message_registry_file_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.reg_file_col =\ message_registry_file.MessageRegistryFileCollection( self.conn, '/redfish/v1/Registries', redfish_version='1.0.2') def test__parse_attributes(self): - self.reg_file_col._parse_attributes() + self.reg_file_col._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.reg_file_col.redfish_version) self.assertEqual('Message Registry Test Collection', self.reg_file_col.name) diff --git a/sushy/tests/unit/resources/sessionservice/test_session.py b/sushy/tests/unit/resources/sessionservice/test_session.py index 953111a..7ead963 100644 --- a/sushy/tests/unit/resources/sessionservice/test_session.py +++ b/sushy/tests/unit/resources/sessionservice/test_session.py @@ -28,10 +28,10 @@ class SessionTestCase(base.TestCase): self.conn = mock.Mock() self.auth = mock.Mock() with open('sushy/tests/unit/json_samples/session.json') as f: - sample_json = json.load(f) - self.conn.get.return_value.json.return_value = sample_json + self.json_doc = json.load(f) + self.conn.get.return_value.json.return_value = self.json_doc self.auth._session_key = 'fake_x_auth_token' - self.auth._session_uri = sample_json['@odata.id'] + self.auth._session_uri = self.json_doc['@odata.id'] self.conn._auth = self.auth self.sess_inst = session.Session( @@ -39,7 +39,7 @@ class SessionTestCase(base.TestCase): redfish_version='1.0.2') def test__parse_attributes(self): - self.sess_inst._parse_attributes() + self.sess_inst._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sess_inst.redfish_version) self.assertEqual('1234567890ABCDEF', self.sess_inst.identity) self.assertEqual('User Session', self.sess_inst.name) @@ -50,7 +50,7 @@ class SessionTestCase(base.TestCase): self.sess_inst.json.pop('Id') self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Id', - self.sess_inst._parse_attributes) + self.sess_inst._parse_attributes, self.json_doc) def test_session_close(self): session_key = self.sess_inst._conn._auth._session_key @@ -68,7 +68,9 @@ class SessionCollectionTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'session_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.sess_col = session.SessionCollection( self.conn, '/redfish/v1/SessionService/Sessions', @@ -76,7 +78,7 @@ class SessionCollectionTestCase(base.TestCase): def test__parse_attributes(self): path = '/redfish/v1/SessionService/Sessions/104f9d68f58abb85' - self.sess_col._parse_attributes() + self.sess_col._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sess_col.redfish_version) self.assertEqual('Session Collection', self.sess_col.name) self.assertEqual((path,), self.sess_col.members_identities) diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py index d59b8af..84f07a0 100644 --- a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -29,7 +29,9 @@ class SessionServiceTestCase(base.TestCase): super(SessionServiceTestCase, self).setUp() self.conn = mock.MagicMock() with open('sushy/tests/unit/json_samples/session_service.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.sess_serv_inst = sessionservice.SessionService( self.conn, '/redfish/v1/SessionService', @@ -46,7 +48,7 @@ class SessionServiceTestCase(base.TestCase): self.assertTrue(mock_LOG.warning.called) def test__parse_attributes(self): - self.sess_serv_inst._parse_attributes() + self.sess_serv_inst._parse_attributes(self.json_doc) exp_path = '/redfish/v1/SessionService' self.assertEqual('1.0.2', self.sess_serv_inst.redfish_version) self.assertEqual('SessionService', self.sess_serv_inst.identity) @@ -58,7 +60,7 @@ class SessionServiceTestCase(base.TestCase): self.sess_serv_inst.json.pop('SessionTimeout') self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute SessionTimeout', - self.sess_serv_inst._parse_attributes()) + self.sess_serv_inst._parse_attributes(self.json_doc)) def test__get_sessions_collection_path(self): self.sess_serv_inst.json.pop('Sessions') diff --git a/sushy/tests/unit/resources/system/storage/test_drive.py b/sushy/tests/unit/resources/system/storage/test_drive.py index f506af7..eae9cc4 100644 --- a/sushy/tests/unit/resources/system/storage/test_drive.py +++ b/sushy/tests/unit/resources/system/storage/test_drive.py @@ -26,7 +26,9 @@ class DriveTestCase(base.TestCase): super(DriveTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/drive.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.stor_drive = drive.Drive( self.conn, @@ -34,7 +36,7 @@ class DriveTestCase(base.TestCase): redfish_version='1.0.2') def test__parse_attributes(self): - self.stor_drive._parse_attributes() + self.stor_drive._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.stor_drive.redfish_version) self.assertEqual('32ADF365C6C1B7BD', self.stor_drive.identity) self.assertEqual('Drive Sample', self.stor_drive.name) diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index 6da311b..2112865 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -41,14 +41,16 @@ class StorageTestCase(base.TestCase): super(StorageTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/storage.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.storage = storage.Storage( self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1', redfish_version='1.0.2') def test__parse_attributes(self): - self.storage._parse_attributes() + self.storage._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.storage.redfish_version) self.assertEqual('1', self.storage.identity) self.assertEqual('Local Storage Controller', self.storage.name) @@ -233,13 +235,16 @@ class StorageCollectionTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'storage_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.stor_col = storage.StorageCollection( self.conn, '/redfish/v1/Systems/437XR1138R2/Storage', redfish_version='1.0.2') def test__parse_attributes(self): - self.stor_col._parse_attributes() + self.stor_col._parse_attributes(self.json_doc) self.assertEqual(( '/redfish/v1/Systems/437XR1138R2/Storage/1',), self.stor_col.members_identities) diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index 61f6a57..3c59ae9 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -27,14 +27,16 @@ class VolumeTestCase(base.TestCase): super(VolumeTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/volume.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.stor_volume = volume.Volume( self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', redfish_version='1.0.2') def test__parse_attributes(self): - self.stor_volume._parse_attributes() + self.stor_volume._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.stor_volume.redfish_version) self.assertEqual('1', self.stor_volume.identity) self.assertEqual('Virtual Disk 1', self.stor_volume.name) @@ -84,14 +86,17 @@ class VolumeCollectionTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'volume_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.stor_vol_col = volume.VolumeCollection( self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', redfish_version='1.0.2') self.stor_vol_col.refresh = mock.Mock() def test__parse_attributes(self): - self.stor_vol_col._parse_attributes() + self.stor_vol_col._parse_attributes(self.json_doc) self.assertEqual(( '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2', diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index 77de438..97c8bc6 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -31,14 +31,14 @@ class BiosTestCase(base.TestCase): super(BiosTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/bios.json') as f: - bios_json = json.load(f) + self.bios_json = json.load(f) with open('sushy/tests/unit/json_samples/bios_settings.json') as f: - bios_settings_json = json.load(f) + self.bios_settings_json = json.load(f) self.conn.get.return_value.json.side_effect = [ - bios_json, - bios_settings_json, - bios_settings_json] + self.bios_json, + self.bios_settings_json, + self.bios_settings_json] conn = mock.Mock() with open('sushy/tests/unit/json_samples/message_registry.json') as f: @@ -53,7 +53,7 @@ class BiosTestCase(base.TestCase): redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_bios._parse_attributes() + self.sys_bios._parse_attributes(self.bios_json) self.assertEqual('1.0.2', self.sys_bios.redfish_version) self.assertEqual('BIOS', self.sys_bios.identity) self.assertEqual('BIOS Configuration Current Settings', @@ -151,7 +151,7 @@ class BiosTestCase(base.TestCase): self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Actions/#Bios.ResetBios/target', - self.sys_bios._parse_attributes) + self.sys_bios._parse_attributes, self.bios_json) def test_reset_bios(self): self.sys_bios.reset_bios() @@ -202,7 +202,7 @@ class BiosTestCase(base.TestCase): self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Actions/#Bios.ChangePassword/target', - self.sys_bios._parse_attributes) + self.sys_bios._parse_attributes, self.bios_json) def test_change_password(self): self.sys_bios.change_password('newpassword', diff --git a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py index 97c9683..037af20 100644 --- a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py +++ b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py @@ -26,7 +26,9 @@ class EthernetInterfaceTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'ethernet_interfaces.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc eth_path = ("/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/" "12446A3B0411") @@ -34,7 +36,7 @@ class EthernetInterfaceTestCase(base.TestCase): self.conn, eth_path, redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_eth._parse_attributes() + self.sys_eth._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_eth.redfish_version) self.assertEqual('1', self.sys_eth.identity) self.assertEqual('Ethernet Interface', self.sys_eth.name) @@ -54,13 +56,16 @@ class EthernetInterfaceCollectionTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'ethernet_interfaces_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.sys_eth_col = ethernet_interface.EthernetInterfaceCollection( self.conn, '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces', redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_eth_col._parse_attributes() + self.sys_eth_col._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_eth_col.redfish_version) self.assertEqual('Ethernet Interface Collection', self.sys_eth_col.name) diff --git a/sushy/tests/unit/resources/system/test_processor.py b/sushy/tests/unit/resources/system/test_processor.py index 558ef48..c5764c0 100644 --- a/sushy/tests/unit/resources/system/test_processor.py +++ b/sushy/tests/unit/resources/system/test_processor.py @@ -28,14 +28,16 @@ class ProcessorTestCase(base.TestCase): super(ProcessorTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/processor.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.sys_processor = processor.Processor( self.conn, '/redfish/v1/Systems/437XR1138R2/Processors/CPU1', redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_processor._parse_attributes() + self.sys_processor._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_processor.redfish_version) self.assertEqual('CPU1', self.sys_processor.identity) self.assertEqual('CPU 1', self.sys_processor.socket) @@ -82,13 +84,16 @@ class ProcessorCollectionTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'processor_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.sys_processor_col = processor.ProcessorCollection( self.conn, '/redfish/v1/Systems/437XR1138R2/Processors', redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_processor_col._parse_attributes() + self.sys_processor_col._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_processor_col.redfish_version) self.assertEqual('Processors Collection', self.sys_processor_col.name) self.assertEqual(('/redfish/v1/Systems/437XR1138R2/Processors/CPU1', diff --git a/sushy/tests/unit/resources/system/test_simple_storage.py b/sushy/tests/unit/resources/system/test_simple_storage.py index 906ceee..171df81 100644 --- a/sushy/tests/unit/resources/system/test_simple_storage.py +++ b/sushy/tests/unit/resources/system/test_simple_storage.py @@ -26,14 +26,16 @@ class SimpleStorageTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'simple_storage.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.simpl_stor = simple_storage.SimpleStorage( self.conn, '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', redfish_version='1.0.2') def test__parse_attributes(self): - self.simpl_stor._parse_attributes() + self.simpl_stor._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.simpl_stor.redfish_version) self.assertEqual('1', self.simpl_stor.identity) self.assertEqual('Simple Storage Controller', self.simpl_stor.name) @@ -56,13 +58,17 @@ class SimpleStorageCollectionTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'simple_storage_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.simpl_stor_col = simple_storage.SimpleStorageCollection( self.conn, '/redfish/v1/Systems/437XR1138R2/SimpleStorage', redfish_version='1.0.2') def test__parse_attributes(self): - self.simpl_stor_col._parse_attributes() + self.simpl_stor_col._parse_attributes(self.json_doc) self.assertEqual(( '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1',), self.simpl_stor_col.members_identities) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 8de7dee..6d8dffc 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -39,14 +39,16 @@ class SystemTestCase(base.TestCase): super(SystemTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/system.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.sys_inst = system.System( self.conn, '/redfish/v1/Systems/437XR1138R2', redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_inst.redfish_version) self.assertEqual('Chicago-45Z-2381', self.sys_inst.asset_tag) self.assertEqual('P79 v1.33 (02/28/2015)', self.sys_inst.bios_version) @@ -85,13 +87,13 @@ class SystemTestCase(base.TestCase): self.sys_inst.json.pop('Actions') self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Actions', - self.sys_inst._parse_attributes) + self.sys_inst._parse_attributes, self.json_doc) def test__parse_attributes_missing_boot(self): self.sys_inst.json.pop('Boot') self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Boot', - self.sys_inst._parse_attributes) + self.sys_inst._parse_attributes, self.json_doc) def test__parse_attributes_missing_reset_target(self): self.sys_inst.json['Actions']['#ComputerSystem.Reset'].pop( @@ -99,11 +101,11 @@ class SystemTestCase(base.TestCase): self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Actions/#ComputerSystem.Reset/target', - self.sys_inst._parse_attributes) + self.sys_inst._parse_attributes, self.json_doc) def test__parse_attributes_null_memory_capacity(self): self.sys_inst.json['MemorySummary']['TotalSystemMemoryGiB'] = None - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) self.assertIsNone(self.sys_inst.memory_summary.size_gib) def test__parse_attributes_bad_maintenance_window_time(self): @@ -112,7 +114,7 @@ class SystemTestCase(base.TestCase): self.assertRaisesRegex( exceptions.MalformedAttributeError, '@Redfish.MaintenanceWindow/MaintenanceWindowStartTime', - self.sys_inst._parse_attributes) + self.sys_inst._parse_attributes, self.json_doc) def test_get__reset_action_element(self): value = self.sys_inst._get_reset_action_element() @@ -284,7 +286,7 @@ class SystemTestCase(base.TestCase): # | GIVEN | self.sys_inst._json['MemorySummary']['Status'].pop('HealthRollup') # | WHEN | - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) # | THEN | self.assertEqual(96, self.sys_inst.memory_summary.size_gib) self.assertIsNone(self.sys_inst.memory_summary.health) @@ -292,7 +294,7 @@ class SystemTestCase(base.TestCase): # | GIVEN | self.sys_inst._json['MemorySummary'].pop('Status') # | WHEN | - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) # | THEN | self.assertEqual(96, self.sys_inst.memory_summary.size_gib) self.assertIsNone(self.sys_inst.memory_summary.health) @@ -300,7 +302,7 @@ class SystemTestCase(base.TestCase): # | GIVEN | self.sys_inst._json['MemorySummary'].pop('TotalSystemMemoryGiB') # | WHEN | - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) # | THEN | self.assertIsNone(self.sys_inst.memory_summary.size_gib) self.assertIsNone(self.sys_inst.memory_summary.health) @@ -308,7 +310,7 @@ class SystemTestCase(base.TestCase): # | GIVEN | self.sys_inst._json.pop('MemorySummary') # | WHEN | - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) # | THEN | self.assertIsNone(self.sys_inst.memory_summary) @@ -555,8 +557,8 @@ class SystemTestCase(base.TestCase): # | THEN | self.assertIsInstance(contoso_system_extn_inst, fake.FakeOEMSystemExtension) - self.assertIs(self.sys_inst, contoso_system_extn_inst.core_resource) - self.assertEqual('Contoso', contoso_system_extn_inst.oem_property_name) + self.assertIs(self.sys_inst, contoso_system_extn_inst._parent_resource) + self.assertEqual('Contoso', contoso_system_extn_inst._vendor_id) class SystemCollectionTestCase(base.TestCase): @@ -566,12 +568,15 @@ class SystemCollectionTestCase(base.TestCase): self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'system_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.sys_col = system.SystemCollection( self.conn, '/redfish/v1/Systems', redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_col._parse_attributes() + self.sys_col._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_col.redfish_version) self.assertEqual('Computer System Collection', self.sys_col.name) self.assertEqual(('/redfish/v1/Systems/437XR1138R2',), diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 34adea6..facd8f6 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -47,7 +47,7 @@ BASE_RESOURCE_JSON = { class BaseResource(resource_base.ResourceBase): - def _parse_attributes(self): + def _parse_attributes(self, json_doc): pass @@ -182,7 +182,7 @@ class TestResource(resource_base.ResourceBase): redfish_version, registries) self.identity = identity - def _parse_attributes(self): + def _parse_attributes(self, json_doc): pass diff --git a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py index e3e8523..53238e4 100644 --- a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py +++ b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py @@ -27,7 +27,9 @@ class SoftwareInventoryTestCase(base.TestCase): conn = mock.Mock() with open( 'sushy/tests/unit/json_samples/softwareinventory.json') as f: - conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + conn.get.return_value.json.return_value = self.json_doc self.soft_inv = softwareinventory.SoftwareInventory( conn, @@ -35,7 +37,7 @@ class SoftwareInventoryTestCase(base.TestCase): redfish_version='1.3.0') def test__parse_attributes(self): - self.soft_inv._parse_attributes() + self.soft_inv._parse_attributes(self.json_doc) self.assertEqual('BMC', self.soft_inv.identity) self.assertEqual( '1.30.367a12-rev1', @@ -57,7 +59,7 @@ class SoftwareInventoryTestCase(base.TestCase): self.soft_inv.json.pop('Id') self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Id', - self.soft_inv._parse_attributes) + self.soft_inv._parse_attributes, self.json_doc) class SoftwareInventoryCollectionTestCase(base.TestCase): @@ -67,14 +69,16 @@ class SoftwareInventoryCollectionTestCase(base.TestCase): conn = mock.Mock() with open('sushy/tests/unit/json_samples/' 'softwareinventory_collection.json') as f: - conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + conn.get.return_value.json.return_value = self.json_doc self.soft_inv_col = softwareinventory.SoftwareInventoryCollection( conn, '/redfish/v1/UpdateService/SoftwareInventory', redfish_version='1.3.0') def test__parse_attributes(self): - self.soft_inv_col._parse_attributes() + self.soft_inv_col._parse_attributes(self.json_doc) self.assertEqual('1.3.0', self.soft_inv_col.redfish_version) self.assertEqual( 'Software Inventory Collection', diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py index 5815840..65b9e38 100644 --- a/sushy/tests/unit/resources/updateservice/test_updateservice.py +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -28,14 +28,16 @@ class UpdateServiceTestCase(base.TestCase): super(UpdateServiceTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/updateservice.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.upd_serv = updateservice.UpdateService( self.conn, '/redfish/v1/UpdateService/UpdateService', redfish_version='1.3.0') def test__parse_attributes(self): - self.upd_serv._parse_attributes() + self.upd_serv._parse_attributes(self.json_doc) self.assertEqual('UpdateService', self.upd_serv.identity) self.assertEqual('/FWUpdate', self.upd_serv.http_push_uri) self.assertIn('/FWUpdate', self.upd_serv.http_push_uri_targets) @@ -52,7 +54,7 @@ class UpdateServiceTestCase(base.TestCase): self.upd_serv.json.pop('Actions') self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Actions', - self.upd_serv._parse_attributes) + self.upd_serv._parse_attributes, self.json_doc) def test_simple_update(self): self.upd_serv.simple_update( diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 1288b8f..92b9e3d 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -45,14 +45,16 @@ class MainTestCase(base.TestCase): mock_session_service.return_value = self.sess_serv mock_connector.return_value = self.conn with open('sushy/tests/unit/json_samples/root.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.root = main.Sushy('http://foo.bar:1234', verify=True, auth=mock_auth) mock_connector.assert_called_once_with( 'http://foo.bar:1234', verify=True) def test__parse_attributes(self): - self.root._parse_attributes() + self.root._parse_attributes(self.json_doc) self.assertEqual('RootService', self.root.identity) self.assertEqual('Root Service', self.root.name) self.assertEqual('1.0.2', self.root.redfish_version) diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index 324366f..cbe341d 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -129,13 +129,13 @@ class UtilsTestCase(base.TestCase): class NestedResource(resource_base.ResourceBase): - def _parse_attributes(self): + def _parse_attributes(self, json_doc): pass class BaseResource(resource_base.ResourceBase): - def _parse_attributes(self): + def _parse_attributes(self, json_doc): pass def _do_some_crunch_work_to_get_a(self): -- GitLab From 3d6de035713a003d9bb74d7281b2026c8f5224be Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Wed, 7 Aug 2019 11:51:59 +0200 Subject: [PATCH 181/303] Add conditional field matching Adds the ability to conditionally match sushy fields against received JSON object. The conditional matching is performed by a user-supplied callable which gets the key to consider (along with the value and potentially other details) and should indicate the the caller if the match occurred. The motivation behind this change is to accommodate malformed Redfish resource properties as observed in the OEM wilderness. Change-Id: I9da9a83b72b670e15b53ef906f186473340c0935 --- ...dd-partial-key-match-27bed73d577b1187.yaml | 9 ++++++ sushy/resources/base.py | 31 +++++++++++++++---- sushy/tests/unit/resources/test_base.py | 21 +++++++++++++ 3 files changed, 55 insertions(+), 6 deletions(-) create mode 100644 releasenotes/notes/add-partial-key-match-27bed73d577b1187.yaml diff --git a/releasenotes/notes/add-partial-key-match-27bed73d577b1187.yaml b/releasenotes/notes/add-partial-key-match-27bed73d577b1187.yaml new file mode 100644 index 0000000..b7e8227 --- /dev/null +++ b/releasenotes/notes/add-partial-key-match-27bed73d577b1187.yaml @@ -0,0 +1,9 @@ +--- +features: + - | + Adds the ability to conditionally match sushy fields against received JSON + object. The conditional matching is performed by a user-supplied callable + which gets the key to consider (along with the value and potentially other + details) and should indicate the the caller if the match occurred. + The motivation behind this change is to accommodate malformed Redfish + resource properties as observed in the OEM wilderness. diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 14bd1b8..e53e4eb 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -60,8 +60,9 @@ class Field(object): if not callable(adapter): raise TypeError("Adapter must be callable") - if isinstance(path, six.string_types): + if not isinstance(path, list): path = [path] + elif not path: raise ValueError('Path cannot be empty') @@ -70,6 +71,17 @@ class Field(object): self._default = default self._adapter = adapter + def _get_item(self, dct, key_or_callable, **context): + if not callable(key_or_callable): + return dct[key_or_callable] + + for candidate_key in dct: + if key_or_callable( + candidate_key, value=dct[candidate_key], **context): + return dct[candidate_key] + + raise KeyError(key_or_callable) + def _load(self, body, resource, nested_in=None): """Load this field from a JSON object. @@ -85,7 +97,10 @@ class Field(object): for path_item in self._path[:-1]: body = body.get(path_item, {}) - if name not in body: + try: + item = self._get_item(body, name) + + except KeyError: if self._required: path = (nested_in or []) + self._path raise exceptions.MissingAttributeError( @@ -97,11 +112,15 @@ class Field(object): try: # Get the value based on the name, defaulting to an empty dict - value = body.get(name, {}) # Check to ensure that value is implemented by OEM - if (value is not None and value != {} and - str(value).lower() != 'none'): - value = self._adapter(value) + # TODO(etingof): we should revisit this logic/code + if (item is not None and item != {} and + str(item).lower() != 'none'): + value = self._adapter(item) + + else: + value = item + except (UnicodeError, ValueError, TypeError) as exc: path = (nested_in or []) + self._path raise exceptions.MalformedAttributeError( diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index facd8f6..9432094 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -441,3 +441,24 @@ class FieldTestCase(base.TestCase): # Regular attributes cannot be accessed via mapping self.assertRaisesRegex(KeyError, '_load', lambda: field['_load']) self.assertRaisesRegex(KeyError, '__init__', lambda: field['__init__']) + + +class PartialKeyResource(resource_base.ResourceBase): + string = resource_base.Field( + lambda key, **context: key.startswith('Str')) + integer = resource_base.Field( + lambda key, value, **context: key == 'Integer' and int(value) < 42) + + +class FieldPartialKeyTestCase(base.TestCase): + def setUp(self): + super(FieldPartialKeyTestCase, self).setUp() + self.conn = mock.Mock() + self.json = copy.deepcopy(TEST_JSON) + self.conn.get.return_value.json.return_value = self.json + self.test_resource = PartialKeyResource( + self.conn, redfish_version='1.0.x') + + def test_ok(self): + self.assertEqual('a string', self.test_resource.string) + self.assertIsNone(self.test_resource.integer) -- GitLab From ae2bafb37fe5bf2762c2182928f02b02f000eb9f Mon Sep 17 00:00:00 2001 From: Kaifeng Wang Date: Fri, 14 Jun 2019 17:34:30 +0800 Subject: [PATCH 182/303] Build pdf doc The is one of community goals that each project could produce a single PDF file. The pdf should be in the output of openstack-tox-docs job. TeX packages are required to build PDF locally, following is recommended: * inkscape * texlive-latex-base * texlive-latex-extra * texlive-fonts-recommended More about the goal: https://governance.openstack.org/tc/goals/train/pdf-doc-generation.html https://etherpad.openstack.org/p/train-pdf-support-goal https://etherpad.openstack.org/p/pdf-goal-train-common-problems Change-Id: Iadbfcb0ab746ac0a64abc85ae7ea2f628405f6fb --- doc/source/conf.py | 9 +++++---- lower-constraints.txt | 2 +- test-requirements.txt | 2 +- tox.ini | 6 ++++++ 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index f0f25aa..5cbe95c 100755 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -36,7 +36,6 @@ source_suffix = '.rst' master_doc = 'index' # General information about the project. -project = u'sushy' copyright = u'2016, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. @@ -58,15 +57,17 @@ html_theme = 'openstackdocs' # html_static_path = ['static'] # Output file base name for HTML help builder. -htmlhelp_basename = '%sdoc' % project +htmlhelp_basename = 'sushydoc' + +latex_use_xindy = False # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', - '%s.tex' % project, - u'%s Documentation' % project, + 'doc-sushy.tex', + u'Sushy Documentation', u'OpenStack Foundation', 'manual'), ] diff --git a/lower-constraints.txt b/lower-constraints.txt index bcfcd5d..910fa57 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -17,7 +17,7 @@ MarkupSafe==1.0 mccabe==0.2.1 mock==2.0.0 mox3==0.20.0 -openstackdocstheme==1.18.1 +openstackdocstheme==1.20.0 os-client-config==1.28.0 oslotest==3.2.0 pbr==2.0.0 diff --git a/test-requirements.txt b/test-requirements.txt index 6f8ebee..ed514e0 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -8,7 +8,7 @@ coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD sphinx!=1.6.6,!=1.6.7,>=1.6.2,<2.0.0;python_version=='2.7' # BSD sphinx!=1.6.6,!=1.6.7,>=1.6.2;python_version>='3.4' # BSD -openstackdocstheme>=1.18.1 # Apache-2.0 +openstackdocstheme>=1.20.0 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 testscenarios>=0.4 # Apache-2.0/BSD diff --git a/tox.ini b/tox.ini index bf76370..5237093 100644 --- a/tox.ini +++ b/tox.ini @@ -42,6 +42,12 @@ commands = coverage erase basepython = python3 commands = python setup.py build_sphinx +[testenv:pdf-docs] +basepython = python3 +whitelist_externals = make +commands = sphinx-build -b latex doc/source doc/build/pdf + make -C doc/build/pdf + [testenv:releasenotes] basepython = python3 commands = -- GitLab From b0803141bbff40c9aeb2677cc84c919a2b897b41 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Thu, 5 Sep 2019 23:47:55 +0000 Subject: [PATCH 183/303] Update master for stable/train Add file to the reno documentation build to show release notes for stable/train. Use pbr instruction to increment the minor version number automatically so that master versions are higher than the versions on stable/train. Change-Id: If9126950cdd8c386df8f7c3b8b88ce22b47fe133 Sem-Ver: feature --- releasenotes/source/index.rst | 1 + releasenotes/source/train.rst | 6 ++++++ 2 files changed, 7 insertions(+) create mode 100644 releasenotes/source/train.rst diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst index 5edb814..c66b1fa 100644 --- a/releasenotes/source/index.rst +++ b/releasenotes/source/index.rst @@ -6,6 +6,7 @@ :maxdepth: 1 unreleased + train stein rocky queens diff --git a/releasenotes/source/train.rst b/releasenotes/source/train.rst new file mode 100644 index 0000000..7fa1088 --- /dev/null +++ b/releasenotes/source/train.rst @@ -0,0 +1,6 @@ +=================================== + Train Series Release Notes +=================================== + +.. release-notes:: + :branch: stable/train -- GitLab From c559be670252eb0418b442827eb99820a7007df6 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Thu, 26 Sep 2019 15:00:31 +0200 Subject: [PATCH 184/303] Now packaging 2.0.0 (train) --- debian/changelog | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/debian/changelog b/debian/changelog index 1ae5a23..8ee680e 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,9 +1,13 @@ -python-sushy (1.8.1-3) UNRELEASED; urgency=medium +python-sushy (2.0.0-1) experimental; urgency=medium + [ Ondřej Nový ] * Use debhelper-compat instead of debian/compat. * Bump Standards-Version to 4.4.0. - -- Ondřej Nový Thu, 18 Jul 2019 16:38:25 +0200 + [ Thomas Goirand ] + * New upstream release. + + -- Thomas Goirand Thu, 26 Sep 2019 15:00:16 +0200 python-sushy (1.8.1-2) unstable; urgency=medium -- GitLab From 6118000b749acff6690562bed20c5ee006b96650 Mon Sep 17 00:00:00 2001 From: Mark Goddard Date: Thu, 26 Sep 2019 14:57:15 +0100 Subject: [PATCH 185/303] Add versions to release notes series Change-Id: I7ac1c9742b6b101bb3f58d63a2d99abc5d827e87 --- releasenotes/source/train.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/releasenotes/source/train.rst b/releasenotes/source/train.rst index 7fa1088..e5b02dc 100644 --- a/releasenotes/source/train.rst +++ b/releasenotes/source/train.rst @@ -1,6 +1,6 @@ -=================================== - Train Series Release Notes -=================================== +=========================================== + Train Series (1.9.0 - 2.0.x) Release Notes +=========================================== .. release-notes:: :branch: stable/train -- GitLab From 33879254e3fb04c531e811b2f4dd37b06e6c828a Mon Sep 17 00:00:00 2001 From: Sayali Kutwal Date: Tue, 15 Oct 2019 11:22:41 +0530 Subject: [PATCH 186/303] Fix typo in the section Enabling SSL The description of Enabling SSL section have 'to it' written twice. Remove one of them to increase the readability. Change-Id: I2be039c6d5003a4d8da678efb123792a368cb154 --- doc/source/contributor/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/contributor/index.rst b/doc/source/contributor/index.rst index 06fbeec..94fb010 100644 --- a/doc/source/contributor/index.rst +++ b/doc/source/contributor/index.rst @@ -82,7 +82,7 @@ use following command:: openssl req -x509 -newkey rsa:2048 -keyout key.pem -out cert.pem -days 365 Start the mockup server passing the ``--ssl-certificate`` and -``--ssl-key`` parameters to it to it, for example:: +``--ssl-key`` parameters to it, for example:: sushy-emulator --ssl-key key.pem --ssl-certificate cert.pem -- GitLab From 54e37422c1d36fe73b12fa9b119334fd243d8d2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Nov=C3=BD?= Date: Fri, 18 Oct 2019 16:28:33 +0200 Subject: [PATCH 187/303] Bump Standards-Version to 4.4.1 --- debian/changelog | 6 ++++++ debian/control | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/debian/changelog b/debian/changelog index 8ee680e..2afe473 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (2.0.0-2) UNRELEASED; urgency=medium + + * Bump Standards-Version to 4.4.1. + + -- Ondřej Nový Fri, 18 Oct 2019 16:28:33 +0200 + python-sushy (2.0.0-1) experimental; urgency=medium [ Ondřej Nový ] diff --git a/debian/control b/debian/control index e8b26d6..1cf6de5 100644 --- a/debian/control +++ b/debian/control @@ -25,7 +25,7 @@ Build-Depends-Indep: python3-testscenarios, python3-testtools, subunit, -Standards-Version: 4.4.0 +Standards-Version: 4.4.1 Vcs-Browser: https://salsa.debian.org/openstack-team/libs/python-sushy Vcs-Git: https://salsa.debian.org/openstack-team/libs/python-sushy.git Homepage: https://docs.openstack.org/sushy -- GitLab From 9f07b1f87f1719e50ca008255caf5d02d96cf61a Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Mon, 21 Oct 2019 10:25:56 +0200 Subject: [PATCH 188/303] Uploading to unstable. --- debian/changelog | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/debian/changelog b/debian/changelog index 2afe473..6666dea 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,8 +1,12 @@ -python-sushy (2.0.0-2) UNRELEASED; urgency=medium +python-sushy (2.0.0-2) unstable; urgency=medium + [ Ondřej Nový ] * Bump Standards-Version to 4.4.1. - -- Ondřej Nový Fri, 18 Oct 2019 16:28:33 +0200 + [ Thomas Goirand ] + * Uploading to unstable. + + -- Thomas Goirand Mon, 21 Oct 2019 10:25:40 +0200 python-sushy (2.0.0-1) experimental; urgency=medium -- GitLab From 5990b5df78808bb53bc5b64c9c4160f8748b055d Mon Sep 17 00:00:00 2001 From: Iury Gregory Melo Ferreira Date: Thu, 24 Oct 2019 11:32:17 +0200 Subject: [PATCH 189/303] Switch jobs to python3 - ironic-base is python3 by default, we don't need `USE_PYTHON3` Depends-On: https://review.opendev.org/#/c/687521/ Change-Id: I70fb1d05f3fa4b93a9bf2603269271f50c90e24f --- zuul.d/sushy-jobs.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml index b7b083e..c7ff202 100644 --- a/zuul.d/sushy-jobs.yaml +++ b/zuul.d/sushy-jobs.yaml @@ -9,7 +9,6 @@ - openstack/sushy vars: devstack_localrc: - USE_PYTHON3: True IRONIC_DEPLOY_DRIVER: redfish IRONIC_ENABLED_HARDWARE_TYPES: redfish IRONIC_DEFAULT_RESCUE_INTERFACE: "" -- GitLab From 4e97c51458b8c0f890295ccf68503bede30b6149 Mon Sep 17 00:00:00 2001 From: Iury Gregory Melo Ferreira Date: Fri, 22 Nov 2019 11:43:26 +0100 Subject: [PATCH 190/303] Drop python 2.7 support and testing OpenStack is dropping the py2.7 support in ussuri cycle. Sushy is ready with python 3 and ok to drop the python 2.7 support. Complete discussion & schedule can be found in - http://lists.openstack.org/pipermail/openstack-discuss/2019-October/010142.html - https://etherpad.openstack.org/p/drop-python2-support Ussuri Communtiy-wide goal - https://review.opendev.org/#/c/691178/ Change-Id: I6879128ec84db7a64fc52411fe654044aaee4671 --- releasenotes/notes/drop-py-2-7-cc931c210ce08e33.yaml | 5 +++++ setup.cfg | 2 -- test-requirements.txt | 1 - tox.ini | 11 ++--------- zuul.d/project.yaml | 5 +---- zuul.d/sushy-jobs.yaml | 7 ------- 6 files changed, 8 insertions(+), 23 deletions(-) create mode 100644 releasenotes/notes/drop-py-2-7-cc931c210ce08e33.yaml diff --git a/releasenotes/notes/drop-py-2-7-cc931c210ce08e33.yaml b/releasenotes/notes/drop-py-2-7-cc931c210ce08e33.yaml new file mode 100644 index 0000000..95bb1ce --- /dev/null +++ b/releasenotes/notes/drop-py-2-7-cc931c210ce08e33.yaml @@ -0,0 +1,5 @@ +upgrade: + - | + Python 2.7 support has been dropped. Last release of sushy + to support Python 2.7 is OpenStack Train. The minimum version of Python now + supported by sushy is Python 3.6. diff --git a/setup.cfg b/setup.cfg index 73a1a9d..2416517 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,8 +13,6 @@ classifier = License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 diff --git a/test-requirements.txt b/test-requirements.txt index ed514e0..2745da5 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,7 +6,6 @@ hacking>=1.0.0,<1.1.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD -sphinx!=1.6.6,!=1.6.7,>=1.6.2,<2.0.0;python_version=='2.7' # BSD sphinx!=1.6.6,!=1.6.7,>=1.6.2;python_version>='3.4' # BSD openstackdocstheme>=1.20.0 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 diff --git a/tox.ini b/tox.ini index 5237093..7c53bdc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,9 +1,10 @@ [tox] minversion = 2.0 -envlist = py3,py27,pep8 +envlist = py3,pep8 skipsdist = True [testenv] +basepython = python3 usedevelop = True setenv = VIRTUAL_ENV={envdir} @@ -16,15 +17,12 @@ deps = commands = stestr run --slowest {posargs} [testenv:pep8] -basepython = python3 commands = flake8 {posargs} [testenv:venv] -basepython = python3 commands = {posargs} [testenv:cover] -basepython = python3 setenv = {[testenv]setenv} PYTHON=coverage run --parallel-mode @@ -39,22 +37,18 @@ commands = coverage erase coverage xml -o cover/coverage.xml [testenv:docs] -basepython = python3 commands = python setup.py build_sphinx [testenv:pdf-docs] -basepython = python3 whitelist_externals = make commands = sphinx-build -b latex doc/source doc/build/pdf make -C doc/build/pdf [testenv:releasenotes] -basepython = python3 commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [testenv:debug] -basepython = python3 commands = oslo_debug_helper -t sushy/tests {posargs} [flake8] @@ -72,7 +66,6 @@ builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build [testenv:lower-constraints] -basepython = python3 deps = -c{toxinidir}/lower-constraints.txt -r{toxinidir}/test-requirements.txt diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 861d37f..a2b8a1d 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -3,15 +3,12 @@ - check-requirements - openstack-cover-jobs - openstack-lower-constraints-jobs - - openstack-python-jobs - - openstack-python3-train-jobs + - openstack-python3-ussuri-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 check: jobs: - sushy-tempest-ironic-partition-redfish-src - - sushy-tempest-ironic-partition-redfish-src-python2 gate: jobs: - sushy-tempest-ironic-partition-redfish-src - - sushy-tempest-ironic-partition-redfish-src-python2 diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml index c7ff202..69870fb 100644 --- a/zuul.d/sushy-jobs.yaml +++ b/zuul.d/sushy-jobs.yaml @@ -13,10 +13,3 @@ IRONIC_ENABLED_HARDWARE_TYPES: redfish IRONIC_DEFAULT_RESCUE_INTERFACE: "" EBTABLES_RACE_FIX: True - -- job: - name: sushy-tempest-ironic-partition-redfish-src-python2 - parent: sushy-tempest-ironic-partition-redfish-src - vars: - devstack_localrc: - USE_PYTHON3: False -- GitLab From 015fe9b3b4f28caf26d6051ee2d522f7aa44f826 Mon Sep 17 00:00:00 2001 From: khansa Date: Mon, 21 Oct 2019 17:18:32 +0100 Subject: [PATCH 191/303] Add OEM extension example script This change adds a simple example script illustrating OEM extension call based on Acme sushy extension package. Story: 2006610 Task: 36779 Change-Id: Icf22b9d38e307a07f59112a6c462d48a8ec6c3d6 --- doc/source/reference/usage.rst | 58 ++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/doc/source/reference/usage.rst b/doc/source/reference/usage.rst index 7854843..64e50be 100644 --- a/doc/source/reference/usage.rst +++ b/doc/source/reference/usage.rst @@ -294,6 +294,64 @@ Creating and using a sushy session service object sess_serv.close_session(sess_col.members_identities[0]) +-------------------- +Using OEM extensions +-------------------- + +Before running this example, please make sure you have a Redfish BMC that +includes the OEM piece for a specific vendor, as well as the Sushy OEM +extension package installed in the system for the same vendor. + +You can check the presence of the OEM extension within each Redfish +resource by specifying the vendor ID and search for them. + +In the following example, we are looking up "Acme" vendor extension to Redfish +Manager resource. + +.. code-block:: python + + import sushy + + root = sushy.Sushy('http://localhost:8000/redfish/v1') + + # Instantiate a system object + system = root.get_system('/redfish/v1/Systems/437XR1138R2') + + print('Working on system resource %s' % system.identity) + + for manager in system.managers: + + print('Using System manager %s' % manager.identity) + + # Get a list of OEM extension names for the system manager + oem_vendors = manager.oem_vendors + + print('Listing OEM extension name(s) for the System ' + 'manager %s' % manager.identity ) + + print(*oem_vendors, sep="\n") + + try: + manager_oem = manager.get_oem_extension('Acme') + + except sushy.exceptions.OEMExtensionNotFoundError: + print('ERROR: Acme OEM extension not found in ' + 'Manager %s' % manager.identity) + continue + + print('%s is an OEM extension of Manager %s' + % (manager_oem.get_extension(), manager.identity)) + + # set boot device to a virtual media device image + manager_oem.set_virtual_boot_device(sushy.VIRTUAL_MEDIA_CD, + manager=manager) + + If you do not have any real baremetal machine that supports the Redfish protocol you can look at the :ref:`contributing` page to learn how to run a Redfish emulator. + +For the OEM extension example, presently, both of the emulators +(static/dynamic) do not expose any OEM; as a result, users may need to add +manually some OEM resources to emulators' templates. It may be easier to +start with a static emulator. -- GitLab From a54dfea55e245be91910f20bfd78f87b6d419468 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Mon, 2 Dec 2019 15:23:39 +0100 Subject: [PATCH 192/303] Stop using six library Since we've dropped support for Python 2.7, it's time to look at the bright future that Python 3.x will bring and stop forcing compatibility with older versions. This patch removes the six library from requirements, not looking back. Change-Id: I8e2b271d0e1c4ef61b73d2e5072dc47c3276cda2 --- lower-constraints.txt | 1 - requirements.txt | 1 - sushy/auth.py | 5 +--- sushy/connector.py | 4 +-- sushy/exceptions.py | 3 +-- sushy/resources/base.py | 26 +++++-------------- sushy/resources/manager/virtual_media.py | 2 +- sushy/resources/system/bios.py | 2 +- .../resources/manager/test_virtual_media.py | 2 +- .../tests/unit/resources/system/test_bios.py | 2 +- sushy/tests/unit/resources/test_base.py | 3 +-- sushy/tests/unit/test_connector.py | 2 +- sushy/utils.py | 20 +++++--------- 13 files changed, 22 insertions(+), 51 deletions(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index 910fa57..c42d8c1 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -32,7 +32,6 @@ PyYAML==3.12 reno==2.5.0 requests==2.14.2 requestsexceptions==1.2.0 -six==1.10.0 snowballstemmer==1.2.1 Sphinx==1.6.2 sphinxcontrib-websupport==1.0.1 diff --git a/requirements.txt b/requirements.txt index 14f51f5..1829f68 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,6 +4,5 @@ pbr!=2.1.0,>=2.0.0 # Apache-2.0 requests>=2.14.2 # Apache-2.0 -six>=1.10.0 # MIT python-dateutil>=2.7.0 # BSD stevedore>=1.29.0 # Apache-2.0 diff --git a/sushy/auth.py b/sushy/auth.py index c537c84..beebdad 100644 --- a/sushy/auth.py +++ b/sushy/auth.py @@ -15,15 +15,12 @@ import abc import logging -import six - from sushy import exceptions LOG = logging.getLogger(__name__) -@six.add_metaclass(abc.ABCMeta) -class AuthBase(object): +class AuthBase(object, metaclass=abc.ABCMeta): def __init__(self, username=None, password=None): """A class representing a base Sushy authentication mechanism diff --git a/sushy/connector.py b/sushy/connector.py index 7c31636..80e038f 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -14,9 +14,9 @@ # under the License. import logging +from urllib import parse as urlparse import requests -from six.moves.urllib import parse from sushy import exceptions @@ -77,7 +77,7 @@ class Connector(object): :raises: ConnectionError :raises: HTTPError """ - url = parse.urljoin(self._url, path) + url = urlparse.urljoin(self._url, path) headers = headers or {} if not any(k.lower() == 'odata-version' for k in headers): headers['OData-Version'] = '4.0' diff --git a/sushy/exceptions.py b/sushy/exceptions.py index 9f552b8..9f1e01a 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -13,10 +13,9 @@ # License for the specific language governing permissions and limitations # under the License. +from http import client as http_client import logging -from six.moves import http_client - LOG = logging.getLogger(__name__) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index e53e4eb..d33355b 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -16,13 +16,6 @@ import abc import collections -# (rpittau) this allows usage of collection ABC abstract classes in both -# Python 2.7 and Python 3.8+ -try: - collectionsAbc = collections.abc -except AttributeError: - collectionsAbc = collections - import copy import io import json @@ -30,8 +23,6 @@ import logging import pkg_resources import zipfile -import six - from sushy import exceptions from sushy.resources import oem from sushy import utils @@ -143,8 +134,7 @@ def _collect_fields(resource): yield (attr, field) -@six.add_metaclass(abc.ABCMeta) -class CompositeField(collectionsAbc.Mapping, Field): +class CompositeField(collections.abc.Mapping, Field, metaclass=abc.ABCMeta): """Base class for fields consisting of several sub-fields.""" def __init__(self, *args, **kwargs): @@ -175,7 +165,6 @@ class CompositeField(collectionsAbc.Mapping, Field): return instance # Satisfy the mapping interface, see - # https://docs.python.org/2/library/collections.html#collections.Mapping. # https://docs.python.org/3/library/collections.abc.html#collections.abc.Mapping def __getitem__(self, key): @@ -274,7 +263,7 @@ class MappedField(Field): Only has effect when the field is not required. This value is not matched against the mapping. """ - if not isinstance(mapping, collectionsAbc.Mapping): + if not isinstance(mapping, collections.abc.Mapping): raise TypeError("The mapping argument must be a mapping") super(MappedField, self).__init__( @@ -300,7 +289,7 @@ class MappedListField(Field): :param default: the default value to use when the field is missing. Only has effect when the field is not required. """ - if not isinstance(mapping, collectionsAbc.Mapping): + if not isinstance(mapping, collections.abc.Mapping): raise TypeError("The mapping argument must be a mapping") self._mapping_adapter = mapping.get @@ -328,8 +317,7 @@ class MappedListField(Field): return instances -@six.add_metaclass(abc.ABCMeta) -class AbstractJsonReader(object): +class AbstractJsonReader(object, metaclass=abc.ABCMeta): def set_connection(self, connector, path): """Sets mandatory connection parameters @@ -406,8 +394,7 @@ class JsonPackagedFileReader(AbstractJsonReader): return json.loads(resource.read().decode(encoding='utf-8')) -@six.add_metaclass(abc.ABCMeta) -class ResourceBase(object): +class ResourceBase(object, metaclass=abc.ABCMeta): redfish_version = None """The Redfish version""" @@ -568,8 +555,7 @@ class ResourceBase(object): return self._registries -@six.add_metaclass(abc.ABCMeta) -class ResourceCollectionBase(ResourceBase): +class ResourceCollectionBase(ResourceBase, metaclass=abc.ABCMeta): name = Field('Name') """The name of the collection""" diff --git a/sushy/resources/manager/virtual_media.py b/sushy/resources/manager/virtual_media.py index 7f2cdac..150e7d0 100644 --- a/sushy/resources/manager/virtual_media.py +++ b/sushy/resources/manager/virtual_media.py @@ -13,7 +13,7 @@ # This is referred from Redfish standard schema. # https://redfish.dmtf.org/schemas/VirtualMedia.v1_2_0.json -from six.moves import http_client +from http import client as http_client from sushy import exceptions from sushy.resources import base diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 4ec0f2f..b1ae6fa 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -13,8 +13,8 @@ # This is referred from Redfish standard schema. # https://redfish.dmtf.org/schemas/Bios.v1_0_3.json +from http import client as http_client import logging -from six.moves import http_client from sushy import exceptions from sushy.resources import base diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py index b85925b..92fd564 100644 --- a/sushy/tests/unit/resources/manager/test_virtual_media.py +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -12,10 +12,10 @@ # License for the specific language governing permissions and limitations # under the License. +from http import client as http_client import json import mock -from six.moves import http_client import sushy from sushy import exceptions diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index 97c8bc6..ea52695 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -12,11 +12,11 @@ # License for the specific language governing permissions and limitations # under the License. +from http import client as http_client import json import mock from dateutil import parser -from six.moves import http_client from sushy import exceptions from sushy.resources.registry import message_registry diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 9432094..8e169a7 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -14,12 +14,11 @@ # under the License. import copy +from http import client as http_client import io import json import mock -from six.moves import http_client - from sushy import exceptions from sushy.resources import base as resource_base from sushy.tests.unit import base diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index cd5e99f..4d84c40 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -13,11 +13,11 @@ # License for the specific language governing permissions and limitations # under the License. +from http import client as http_client import json import mock import requests -from six.moves import http_client from sushy import auth as sushy_auth from sushy import connector diff --git a/sushy/utils.py b/sushy/utils.py index b4d2a92..d0300d1 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -14,19 +14,11 @@ # under the License. import collections - -# (rpittau) this allows usage of collection ABC abstract classes in both -# Python 2.7 and Python 3.8+ -try: - collectionsAbc = collections.abc -except AttributeError: - collectionsAbc = collections +import functools import logging import threading -import six - from sushy import exceptions LOG = logging.getLogger(__name__) @@ -220,7 +212,7 @@ def cache_it(res_accessor_method): """ cache_attr_name = '_cache_' + res_accessor_method.__name__ - @six.wraps(res_accessor_method) + @functools.wraps(res_accessor_method) def func_wrapper(res_selfie): cache_attr_val = getattr(res_selfie, cache_attr_name, None) @@ -239,7 +231,7 @@ def cache_it(res_accessor_method): if isinstance(cache_attr_val, base.ResourceBase): cache_attr_val.refresh(force=False) - elif isinstance(cache_attr_val, collectionsAbc.Sequence): + elif isinstance(cache_attr_val, collections.abc.Sequence): for elem in cache_attr_val: if isinstance(elem, base.ResourceBase): elem.refresh(force=False) @@ -268,7 +260,7 @@ def cache_clear(res_selfie, force_refresh, only_these=None): cache_attr_names = setdefaultattr( res_selfie, CACHE_ATTR_NAMES_VAR_NAME, set()) if only_these is not None: - if not isinstance(only_these, collectionsAbc.Sequence): + if not isinstance(only_these, collections.abc.Sequence): raise TypeError("'only_these' must be a sequence.") cache_attr_names = cache_attr_names.intersection( @@ -281,7 +273,7 @@ def cache_clear(res_selfie, force_refresh, only_these=None): if isinstance(cache_attr_val, base.ResourceBase): cache_attr_val.invalidate(force_refresh) - elif isinstance(cache_attr_val, collectionsAbc.Sequence): + elif isinstance(cache_attr_val, collections.abc.Sequence): for elem in cache_attr_val: if isinstance(elem, base.ResourceBase): elem.invalidate(force_refresh) @@ -331,7 +323,7 @@ def synchronized(wrapped): """ lock = threading.RLock() - @six.wraps(wrapped) + @functools.wraps(wrapped) def wrapper(*args, **kwargs): with lock: return wrapped(*args, **kwargs) -- GitLab From 9d01756833ec97a05f301a4fa1e8b2511d7c5c74 Mon Sep 17 00:00:00 2001 From: Richard Pioso Date: Thu, 19 Dec 2019 18:52:16 -0500 Subject: [PATCH 193/303] SSC.disks_sizes_bytes handle CapacityBytes is None In the open ocean, a Redfish service can return a SimpleStorage resource containing a Device property with its CapacityBytes property set to null. According to the Distributed Management Task Force (DMTF) schema for SimpleStorage, the type of CapacityBytes can be either integer or null [1]. The Dell integrated Dell Remote Access Controller (iDRAC) 9 running Lifecycle Controller firmware version 3.30.30.30 returns that for a Dell EMC PowerEdge R740xd containing a Dell HBA 330 12Gbps SAS Host Bus Adapter Controller (non-RAID), MiniCard (HBA 330 Mini) [2]. The HBA 330 Mini firmware version is 16.17.00.03. The same was observed against an R740xd running Lifecycle Controller firmware version 3.34.34.34. This changes the SimpleStorageCollection.disks_size_bytes property to process only Disks with CapacityByes not set to None. [1] https://redfish.dmtf.org/schemas/SimpleStorage.v1_2_3.json [2] http://eavesdrop.openstack.org/irclogs/%23openstack-ironic/%23openstack-ironic.2019-12-11.log.html#t2019-12-11T07:24:17 Change-Id: I57a0074a7f6c4495e7c73a00c87675ea87fdb6e8 Story: 2006918 Task: 37567 --- ...device-capacity-bytes-null-0672eed36d9da70a.yaml | 11 +++++++++++ sushy/resources/system/simple_storage.py | 3 ++- .../unit/resources/system/test_simple_storage.py | 13 +++++++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/fix-simple-storage-device-capacity-bytes-null-0672eed36d9da70a.yaml diff --git a/releasenotes/notes/fix-simple-storage-device-capacity-bytes-null-0672eed36d9da70a.yaml b/releasenotes/notes/fix-simple-storage-device-capacity-bytes-null-0672eed36d9da70a.yaml new file mode 100644 index 0000000..5917609 --- /dev/null +++ b/releasenotes/notes/fix-simple-storage-device-capacity-bytes-null-0672eed36d9da70a.yaml @@ -0,0 +1,11 @@ +--- +fixes: + - | + Fixes bug in ``SimpleStorageCollection.disks_sizes_bytes`` which assumed + the type of a disk's ``CapacityBytes`` property is ``integer``. According + to the Distributed Management Task Force (DMTF) Redfish standard schema + [1], it can be ``null``, which is converted to ``None`` in Python. For + more information, see `story 2006918 + `_. + + [1] https://redfish.dmtf.org/schemas/SimpleStorage.v1_2_3.json \ No newline at end of file diff --git a/sushy/resources/system/simple_storage.py b/sushy/resources/system/simple_storage.py index 3045fd9..2abbc28 100644 --- a/sushy/resources/system/simple_storage.py +++ b/sushy/resources/system/simple_storage.py @@ -73,7 +73,8 @@ class SimpleStorageCollection(base.ResourceCollectionBase): return sorted(device.capacity_bytes for simpl_stor in self.get_members() for device in simpl_stor.devices - if device.status.state == res_cons.STATE_ENABLED) + if (device.status.state == res_cons.STATE_ENABLED + and device.capacity_bytes is not None)) @property def max_size_bytes(self): diff --git a/sushy/tests/unit/resources/system/test_simple_storage.py b/sushy/tests/unit/resources/system/test_simple_storage.py index 171df81..a4a25f9 100644 --- a/sushy/tests/unit/resources/system/test_simple_storage.py +++ b/sushy/tests/unit/resources/system/test_simple_storage.py @@ -102,6 +102,19 @@ class SimpleStorageCollectionTestCase(base.TestCase): self.assertEqual([4000000000000, 8000000000000], self.simpl_stor_col.disks_sizes_bytes) + def test_disks_sizes_bytes_capacity_bytes_none(self): + self.conn.get.return_value.json.reset_mock() + + with open('sushy/tests/unit/json_samples/' + 'simple_storage.json') as f: + json_doc = json.load(f) + + json_doc['Devices'][0]['CapacityBytes'] = None + self.conn.get.return_value.json.return_value = json_doc + + self.assertEqual([4000000000000], + self.simpl_stor_col.disks_sizes_bytes) + def test_max_size_bytes(self): self.conn.get.return_value.json.reset_mock() -- GitLab From c944ab44d18fafa8dbd74bdf45ee4d2c93b9da69 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Mon, 23 Dec 2019 10:46:14 +0100 Subject: [PATCH 194/303] Enforce running tox with correct python version based on env Since removing support for Python 2, we changed the basepython value to 3. This means that all the tox tests run with the default python version available in the system. This is not quite correct when running on environment such as py36, py37 or py38, since they imply running with different Python versions based on the environment. To enforce the correct version we need to add the option ignore_basepython_conflict available since tox 3.1.0 [0]. [0] https://tox.readthedocs.io/en/latest/config.html#conf-ignore_basepython_conflict Change-Id: I73f57c423dfacdb67d9e06de2885943d800f47a9 --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 7c53bdc..ab282dc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,8 @@ [tox] -minversion = 2.0 +minversion = 3.1.0 envlist = py3,pep8 skipsdist = True +ignore_basepython_conflict=true [testenv] basepython = python3 -- GitLab From 1f60044bd860ff7efe47851b92eea695a755e28f Mon Sep 17 00:00:00 2001 From: Bill Dodd Date: Thu, 11 Jul 2019 13:17:08 -0500 Subject: [PATCH 195/303] Add Task Monitor support for async operations Redfish services that support asynchronous operations implement the Task service, Task resource, and Task Monitor. When performing long running update operations, the service returns a status of 202 Accepted. When returning a status code of 202, the response includes a Location header containing the URL of the Task Monitor. The response may also include the Retry-After header to specify the amount of time the client should wait before querying the operation status. The Task Monitor is an opaque URL that the client can query via a GET request. While the async operation is still in progress, the service returns a 202 Accepted status. The body of the response may be a Task resource or it may be empty. Once the operation completes, a GET on the Task Monitor will return a status that indicates the result of the operation (e.g., 200, 201, 4XX). A new TaskMonitor class is added to represent the Task Monitor and give the sushy user the ability to monitor the status of an async operation. The Connector class is updated allow an option to make HTTP requests blocking. This change utilizes the newly added TaskMonitor. Change-Id: Ice6700806710f37b590f6b879ec38656326b39c6 Story: 2003514 Task: 30719 --- ...task-monitor-support-21f711927ad6ec91.yaml | 5 + sushy/connector.py | 77 ++++++++--- sushy/resources/base.py | 3 +- sushy/resources/task_monitor.py | 114 ++++++++++++++++ .../tests/unit/resources/test_task_monitor.py | 122 ++++++++++++++++++ sushy/tests/unit/test_connector.py | 62 ++++++++- 6 files changed, 361 insertions(+), 22 deletions(-) create mode 100644 releasenotes/notes/add-task-monitor-support-21f711927ad6ec91.yaml create mode 100644 sushy/resources/task_monitor.py create mode 100644 sushy/tests/unit/resources/test_task_monitor.py diff --git a/releasenotes/notes/add-task-monitor-support-21f711927ad6ec91.yaml b/releasenotes/notes/add-task-monitor-support-21f711927ad6ec91.yaml new file mode 100644 index 0000000..557b5a2 --- /dev/null +++ b/releasenotes/notes/add-task-monitor-support-21f711927ad6ec91.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Add support for a Task Monitor resource to be able to monitor the state + of asynchronous operations. diff --git a/sushy/connector.py b/sushy/connector.py index 80e038f..9f43bf8 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -17,8 +17,10 @@ import logging from urllib import parse as urlparse import requests +import time from sushy import exceptions +from sushy.resources.task_monitor import TaskMonitor LOG = logging.getLogger(__name__) @@ -61,15 +63,17 @@ class Connector(object): """Close this connector and the associated HTTP session.""" self._session.close() - def _op(self, method, path='', data=None, headers=None, - **extra_session_req_kwargs): + def _op(self, method, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): """Generic RESTful request handler. :param method: The HTTP method to be used, e.g: GET, POST, PUT, PATCH, etc... - :param path: The sub-URI path to the resource. + :param path: The sub-URI or absolute URL path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. :param extra_session_req_kwargs: Optional keyword argument to pass requests library arguments which would pass on to requests session object. @@ -77,16 +81,19 @@ class Connector(object): :raises: ConnectionError :raises: HTTPError """ - url = urlparse.urljoin(self._url, path) + url = path if urlparse.urlparse(path).netloc else urlparse.urljoin( + self._url, path) headers = headers or {} if not any(k.lower() == 'odata-version' for k in headers): headers['OData-Version'] = '4.0' # TODO(lucasagomes): We should mask the data to remove sensitive # information LOG.debug('HTTP request: %(method)s %(url)s; headers: %(headers)s; ' - 'body: %(data)s; session arguments: %(session)s;', + 'body: %(data)s; blocking: %(blocking)s; timeout: ' + '%(timeout)s; session arguments: %(session)s;', {'method': method, 'url': url, 'headers': headers, - 'data': data, 'session': extra_session_req_kwargs}) + 'data': data, 'blocking': blocking, 'timeout': timeout, + 'session': extra_session_req_kwargs}) try: response = self._session.request(method, url, json=data, headers=headers, @@ -110,6 +117,29 @@ class Connector(object): else: raise + if blocking and response.status_code == 202: + if not response.headers.get('location'): + m = ('HTTP response for %(method)s request to %(url)s ' + 'returned status 202, but no Location header' + % {'method': method, 'url': url}) + raise exceptions.ConnectionError(url=url, error=m) + timeout_at = time.time() + timeout + mon = (TaskMonitor(self, response.headers.get('location')) + .set_retry_after(response.headers.get('retry-after'))) + while mon.in_progress: + LOG.debug('Blocking for in-progress %(method)s call to ' + '%(url)s; sleeping for %(sleep)s seconds', + {'method': method, 'url': url, + 'sleep': mon.sleep_for}) + time.sleep(mon.sleep_for) + if time.time() >= timeout_at and mon.in_progress: + m = ('Timeout waiting for blocking %(method)s ' + 'request to %(url)s (timeout = %(timeout)s)' + % {'method': method, 'url': url, + 'timeout': timeout}) + raise exceptions.ConnectionError(url=url, error=m) + response = mon.response + LOG.debug('HTTP response for %(method)s %(url)s: ' 'status code: %(code)s', {'method': method, 'url': url, @@ -117,13 +147,15 @@ class Connector(object): return response - def get(self, path='', data=None, headers=None, - **extra_session_req_kwargs): + def get(self, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): """HTTP GET method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. :param extra_session_req_kwargs: Optional keyword argument to pass requests library arguments which would pass on to requests session object. @@ -132,15 +164,18 @@ class Connector(object): :raises: HTTPError """ return self._op('GET', path, data=data, headers=headers, + blocking=blocking, timeout=timeout, **extra_session_req_kwargs) - def post(self, path='', data=None, headers=None, - **extra_session_req_kwargs): + def post(self, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): """HTTP POST method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. :param extra_session_req_kwargs: Optional keyword argument to pass requests library arguments which would pass on to requests session object. @@ -149,15 +184,18 @@ class Connector(object): :raises: HTTPError """ return self._op('POST', path, data=data, headers=headers, + blocking=blocking, timeout=timeout, **extra_session_req_kwargs) - def patch(self, path='', data=None, headers=None, - **extra_session_req_kwargs): + def patch(self, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): """HTTP PATCH method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. :param extra_session_req_kwargs: Optional keyword argument to pass requests library arguments which would pass on to requests session object. @@ -166,15 +204,18 @@ class Connector(object): :raises: HTTPError """ return self._op('PATCH', path, data=data, headers=headers, + blocking=blocking, timeout=timeout, **extra_session_req_kwargs) - def put(self, path='', data=None, headers=None, - **extra_session_req_kwargs): + def put(self, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): """HTTP PUT method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. :param extra_session_req_kwargs: Optional keyword argument to pass requests library arguments which would pass on to requests session object. @@ -183,15 +224,18 @@ class Connector(object): :raises: HTTPError """ return self._op('PUT', path, data=data, headers=headers, + blocking=blocking, timeout=timeout, **extra_session_req_kwargs) - def delete(self, path='', data=None, headers=None, - **extra_session_req_kwargs): + def delete(self, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): """HTTP DELETE method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. :param extra_session_req_kwargs: Optional keyword argument to pass requests library arguments which would pass on to requests session object. @@ -200,6 +244,7 @@ class Connector(object): :raises: HTTPError """ return self._op('DELETE', path, data=data, headers=headers, + blocking=blocking, timeout=timeout, **extra_session_req_kwargs) def __enter__(self): diff --git a/sushy/resources/base.py b/sushy/resources/base.py index d33355b..1cda951 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -338,7 +338,8 @@ class JsonDataReader(AbstractJsonReader): def get_json(self): """Gets JSON file from URI directly""" - return self._conn.get(path=self._path).json() + data = self._conn.get(path=self._path) + return data.json() if data.content else {} class JsonPublicFileReader(AbstractJsonReader): diff --git a/sushy/resources/task_monitor.py b/sushy/resources/task_monitor.py new file mode 100644 index 0000000..cac9cba --- /dev/null +++ b/sushy/resources/task_monitor.py @@ -0,0 +1,114 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is described in Redfish specification section "Asynchronous operations" +# www.dmtf.org/sites/default/files/standards/documents/DSP0266_1.7.0.pdf + + +from datetime import datetime +from datetime import timedelta +import logging + +from dateutil import parser + +from sushy.resources import base + +LOG = logging.getLogger(__name__) + + +class TaskMonitor(base.ResourceBase): + + def __init__(self, + connector, + path='', + redfish_version=None): + """A class representing a Redfish Task Monitor + + :param connector: A Connector instance + :param path: sub-URI path to the resource. + :param redfish_version: The version of Redfish. Used to construct + the object according to schema of the given version. + """ + super(TaskMonitor, self).__init__(connector, path, redfish_version) + self._retry_after = None + self._location_header = None + self._in_progress = True + self._response = None + + @staticmethod + def _to_datetime(retry_after): + if isinstance(retry_after, int) or retry_after.isdigit(): + # Retry-After: 120 + return datetime.now() + timedelta(seconds=int(retry_after)) + else: + # Retry-After: Fri, 31 Dec 1999 23:59:59 GMT + return parser.parse(retry_after) + + def set_retry_after(self, value): + """Set the time the client should wait before querying the task status + + :param value: The value of the Retry-After header, which can be the + number of seconds to wait or an `HTTP-date` string as + defined by RFC 7231 + :return: The TaskMonitor object + """ + self._retry_after = self._to_datetime(value or 1) + return self + + @property + def retry_after(self): + """Time the client should wait before querying the task status + + :return: The Retry-After time in `datetime` format + """ + return self._retry_after + + @property + def sleep_for(self): + """Seconds the client should wait before querying the operation status + + :return: The number of seconds to wait + """ + return max(0, (self._retry_after - datetime.now()).total_seconds()) + + @property + def location_header(self): + """The Location header returned from the GET on the Task Monitor + + :return: The Location header (an absolute URL) + """ + return self._location_header + + @property + def in_progress(self): + """Checks the status of the async task + + :return: True if the async task is still in progress, False otherwise + """ + if not self._in_progress: + return False + r = self._conn.get(self._path) + self._response = r + self._location_header = r.headers.get('location') + if r.status_code == 202: + self.set_retry_after(r.headers.get('retry-after')) + else: + self._in_progress = False + return self._in_progress + + @property + def response(self): + """The response from the last TaskMonitor in_progress check + + :return: The `requests` response object or None + """ + return self._response diff --git a/sushy/tests/unit/resources/test_task_monitor.py b/sushy/tests/unit/resources/test_task_monitor.py new file mode 100644 index 0000000..f18951f --- /dev/null +++ b/sushy/tests/unit/resources/test_task_monitor.py @@ -0,0 +1,122 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from datetime import timedelta + +from dateutil import parser +import mock + +from sushy.resources.task_monitor import TaskMonitor +from sushy.tests.unit import base + + +class TaskMonitorTestCase(base.TestCase): + + def setUp(self): + super(TaskMonitorTestCase, self).setUp() + self.conn = mock.Mock() + self.data = {'fake': 'data'} + self.http_date = 'Fri, 31 Dec 1999 23:59:59 GMT' + self.seconds = 120 + self.datetime = parser.parse(self.http_date) + self.req_headers = {'X-Fake': 'header'} + self.res_headers1 = {'location': 'https://sample.com/foo/bar', + 'retry-after': self.http_date} + self.res_headers2 = {'location': 'https://sample.com/foo/bar', + 'retry-after': str(self.seconds)} + + def test_task_in_progress(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.json.return_value = {} + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location'))\ + .set_retry_after(res.headers.get('retry-after')) + self.assertIsNotNone(tm) + self.assertTrue(tm.in_progress) + + def test_task_not_in_progress(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.status_code = 201 + self.conn.get.return_value.json.return_value = self.data.copy() + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location'))\ + .set_retry_after(res.headers.get('retry-after')) + self.assertIsNotNone(tm) + self.assertFalse(tm.in_progress) + + def test_retry_after_http_date(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.json.return_value = {} + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location')) \ + .set_retry_after(res.headers.get('retry-after')) + self.assertIsNotNone(tm) + self.assertEqual(self.datetime, tm.retry_after) + + def test_retry_after_seconds(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers2.copy() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = self.res_headers2.copy() + self.conn.get.return_value.json.return_value = {} + start = datetime.now() + timedelta(seconds=self.seconds) + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location')) \ + .set_retry_after(res.headers.get('retry-after')) + end = datetime.now() + timedelta(seconds=self.seconds) + self.assertIsNotNone(tm) + self.assertTrue(start <= tm.retry_after <= end) + + def test_sleep_for(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers2.copy() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = self.res_headers2.copy() + self.conn.get.return_value.json.return_value = {} + start = datetime.now() + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location')) \ + .set_retry_after(res.headers.get('retry-after')) + self.assertIsNotNone(tm) + sleep_for = tm.sleep_for + elapsed = (datetime.now() - start).total_seconds() + self.assertTrue(self.seconds - elapsed <= sleep_for <= self.seconds) + + def test_response(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.status_code = 201 + self.conn.get.return_value.json.return_value = self.data.copy() + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location')) \ + .set_retry_after(res.headers.get('retry-after')) + self.assertIsNotNone(tm) + self.assertFalse(tm.in_progress) + response = tm.response + self.assertEqual(201, response.status_code) + self.assertEqual(self.data.copy(), response.json()) diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 4d84c40..184b72f 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -48,35 +48,80 @@ class ConnectorMethodsTestCase(base.TestCase): self.conn.get(path='fake/path', data=self.data.copy(), headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'GET', 'fake/path', - data=self.data, headers=self.headers) + data=self.data, headers=self.headers, + blocking=False, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_get_blocking(self, mock__op): + self.conn.get(path='fake/path', data=self.data.copy(), + headers=self.headers.copy(), blocking=True) + mock__op.assert_called_once_with(mock.ANY, 'GET', 'fake/path', + data=self.data, headers=self.headers, + blocking=True, timeout=60) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_post(self, mock__op): self.conn.post(path='fake/path', data=self.data.copy(), headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'POST', 'fake/path', - data=self.data, headers=self.headers) + data=self.data, headers=self.headers, + blocking=False, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_post_blocking(self, mock__op): + self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.headers.copy(), blocking=True, timeout=120) + mock__op.assert_called_once_with(mock.ANY, 'POST', 'fake/path', + data=self.data, headers=self.headers, + blocking=True, timeout=120) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_patch(self, mock__op): self.conn.patch(path='fake/path', data=self.data.copy(), headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'PATCH', 'fake/path', - data=self.data, headers=self.headers) + data=self.data, headers=self.headers, + blocking=False, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_patch_blocking(self, mock__op): + self.conn.patch(path='fake/path', data=self.data.copy(), + headers=self.headers.copy(), blocking=True) + mock__op.assert_called_once_with(mock.ANY, 'PATCH', 'fake/path', + data=self.data, headers=self.headers, + blocking=True, timeout=60) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_put(self, mock__op): self.conn.put(path='fake/path', data=self.data.copy(), headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'PUT', 'fake/path', - data=self.data, headers=self.headers) + data=self.data, headers=self.headers, + blocking=False, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_put_blocking(self, mock__op): + self.conn.put(path='fake/path', data=self.data.copy(), + headers=self.headers.copy(), blocking=True) + mock__op.assert_called_once_with(mock.ANY, 'PUT', 'fake/path', + data=self.data, headers=self.headers, + blocking=True, timeout=60) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_delete(self, mock__op): self.conn.delete(path='fake/path', data=self.data.copy(), headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'DELETE', 'fake/path', - data=self.data, headers=self.headers) + data=self.data, headers=self.headers, + blocking=False, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_delete_blocking(self, mock__op): + self.conn.delete(path='fake/path', data=self.data.copy(), + headers=self.headers.copy(), blocking=True) + mock__op.assert_called_once_with(mock.ANY, 'DELETE', 'fake/path', + data=self.data, headers=self.headers, + blocking=True, timeout=60) def test_set_auth(self): mock_auth = mock.MagicMock() @@ -286,3 +331,10 @@ class ConnectorOpTestCase(base.TestCase): self.conn._op('GET', 'http://foo.bar') exc = cm.exception self.assertEqual(http_client.FORBIDDEN, exc.status_code) + + def test_blocking_no_location_header(self): + self.request.return_value.status_code = http_client.ACCEPTED + self.request.return_value.headers = {'retry-after': 5} + with self.assertRaisesRegex(exceptions.ConnectionError, + 'status 202, but no Location header'): + self.conn._op('POST', 'http://foo.bar', blocking=True) -- GitLab From 9e12fbf109eff84ff31735a690dd010ecc4df24e Mon Sep 17 00:00:00 2001 From: Shivanand Tendulker Date: Sun, 14 Jul 2019 10:52:16 -0400 Subject: [PATCH 196/303] Handle incomplete messages in MessageRegistry Some messges in the MessageRegistry are not having fields like 'Description' and 'Severity'. Most of the time the string in 'Message' and 'Description' fields are same. This fix proposes to make 'Description' as not-required field and setting the 'Severity' field to default value of 'CRITICAL' if its missing in the messages of MessageRegistry. Example of a message in a MessageRegistry "UpdateFailed": { "Message": "The update failed with a component specific error (%1).", "NumberOfArgs": 1, "ParamTypes": [ "string" ], "Resolution": "Retry the update after remedying the component error." } Story: 2007216 Task: 38467 Change-Id: I3e9e2ee0867f59684c24e60772a5318801891521 --- ...6-fix-to-message-registry-cff37659f03ba815.yaml | 8 ++++++++ sushy/resources/registry/message_registry.py | 5 +++-- .../tests/unit/json_samples/message_registry.json | 9 +++++++++ .../resources/registry/test_message_registry.py | 14 +++++++++++++- 4 files changed, 33 insertions(+), 3 deletions(-) create mode 100644 releasenotes/notes/story-2007216-fix-to-message-registry-cff37659f03ba815.yaml diff --git a/releasenotes/notes/story-2007216-fix-to-message-registry-cff37659f03ba815.yaml b/releasenotes/notes/story-2007216-fix-to-message-registry-cff37659f03ba815.yaml new file mode 100644 index 0000000..905fa8a --- /dev/null +++ b/releasenotes/notes/story-2007216-fix-to-message-registry-cff37659f03ba815.yaml @@ -0,0 +1,8 @@ +--- +fixes: + - | + Handles incomplete messages in MessageRegistry that are not having fields + like 'Description' and 'Severity'. See story + `2007216 `_ for more + information. + diff --git a/sushy/resources/registry/message_registry.py b/sushy/resources/registry/message_registry.py index 01b2e13..2b32ede 100644 --- a/sushy/resources/registry/message_registry.py +++ b/sushy/resources/registry/message_registry.py @@ -15,12 +15,13 @@ from sushy.resources import base +from sushy.resources import constants as res_cons from sushy.resources import mappings as res_maps class MessageDictionaryField(base.DictionaryField): - description = base.Field('Description', required=True) + description = base.Field('Description', required=False) """Indicates how and when the message is returned by the Redfish service""" message = base.Field('Message', required=True) @@ -46,7 +47,7 @@ class MessageDictionaryField(base.DictionaryField): severity = base.MappedField('Severity', res_maps.SEVERITY_VALUE_MAP, - required=True) + default=res_cons.SEVERITY_CRITICAL) """Mapped severity of the message""" diff --git a/sushy/tests/unit/json_samples/message_registry.json b/sushy/tests/unit/json_samples/message_registry.json index eb61b68..2468701 100644 --- a/sushy/tests/unit/json_samples/message_registry.json +++ b/sushy/tests/unit/json_samples/message_registry.json @@ -35,6 +35,15 @@ "number" ], "Resolution": "Try again" + }, + "MissingThings": { + "Message": "Property's %1 value cannot be less than %2.", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Try Later" } } } diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py index af34760..e94c486 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -47,7 +47,7 @@ class MessageRegistryTestCase(base.TestCase): self.assertEqual('Test', self.registry.registry_prefix) self.assertEqual('1.1.1', self.registry.registry_version) self.assertEqual('sushy', self.registry.owning_entity) - self.assertEqual(3, len(self.registry.messages)) + self.assertEqual(4, len(self.registry.messages)) self.assertEqual('Everything OK', self.registry.messages['Success'].description) self.assertEqual('Everything done successfully.', @@ -61,6 +61,18 @@ class MessageRegistryTestCase(base.TestCase): self.assertEqual(res_cons.PARAMTYPE_NUMBER, self.registry.messages['TooBig'].param_types[1]) self.assertEqual('Panic', self.registry.messages['Failed'].resolution) + self.assertEqual( + 2, len(self.registry.messages['MissingThings'].param_types)) + self.assertEqual(res_cons.SEVERITY_CRITICAL, + self.registry.messages['MissingThings'].severity) + self.assertEqual( + res_cons.PARAMTYPE_STRING, + self.registry.messages['MissingThings'].param_types[0]) + self.assertEqual( + res_cons.PARAMTYPE_NUMBER, + self.registry.messages['MissingThings'].param_types[1]) + self.assertEqual( + 'Try Later', self.registry.messages['MissingThings'].resolution) def test__parse_attribtues_unknown_param_type(self): self.registry.json['Messages']['Failed']['ParamTypes'] = \ -- GitLab From c961d8fa804a0d9b44f037b4b05b7edb76e21ed6 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Fri, 7 Feb 2020 19:27:28 +0100 Subject: [PATCH 197/303] Automatically discover available system/manager/chassis The ``get_system``, ``get_manager`` and ``get_chassis`` methods have been modified not to require the ``identity`` parameter referring to a particular resource instance. If ``identity`` is omited, sushy will default to the only available resource for as long as it's single and therefore deterministic. The intent is to simplify user API by not requiring the consumer to discover available resources prior to requesting one. Change-Id: Ifbf8b05154b619d4831c26870ee97c3dce555fd2 Story: 2007258 Task: 38707 --- ...add-default-identity-10c5dd23bed0e915.yaml | 10 ++ sushy/exceptions.py | 4 + sushy/main.py | 50 +++++++- sushy/tests/unit/test_main.py | 108 ++++++++++++++++++ 4 files changed, 166 insertions(+), 6 deletions(-) create mode 100644 releasenotes/notes/add-default-identity-10c5dd23bed0e915.yaml diff --git a/releasenotes/notes/add-default-identity-10c5dd23bed0e915.yaml b/releasenotes/notes/add-default-identity-10c5dd23bed0e915.yaml new file mode 100644 index 0000000..6aa2a8b --- /dev/null +++ b/releasenotes/notes/add-default-identity-10c5dd23bed0e915.yaml @@ -0,0 +1,10 @@ +--- +features: + - | + The ``get_system``, ``get_manager`` and ``get_chassis`` methods modified + not to require the ``identity`` parameter referring to a particular + resource instance. If ``identity`` is omited, sushy will default to the + only available resource for as long as it's single and therefore + deterministic. + The intent is to simplify user API by not requiring the consumer to + discover available resources prior to requesting one. diff --git a/sushy/exceptions.py b/sushy/exceptions.py index 9f1e01a..3d6d18b 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -60,6 +60,10 @@ class ArchiveParsingError(SushyError): message = 'Failed parsing archive "%(path)s": %(error)s' +class UnknownDefaultError(SushyError): + message = 'Failed at determining default for "%(entity)s": %(error)s' + + class ExtensionError(SushyError): message = ('Sushy Extension Error: %(error)s') diff --git a/sushy/main.py b/sushy/main.py index d397c32..d378e12 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -182,12 +182,25 @@ class Sushy(base.ResourceBase): redfish_version=self.redfish_version, registries=self.registries) - def get_system(self, identity): + def get_system(self, identity=None): """Given the identity return a System object - :param identity: The identity of the System resource + :param identity: The identity of the System resource. If not given, + sushy will default to the single available System or fail + if there appear to be more or less then one System listed. + :raises: `UnknownDefaultError` if default system can't be determined. :returns: The System object """ + if identity is None: + systems_collection = self.get_system_collection() + listed_systems = systems_collection.get_members() + if len(listed_systems) != 1: + raise exceptions.UnknownDefaultError( + entity='ComputerSystem', + error='System count is not exactly one') + + identity = listed_systems[0].path + return system.System(self._conn, identity, redfish_version=self.redfish_version, registries=self.registries) @@ -207,12 +220,25 @@ class Sushy(base.ResourceBase): redfish_version=self.redfish_version, registries=self.registries) - def get_chassis(self, identity): + def get_chassis(self, identity=None): """Given the identity return a Chassis object - :param identity: The identity of the Chassis resource + :param identity: The identity of the Chassis resource. If not given, + sushy will default to the single available chassis or fail + if there appear to be more or less then one Chassis listed. + :raises: `UnknownDefaultError` if default system can't be determined. :returns: The Chassis object """ + if identity is None: + chassis_collection = self.get_chassis_collection() + listed_chassis = chassis_collection.get_members() + if len(listed_chassis) != 1: + raise exceptions.UnknownDefaultError( + entity='Chassis', + error='Chassis count is not exactly one') + + identity = listed_chassis[0].path + return chassis.Chassis(self._conn, identity, redfish_version=self.redfish_version, registries=self.registries) @@ -257,12 +283,24 @@ class Sushy(base.ResourceBase): redfish_version=self.redfish_version, registries=self.registries) - def get_manager(self, identity): + def get_manager(self, identity=None): """Given the identity return a Manager object - :param identity: The identity of the Manager resource + :param identity: The identity of the Manager resource. If not given, + sushy will default to the single available Manager or fail + if there appear to be more or less then one Manager listed. :returns: The Manager object """ + if identity is None: + managers_collection = self.get_manager_collection() + listed_managers = managers_collection.get_members() + if len(listed_managers) != 1: + raise exceptions.UnknownDefaultError( + entity='Manager', + error='Manager count is not exactly one') + + identity = listed_managers[0].path + return manager.Manager(self._conn, identity, redfish_version=self.redfish_version, registries=self.registries) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 92b9e3d..9750b26 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -116,6 +116,41 @@ class MainTestCase(base.TestCase): redfish_version=self.root.redfish_version, registries=mock_registries) + @mock.patch.object(system, 'SystemCollection', autospec=True) + @mock.patch.object(system, 'System', autospec=True) + @mock.patch('sushy.Sushy.registries', autospec=True) + def test_get_system_default_ok( + self, mock_registries, mock_system, mock_system_collection): + self.root._standard_message_registries_path = None + mock_system.path = 'fake-system-id' + mock_members = mock_system_collection.return_value.get_members + mock_members.return_value = [mock_system] + self.root.get_system() + mock_system_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Systems', + redfish_version=self.root.redfish_version, + registries=mock_registries + ) + mock_system.assert_called_once_with( + self.root._conn, 'fake-system-id', + redfish_version=self.root.redfish_version, + registries=mock_registries) + + @mock.patch.object(system, 'SystemCollection', autospec=True) + @mock.patch.object(system, 'System', autospec=True) + @mock.patch('sushy.Sushy.registries', autospec=True) + def test_get_system_default_failure( + self, mock_registries, mock_system, mock_system_collection): + self.root._standard_message_registries_path = None + mock_members = mock_system_collection.return_value.get_members + mock_members.return_value = [] + self.assertRaises(exceptions.UnknownDefaultError, self.root.get_system) + mock_system_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Systems', + redfish_version=self.root.redfish_version, + registries=mock_registries + ) + @mock.patch.object(chassis, 'Chassis', autospec=True) def test_get_chassis(self, mock_chassis): self.root.get_chassis('fake-chassis-id') @@ -123,6 +158,43 @@ class MainTestCase(base.TestCase): self.root._conn, 'fake-chassis-id', self.root.redfish_version, self.root.registries) + @mock.patch.object(chassis, 'ChassisCollection', autospec=True) + @mock.patch.object(chassis, 'Chassis', autospec=True) + @mock.patch('sushy.Sushy.registries', autospec=True) + def test_get_chassis_default_ok( + self, mock_registries, mock_chassis, mock_chassis_collection): + self.root._standard_message_registries_path = None + mock_chassis.path = 'fake-chassis-id' + mock_members = mock_chassis_collection.return_value.get_members + mock_members.return_value = [mock_chassis] + self.root.get_chassis() + mock_chassis_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Chassis', + redfish_version=self.root.redfish_version, + registries=mock_registries + ) + mock_chassis.assert_called_once_with( + self.root._conn, 'fake-chassis-id', + redfish_version=self.root.redfish_version, + registries=mock_registries + ) + + @mock.patch.object(chassis, 'ChassisCollection', autospec=True) + @mock.patch.object(chassis, 'Chassis', autospec=True) + @mock.patch('sushy.Sushy.registries', autospec=True) + def test_get_chassis_default_failure( + self, mock_registries, mock_chassis, mock_chassis_collection): + self.root._standard_message_registries_path = None + mock_members = mock_chassis_collection.return_value.get_members + mock_members.return_value = [] + self.assertRaises( + exceptions.UnknownDefaultError, self.root.get_chassis) + mock_chassis_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Chassis', + redfish_version=self.root.redfish_version, + registries=mock_registries + ) + @mock.patch.object(chassis, 'ChassisCollection', autospec=True) def test_get_chassis_collection(self, chassis_collection_mock): self.root.get_chassis_collection() @@ -158,6 +230,42 @@ class MainTestCase(base.TestCase): self.root._conn, 'fake-manager-id', self.root.redfish_version, self.root.registries) + @mock.patch.object(manager, 'ManagerCollection', autospec=True) + @mock.patch.object(manager, 'Manager', autospec=True) + @mock.patch('sushy.Sushy.registries', autospec=True) + def test_get_manager_default_ok( + self, mock_registries, mock_manager, mock_manager_collection): + self.root._standard_message_registries_path = None + mock_manager.path = 'fake-manager-id' + mock_members = mock_manager_collection.return_value.get_members + mock_members.return_value = [mock_manager] + self.root.get_manager() + mock_manager_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Managers', + redfish_version=self.root.redfish_version, + registries=mock_registries + ) + mock_manager.assert_called_once_with( + self.root._conn, 'fake-manager-id', + redfish_version=self.root.redfish_version, + registries=mock_registries) + + @mock.patch.object(manager, 'ManagerCollection', autospec=True) + @mock.patch.object(manager, 'Manager', autospec=True) + @mock.patch('sushy.Sushy.registries', autospec=True) + def test_get_manager_default_failure( + self, mock_registries, mock_manager, mock_system_collection): + self.root._standard_message_registries_path = None + mock_members = mock_system_collection.return_value.get_members + mock_members.return_value = [] + self.assertRaises( + exceptions.UnknownDefaultError, self.root.get_manager) + mock_system_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Managers', + redfish_version=self.root.redfish_version, + registries=mock_registries + ) + @mock.patch.object(sessionservice, 'SessionService', autospec=True) def test_get_sessionservice(self, mock_sess_serv): self.root.get_session_service() -- GitLab From 1ec13aaba2d0b9c703a2564ec588e4d6de92697b Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Tue, 21 Jan 2020 17:31:11 +0100 Subject: [PATCH 198/303] Fix 'None' field value processing Change [1] introduced a conditional call to Field's "adapter" callable supposedly to work-around some OEM issues. Sadly, the exact problem that change [1] has been trying to solve was never explained and remains murky. However, fix [1] has also introduced a bug which makes sushy ignoring 'None' literals in the values. This sometimes leads to sushy failing to operate on perfectly valid JSON documents. This change removes most of the conditions guarding Field's "adapter" function calls. 1. https://review.opendev.org/#/c/669963/6 Change-Id: I8d1e1691a0bb2b6315894c85569a73633d34c1cb --- sushy/resources/base.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 1cda951..02a8c01 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -101,16 +101,12 @@ class Field(object): # Do not run the adapter on the default value return self._default - try: - # Get the value based on the name, defaulting to an empty dict - # Check to ensure that value is implemented by OEM - # TODO(etingof): we should revisit this logic/code - if (item is not None and item != {} and - str(item).lower() != 'none'): - value = self._adapter(item) + # NOTE(etingof): this is just to account for schema violation + if item is None: + return - else: - value = item + try: + return self._adapter(item) except (UnicodeError, ValueError, TypeError) as exc: path = (nested_in or []) + self._path @@ -119,8 +115,6 @@ class Field(object): resource=resource.path, error=exc) - return value - def _collect_fields(resource): """Collect fields from the JSON. -- GitLab From 8440eb2826f14e579e0892ed3566f43bd86a888c Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Wed, 22 Jan 2020 16:44:19 +0100 Subject: [PATCH 199/303] Relax required Redfish fields handling Redfish defines some of the fields in its JSON schemas as mandatory. However, some implementations ignore this requirement and occasionally omit some required fields in the Redfish document tree they produce. Failing the whole Redfish interaction basing solely on the absence of a required (bit non-essential) field makes sushy perfect, but not exactly practical. This patch changes to semantics of the `default` parameter in ``Field` constructor in a way that it can inhibit otherwise fatal failure when a select of required attributes are missing. Along this mis/feature, some non-essential fields in Redfish message registry have been made required and defaulted effectively making them non-required. Change-Id: I637f11ff9ceab398077eae19db83db396356c8dc Story: 2006641 Task: 38362 --- sushy/resources/base.py | 38 ++++++++++--------- sushy/resources/registry/message_registry.py | 5 ++- .../registry/test_message_registry.py | 12 +++++- 3 files changed, 35 insertions(+), 20 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 02a8c01..16af69e 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -40,10 +40,9 @@ class Field(object): :param path: JSON field to fetch the value from. Either a string, or a list of strings in case of a nested field. - :param required: whether this field is required. Missing required - fields result in MissingAttributeError. + :param required: whether this field is required. Missing required, + but not defaulted, fields result in MissingAttributeError. :param default: the default value to use when the field is missing. - Only has effect when the field is not required. :param adapter: a function to call to transform and/or validate the received value. UnicodeError, ValueError or TypeError from this call are reraised as MalformedAttributeError. @@ -80,7 +79,8 @@ class Field(object): :param resource: ResourceBase instance for which the field is loaded. :param nested_in: parent resource path (for error reporting only), must be a list of strings or None. - :raises: MissingAttributeError if a required field is missing. + :raises: MissingAttributeError if a required field is missing + and not defaulted. :raises: MalformedAttributeError on invalid field value or type. :returns: loaded and verified value """ @@ -94,12 +94,18 @@ class Field(object): except KeyError: if self._required: path = (nested_in or []) + self._path - raise exceptions.MissingAttributeError( - attribute='/'.join(path), - resource=resource.path) - else: - # Do not run the adapter on the default value - return self._default + + if self._default is None: + raise exceptions.MissingAttributeError( + attribute='/'.join(path), + resource=resource.path) + + logging.warning( + 'Applying default "%s" on required, but missing ' + 'attribute "%s"' % (self._default, path)) + + # Do not run the adapter on the default value + return self._default # NOTE(etingof): this is just to account for schema violation if item is None: @@ -251,11 +257,10 @@ class MappedField(Field): a string or a list of string. In the latter case, the value will be fetched from a nested object. :param mapping: a mapping to take values from. - :param required: whether this field is required. Missing required - fields result in MissingAttributeError. + :param required: whether this field is required. Missing required, + but not defaulted, fields result in MissingAttributeError. :param default: the default value to use when the field is missing. - Only has effect when the field is not required. This value is not - matched against the mapping. + This value is not matched against the mapping. """ if not isinstance(mapping, collections.abc.Mapping): raise TypeError("The mapping argument must be a mapping") @@ -278,10 +283,9 @@ class MappedListField(Field): :param field: JSON field to fetch the list of values from. :param mapping: a mapping for the list elements. - :param required: whether this field is required. Missing required - fields result in MissingAttributeError. + :param required: whether this field is required. Missing required, + but not defaulted, fields result in MissingAttributeError. :param default: the default value to use when the field is missing. - Only has effect when the field is not required. """ if not isinstance(mapping, collections.abc.Mapping): raise TypeError("The mapping argument must be a mapping") diff --git a/sushy/resources/registry/message_registry.py b/sushy/resources/registry/message_registry.py index 2b32ede..fc90022 100644 --- a/sushy/resources/registry/message_registry.py +++ b/sushy/resources/registry/message_registry.py @@ -21,7 +21,7 @@ from sushy.resources import mappings as res_maps class MessageDictionaryField(base.DictionaryField): - description = base.Field('Description', required=False) + description = base.Field('Description', required=True, default='') """Indicates how and when the message is returned by the Redfish service""" message = base.Field('Message', required=True) @@ -47,7 +47,8 @@ class MessageDictionaryField(base.DictionaryField): severity = base.MappedField('Severity', res_maps.SEVERITY_VALUE_MAP, - default=res_cons.SEVERITY_CRITICAL) + required=True, + default=res_cons.SEVERITY_WARNING) """Mapped severity of the message""" diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py index e94c486..93e8671 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -63,7 +63,7 @@ class MessageRegistryTestCase(base.TestCase): self.assertEqual('Panic', self.registry.messages['Failed'].resolution) self.assertEqual( 2, len(self.registry.messages['MissingThings'].param_types)) - self.assertEqual(res_cons.SEVERITY_CRITICAL, + self.assertEqual(res_cons.SEVERITY_WARNING, self.registry.messages['MissingThings'].severity) self.assertEqual( res_cons.PARAMTYPE_STRING, @@ -74,6 +74,16 @@ class MessageRegistryTestCase(base.TestCase): self.assertEqual( 'Try Later', self.registry.messages['MissingThings'].resolution) + def test__parse_attributes_missing_msg_desc(self): + self.json_doc['Messages']['Success'].pop('Description') + self.registry._parse_attributes(self.json_doc) + self.assertEqual('', self.registry.messages['Success'].description) + + def test__parse_attributes_missing_msg_severity(self): + self.json_doc['Messages']['Success'].pop('Severity') + self.registry._parse_attributes(self.json_doc) + self.assertEqual('warning', self.registry.messages['Success'].severity) + def test__parse_attribtues_unknown_param_type(self): self.registry.json['Messages']['Failed']['ParamTypes'] = \ ['unknown_type'] -- GitLab From e5b8c8724edbc334478010214e5829ae8881943e Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Mon, 2 Mar 2020 16:36:53 +0100 Subject: [PATCH 200/303] Fix incorrect usage of assertRaisesRegex in unit tests This test passes None instead of a callable, which just happens to work in Python < 3.9 and breaks in 3.9. Since SessionTimeout is not actually required, I'm removing the wrong unit test. Change-Id: I64db7482c4879950dee2b020033d4d1101f61bb6 --- .../unit/resources/sessionservice/test_sessionservice.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py index 84f07a0..8af89b4 100644 --- a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -56,12 +56,6 @@ class SessionServiceTestCase(base.TestCase): self.assertEqual(30, self.sess_serv_inst.session_timeout) self.assertEqual(exp_path, self.sess_serv_inst.path) - def test__parse_attributes_missing_timeout(self): - self.sess_serv_inst.json.pop('SessionTimeout') - self.assertRaisesRegex( - exceptions.MissingAttributeError, 'attribute SessionTimeout', - self.sess_serv_inst._parse_attributes(self.json_doc)) - def test__get_sessions_collection_path(self): self.sess_serv_inst.json.pop('Sessions') self.assertRaisesRegex( -- GitLab From 97cee86a2fb35dd3e12f152ee6c214dca895dbde Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Mon, 2 Mar 2020 13:09:21 +0100 Subject: [PATCH 201/303] Add `set_system_boot_options` method Some BMC implementations do not support changing all attributes of the Redfish `Boot` object. To address that, this patch adds a new `set_system_boot_options` method to the `System` object superseding the set_system_boot_source` method. The new method has all boot parameters optional to allow for more atomicity when PATCH'ing Redfish `Boot` object. When sending HTTP PATCH request, this new method will only include those items into the JSON document, that are explicitly passed by the user. This change might improve interoperability with BMCs that do not handle certain attributes of the `Boot` object. Change-Id: I4fa9f718de99def4da162acd4636a17d53ee9a51 Story: 2007355 Task: 38907 --- ...decouple-boot-params-c75e80f5951abb12.yaml | 10 +++ sushy/resources/system/system.py | 83 +++++++++++++------ .../unit/resources/system/test_system.py | 58 +++++++++++++ 3 files changed, 124 insertions(+), 27 deletions(-) create mode 100644 releasenotes/notes/decouple-boot-params-c75e80f5951abb12.yaml diff --git a/releasenotes/notes/decouple-boot-params-c75e80f5951abb12.yaml b/releasenotes/notes/decouple-boot-params-c75e80f5951abb12.yaml new file mode 100644 index 0000000..7a5a502 --- /dev/null +++ b/releasenotes/notes/decouple-boot-params-c75e80f5951abb12.yaml @@ -0,0 +1,10 @@ +--- +fixes: + - | + Adds a new ``set_system_boot_options`` method to the ``System`` object + superseding the ``set_system_boot_source`` method. The new method has + all boot parameters optional to allow for more atomicity when PATCH'ing + Redfish ``Boot`` object. The new method will only include those items in + the PATCH document, that are explicitly passed by the user. This change + might improve interoperability with BMCs that do not handle certain + attributes of the ``Boot`` object. diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 9423fdb..350cf7b 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -16,6 +16,7 @@ # This is referred from Redfish standard schema. # https://redfish.dmtf.org/schemas/ComputerSystem.v1_5_0.json +import collections import logging from sushy import exceptions @@ -207,41 +208,44 @@ class System(base.ResourceBase): set(sys_maps.BOOT_SOURCE_TARGET_MAP). intersection(self.boot.allowed_values)]) - def set_system_boot_source(self, target, - enabled=sys_cons.BOOT_SOURCE_ENABLED_ONCE, - mode=None): - """Set the boot source. + def set_system_boot_options(self, target=None, enabled=None, mode=None): + """Set boot source and/or boot frequency and/or boot mode. - Set the boot source to use on next reboot of the System. + Set the boot source and/or boot frequency and/or boot mode to use + on next reboot of the System. - :param target: The target boot source. + :param target: The target boot source, optional. :param enabled: The frequency, whether to set it for the next reboot only (BOOT_SOURCE_ENABLED_ONCE) or persistent to all future reboots (BOOT_SOURCE_ENABLED_CONTINUOUS) or disabled - (BOOT_SOURCE_ENABLED_DISABLED). - :param mode: The boot mode, UEFI (BOOT_SOURCE_MODE_UEFI) or - BIOS (BOOT_SOURCE_MODE_BIOS). + (BOOT_SOURCE_ENABLED_DISABLED), optional. + :param mode: The boot mode (UEFI: BOOT_SOURCE_MODE_UEFI or + BIOS: BOOT_SOURCE_MODE_BIOS), optional. :raises: InvalidParameterValueError, if any information passed is invalid. """ - valid_targets = self.get_allowed_system_boot_source_values() - if target not in valid_targets: - raise exceptions.InvalidParameterValueError( - parameter='target', value=target, valid_values=valid_targets) + data = collections.defaultdict(dict) - if enabled not in sys_maps.BOOT_SOURCE_ENABLED_MAP_REV: - raise exceptions.InvalidParameterValueError( - parameter='enabled', value=enabled, - valid_values=list(sys_maps.BOOT_SOURCE_ENABLED_MAP_REV)) + if target is not None: + valid_targets = self.get_allowed_system_boot_source_values() + if target not in valid_targets: + raise exceptions.InvalidParameterValueError( + parameter='target', value=target, + valid_values=valid_targets) - data = { - 'Boot': { - 'BootSourceOverrideTarget': - sys_maps.BOOT_SOURCE_TARGET_MAP_REV[target], - 'BootSourceOverrideEnabled': - sys_maps.BOOT_SOURCE_ENABLED_MAP_REV[enabled] - } - } + fishy_target = sys_maps.BOOT_SOURCE_TARGET_MAP_REV[target] + + data['Boot']['BootSourceOverrideTarget'] = fishy_target + + if enabled is not None: + if enabled not in sys_maps.BOOT_SOURCE_ENABLED_MAP_REV: + raise exceptions.InvalidParameterValueError( + parameter='enabled', value=enabled, + valid_values=list(sys_maps.BOOT_SOURCE_ENABLED_MAP_REV)) + + fishy_freq = sys_maps.BOOT_SOURCE_ENABLED_MAP_REV[enabled] + + data['Boot']['BootSourceOverrideEnabled'] = fishy_freq if mode is not None: if mode not in sys_maps.BOOT_SOURCE_MODE_MAP_REV: @@ -249,13 +253,38 @@ class System(base.ResourceBase): parameter='mode', value=mode, valid_values=list(sys_maps.BOOT_SOURCE_MODE_MAP_REV)) - data['Boot']['BootSourceOverrideMode'] = ( - sys_maps.BOOT_SOURCE_MODE_MAP_REV[mode]) + fishy_mode = sys_maps.BOOT_SOURCE_MODE_MAP_REV[mode] + + data['Boot']['BootSourceOverrideMode'] = fishy_mode # TODO(lucasagomes): Check the return code and response body ? # Probably we should call refresh() as well. self._conn.patch(self.path, data=data) + # TODO(etingof): we should remove this method, eventually + def set_system_boot_source( + self, target, enabled=sys_cons.BOOT_SOURCE_ENABLED_ONCE, + mode=None): + """Set boot source and/or boot frequency and/or boot mode. + + Set the boot source and/or boot frequency and/or boot mode to use + on next reboot of the System. + + This method is obsoleted by `set_system_boot_options`. + + :param target: The target boot source. + :param enabled: The frequency, whether to set it for the next + reboot only (BOOT_SOURCE_ENABLED_ONCE) or persistent to all + future reboots (BOOT_SOURCE_ENABLED_CONTINUOUS) or disabled + (BOOT_SOURCE_ENABLED_DISABLED). + Default is `BOOT_SOURCE_ENABLED_ONCE`. + :param mode: The boot mode (UEFI: BOOT_SOURCE_MODE_UEFI or + BIOS: BOOT_SOURCE_MODE_BIOS), optional. + :raises: InvalidParameterValueError, if any information passed is + invalid. + """ + self.set_system_boot_options(target, enabled, mode) + def set_indicator_led(self, state): """Set IndicatorLED to the given state. diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 6d8dffc..dcfa487 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -226,6 +226,64 @@ class SystemTestCase(base.TestCase): self.assertIsInstance(values, set) self.assertEqual(1, mock_log.call_count) + def test_set_system_boot_options(self): + self.sys_inst.set_system_boot_options( + sushy.BOOT_SOURCE_TARGET_PXE, + enabled=sushy.BOOT_SOURCE_ENABLED_CONTINUOUS, + mode=sushy.BOOT_SOURCE_MODE_UEFI) + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', + data={'Boot': {'BootSourceOverrideEnabled': 'Continuous', + 'BootSourceOverrideTarget': 'Pxe', + 'BootSourceOverrideMode': 'UEFI'}}) + + def test_set_system_boot_options_no_mode_specified(self): + self.sys_inst.set_system_boot_options( + sushy.BOOT_SOURCE_TARGET_HDD, + enabled=sushy.BOOT_SOURCE_ENABLED_ONCE) + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', + data={'Boot': {'BootSourceOverrideEnabled': 'Once', + 'BootSourceOverrideTarget': 'Hdd'}}) + + def test_set_system_boot_options_no_target_specified(self): + self.sys_inst.set_system_boot_options( + enabled=sushy.BOOT_SOURCE_ENABLED_CONTINUOUS, + mode=sushy.BOOT_SOURCE_MODE_UEFI) + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', + data={'Boot': {'BootSourceOverrideEnabled': 'Continuous', + 'BootSourceOverrideMode': 'UEFI'}}) + + def test_set_system_boot_options_no_freq_specified(self): + self.sys_inst.set_system_boot_options( + target=sushy.BOOT_SOURCE_TARGET_PXE, + mode=sushy.BOOT_SOURCE_MODE_UEFI) + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', + data={'Boot': {'BootSourceOverrideTarget': 'Pxe', + 'BootSourceOverrideMode': 'UEFI'}}) + + def test_set_system_boot_options_nothing_specified(self): + self.sys_inst.set_system_boot_options() + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', data={}) + + def test_set_system_boot_options_invalid_target(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.sys_inst.set_system_boot_source, + 'invalid-target') + + def test_set_system_boot_options_invalid_enabled(self): + with self.assertRaisesRegex( + exceptions.InvalidParameterValueError, + '"enabled" value.*{0}'.format( + list(sys_map.BOOT_SOURCE_ENABLED_MAP_REV))): + + self.sys_inst.set_system_boot_options( + sushy.BOOT_SOURCE_TARGET_HDD, + enabled='invalid-enabled') + def test_set_system_boot_source(self): self.sys_inst.set_system_boot_source( sushy.BOOT_SOURCE_TARGET_PXE, -- GitLab From 8177e66462ac83e527cc96f92e4ea6c391737570 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Mon, 9 Mar 2020 15:02:45 +0100 Subject: [PATCH 202/303] Make MessageRegistryFile.Registry attribute non-required According to Redfish schema, MessageRegistryFile.Registry attribute is required and should specify some prefix for the accompanying messages. It turned out, that certain vendor does not supply this attribute in Redfish response message. That makes sushy exploding on attempt to fetch any resource from BMC. This patch makes the above mentioned attribute defaulted to some nonsense what will make sushy logging the problem, but not exploding. It is not entirely clear if the client could make sense of the messages without this "Registry" prefix though. Story: 2006641 Task: 36864 Change-Id: Ie8f867c7d824db156af8b874acae45d6496014a7 --- sushy/resources/registry/message_registry_file.py | 2 +- .../resources/registry/test_message_registry_file.py | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/sushy/resources/registry/message_registry_file.py b/sushy/resources/registry/message_registry_file.py index 9eb638f..73f2476 100644 --- a/sushy/resources/registry/message_registry_file.py +++ b/sushy/resources/registry/message_registry_file.py @@ -66,7 +66,7 @@ class MessageRegistryFile(base.ResourceBase): languages = base.Field('Languages', required=True) """List of RFC 5646 language codes supported by this resource""" - registry = base.Field('Registry', required=True) + registry = base.Field('Registry', required=True, default='UNKNOWN.0.0') """Prefix for MessageId used for messages from this resource This attribute is in form Registry_name.Major_version.Minor_version diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py index fd8f649..faa7e92 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry_file.py +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -191,6 +191,16 @@ class MessageRegistryFileTestCase(base.TestCase): 'No message registry found for %(language)s or default', {'language': 'en'}) + @mock.patch('sushy.resources.base.logging.warning', + autospec=True) + def test__parse_attributes_missing_registry(self, mock_log): + self.json_doc.pop('Registry') + self.reg_file._parse_attributes(self.json_doc) + self.assertEqual('UNKNOWN.0.0', self.reg_file.registry) + mock_log.assert_called_with( + 'Applying default "UNKNOWN.0.0" on required, but missing ' + 'attribute "[\'Registry\']"') + class MessageRegistryFileCollectionTestCase(base.TestCase): -- GitLab From 5b07f04538e16a31f6e7a5dd69b558850ddcc126 Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Mon, 16 Mar 2020 20:01:26 +0100 Subject: [PATCH 203/303] Add `Connector(..., response_callback=None)`` parameter Adds optional `response_callback` parameter to `Connector` class that can be used by the application to receive vanilla HTTP messages in the course of running Redfish call. The intention is to facilitate Redfish exchange debugging. Change-Id: I735a51a651d05b85c47a3c9a33e587f9c5b4c28f --- .../notes/add-response-cb-65d448ee2690d0b2.yaml | 7 +++++++ sushy/connector.py | 9 ++++++++- sushy/tests/unit/test_connector.py | 17 +++++++++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/add-response-cb-65d448ee2690d0b2.yaml diff --git a/releasenotes/notes/add-response-cb-65d448ee2690d0b2.yaml b/releasenotes/notes/add-response-cb-65d448ee2690d0b2.yaml new file mode 100644 index 0000000..b1d80d1 --- /dev/null +++ b/releasenotes/notes/add-response-cb-65d448ee2690d0b2.yaml @@ -0,0 +1,7 @@ +--- +features: + - | + Adds optional ``response_callback`` parameter to ``Connector`` class + that can be used by the application to receive vanilla HTTP messages + in the course of running Redfish call. The intention is to facilitate + Redfish exchange debugging. diff --git a/sushy/connector.py b/sushy/connector.py index 9f43bf8..504aac4 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -27,11 +27,14 @@ LOG = logging.getLogger(__name__) class Connector(object): - def __init__(self, url, username=None, password=None, verify=True): + def __init__( + self, url, username=None, password=None, verify=True, + response_callback=None): self._url = url self._verify = verify self._session = requests.Session() self._session.verify = self._verify + self._response_callback = response_callback # NOTE(etingof): field studies reveal that some BMCs choke at # long-running persistent HTTP connections (or TCP connections). @@ -100,6 +103,10 @@ class Connector(object): **extra_session_req_kwargs) except requests.ConnectionError as e: raise exceptions.ConnectionError(url=url, error=e) + + if self._response_callback: + self._response_callback(response) + # If we received an AccessError, and we # previously established a redfish session # there is a chance that the session has timed-out. diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 184b72f..1c81afa 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -43,6 +43,16 @@ class ConnectorMethodsTestCase(base.TestCase): password='password') self.assertEqual(conn._session.auth, ('admin', 'password')) + def test_init_with_callback(self): + def response_callback(response): + return + + conn = connector.Connector('http://foo.bar:1234', + username='admin', + password='password', + response_callback=response_callback) + self.assertIs(conn._response_callback, response_callback) + @mock.patch.object(connector.Connector, '_op', autospec=True) def test_get(self, mock__op): self.conn.get(path='fake/path', data=self.data.copy(), @@ -169,6 +179,13 @@ class ConnectorOpTestCase(base.TestCase): 'GET', 'http://foo.bar:1234/fake/path', headers=self.headers, json=None) + def test_response_callback(self): + mock_response_callback = mock.MagicMock() + self.conn._response_callback = mock_response_callback + + self.conn._op('GET', path='fake/path', headers=self.headers) + self.assertEqual(1, mock_response_callback.call_count) + def test_ok_get_url_redirect_false(self): self.conn._op('GET', path='fake/path', headers=self.headers, allow_redirects=False) -- GitLab From c737bf831cde4599796f84daf770159155ea2c1a Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Tue, 17 Mar 2020 19:44:30 +0100 Subject: [PATCH 204/303] Lazily load message registries Postpone (potentially very large) Redfish message registries download and processing up to the first access by the client. The goal is to reduce the amount of unnecessary traffic and CPU cycles. Change-Id: Ief77d3ccce7fda7e6656c494060e47fe27b20f2e Story: 2007442 Task: 39102 --- ...zily-load-registries-0e9441e435c2471d.yaml | 6 ++ sushy/main.py | 80 ++++++++++++++++--- sushy/tests/unit/test_main.py | 79 +++++++++--------- 3 files changed, 118 insertions(+), 47 deletions(-) create mode 100644 releasenotes/notes/lazily-load-registries-0e9441e435c2471d.yaml diff --git a/releasenotes/notes/lazily-load-registries-0e9441e435c2471d.yaml b/releasenotes/notes/lazily-load-registries-0e9441e435c2471d.yaml new file mode 100644 index 0000000..0621193 --- /dev/null +++ b/releasenotes/notes/lazily-load-registries-0e9441e435c2471d.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Postpones (potentially very large) Redfish message registries download and + processing up to the first access by the client. The goal is to reduce + the amount of unnecessary traffic and CPU cycles. diff --git a/sushy/main.py b/sushy/main.py index d378e12..05651a6 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -12,6 +12,7 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +import collections import logging import pkg_resources import requests @@ -55,6 +56,49 @@ class ProtocolFeaturesSupportedField(base.CompositeField): """The select query parameter is supported""" +class LazyRegistries(collections.MutableMapping): + """Download registries on demand. + + Redfish message registries can be very large. On top of that, + they are not used frequently. Thus, let's not pull them off + the BMC unless the consumer is actually trying to use them. + + :param service_root: Redfish service root object + :type service_root: Sushy + """ + + def __init__(self, service_root): + self._service_root = service_root + self._registries = None + + def __getitem__(self, key): + registries = self.registries + return registries[key] + + def __setitem__(self, key, value): + registries = self.registries + registries[key] = value + + def __delitem__(self, key): + registries = self.registries + del registries[key] + + def __iter__(self): + registries = self.registries + return iter(registries or ()) + + def __len__(self): + registries = self.registries + return len(registries) + + @property + def registries(self): + if self._registries is None: + self._registries = self._service_root.registries + + return self._registries + + class Sushy(base.ResourceBase): identity = base.Field('Id', required=True) @@ -180,7 +224,7 @@ class Sushy(base.ResourceBase): return system.SystemCollection( self._conn, self._systems_path, redfish_version=self.redfish_version, - registries=self.registries) + registries=self.lazy_registries) def get_system(self, identity=None): """Given the identity return a System object @@ -203,7 +247,7 @@ class Sushy(base.ResourceBase): return system.System(self._conn, identity, redfish_version=self.redfish_version, - registries=self.registries) + registries=self.lazy_registries) def get_chassis_collection(self): """Get the ChassisCollection object @@ -218,7 +262,7 @@ class Sushy(base.ResourceBase): return chassis.ChassisCollection(self._conn, self._chassis_path, redfish_version=self.redfish_version, - registries=self.registries) + registries=self.lazy_registries) def get_chassis(self, identity=None): """Given the identity return a Chassis object @@ -241,7 +285,7 @@ class Sushy(base.ResourceBase): return chassis.Chassis(self._conn, identity, redfish_version=self.redfish_version, - registries=self.registries) + registries=self.lazy_registries) def get_fabric_collection(self): """Get the FabricCollection object @@ -256,7 +300,7 @@ class Sushy(base.ResourceBase): return fabric.FabricCollection(self._conn, self._fabrics_path, redfish_version=self.redfish_version, - registries=self.registries) + registries=self.lazy_registries) def get_fabric(self, identity): """Given the identity return a Fabric object @@ -266,7 +310,7 @@ class Sushy(base.ResourceBase): """ return fabric.Fabric(self._conn, identity, redfish_version=self.redfish_version, - registries=self.registries) + registries=self.lazy_registries) def get_manager_collection(self): """Get the ManagerCollection object @@ -281,7 +325,7 @@ class Sushy(base.ResourceBase): return manager.ManagerCollection(self._conn, self._managers_path, redfish_version=self.redfish_version, - registries=self.registries) + registries=self.lazy_registries) def get_manager(self, identity=None): """Given the identity return a Manager object @@ -303,7 +347,7 @@ class Sushy(base.ResourceBase): return manager.Manager(self._conn, identity, redfish_version=self.redfish_version, - registries=self.registries) + registries=self.lazy_registries) def get_session_service(self): """Get the SessionService object @@ -328,7 +372,8 @@ class Sushy(base.ResourceBase): """ return session.Session( self._conn, identity, - redfish_version=self.redfish_version, registries=self.registries) + redfish_version=self.redfish_version, + registries=self.lazy_registries) def get_update_service(self): """Get the UpdateService object @@ -342,7 +387,7 @@ class Sushy(base.ResourceBase): return updateservice.UpdateService( self._conn, self._update_service_path, redfish_version=self.redfish_version, - registries=self.registries) + registries=self.lazy_registries) def _get_registry_collection(self): """Get MessageRegistryFileCollection object @@ -374,7 +419,7 @@ class Sushy(base.ResourceBase): return compositionservice.CompositionService( self._conn, self._composition_service_path, redfish_version=self.redfish_version, - registries=self.registries) + registries=self.lazy_registries) def _get_standard_message_registry_collection(self): """Load packaged standard message registries @@ -421,3 +466,16 @@ class Sushy(base.ResourceBase): self._public_connector) for r in provided}) return registries + + @property + def lazy_registries(self): + """Gets and combines all message registries together + + Fetches all registries if any provided by Redfish service + and combines together with packaged standard registries. + + :returns: dict of combined message registries where key is + Registry_name.Major_version.Minor_version and value is registry + itself. + """ + return LazyRegistries(self) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 9750b26..a6c290c 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -95,32 +95,32 @@ class MainTestCase(base.TestCase): self.assertFalse(mock_Sushy_Connector.called) @mock.patch.object(system, 'SystemCollection', autospec=True) - @mock.patch('sushy.Sushy.registries', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) def test_get_system_collection( - self, mock_registries, mock_system_collection): + self, mock_lazy_registries, mock_system_collection): self.root._standard_message_registries_path = None self.root.get_system_collection() mock_system_collection.assert_called_once_with( self.root._conn, '/redfish/v1/Systems', redfish_version=self.root.redfish_version, - registries=mock_registries + registries=mock_lazy_registries ) @mock.patch.object(system, 'System', autospec=True) - @mock.patch('sushy.Sushy.registries', autospec=True) - def test_get_system(self, mock_registries, mock_system): + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) + def test_get_system(self, mock_lazy_registries, mock_system): self.root._standard_message_registries_path = None self.root.get_system('fake-system-id') mock_system.assert_called_once_with( self.root._conn, 'fake-system-id', redfish_version=self.root.redfish_version, - registries=mock_registries) + registries=mock_lazy_registries) @mock.patch.object(system, 'SystemCollection', autospec=True) @mock.patch.object(system, 'System', autospec=True) - @mock.patch('sushy.Sushy.registries', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) def test_get_system_default_ok( - self, mock_registries, mock_system, mock_system_collection): + self, mock_lazy_registries, mock_system, mock_system_collection): self.root._standard_message_registries_path = None mock_system.path = 'fake-system-id' mock_members = mock_system_collection.return_value.get_members @@ -129,18 +129,18 @@ class MainTestCase(base.TestCase): mock_system_collection.assert_called_once_with( self.root._conn, '/redfish/v1/Systems', redfish_version=self.root.redfish_version, - registries=mock_registries + registries=mock_lazy_registries ) mock_system.assert_called_once_with( self.root._conn, 'fake-system-id', redfish_version=self.root.redfish_version, - registries=mock_registries) + registries=mock_lazy_registries) @mock.patch.object(system, 'SystemCollection', autospec=True) @mock.patch.object(system, 'System', autospec=True) - @mock.patch('sushy.Sushy.registries', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) def test_get_system_default_failure( - self, mock_registries, mock_system, mock_system_collection): + self, mock_lazy_registries, mock_system, mock_system_collection): self.root._standard_message_registries_path = None mock_members = mock_system_collection.return_value.get_members mock_members.return_value = [] @@ -148,7 +148,7 @@ class MainTestCase(base.TestCase): mock_system_collection.assert_called_once_with( self.root._conn, '/redfish/v1/Systems', redfish_version=self.root.redfish_version, - registries=mock_registries + registries=mock_lazy_registries ) @mock.patch.object(chassis, 'Chassis', autospec=True) @@ -156,13 +156,13 @@ class MainTestCase(base.TestCase): self.root.get_chassis('fake-chassis-id') mock_chassis.assert_called_once_with( self.root._conn, 'fake-chassis-id', - self.root.redfish_version, self.root.registries) + self.root.redfish_version, self.root.lazy_registries) @mock.patch.object(chassis, 'ChassisCollection', autospec=True) @mock.patch.object(chassis, 'Chassis', autospec=True) - @mock.patch('sushy.Sushy.registries', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) def test_get_chassis_default_ok( - self, mock_registries, mock_chassis, mock_chassis_collection): + self, mock_lazy_registries, mock_chassis, mock_chassis_collection): self.root._standard_message_registries_path = None mock_chassis.path = 'fake-chassis-id' mock_members = mock_chassis_collection.return_value.get_members @@ -171,19 +171,19 @@ class MainTestCase(base.TestCase): mock_chassis_collection.assert_called_once_with( self.root._conn, '/redfish/v1/Chassis', redfish_version=self.root.redfish_version, - registries=mock_registries + registries=mock_lazy_registries ) mock_chassis.assert_called_once_with( self.root._conn, 'fake-chassis-id', redfish_version=self.root.redfish_version, - registries=mock_registries + registries=mock_lazy_registries ) @mock.patch.object(chassis, 'ChassisCollection', autospec=True) @mock.patch.object(chassis, 'Chassis', autospec=True) - @mock.patch('sushy.Sushy.registries', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) def test_get_chassis_default_failure( - self, mock_registries, mock_chassis, mock_chassis_collection): + self, mock_lazy_registries, mock_chassis, mock_chassis_collection): self.root._standard_message_registries_path = None mock_members = mock_chassis_collection.return_value.get_members mock_members.return_value = [] @@ -192,7 +192,7 @@ class MainTestCase(base.TestCase): mock_chassis_collection.assert_called_once_with( self.root._conn, '/redfish/v1/Chassis', redfish_version=self.root.redfish_version, - registries=mock_registries + registries=mock_lazy_registries ) @mock.patch.object(chassis, 'ChassisCollection', autospec=True) @@ -200,41 +200,41 @@ class MainTestCase(base.TestCase): self.root.get_chassis_collection() chassis_collection_mock.assert_called_once_with( self.root._conn, '/redfish/v1/Chassis', - self.root.redfish_version, self.root.registries) + self.root.redfish_version, self.root.lazy_registries) @mock.patch.object(fabric, 'Fabric', autospec=True) def test_get_fabric(self, mock_fabric): self.root.get_fabric('fake-fabric-id') mock_fabric.assert_called_once_with( self.root._conn, 'fake-fabric-id', - self.root.redfish_version, self.root.registries) + self.root.redfish_version, self.root.lazy_registries) @mock.patch.object(fabric, 'FabricCollection', autospec=True) def test_get_fabric_collection(self, fabric_collection_mock): self.root.get_fabric_collection() fabric_collection_mock.assert_called_once_with( self.root._conn, '/redfish/v1/Fabrics', - self.root.redfish_version, self.root.registries) + self.root.redfish_version, self.root.lazy_registries) @mock.patch.object(manager, 'ManagerCollection', autospec=True) def test_get_manager_collection(self, ManagerCollection_mock): self.root.get_manager_collection() ManagerCollection_mock.assert_called_once_with( self.root._conn, '/redfish/v1/Managers', - self.root.redfish_version, self.root.registries) + self.root.redfish_version, self.root.lazy_registries) @mock.patch.object(manager, 'Manager', autospec=True) def test_get_manager(self, Manager_mock): self.root.get_manager('fake-manager-id') Manager_mock.assert_called_once_with( self.root._conn, 'fake-manager-id', - self.root.redfish_version, self.root.registries) + self.root.redfish_version, self.root.lazy_registries) @mock.patch.object(manager, 'ManagerCollection', autospec=True) @mock.patch.object(manager, 'Manager', autospec=True) - @mock.patch('sushy.Sushy.registries', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) def test_get_manager_default_ok( - self, mock_registries, mock_manager, mock_manager_collection): + self, mock_lazy_registries, mock_manager, mock_manager_collection): self.root._standard_message_registries_path = None mock_manager.path = 'fake-manager-id' mock_members = mock_manager_collection.return_value.get_members @@ -243,18 +243,18 @@ class MainTestCase(base.TestCase): mock_manager_collection.assert_called_once_with( self.root._conn, '/redfish/v1/Managers', redfish_version=self.root.redfish_version, - registries=mock_registries + registries=mock_lazy_registries ) mock_manager.assert_called_once_with( self.root._conn, 'fake-manager-id', redfish_version=self.root.redfish_version, - registries=mock_registries) + registries=mock_lazy_registries) @mock.patch.object(manager, 'ManagerCollection', autospec=True) @mock.patch.object(manager, 'Manager', autospec=True) - @mock.patch('sushy.Sushy.registries', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) def test_get_manager_default_failure( - self, mock_registries, mock_manager, mock_system_collection): + self, mock_lazy_registries, mock_manager, mock_system_collection): self.root._standard_message_registries_path = None mock_members = mock_system_collection.return_value.get_members mock_members.return_value = [] @@ -263,7 +263,7 @@ class MainTestCase(base.TestCase): mock_system_collection.assert_called_once_with( self.root._conn, '/redfish/v1/Managers', redfish_version=self.root.redfish_version, - registries=mock_registries + registries=mock_lazy_registries ) @mock.patch.object(sessionservice, 'SessionService', autospec=True) @@ -278,14 +278,14 @@ class MainTestCase(base.TestCase): self.root.get_session('asdf') mock_sess.assert_called_once_with( self.root._conn, 'asdf', - self.root.redfish_version, self.root.registries) + self.root.redfish_version, self.root.lazy_registries) @mock.patch.object(updateservice, 'UpdateService', autospec=True) def test_get_update_service(self, mock_upd_serv): self.root.get_update_service() mock_upd_serv.assert_called_once_with( self.root._conn, '/redfish/v1/UpdateService', - self.root.redfish_version, self.root.registries) + self.root.redfish_version, self.root.lazy_registries) @mock.patch.object(message_registry_file, 'MessageRegistryFileCollection', @@ -303,7 +303,7 @@ class MainTestCase(base.TestCase): self.root.get_composition_service() mock_comp_ser.assert_called_once_with( self.root._conn, '/redfish/v1/CompositionService', - self.root.redfish_version, self.root.registries) + self.root.redfish_version, self.root.lazy_registries) def test__get_standard_message_registry_collection(self): registries = self.root._get_standard_message_registry_collection() @@ -383,6 +383,13 @@ class MainTestCase(base.TestCase): registries = self.root.registries self.assertEqual({'RegistryA.2.0': mock_msg_reg1}, registries) + @mock.patch('sushy.Sushy.registries', autospec=True) + def test_lazy_registries(self, mock_registries): + registries = self.root.lazy_registries + self.assertEqual(0, mock_registries.__getitem__.call_count) + registries[1] + self.assertEqual(1, mock_registries.__getitem__.call_count) + class BareMinimumMainTestCase(base.TestCase): -- GitLab From 4791814adf044120ebdc3927ec84af9522d63fac Mon Sep 17 00:00:00 2001 From: Ilya Etingof Date: Thu, 12 Mar 2020 12:22:34 +0100 Subject: [PATCH 205/303] Ignore failing message registry download Redfish agent (running inside BMC) can hyperlink message registries from other locations so that the client can download and use them. Trouble is, that sometimes these hyperlinked locations may not be available or the hyperlink itself can be malformed (e.g. iLO). This patch ignores essentially all errors caused by message registry download. That effectively renders failed message registry being absent. Change-Id: Iebe69ed0f93b6ba0c2b3905f751c1f12ed5eac88 Story: 2007445 Task: 39106 --- .../registry/message_registry_file.py | 22 +++++++++- .../registry/test_message_registry_file.py | 42 +++++++++++++++++++ 2 files changed, 62 insertions(+), 2 deletions(-) diff --git a/sushy/resources/registry/message_registry_file.py b/sushy/resources/registry/message_registry_file.py index 9eb638f..5730ab1 100644 --- a/sushy/resources/registry/message_registry_file.py +++ b/sushy/resources/registry/message_registry_file.py @@ -126,10 +126,28 @@ class MessageRegistryFile(base.ResourceBase): {'language': language}) continue - registry = RegistryType(*args, **kwargs) + try: + registry = RegistryType(*args, **kwargs) + + except Exception as exc: + LOG.warning( + 'Cannot load message registry type from location ' + '%(location)s: %(error)s', { + 'location': kwargs['path'], + 'error': exc}) + continue if registry._odata_type.endswith('MessageRegistry'): - return message_registry.MessageRegistry(*args, **kwargs) + try: + return message_registry.MessageRegistry(*args, **kwargs) + + except Exception as exc: + LOG.warning( + 'Cannot load message registry from location ' + '%(location)s: %(error)s', { + 'location': kwargs['path'], + 'error': exc}) + continue LOG.warning('Ignoring unsupported flavor of registry %(registry)s', {'registry': registry._odata_type}) diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py index fd8f649..a8cb8a8 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry_file.py +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -150,6 +150,48 @@ class MessageRegistryFileTestCase(base.TestCase): 'No message registry found for %(language)s or default', {'language': 'en'}) + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.LOG', + autospec=True) + def test_get_message_registry_invalid_uri( + self, mock_log, mock_msg_reg_type, mock_msg_reg): + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].uri = {'extref': 'http://127.0.0.1/reg'} + mock_msg_reg.side_effect = TypeError('Wrong URL type') + mock_msg_reg_type.return_value._odata_type = mock.MagicMock( + endswith=mock.MagicMock(return_value=True)) + + registry = self.reg_file.get_message_registry('en', None) + + self.assertIsNone(registry) + + mock_msg_reg_type.assert_called_once_with( + mock.ANY, + path={'extref': 'http://127.0.0.1/reg'}, reader=None, + redfish_version='1.0.2') + + mock_msg_reg.assert_called_once_with( + mock.ANY, + path={'extref': 'http://127.0.0.1/reg'}, reader=None, + redfish_version='1.0.2') + + expected_calls = [ + mock.call( + 'Cannot load message registry from location %(location)s: ' + '%(error)s', + {'location': {'extref': 'http://127.0.0.1/reg'}, + 'error': mock.ANY}), + mock.call( + 'No message registry found for %(language)s or default', + {'language': 'en'}) + ] + + mock_log.warning.assert_has_calls(expected_calls) + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', autospec=True) def test_get_message_registry_non_default_lang(self, mock_registry_type): -- GitLab From 91de44ff2c6fe006e0d8320e70d3fa75c37d41d0 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Mon, 23 Mar 2020 14:49:03 +0100 Subject: [PATCH 206/303] Explicitly set ramdisk type The sushy-tempest-ironic-partition-redfish-src job is based on ironic-base job which will default to dib. To prevent breaking the sushy CI we set the ramdisk type to tinyipa for now and we'll convert the jobs here later. Change-Id: Id58e612da796bdac633adb74156edb16d133ce10 --- zuul.d/sushy-jobs.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml index 69870fb..82a3dd1 100644 --- a/zuul.d/sushy-jobs.yaml +++ b/zuul.d/sushy-jobs.yaml @@ -9,6 +9,7 @@ - openstack/sushy vars: devstack_localrc: + IRONIC_RAMDISK_TYPE: tinyipa IRONIC_DEPLOY_DRIVER: redfish IRONIC_ENABLED_HARDWARE_TYPES: redfish IRONIC_DEFAULT_RESCUE_INTERFACE: "" -- GitLab From 2c41452bb2a399267c40943b8b64a47b57fbd96e Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Mon, 30 Mar 2020 11:23:42 +0200 Subject: [PATCH 207/303] Bump hacking to 3.0.0 The new version enables a lot of standard flake8 checks, so a few fixes are required. W503 is disabled as it conflicts with W504 and the latter seems to be preferred nowadays. Change-Id: I9834e1f2a31cee840c3260aaddfabd3331a86ba7 --- lower-constraints.txt | 2 +- sushy/auth.py | 4 ++-- sushy/main.py | 21 +++++++++++---------- test-requirements.txt | 2 +- tox.ini | 2 +- 5 files changed, 16 insertions(+), 15 deletions(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index c42d8c1..a0e75b2 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -7,7 +7,7 @@ dulwich==0.15.0 extras==1.0.0 fixtures==3.0.0 flake8==2.5.5 -hacking==1.0.0 +hacking==3.0.0 imagesize==0.7.1 iso8601==0.1.11 Jinja2==2.10 diff --git a/sushy/auth.py b/sushy/auth.py index beebdad..37ccf6f 100644 --- a/sushy/auth.py +++ b/sushy/auth.py @@ -159,8 +159,8 @@ class SessionAuth(AuthBase): def can_refresh_session(self): """Method to assert if session based refresh can be done.""" - return (self._session_key is not None and - self._session_resource_id is not None) + return (self._session_key is not None + and self._session_resource_id is not None) def refresh_session(self): """Method to refresh a session to a Redfish controller. diff --git a/sushy/main.py b/sushy/main.py index 05651a6..fcc2bac 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -171,8 +171,8 @@ class Sushy(base.ResourceBase): Defaults to 'en'. """ self._root_prefix = root_prefix - if (auth is not None and (password is not None or - username is not None)): + if (auth is not None and (password is not None + or username is not None)): msg = ('Username or Password were provided to Sushy ' 'when an authentication mechanism was specified.') raise ValueError(msg) @@ -430,12 +430,13 @@ class Sushy(base.ResourceBase): message_registries = [] resource_package_name = __name__ for json_file in pkg_resources.resource_listdir( - resource_package_name, STANDARD_REGISTRY_PATH): - # Not using path.join according to pkg_resources docs - mes_reg = message_registry.MessageRegistry( - None, STANDARD_REGISTRY_PATH + json_file, - reader=base.JsonPackagedFileReader(resource_package_name)) - message_registries.append(mes_reg) + resource_package_name, STANDARD_REGISTRY_PATH): + # Not using path.join according to pkg_resources docs + mes_reg = message_registry.MessageRegistry( + None, STANDARD_REGISTRY_PATH + json_file, + reader=base.JsonPackagedFileReader( + resource_package_name)) + message_registries.append(mes_reg) return message_registries @@ -453,8 +454,8 @@ class Sushy(base.ResourceBase): """ standard = self._get_standard_message_registry_collection() - registries = {r.registry_prefix + '.' + - r.registry_version.rsplit('.', 1)[0]: r + registries = {r.registry_prefix + '.' + + r.registry_version.rsplit('.', 1)[0]: r for r in standard if r.language == self._language} registry_col = self._get_registry_collection() diff --git a/test-requirements.txt b/test-requirements.txt index 2745da5..8a70ccd 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,7 +2,7 @@ # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. -hacking>=1.0.0,<1.1.0 # Apache-2.0 +hacking>=3.0.0,<3.1.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD diff --git a/tox.ini b/tox.ini index ab282dc..7dc3067 100644 --- a/tox.ini +++ b/tox.ini @@ -55,7 +55,7 @@ commands = oslo_debug_helper -t sushy/tests {posargs} [flake8] # E123, E125 skipped as they are invalid PEP-8. show-source = True -ignore = E123,E125 +ignore = E123,E125,W503 # [H106] Don't put vim configuration in source files. # [H203] Use assertIs(Not)None to check for None. # [H204] Use assert(Not)Equal to check for equality. -- GitLab From 27ebcd3c3e89f9130666b9dcf27a78b5fb35137e Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Mon, 30 Mar 2020 16:28:48 +0200 Subject: [PATCH 208/303] [trivial] add reason why we skip W503 in pep8 check Change-Id: I668ae52dba7dfe306a4ed5b1b6a6ff4f6b549669 --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 7dc3067..876b50b 100644 --- a/tox.ini +++ b/tox.ini @@ -53,8 +53,9 @@ commands = commands = oslo_debug_helper -t sushy/tests {posargs} [flake8] -# E123, E125 skipped as they are invalid PEP-8. show-source = True +# E123, E125 skipped as they are invalid PEP-8. +# [W503] Line break occurred before a binary operator. Conflicts with W504. ignore = E123,E125,W503 # [H106] Don't put vim configuration in source files. # [H203] Use assertIs(Not)None to check for None. -- GitLab From f79fe38e08045594ada7d14f6674d79a74f85205 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Thu, 2 Apr 2020 15:50:15 +0200 Subject: [PATCH 209/303] Switch to the new canonical constraints URL on master Change-Id: Idd8fc16a96bb74836eda6c0536daee01b9a2b8a6 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 7dc3067..38b1544 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ setenv = PYTHONWARNINGS=default::DeprecationWarning install_command = pip install {opts} {packages} deps = - -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} + -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt commands = stestr run --slowest {posargs} -- GitLab From 9eae135fe1f23ef99c132b82af7983ec1da275da Mon Sep 17 00:00:00 2001 From: Andreas Jaeger Date: Sun, 5 Apr 2020 18:37:14 +0200 Subject: [PATCH 210/303] Cleanup py27 support Make a few cleanups: - Remove python 2.7 stanza from setup.py - Add requires on python >= 3.6 to setup.cfg so that pypi and pip know about the requirement - Remove obsolete sections from setup.cfg - Update classifiers - Update requirements, no need for python_version anymore - Switch to sphinx-build, use apidoc for this. Change-Id: I773d22103fb5d200d45ab8e9230cbf6a1703d607 --- doc/source/conf.py | 10 +++++++++- doc/source/reference/index.rst | 4 ++-- lower-constraints.txt | 1 + setup.cfg | 24 +++--------------------- setup.py | 9 --------- sushy/main.py | 2 +- test-requirements.txt | 8 ++++++-- tox.ini | 2 +- 8 files changed, 23 insertions(+), 37 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 5cbe95c..cae009f 100755 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -20,7 +20,7 @@ sys.path.insert(0, os.path.abspath('../..')) # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ - 'sphinx.ext.autodoc', + 'sphinxcontrib.apidoc', #'sphinx.ext.intersphinx', 'openstackdocstheme' ] @@ -73,3 +73,11 @@ latex_documents = [ # Example configuration for intersphinx: refer to the Python standard library. #intersphinx_mapping = {'http://docs.python.org/': None} + +# -- sphinxcontrib.apidoc configuration -------------------------------------- + +apidoc_module_dir = '../../sushy' +apidoc_output_dir = 'reference/api' +apidoc_excluded_paths = [ + 'tests', +] diff --git a/doc/source/reference/index.rst b/doc/source/reference/index.rst index bf26477..998aacf 100644 --- a/doc/source/reference/index.rst +++ b/doc/source/reference/index.rst @@ -32,8 +32,8 @@ Sushy Python API Reference * :ref:`modindex` -.. # api/autoindex is hidden since it's in the modindex link above. +.. # api/modules is hidden since it's in the modindex link above. .. toctree:: :hidden: - api/autoindex + api/modules diff --git a/lower-constraints.txt b/lower-constraints.txt index a0e75b2..14abc6d 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -34,6 +34,7 @@ requests==2.14.2 requestsexceptions==1.2.0 snowballstemmer==1.2.1 Sphinx==1.6.2 +sphinxcontrib-apidoc==0.2.0 sphinxcontrib-websupport==1.0.1 stevedore==1.29.0 stestr==2.0.0 diff --git a/setup.cfg b/setup.cfg index 2416517..caf4810 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,6 +6,7 @@ description-file = author = OpenStack author-email = openstack-discuss@lists.openstack.org home-page = https://docs.openstack.org/sushy/latest/ +python-requires = >=3.6 classifier = Environment :: OpenStack Intended Audience :: Information Technology @@ -13,6 +14,8 @@ classifier = License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python + Programming Language :: Python :: Implementation :: CPython + Programming Language :: Python :: 3 :: Only Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 @@ -25,16 +28,6 @@ packages = sushy.resources.system.oems = contoso = sushy.resources.oem.fake:get_extension - -[build_sphinx] -source-dir = doc/source -build-dir = doc/build -all_files = 1 -warning-is-error = 1 - -[upload_sphinx] -upload-dir = doc/build/html - [compile_catalog] directory = sushy/locale domain = sushy @@ -48,14 +41,3 @@ input_file = sushy/locale/sushy.pot keywords = _ gettext ngettext l_ lazy_gettext mapping_file = babel.cfg output_file = sushy/locale/sushy.pot - -[build_releasenotes] -all_files = 1 -build-dir = releasenotes/build -source-dir = releasenotes/source - -[pbr] -autodoc_index_modules = True -api_doc_dir = reference/api -autodoc_exclude_modules = - sushy.tests.* diff --git a/setup.py b/setup.py index 566d844..cd35c3c 100644 --- a/setup.py +++ b/setup.py @@ -13,17 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT import setuptools -# In python < 2.7.4, a lazy loading of package `pbr` will break -# setuptools if some other modules registered functions in `atexit`. -# solution from: http://bugs.python.org/issue15881#msg170215 -try: - import multiprocessing # noqa -except ImportError: - pass - setuptools.setup( setup_requires=['pbr>=2.0.0'], pbr=True) diff --git a/sushy/main.py b/sushy/main.py index fcc2bac..c1c2ae1 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -64,7 +64,7 @@ class LazyRegistries(collections.MutableMapping): the BMC unless the consumer is actually trying to use them. :param service_root: Redfish service root object - :type service_root: Sushy + :type service_root: sushy.main.Sushy """ def __init__(self, service_root): diff --git a/test-requirements.txt b/test-requirements.txt index 8a70ccd..05f9519 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,12 +6,16 @@ hacking>=3.0.0,<3.1.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 python-subunit>=1.0.0 # Apache-2.0/BSD -sphinx!=1.6.6,!=1.6.7,>=1.6.2;python_version>='3.4' # BSD -openstackdocstheme>=1.20.0 # Apache-2.0 + oslotest>=3.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 testscenarios>=0.4 # Apache-2.0/BSD testtools>=2.2.0 # MIT +# docs +sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD +openstackdocstheme>=1.20.0 # Apache-2.0 +sphinxcontrib-apidoc>=0.2.0 # BSD + # releasenotes reno>=2.5.0 # Apache-2.0 diff --git a/tox.ini b/tox.ini index aa911e5..4677555 100644 --- a/tox.ini +++ b/tox.ini @@ -38,7 +38,7 @@ commands = coverage erase coverage xml -o cover/coverage.xml [testenv:docs] -commands = python setup.py build_sphinx +commands = sphinx-build -W -b html doc/source doc/build/html [testenv:pdf-docs] whitelist_externals = make -- GitLab From 14b887a414dc5eae31cc6a01935e12e862cf8df2 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Mon, 6 Apr 2020 22:57:27 +0200 Subject: [PATCH 211/303] Now packaging 3.2.0 --- debian/changelog | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/debian/changelog b/debian/changelog index 6666dea..a3c4783 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (3.2.0-1) experimental; urgency=medium + + * New upstream release. + + -- Thomas Goirand Mon, 06 Apr 2020 22:57:07 +0200 + python-sushy (2.0.0-2) unstable; urgency=medium [ Ondřej Nový ] -- GitLab From 5c7add93902049e1632e15383a7a6c54f38b2e34 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Mon, 6 Apr 2020 22:58:06 +0200 Subject: [PATCH 212/303] Removed -six from build-depends. --- debian/changelog | 1 + debian/control | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/debian/changelog b/debian/changelog index a3c4783..b4c1132 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,6 +1,7 @@ python-sushy (3.2.0-1) experimental; urgency=medium * New upstream release. + * Removed -six from build-depends. -- Thomas Goirand Mon, 06 Apr 2020 22:57:07 +0200 diff --git a/debian/control b/debian/control index 1cf6de5..de8efd0 100644 --- a/debian/control +++ b/debian/control @@ -19,7 +19,6 @@ Build-Depends-Indep: python3-openstackdocstheme, python3-oslotest, python3-requests, - python3-six, python3-stestr, python3-stevedore, python3-testscenarios, @@ -53,7 +52,6 @@ Architecture: all Depends: python3-pbr (>= 2.0.0), python3-requests (>= 2.14.2), - python3-six, ${misc:Depends}, ${python3:Depends}, Suggests: -- GitLab From 226781af6143c28bb5b0acabfb37737569794f53 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Thu, 9 Apr 2020 10:44:28 +0200 Subject: [PATCH 213/303] Convert sushy tempest to dib Depends-On: https://review.opendev.org/718607 Change-Id: I1e3dabe9c6e37127cfba54940fbf1a832447bb6a --- zuul.d/sushy-jobs.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml index 82a3dd1..69870fb 100644 --- a/zuul.d/sushy-jobs.yaml +++ b/zuul.d/sushy-jobs.yaml @@ -9,7 +9,6 @@ - openstack/sushy vars: devstack_localrc: - IRONIC_RAMDISK_TYPE: tinyipa IRONIC_DEPLOY_DRIVER: redfish IRONIC_ENABLED_HARDWARE_TYPES: redfish IRONIC_DEFAULT_RESCUE_INTERFACE: "" -- GitLab From cc69cf531ead06cf33654de32b14354fbe139db1 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Fri, 10 Apr 2020 12:00:41 +0000 Subject: [PATCH 214/303] Update master for stable/ussuri Add file to the reno documentation build to show release notes for stable/ussuri. Use pbr instruction to increment the minor version number automatically so that master versions are higher than the versions on stable/ussuri. Change-Id: I15fca41fbe715469ccc22389de9cbf526ae3f211 Sem-Ver: feature --- releasenotes/source/index.rst | 1 + releasenotes/source/ussuri.rst | 6 ++++++ 2 files changed, 7 insertions(+) create mode 100644 releasenotes/source/ussuri.rst diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst index c66b1fa..f690a05 100644 --- a/releasenotes/source/index.rst +++ b/releasenotes/source/index.rst @@ -6,6 +6,7 @@ :maxdepth: 1 unreleased + ussuri train stein rocky diff --git a/releasenotes/source/ussuri.rst b/releasenotes/source/ussuri.rst new file mode 100644 index 0000000..e21e50e --- /dev/null +++ b/releasenotes/source/ussuri.rst @@ -0,0 +1,6 @@ +=========================== +Ussuri Series Release Notes +=========================== + +.. release-notes:: + :branch: stable/ussuri -- GitLab From bd4f15e1b716afeb10e71eae98bf69dd3f1a9e61 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Fri, 10 Apr 2020 12:00:42 +0000 Subject: [PATCH 215/303] Add Python3 victoria unit tests This is an automatically generated patch to ensure unit testing is in place for all the of the tested runtimes for victoria. See also the PTI in governance [1]. [1]: https://governance.openstack.org/tc/reference/project-testing-interface.html Change-Id: I3942ee259bb828cd6d2b63fecc78e723457e3edf --- zuul.d/project.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index a2b8a1d..68411bc 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -3,7 +3,7 @@ - check-requirements - openstack-cover-jobs - openstack-lower-constraints-jobs - - openstack-python3-ussuri-jobs + - openstack-python3-victoria-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 check: -- GitLab From 4c5ca0d30550c3a9687ef787480aa1de7fb56e21 Mon Sep 17 00:00:00 2001 From: Iury Gregory Melo Ferreira Date: Fri, 17 Apr 2020 20:13:59 +0200 Subject: [PATCH 216/303] Stop configuring install_command in tox. Currently, we are overriding 'install_command' to use 'pip'. This is considered poor behavior and 'python -m pip' should be used instead: https://snarky.ca/why-you-should-use-python-m-pip/ It turns out that this is the the default value provided by tox: https://tox.readthedocs.io/en/latest/config.html#conf-install_command So we can remove the line and simply use the default value. openstack-discuss thread http://lists.openstack.org/pipermail/openstack-discuss/2020-April/014237.html Change-Id: Iace21f058714365115e2cd63c3a463af27540721 --- tox.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/tox.ini b/tox.ini index 4677555..2fd073b 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,6 @@ usedevelop = True setenv = VIRTUAL_ENV={envdir} PYTHONWARNINGS=default::DeprecationWarning -install_command = pip install {opts} {packages} deps = -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/test-requirements.txt -- GitLab From 010a288b156fa9b3ec11257ca8a2649c4f2305be Mon Sep 17 00:00:00 2001 From: Sean McGinnis Date: Sat, 18 Apr 2020 12:00:45 -0500 Subject: [PATCH 217/303] Use unittest.mock instead of third party mock Now that we no longer support py27, we can use the standard library unittest.mock module instead of the third party mock lib. Change-Id: Ide81a721c5a0f14250250fa82925bf300ba72c31 Signed-off-by: Sean McGinnis --- sushy/tests/unit/resources/chassis/test_chassis.py | 2 +- sushy/tests/unit/resources/chassis/test_power.py | 2 +- sushy/tests/unit/resources/chassis/test_thermal.py | 2 +- .../resources/compositionservice/test_compositionservice.py | 2 +- .../unit/resources/compositionservice/test_resourceblock.py | 2 +- .../unit/resources/compositionservice/test_resourcezone.py | 2 +- sushy/tests/unit/resources/fabric/test_endpoint.py | 2 +- sushy/tests/unit/resources/fabric/test_fabric.py | 2 +- sushy/tests/unit/resources/manager/test_manager.py | 2 +- sushy/tests/unit/resources/manager/test_virtual_media.py | 2 +- sushy/tests/unit/resources/oem/test_common.py | 2 +- sushy/tests/unit/resources/oem/test_fake.py | 2 +- sushy/tests/unit/resources/registry/test_message_registry.py | 2 +- .../tests/unit/resources/registry/test_message_registry_file.py | 2 +- sushy/tests/unit/resources/sessionservice/test_session.py | 2 +- .../tests/unit/resources/sessionservice/test_sessionservice.py | 2 +- sushy/tests/unit/resources/system/storage/test_drive.py | 2 +- sushy/tests/unit/resources/system/storage/test_storage.py | 2 +- sushy/tests/unit/resources/system/storage/test_volume.py | 2 +- sushy/tests/unit/resources/system/test_bios.py | 2 +- sushy/tests/unit/resources/system/test_ethernet_interfaces.py | 2 +- sushy/tests/unit/resources/system/test_processor.py | 2 +- sushy/tests/unit/resources/system/test_simple_storage.py | 2 +- sushy/tests/unit/resources/system/test_system.py | 2 +- sushy/tests/unit/resources/test_base.py | 2 +- sushy/tests/unit/resources/test_settings.py | 2 +- sushy/tests/unit/resources/test_task_monitor.py | 2 +- .../unit/resources/updateservice/test_softwareinventory.py | 2 +- sushy/tests/unit/resources/updateservice/test_updateservice.py | 2 +- sushy/tests/unit/test_auth.py | 2 +- sushy/tests/unit/test_connector.py | 2 +- sushy/tests/unit/test_main.py | 2 +- sushy/tests/unit/test_utils.py | 2 +- 33 files changed, 33 insertions(+), 33 deletions(-) diff --git a/sushy/tests/unit/resources/chassis/test_chassis.py b/sushy/tests/unit/resources/chassis/test_chassis.py index 718de53..b4436f5 100644 --- a/sushy/tests/unit/resources/chassis/test_chassis.py +++ b/sushy/tests/unit/resources/chassis/test_chassis.py @@ -13,8 +13,8 @@ # under the License. import json +from unittest import mock -import mock import sushy from sushy import exceptions diff --git a/sushy/tests/unit/resources/chassis/test_power.py b/sushy/tests/unit/resources/chassis/test_power.py index 27278ba..9438606 100644 --- a/sushy/tests/unit/resources/chassis/test_power.py +++ b/sushy/tests/unit/resources/chassis/test_power.py @@ -13,8 +13,8 @@ # under the License. import json +from unittest import mock -import mock from sushy.resources.chassis.power import power from sushy.tests.unit import base diff --git a/sushy/tests/unit/resources/chassis/test_thermal.py b/sushy/tests/unit/resources/chassis/test_thermal.py index 5500506..dc4cdc0 100644 --- a/sushy/tests/unit/resources/chassis/test_thermal.py +++ b/sushy/tests/unit/resources/chassis/test_thermal.py @@ -13,8 +13,8 @@ # under the License. import json +from unittest import mock -import mock from sushy.resources.chassis.thermal import thermal from sushy.tests.unit import base diff --git a/sushy/tests/unit/resources/compositionservice/test_compositionservice.py b/sushy/tests/unit/resources/compositionservice/test_compositionservice.py index 878b36c..f6ba81f 100644 --- a/sushy/tests/unit/resources/compositionservice/test_compositionservice.py +++ b/sushy/tests/unit/resources/compositionservice/test_compositionservice.py @@ -12,7 +12,7 @@ # limitations under the License. import json -import mock +from unittest import mock from sushy.resources.compositionservice import compositionservice from sushy.resources import constants as res_cons diff --git a/sushy/tests/unit/resources/compositionservice/test_resourceblock.py b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py index f1fd0de..72b0819 100644 --- a/sushy/tests/unit/resources/compositionservice/test_resourceblock.py +++ b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py @@ -12,7 +12,7 @@ # limitations under the License. import json -import mock +from unittest import mock from sushy import exceptions from sushy.resources.compositionservice import constants as res_block_cons diff --git a/sushy/tests/unit/resources/compositionservice/test_resourcezone.py b/sushy/tests/unit/resources/compositionservice/test_resourcezone.py index b19a2f8..0f0b4da 100644 --- a/sushy/tests/unit/resources/compositionservice/test_resourcezone.py +++ b/sushy/tests/unit/resources/compositionservice/test_resourcezone.py @@ -12,7 +12,7 @@ # limitations under the License. import json -import mock +from unittest import mock from sushy import exceptions from sushy.resources.compositionservice import resourcezone diff --git a/sushy/tests/unit/resources/fabric/test_endpoint.py b/sushy/tests/unit/resources/fabric/test_endpoint.py index 3733727..0b3b1b7 100644 --- a/sushy/tests/unit/resources/fabric/test_endpoint.py +++ b/sushy/tests/unit/resources/fabric/test_endpoint.py @@ -11,8 +11,8 @@ # under the License. import json +from unittest import mock -import mock import sushy from sushy.resources.fabric import endpoint diff --git a/sushy/tests/unit/resources/fabric/test_fabric.py b/sushy/tests/unit/resources/fabric/test_fabric.py index a94fa23..d6155e0 100644 --- a/sushy/tests/unit/resources/fabric/test_fabric.py +++ b/sushy/tests/unit/resources/fabric/test_fabric.py @@ -13,8 +13,8 @@ # under the License. import json +from unittest import mock -import mock import sushy from sushy.resources.fabric import endpoint diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index e337c35..4de5f3d 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -11,8 +11,8 @@ # under the License. import json +from unittest import mock -import mock import sushy from sushy import exceptions diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py index 92fd564..284a0e2 100644 --- a/sushy/tests/unit/resources/manager/test_virtual_media.py +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -14,8 +14,8 @@ from http import client as http_client import json +from unittest import mock -import mock import sushy from sushy import exceptions diff --git a/sushy/tests/unit/resources/oem/test_common.py b/sushy/tests/unit/resources/oem/test_common.py index 4debd85..b5528bf 100644 --- a/sushy/tests/unit/resources/oem/test_common.py +++ b/sushy/tests/unit/resources/oem/test_common.py @@ -10,8 +10,8 @@ # License for the specific language governing permissions and limitations # under the License. -import mock import stevedore +from unittest import mock from sushy import exceptions from sushy.resources import base as res_base diff --git a/sushy/tests/unit/resources/oem/test_fake.py b/sushy/tests/unit/resources/oem/test_fake.py index ef451f5..e9bd7c2 100644 --- a/sushy/tests/unit/resources/oem/test_fake.py +++ b/sushy/tests/unit/resources/oem/test_fake.py @@ -11,8 +11,8 @@ # under the License. import json +from unittest import mock -import mock from sushy.resources.oem import fake from sushy.resources.system import system diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py index 93e8671..7c3953a 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -14,8 +14,8 @@ import json +from unittest import mock -import mock from sushy.resources import constants as res_cons from sushy.resources.registry import message_registry diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py index 8376dab..b36070f 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry_file.py +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -14,7 +14,7 @@ import json -import mock +from unittest import mock from sushy.resources.registry import message_registry_file from sushy.tests.unit import base diff --git a/sushy/tests/unit/resources/sessionservice/test_session.py b/sushy/tests/unit/resources/sessionservice/test_session.py index 7ead963..34492b5 100644 --- a/sushy/tests/unit/resources/sessionservice/test_session.py +++ b/sushy/tests/unit/resources/sessionservice/test_session.py @@ -14,7 +14,7 @@ # under the License. import json -import mock +from unittest import mock from sushy import exceptions from sushy.resources.sessionservice import session diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py index 8af89b4..7366d7c 100644 --- a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -14,8 +14,8 @@ # under the License. import json +from unittest import mock -import mock from sushy import exceptions from sushy.resources.sessionservice import session diff --git a/sushy/tests/unit/resources/system/storage/test_drive.py b/sushy/tests/unit/resources/system/storage/test_drive.py index eae9cc4..a3978bc 100644 --- a/sushy/tests/unit/resources/system/storage/test_drive.py +++ b/sushy/tests/unit/resources/system/storage/test_drive.py @@ -11,8 +11,8 @@ # under the License. import json +from unittest import mock -import mock import sushy from sushy import exceptions diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index 2112865..d62fb6c 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -11,8 +11,8 @@ # under the License. import json +from unittest import mock -import mock import sushy from sushy.resources.system.storage import drive diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index 3c59ae9..1c0de83 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -11,7 +11,7 @@ # under the License. import json -import mock +from unittest import mock from dateutil import parser diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index ea52695..a452e71 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -14,7 +14,7 @@ from http import client as http_client import json -import mock +from unittest import mock from dateutil import parser diff --git a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py index 037af20..ffcde96 100644 --- a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py +++ b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py @@ -11,8 +11,8 @@ # under the License. import json +from unittest import mock -import mock from sushy.resources import constants as res_cons from sushy.resources.system import ethernet_interface diff --git a/sushy/tests/unit/resources/system/test_processor.py b/sushy/tests/unit/resources/system/test_processor.py index c5764c0..202e763 100644 --- a/sushy/tests/unit/resources/system/test_processor.py +++ b/sushy/tests/unit/resources/system/test_processor.py @@ -13,8 +13,8 @@ # under the License. import json +from unittest import mock -import mock import sushy from sushy.resources import constants as res_cons diff --git a/sushy/tests/unit/resources/system/test_simple_storage.py b/sushy/tests/unit/resources/system/test_simple_storage.py index a4a25f9..f4e5c1a 100644 --- a/sushy/tests/unit/resources/system/test_simple_storage.py +++ b/sushy/tests/unit/resources/system/test_simple_storage.py @@ -11,8 +11,8 @@ # under the License. import json +from unittest import mock -import mock from sushy.resources import constants as res_cons from sushy.resources.system import simple_storage diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index dcfa487..105e0de 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -14,7 +14,7 @@ # under the License. import json -import mock +from unittest import mock from dateutil import parser diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 8e169a7..58f1fd1 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -17,7 +17,7 @@ import copy from http import client as http_client import io import json -import mock +from unittest import mock from sushy import exceptions from sushy.resources import base as resource_base diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index d32b037..6343c78 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -14,7 +14,7 @@ # under the License. import json -import mock +from unittest import mock from sushy.resources import constants as res_cons from sushy.resources.registry import message_registry diff --git a/sushy/tests/unit/resources/test_task_monitor.py b/sushy/tests/unit/resources/test_task_monitor.py index f18951f..d228010 100644 --- a/sushy/tests/unit/resources/test_task_monitor.py +++ b/sushy/tests/unit/resources/test_task_monitor.py @@ -14,9 +14,9 @@ from datetime import datetime from datetime import timedelta +from unittest import mock from dateutil import parser -import mock from sushy.resources.task_monitor import TaskMonitor from sushy.tests.unit import base diff --git a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py index 53238e4..b91a83d 100644 --- a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py +++ b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py @@ -12,7 +12,7 @@ # limitations under the License. import json -import mock +from unittest import mock from sushy import exceptions from sushy.resources import constants as res_cons diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py index 65b9e38..fbbcb7a 100644 --- a/sushy/tests/unit/resources/updateservice/test_updateservice.py +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -12,7 +12,7 @@ # limitations under the License. import json -import mock +from unittest import mock from sushy import exceptions from sushy.resources import constants as res_cons diff --git a/sushy/tests/unit/test_auth.py b/sushy/tests/unit/test_auth.py index df6650b..e91b598 100644 --- a/sushy/tests/unit/test_auth.py +++ b/sushy/tests/unit/test_auth.py @@ -13,7 +13,7 @@ # License for the specific language governing permissions and limitations # under the License. -import mock +from unittest import mock from sushy import auth from sushy import connector diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 1c81afa..542daf2 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -15,8 +15,8 @@ from http import client as http_client import json +from unittest import mock -import mock import requests from sushy import auth as sushy_auth diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index a6c290c..075c088 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -14,7 +14,7 @@ # under the License. import json -import mock +from unittest import mock from sushy import auth from sushy import connector diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index cbe341d..db3dfb8 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -14,8 +14,8 @@ # under the License. import json +from unittest import mock -import mock from sushy import exceptions from sushy.resources import base as resource_base -- GitLab From bab92f0902480a164d96ed21abcb358f59a94387 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Tue, 21 Apr 2020 15:28:08 +0200 Subject: [PATCH 218/303] Add import order check Set flake8-import-order version to 0.17.1 Full py3 compatible version. Add all Python3 modules to stdlib list. Also includes fix to an enum34 dependency bug. Change-Id: If4202cc95a2121c4e693aff6777ae19d85f34a82 --- lower-constraints.txt | 1 + sushy/connector.py | 2 +- sushy/main.py | 1 + sushy/resources/base.py | 4 ++-- sushy/resources/chassis/chassis.py | 3 ++- sushy/resources/fabric/fabric.py | 3 ++- sushy/resources/settings.py | 3 ++- .../unit/resources/compositionservice/test_resourceblock.py | 1 - sushy/tests/unit/resources/oem/test_common.py | 3 ++- sushy/tests/unit/resources/test_base.py | 2 +- sushy/utils.py | 1 - test-requirements.txt | 1 + tox.ini | 3 +++ 13 files changed, 18 insertions(+), 10 deletions(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index 14abc6d..ca8169f 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -7,6 +7,7 @@ dulwich==0.15.0 extras==1.0.0 fixtures==3.0.0 flake8==2.5.5 +flake8-import-order==0.17.1 hacking==3.0.0 imagesize==0.7.1 iso8601==0.1.11 diff --git a/sushy/connector.py b/sushy/connector.py index 504aac4..208980d 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -14,10 +14,10 @@ # under the License. import logging +import time from urllib import parse as urlparse import requests -import time from sushy import exceptions from sushy.resources.task_monitor import TaskMonitor diff --git a/sushy/main.py b/sushy/main.py index c1c2ae1..be8b0ee 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -14,6 +14,7 @@ # under the License. import collections import logging + import pkg_resources import requests diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 16af69e..025a571 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -15,14 +15,14 @@ import abc import collections - import copy import io import json import logging -import pkg_resources import zipfile +import pkg_resources + from sushy import exceptions from sushy.resources import oem from sushy import utils diff --git a/sushy/resources/chassis/chassis.py b/sushy/resources/chassis/chassis.py index 433b4db..0e1eae7 100644 --- a/sushy/resources/chassis/chassis.py +++ b/sushy/resources/chassis/chassis.py @@ -13,6 +13,8 @@ # This is referred from Redfish standard schema. # http://redfish.dmtf.org/schemas/v1/Chassis.v1_8_0.json +import logging + from sushy import exceptions from sushy.resources import base from sushy.resources.chassis import mappings as cha_maps @@ -23,7 +25,6 @@ from sushy.resources.manager import manager from sushy.resources import mappings as res_maps from sushy import utils -import logging LOG = logging.getLogger(__name__) diff --git a/sushy/resources/fabric/fabric.py b/sushy/resources/fabric/fabric.py index 1bcc73a..cb91b1d 100644 --- a/sushy/resources/fabric/fabric.py +++ b/sushy/resources/fabric/fabric.py @@ -13,13 +13,14 @@ # This is referred from Redfish standard schema. # http://redfish.dmtf.org/schemas/v1/Fabric.v1_0_4.json +import logging + from sushy.resources import base from sushy.resources import common from sushy.resources.fabric import endpoint as fab_endpoint from sushy.resources import mappings as res_maps from sushy import utils -import logging LOG = logging.getLogger(__name__) diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index e048fc2..e084545 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -13,9 +13,10 @@ # This is referred from Redfish standard schema. # https://redfish.dmtf.org/schemas/Settings.v1_2_0.json -from dateutil import parser import logging +from dateutil import parser + from sushy.resources import base from sushy.resources import common from sushy.resources import constants as res_cons diff --git a/sushy/tests/unit/resources/compositionservice/test_resourceblock.py b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py index 72b0819..ce03bdb 100644 --- a/sushy/tests/unit/resources/compositionservice/test_resourceblock.py +++ b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py @@ -18,7 +18,6 @@ from sushy import exceptions from sushy.resources.compositionservice import constants as res_block_cons from sushy.resources.compositionservice import resourceblock from sushy.resources import constants as res_cons - from sushy.tests.unit import base diff --git a/sushy/tests/unit/resources/oem/test_common.py b/sushy/tests/unit/resources/oem/test_common.py index b5528bf..c09cda5 100644 --- a/sushy/tests/unit/resources/oem/test_common.py +++ b/sushy/tests/unit/resources/oem/test_common.py @@ -10,9 +10,10 @@ # License for the specific language governing permissions and limitations # under the License. -import stevedore from unittest import mock +import stevedore + from sushy import exceptions from sushy.resources import base as res_base from sushy.resources.oem import base as oem_base diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 58f1fd1..d45f395 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -18,11 +18,11 @@ from http import client as http_client import io import json from unittest import mock +import zipfile from sushy import exceptions from sushy.resources import base as resource_base from sushy.tests.unit import base -import zipfile BASE_RESOURCE_JSON = { diff --git a/sushy/utils.py b/sushy/utils.py index d0300d1..98d4596 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -15,7 +15,6 @@ import collections import functools - import logging import threading diff --git a/test-requirements.txt b/test-requirements.txt index 05f9519..23517dd 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -11,6 +11,7 @@ oslotest>=3.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 testscenarios>=0.4 # Apache-2.0/BSD testtools>=2.2.0 # MIT +flake8-import-order>=0.17.1 # LGPLv3 # docs sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD diff --git a/tox.ini b/tox.ini index 4677555..8d1eff4 100644 --- a/tox.ini +++ b/tox.ini @@ -66,6 +66,9 @@ ignore = E123,E125,W503 enable-extensions=H106,H203,H204,H205,H210,H904 builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build +import-order-style = pep8 +application-import-names = sushy +filename = *.py [testenv:lower-constraints] deps = -- GitLab From 8ece3146275a57aa316762538a663ef3cb2769d7 Mon Sep 17 00:00:00 2001 From: Sean McGinnis Date: Fri, 24 Apr 2020 08:23:22 -0500 Subject: [PATCH 219/303] Add py38 package metadata Now that we are running the Victoria tests that include a voting py38, we can now add the Python 3.8 metadata to the package information to reflect that support. Change-Id: I68f154415b4a9639b6c88eaaee883bff12869abd Signed-off-by: Sean McGinnis --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index caf4810..0c1be94 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,6 +19,7 @@ classifier = Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 [files] packages = -- GitLab From 4e8bb47ddf3eaa1da92800abdc2c11f7a5d0ec37 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Mon, 27 Apr 2020 16:08:52 +0200 Subject: [PATCH 220/303] Restore default netboot boot option We changed the default boot option to local but should be netboot for this job. Change-Id: I324044cbf4a91380564fd98d4756f504e31b98cb --- zuul.d/sushy-jobs.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml index 69870fb..e6c4d6a 100644 --- a/zuul.d/sushy-jobs.yaml +++ b/zuul.d/sushy-jobs.yaml @@ -9,6 +9,7 @@ - openstack/sushy vars: devstack_localrc: + IRONIC_DEFAULT_BOOT_OPTION: netboot IRONIC_DEPLOY_DRIVER: redfish IRONIC_ENABLED_HARDWARE_TYPES: redfish IRONIC_DEFAULT_RESCUE_INTERFACE: "" -- GitLab From 155d2293d341b98607dbe8d6593afd72b08ce3ba Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Fri, 8 May 2020 11:49:00 +0200 Subject: [PATCH 221/303] Uploading to unstable. --- debian/changelog | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/debian/changelog b/debian/changelog index b4c1132..a3ec43c 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (3.2.0-2) unstable; urgency=medium + + * Uploading to unstable. + + -- Thomas Goirand Fri, 08 May 2020 11:47:55 +0200 + python-sushy (3.2.0-1) experimental; urgency=medium * New upstream release. -- GitLab From 98e907ec5abf1aeeeea3a0231787e1bc61b75cf5 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Tue, 12 May 2020 11:30:58 +0200 Subject: [PATCH 222/303] Fix pep8 test Also adding E741 to ignore list. Change-Id: Iedd6daa7c45fdbe53e9fefcd84ce65f6cc33514d --- sushy/tests/unit/resources/system/storage/test_storage.py | 2 +- tox.ini | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index d62fb6c..00eb28b 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -62,7 +62,7 @@ class StorageTestCase(base.TestCase): '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233', # noqa '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD', # noqa '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2', # noqa - ), self.storage.drives_identities) + ), self.storage.drives_identities) def test_get_drive(self): # | WHEN | diff --git a/tox.ini b/tox.ini index e464c2e..7d0e4a8 100644 --- a/tox.ini +++ b/tox.ini @@ -54,8 +54,9 @@ commands = oslo_debug_helper -t sushy/tests {posargs} [flake8] show-source = True # E123, E125 skipped as they are invalid PEP-8. -# [W503] Line break occurred before a binary operator. Conflicts with W504. -ignore = E123,E125,W503 +# E741 ambiguous variable name. +# W503 Line break occurred before a binary operator. Conflicts with W504. +ignore = E123,E125,E741,W503 # [H106] Don't put vim configuration in source files. # [H203] Use assertIs(Not)None to check for None. # [H204] Use assert(Not)Equal to check for equality. -- GitLab From 7098c665150010eca82e4988107d0a1025bb5219 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Tue, 12 May 2020 17:42:29 +0200 Subject: [PATCH 223/303] Update lower-constraints.txt We need to list all dependencies in lower-constraints Change-Id: If6daecb018cefda05aba9ee023d319f308fd33bb --- lower-constraints.txt | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index ca8169f..cf468a9 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -1,6 +1,8 @@ alabaster==0.7.10 appdirs==1.3.0 Babel==2.3.4 +cliff==3.1.0 +cmd2==0.8.9 coverage==4.0 docutils==0.11 dulwich==0.15.0 @@ -8,6 +10,7 @@ extras==1.0.0 fixtures==3.0.0 flake8==2.5.5 flake8-import-order==0.17.1 +future==0.18.2 hacking==3.0.0 imagesize==0.7.1 iso8601==0.1.11 @@ -23,8 +26,12 @@ os-client-config==1.28.0 oslotest==3.2.0 pbr==2.0.0 pep8==1.5.7 +prettytable==0.7.2 +pycodestyle==2.6.0 pyflakes==0.8.1 Pygments==2.2.0 +pyparsing==2.4.7 +pyperclip==1.8.0 python-dateutil==2.7.0 python-mimeparse==1.6.0 python-subunit==1.0.0 @@ -33,13 +40,17 @@ PyYAML==3.12 reno==2.5.0 requests==2.14.2 requestsexceptions==1.2.0 +six==1.14.0 snowballstemmer==1.2.1 Sphinx==1.6.2 sphinxcontrib-apidoc==0.2.0 sphinxcontrib-websupport==1.0.1 -stevedore==1.29.0 stestr==2.0.0 +stevedore==1.29.0 +testrepository==0.0.20 testscenarios==0.4 testtools==2.2.0 traceback2==1.4.0 unittest2==1.1.0 +voluptuous==0.11.7 +wcwidth==0.1.9 -- GitLab From 359f2ec2c11cf4e4d2677c2af4ed715e92ce33f3 Mon Sep 17 00:00:00 2001 From: Aija Jaunteva Date: Wed, 13 May 2020 14:52:09 +0300 Subject: [PATCH 224/303] Fix OEM extension loading for different servers Remove caching and re-using loaded extension objects as it loaded the same instance for different servers making it impossible to use it for more than 1 server. The caching of extension managers is still in place, consumer of loaded extension object can manage and reuse the instance as needed. Change-Id: I7834b49455258f4c3e5690708b23c67d6a361158 Story: 2007669 Task: 39767 --- .../fix-oem-loading-52da045252b6c33e.yaml | 6 +++ sushy/resources/oem/common.py | 27 ++-------- sushy/tests/unit/resources/oem/test_common.py | 53 ++++++++++--------- 3 files changed, 36 insertions(+), 50 deletions(-) create mode 100644 releasenotes/notes/fix-oem-loading-52da045252b6c33e.yaml diff --git a/releasenotes/notes/fix-oem-loading-52da045252b6c33e.yaml b/releasenotes/notes/fix-oem-loading-52da045252b6c33e.yaml new file mode 100644 index 0000000..8050478 --- /dev/null +++ b/releasenotes/notes/fix-oem-loading-52da045252b6c33e.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Fixes Sushy OEM extension loading when using multiple servers that + caused loaded extensions to point to server for which the extension + was loaded first. diff --git a/sushy/resources/oem/common.py b/sushy/resources/oem/common.py index 10fb4db..5a0b9c0 100644 --- a/sushy/resources/oem/common.py +++ b/sushy/resources/oem/common.py @@ -85,26 +85,6 @@ def _get_extension_manager_of_resource(resource_name): return _global_extn_mgrs_by_resource[resource_name] -@utils.synchronized -def _get_resource_vendor_extension_obj(extension, resource, vendor): - """Get the object returned by extension's plugin() method. - - :param extension: stevedore Extension - :param resource: The Sushy resource instance - :param vendor: This is the OEM vendor string which is the vendor-specific - extensibility identifier. Examples are: 'Contoso', 'Hpe'. As a matter - of fact the lowercase of this string will be the plugin entry point - name. - :returns: The object returned by ``plugin(*args, **kwds)`` of extension. - """ - if extension.obj is None: - oem_resource = extension.plugin() - extension.obj = resource.clone_resource( - oem_resource).set_parent_resource(resource, vendor) - - return extension.obj - - def get_resource_extension_by_vendor( resource_name, vendor, resource): """Helper method to get Resource specific OEM extension object for vendor @@ -131,7 +111,6 @@ def get_resource_extension_by_vendor( raise exceptions.OEMExtensionNotFoundError( resource=resource_name, name=vendor.lower()) - if resource_vendor_extn.obj is None: - return _get_resource_vendor_extension_obj( - resource_vendor_extn, resource, vendor) - return resource_vendor_extn.obj + oem_resource = resource_vendor_extn.plugin() + return resource.clone_resource( + oem_resource).set_parent_resource(resource, vendor) diff --git a/sushy/tests/unit/resources/oem/test_common.py b/sushy/tests/unit/resources/oem/test_common.py index c09cda5..79e35db 100644 --- a/sushy/tests/unit/resources/oem/test_common.py +++ b/sushy/tests/unit/resources/oem/test_common.py @@ -115,32 +115,6 @@ class ResourceOEMCommonMethodsTestCase(base.TestCase): self.assertTrue(extension in (self.contoso_extn, self.faux_extn)) - def test__get_resource_vendor_extension_obj_lazy_plugin_invoke(self): - resource_instance_mock = mock.Mock() - extension_mock = mock.MagicMock() - extension_mock.obj = None - - mock_oem_resource = extension_mock.plugin.return_value - - result = oem_common._get_resource_vendor_extension_obj( - extension_mock, resource_instance_mock, 'fish-n-chips') - - mock_clone_resource = resource_instance_mock.clone_resource - mock_clone_resource.assert_called_once_with(mock_oem_resource) - mock_ext = mock_clone_resource.return_value - mock_ext.set_parent_resource.assert_called_once_with( - resource_instance_mock, 'fish-n-chips') - mock_ext = mock_ext.set_parent_resource.return_value - self.assertEqual(result, mock_ext) - - extension_mock.reset_mock() - - # extension_mock.obj is not None anymore - oem_common._get_resource_vendor_extension_obj( - extension_mock, resource_instance_mock, 'fish-n-chips') - - self.assertFalse(extension_mock.plugin.called) - @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) def test_get_resource_extension_by_vendor(self, ExtensionManager_mock): oem_resource_mock = mock.Mock() @@ -193,3 +167,30 @@ class ResourceOEMCommonMethodsTestCase(base.TestCase): 'by name "faux"', oem_common.get_resource_extension_by_vendor, 'sushy.resources.system.oems', 'Faux', resource_instance_mock) + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test_get_resource_extension_by_vendor_different_resources( + self, ExtensionManager_mock): + oem_resource_mock = mock.Mock() + oem_resource_mock.set_parent_resource = lambda *x: oem_resource_mock + resource_instance_mock = mock.Mock() + resource_instance_mock.clone_resource = lambda *x: oem_resource_mock + oem_resource_mock2 = mock.Mock() + oem_resource_mock2.set_parent_resource = lambda *x: oem_resource_mock2 + resource_instance_mock2 = mock.Mock() + resource_instance_mock2.clone_resource = lambda *x: oem_resource_mock2 + ExtensionManager_mock.side_effect = [self.fake_ext_mgr] + + result = oem_common.get_resource_extension_by_vendor( + 'system', 'Faux', resource_instance_mock) + self.assertEqual(result, oem_resource_mock) + ExtensionManager_mock.assert_called_once_with( + 'sushy.resources.system.oems', propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + ExtensionManager_mock.reset_mock() + + result2 = oem_common.get_resource_extension_by_vendor( + 'system', 'Faux', resource_instance_mock2) + self.assertEqual(result2, oem_resource_mock2) + ExtensionManager_mock.assert_not_called() + ExtensionManager_mock.reset_mock() -- GitLab From 6caab81d4fe4442d58f784bb0683f5c0daa8338f Mon Sep 17 00:00:00 2001 From: melissaml Date: Thu, 14 May 2020 19:08:01 +0800 Subject: [PATCH 225/303] Remove translation sections from setup.cfg These translation sections are not needed anymore, Babel can generate translation files without them. Change-Id: Ie53669dcc1339ab33a9a2ed152d5230140f7c9b1 --- babel.cfg | 2 -- setup.cfg | 14 -------------- 2 files changed, 16 deletions(-) delete mode 100644 babel.cfg diff --git a/babel.cfg b/babel.cfg deleted file mode 100644 index 15cd6cb..0000000 --- a/babel.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[python: **.py] - diff --git a/setup.cfg b/setup.cfg index 0c1be94..b5c315c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,17 +28,3 @@ packages = [entry_points] sushy.resources.system.oems = contoso = sushy.resources.oem.fake:get_extension - -[compile_catalog] -directory = sushy/locale -domain = sushy - -[update_catalog] -domain = sushy -output_dir = sushy/locale -input_file = sushy/locale/sushy.pot - -[extract_messages] -keywords = _ gettext ngettext l_ lazy_gettext -mapping_file = babel.cfg -output_file = sushy/locale/sushy.pot -- GitLab From 9603ad83d923a39ada4f8c2da12167b4c1eda497 Mon Sep 17 00:00:00 2001 From: Iury Gregory Melo Ferreira Date: Thu, 14 May 2020 13:36:50 +0200 Subject: [PATCH 226/303] Fix pdf build openstackdocstheme 2.1.0 broke the pdf builds, adding a workaround that fix the problem and also works with older openstackdocstheme. Change-Id: I44149b0db1764c261e3081fbdf23958dc0df2c8f --- bindep.txt | 2 ++ doc/source/conf.py | 6 ++++++ 2 files changed, 8 insertions(+) create mode 100644 bindep.txt diff --git a/bindep.txt b/bindep.txt new file mode 100644 index 0000000..273e463 --- /dev/null +++ b/bindep.txt @@ -0,0 +1,2 @@ +# fonts-freefont-otf is needed for pdf docs builds with the 'xelatex' engine +fonts-freefont-otf [doc] diff --git a/doc/source/conf.py b/doc/source/conf.py index cae009f..5a3383d 100755 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -59,6 +59,12 @@ html_theme = 'openstackdocs' # Output file base name for HTML help builder. htmlhelp_basename = 'sushydoc' +# The openstackdocstheme 2.1.0 extension stopped always overriding latex_engine +# to 'xelatex'. We need the 'xelatex' engine in order to handle some Unicode +# characters we use in our feature classification matrix, like the "X" mark, so +# we specify it here. +latex_engine = 'xelatex' + latex_use_xindy = False # Grouping the document tree into LaTeX files. List of tuples -- GitLab From 4fa057b57f3aa457bce804454c89f17932e2e047 Mon Sep 17 00:00:00 2001 From: Andreas Jaeger Date: Thu, 21 May 2020 20:29:28 +0200 Subject: [PATCH 227/303] Switch to newer openstackdocstheme and reno versions Switch to openstackdocstheme 2.2.1 and reno 3.1.0 versions. Using these versions will allow especially: * Linking from HTML to PDF document * Allow parallel building of documents * Fix some rendering problems Update Sphinx version as well. Set openstackdocs_pdf_link to link to PDF file. Note that the link to the published document only works on docs.openstack.org where the PDF file is placed in the top-level html directory. The site-preview places the PDF in a pdf directory. Change pygments_style to 'native' since old theme version always used 'native' and the theme now respects the setting and using 'sphinx' can lead to some strange rendering. openstackdocstheme renames some variables, so follow the renames before the next release removes them. A couple of variables are also not needed anymore, remove them. See also http://lists.openstack.org/pipermail/openstack-discuss/2020-May/014971.html Change-Id: I40845ab9b5acd762d659fca2d7fcf328f2f7cf2f --- doc/source/conf.py | 7 ++++++- lower-constraints.txt | 6 +++--- releasenotes/source/conf.py | 9 +++++---- test-requirements.txt | 6 +++--- 4 files changed, 17 insertions(+), 11 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 5a3383d..d226d48 100755 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -46,7 +46,12 @@ add_function_parentheses = True add_module_names = True # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = 'native' + +# openstackdocstheme options +openstackdocs_repo_name = 'openstack/sushy' +openstackdocs_use_storyboard = True +openstackdocs_pdf_link = True # -- Options for HTML output -------------------------------------------------- diff --git a/lower-constraints.txt b/lower-constraints.txt index cf468a9..0baff17 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -21,7 +21,7 @@ MarkupSafe==1.0 mccabe==0.2.1 mock==2.0.0 mox3==0.20.0 -openstackdocstheme==1.20.0 +openstackdocstheme==2.2.1 os-client-config==1.28.0 oslotest==3.2.0 pbr==2.0.0 @@ -37,12 +37,12 @@ python-mimeparse==1.6.0 python-subunit==1.0.0 pytz==2013.6 PyYAML==3.12 -reno==2.5.0 +reno==3.1.0 requests==2.14.2 requestsexceptions==1.2.0 six==1.14.0 snowballstemmer==1.2.1 -Sphinx==1.6.2 +Sphinx==2.0.0 sphinxcontrib-apidoc==0.2.0 sphinxcontrib-websupport==1.0.1 stestr==2.0.0 diff --git a/releasenotes/source/conf.py b/releasenotes/source/conf.py index 03169f4..f7b8b09 100644 --- a/releasenotes/source/conf.py +++ b/releasenotes/source/conf.py @@ -12,9 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Glance Release Notes documentation build configuration file, created by -# sphinx-quickstart on Tue Nov 3 17:40:50 2015. -# # This file is execfile()d with the current directory set to its # containing dir. # @@ -98,7 +95,7 @@ exclude_patterns = [] # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = 'native' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -106,6 +103,10 @@ pygments_style = 'sphinx' # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False +# openstackdocstheme options +openstackdocs_repo_name = 'openstack/sushy' +openstackdocs_use_storyboard = True + # -- Options for HTML output ---------------------------------------------- diff --git a/test-requirements.txt b/test-requirements.txt index 23517dd..7bd12d1 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -14,9 +14,9 @@ testtools>=2.2.0 # MIT flake8-import-order>=0.17.1 # LGPLv3 # docs -sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD -openstackdocstheme>=1.20.0 # Apache-2.0 +sphinx>=2.0.0,!=2.1.0 # BSD +openstackdocstheme>=2.2.1 # Apache-2.0 sphinxcontrib-apidoc>=0.2.0 # BSD # releasenotes -reno>=2.5.0 # Apache-2.0 +reno>=3.1.0 # Apache-2.0 -- GitLab From 2b996673c9faf57c4e0832df46b93760c5282c82 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Tue, 26 May 2020 12:06:32 +0200 Subject: [PATCH 228/303] Remove python-subunit, testtools and testscenarios None of these dependencies are used explicitly. Change-Id: Ib32e37267eed12c367d3cdc784f8c2817c71d30d --- test-requirements.txt | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 7bd12d1..e0ca2ea 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,15 +2,13 @@ # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. -hacking>=3.0.0,<3.1.0 # Apache-2.0 - +# unit tests coverage!=4.4,>=4.0 # Apache-2.0 -python-subunit>=1.0.0 # Apache-2.0/BSD - oslotest>=3.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 -testscenarios>=0.4 # Apache-2.0/BSD -testtools>=2.2.0 # MIT + +# linters +hacking>=3.0.0,<3.1.0 # Apache-2.0 flake8-import-order>=0.17.1 # LGPLv3 # docs -- GitLab From 461a962be9a396bf96f36238108dcddd0fb2aa67 Mon Sep 17 00:00:00 2001 From: Bill Dodd Date: Tue, 26 May 2020 15:49:06 -0500 Subject: [PATCH 229/303] Make Volume/VolumeCollection operations blocking The operations in Volume and VolumeCollection (initialize_volume, delete_volume, and create_volume) may be handled asynchronously by the Redfish service. This patch will leverage the new blocking parameter in the sushy connector module to make these POST/DELETE calls blocking. The logic to do the task monitoring will then be handled in connector.py. Change-Id: Ib525de0e7638ca3affcd08512be1159cebacd38f Story: 2003514 Task: 39863 --- ...make-volume-ops-blocking-de5c2ae032041d5d.yaml | 4 ++++ sushy/resources/system/storage/volume.py | 15 ++++++--------- .../unit/resources/system/storage/test_volume.py | 8 ++++---- 3 files changed, 14 insertions(+), 13 deletions(-) create mode 100644 releasenotes/notes/make-volume-ops-blocking-de5c2ae032041d5d.yaml diff --git a/releasenotes/notes/make-volume-ops-blocking-de5c2ae032041d5d.yaml b/releasenotes/notes/make-volume-ops-blocking-de5c2ae032041d5d.yaml new file mode 100644 index 0000000..40b9e8c --- /dev/null +++ b/releasenotes/notes/make-volume-ops-blocking-de5c2ae032041d5d.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Make POST and DELETE operations in Volume and VolumeCollection blocking. diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 36b0693..9740e4c 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -96,7 +96,8 @@ class Volume(base.ResourceBase): parameter='value', value=value, valid_values=valid_values) value = store_maps.VOLUME_INIT_TYPE_MAP_REV[value] target_uri = self._get_initialize_action_element().target_uri - self._conn.post(target_uri, data={'InitializeType': value}) + self._conn.post(target_uri, data={'InitializeType': value}, + blocking=True) def delete_volume(self, payload=None): """Delete the volume. @@ -105,7 +106,7 @@ class Volume(base.ResourceBase): :raises: ConnectionError :raises: HTTPError """ - self._conn.delete(self._path, data=payload) + self._conn.delete(self._path, data=payload, blocking=True) class VolumeCollection(base.ResourceCollectionBase): @@ -147,15 +148,11 @@ class VolumeCollection(base.ResourceCollectionBase): :param payload: The payload representing the new volume to create. :raises: ConnectionError :raises: HTTPError + :returns: Newly created Volume resource or None if no Location header """ - r = self._conn.post(self._path, data=payload) + r = self._conn.post(self._path, data=payload, blocking=True) location = r.headers.get('Location') - new_volume = None if r.status_code == 201: if location: self.refresh() - new_volume = self.get_member(location) - elif r.status_code == 202: - # TODO(billdodd): TaskMonitor support to be added in subsequent PR - pass - return new_volume + return self.get_member(location) diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index 1c0de83..90900fe 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -59,7 +59,7 @@ class VolumeTestCase(base.TestCase): 'Volumes/1/Actions/Volume.Initialize' self.stor_volume.initialize_volume('fast') self.stor_volume._conn.post.assert_called_once_with( - target_uri, data={'InitializeType': 'Fast'}) + target_uri, data={'InitializeType': 'Fast'}, blocking=True) def test_initialize_volume_bad_value(self): self.assertRaisesRegex( @@ -70,13 +70,13 @@ class VolumeTestCase(base.TestCase): def test_delete_volume(self): self.stor_volume.delete_volume() self.stor_volume._conn.delete.assert_called_once_with( - self.stor_volume._path, data=None) + self.stor_volume._path, data=None, blocking=True) def test_delete_volume_with_payload(self): payload = {'@Redfish.OperationApplyTime': 'OnReset'} self.stor_volume.delete_volume(payload=payload) self.stor_volume._conn.delete.assert_called_once_with( - self.stor_volume._path, data=payload) + self.stor_volume._path, data=payload, blocking=True) class VolumeCollectionTestCase(base.TestCase): @@ -190,7 +190,7 @@ class VolumeCollectionTestCase(base.TestCase): new_vol = self.stor_vol_col.create_volume(payload) self.stor_vol_col._conn.post.assert_called_once_with( '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', - data=payload) + data=payload, blocking=True) self.stor_vol_col.refresh.assert_called_once() self.assertIsNotNone(new_vol) self.assertEqual('4', new_vol.identity) -- GitLab From 976d2acf5fbc38ab66f5271034b318ee642ec1eb Mon Sep 17 00:00:00 2001 From: Bill Dodd Date: Wed, 27 May 2020 14:24:10 -0500 Subject: [PATCH 230/303] Add RAIDType properties to storage resources Recent versions of the Redfish specification added a RAIDType property to the Volume resource and a SupportedRAIDTypes property to the Storage resource. This patch adds those properties to the sushy resource models. Change-Id: I1e77561fcb0d85bbee80457fbb053d866838c243 Story: 2003514 Task: 36274 --- ...raid-type-properties-2090da5bea37c660.yaml | 5 ++ sushy/resources/system/storage/constants.py | 76 +++++++++++++++++++ sushy/resources/system/storage/mappings.py | 19 +++++ sushy/resources/system/storage/storage.py | 5 ++ sushy/resources/system/storage/volume.py | 3 + sushy/tests/unit/json_samples/storage.json | 4 + sushy/tests/unit/json_samples/volume4.json | 1 + .../resources/system/storage/test_storage.py | 2 + .../resources/system/storage/test_volume.py | 2 + 9 files changed, 117 insertions(+) create mode 100644 releasenotes/notes/add-raid-type-properties-2090da5bea37c660.yaml diff --git a/releasenotes/notes/add-raid-type-properties-2090da5bea37c660.yaml b/releasenotes/notes/add-raid-type-properties-2090da5bea37c660.yaml new file mode 100644 index 0000000..7ebf2ee --- /dev/null +++ b/releasenotes/notes/add-raid-type-properties-2090da5bea37c660.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Add RAIDType property to the Volume resource and SupportedRAIDTypes + property to the Storage resource. diff --git a/sushy/resources/system/storage/constants.py b/sushy/resources/system/storage/constants.py index 92b9c9c..e896a4a 100644 --- a/sushy/resources/system/storage/constants.py +++ b/sushy/resources/system/storage/constants.py @@ -39,3 +39,79 @@ VOLUME_TYPE_SPANNED_MIRRORS = 'spannedmirrors' VOLUME_TYPE_SPANNED_STRIPES_WITH_PARITY = 'spannedstripeswithparity' """The volume is a spanned set of devices which uses parity to retain redundant information.""" + +# RAIDType Types +RAID_TYPE_RAID0 = 'RAID0' +"""A placement policy where consecutive logical blocks of data are uniformly +distributed across a set of independent storage devices without offering any +form of redundancy.""" + +RAID_TYPE_RAID1 = 'RAID1' +"""A placement policy where each logical block of data is stored on more than +one independent storage device.""" + +RAID_TYPE_RAID3 = 'RAID3' +"""A placement policy using parity-based protection where logical bytes of +data are uniformly distributed across a set of independent storage devices and +where the parity is stored on a dedicated independent storage device.""" + +RAID_TYPE_RAID4 = 'RAID4' +"""A placement policy using parity-based protection where logical blocks of +data are uniformly distributed across a set of independent storage devices and +where the parity is stored on a dedicated independent storage device.""" + +RAID_TYPE_RAID5 = 'RAID5' +"""A placement policy using parity-based protection for storing stripes of 'n' +logical blocks of data and one logical block of parity across a set of 'n+1' +independent storage devices where the parity and data blocks are interleaved +across the storage devices.""" + +RAID_TYPE_RAID6 = 'RAID6' +"""A placement policy using parity-based protection for storing stripes of 'n' +logical blocks of data and two logical blocks of independent parity across a +set of 'n+2' independent storage devices where the parity and data blocks are +interleaved across the storage devices.""" + +RAID_TYPE_RAID10 = 'RAID10' +"""A placement policy that creates a striped device (RAID 0) over a set of +mirrored devices (RAID 1).""" + +RAID_TYPE_RAID01 = 'RAID01' +"""A data placement policy that creates a mirrored device (RAID 1) over a set +of striped devices (RAID 0).""" + +RAID_TYPE_RAID6TP = 'RAID6TP' +"""A placement policy that uses parity-based protection for storing stripes of +'n' logical blocks of data and three logical blocks of independent parity +across a set of 'n+3' independent storage devices where the parity and data +blocks are interleaved across the storage devices.""" + +RAID_TYPE_RAID1E = 'RAID1E' +"""A placement policy that uses a form of mirroring implemented over a set of +independent storage devices where logical blocks are duplicated on a pair of +independent storage devices so that data is uniformly distributed across the +storage devices.""" + +RAID_TYPE_RAID50 = 'RAID50' +"""A placement policy that uses a RAID 0 stripe set over two or more RAID 5 +sets of independent storage devices.""" + +RAID_TYPE_RAID60 = 'RAID60' +"""A placement policy that uses a RAID 0 stripe set over two or more RAID 6 +sets of independent storage devices.""" + +RAID_TYPE_RAID00 = 'RAID00' +"""A placement policy that creates a RAID 0 stripe set over two or more RAID 0 +sets.""" + +RAID_TYPE_RAID10E = 'RAID10E' +"""A placement policy that uses a RAID 0 stripe set over two or more RAID 10 +sets.""" + +RAID_TYPE_RAID1Triple = 'RAID1Triple' +"""A placement policy where each logical block of data is mirrored three times +across a set of three independent storage devices.""" + +RAID_TYPE_RAID10Triple = 'RAID10Triple' +"""A placement policy that uses a striped device (RAID 0) over a set of triple +mirrored devices (RAID 1Triple).""" diff --git a/sushy/resources/system/storage/mappings.py b/sushy/resources/system/storage/mappings.py index 462f9a7..8fec80d 100644 --- a/sushy/resources/system/storage/mappings.py +++ b/sushy/resources/system/storage/mappings.py @@ -31,3 +31,22 @@ VOLUME_TYPE_TYPE_MAP = { 'SpannedStripesWithParity': store_cons.VOLUME_TYPE_SPANNED_STRIPES_WITH_PARITY } + +RAID_TYPE_TYPE_MAP = { + 'RAID0': store_cons.RAID_TYPE_RAID0, + 'RAID1': store_cons.RAID_TYPE_RAID1, + 'RAID3': store_cons.RAID_TYPE_RAID3, + 'RAID4': store_cons.RAID_TYPE_RAID4, + 'RAID5': store_cons.RAID_TYPE_RAID5, + 'RAID6': store_cons.RAID_TYPE_RAID6, + 'RAID10': store_cons.RAID_TYPE_RAID10, + 'RAID01': store_cons.RAID_TYPE_RAID01, + 'RAID6TP': store_cons.RAID_TYPE_RAID6TP, + 'RAID1E': store_cons.RAID_TYPE_RAID1E, + 'RAID50': store_cons.RAID_TYPE_RAID50, + 'RAID60': store_cons.RAID_TYPE_RAID60, + 'RAID00': store_cons.RAID_TYPE_RAID00, + 'RAID10E': store_cons.RAID_TYPE_RAID10E, + 'RAID1Triple': store_cons.RAID_TYPE_RAID1Triple, + 'RAID10Triple': store_cons.RAID_TYPE_RAID10Triple, +} diff --git a/sushy/resources/system/storage/storage.py b/sushy/resources/system/storage/storage.py index 4f42103..f658437 100644 --- a/sushy/resources/system/storage/storage.py +++ b/sushy/resources/system/storage/storage.py @@ -19,6 +19,7 @@ from sushy.resources import base from sushy.resources import common from sushy.resources import mappings as res_maps from sushy.resources.system.storage import drive +from sushy.resources.system.storage import mappings from sushy.resources.system.storage import volume from sushy import utils @@ -52,6 +53,10 @@ class StorageControllersListField(base.ListField): res_maps.PROTOCOL_TYPE_VALUE_MAP) """The protocols which the controller can use tocommunicate with devices""" + raid_types = base.MappedListField('SupportedRAIDTypes', + mappings.RAID_TYPE_TYPE_MAP) + """The set of RAID types supported by the storage controller.""" + class Storage(base.ResourceBase): """This class represents the storage subsystem resources. diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 36b0693..af2ec1e 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -44,6 +44,9 @@ class Volume(base.ResourceBase): store_maps.VOLUME_TYPE_TYPE_MAP) """The type of this volume.""" + raid_type = base.MappedField('RAIDType', store_maps.RAID_TYPE_TYPE_MAP) + """The RAID type of this volume.""" + encrypted = base.Field('Encrypted', adapter=bool) """Is this Volume encrypted.""" diff --git a/sushy/tests/unit/json_samples/storage.json b/sushy/tests/unit/json_samples/storage.json index b9cbe91..d3a6322 100644 --- a/sushy/tests/unit/json_samples/storage.json +++ b/sushy/tests/unit/json_samples/storage.json @@ -39,6 +39,10 @@ "SupportedDeviceProtocols": [ "SAS", "SATA" + ], + "SupportedRAIDTypes": [ + "RAID0", + "RAID1" ] } ], diff --git a/sushy/tests/unit/json_samples/volume4.json b/sushy/tests/unit/json_samples/volume4.json index 0d76a62..e5c5be4 100644 --- a/sushy/tests/unit/json_samples/volume4.json +++ b/sushy/tests/unit/json_samples/volume4.json @@ -9,6 +9,7 @@ }, "Encrypted": false, "VolumeType": "Mirrored", + "RAIDType": "RAID1", "CapacityBytes": 107374182400, "Identifiers": [ { diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py index 00eb28b..365dc80 100644 --- a/sushy/tests/unit/resources/system/storage/test_storage.py +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -128,6 +128,8 @@ class StorageTestCase(base.TestCase): controller.controller_protocols) self.assertEqual([sushy.PROTOCOL_TYPE_SAS, sushy.PROTOCOL_TYPE_SATA], controller.device_protocols) + self.assertEqual([sushy.RAID_TYPE_RAID0, sushy.RAID_TYPE_RAID1], + controller.raid_types) def test_drives_after_refresh(self): self.storage.refresh() diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index 1c0de83..da1d4cd 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -179,6 +179,7 @@ class VolumeCollectionTestCase(base.TestCase): payload = { 'Name': 'My Volume 4', 'VolumeType': 'Mirrored', + 'RAIDType': 'RAID1', 'CapacityBytes': 107374182400 } with open('sushy/tests/unit/json_samples/volume4.json') as f: @@ -197,3 +198,4 @@ class VolumeCollectionTestCase(base.TestCase): self.assertEqual('My Volume 4', new_vol.name) self.assertEqual(107374182400, new_vol.capacity_bytes) self.assertEqual(sushy.VOLUME_TYPE_MIRRORED, new_vol.volume_type) + self.assertEqual(sushy.RAID_TYPE_RAID1, new_vol.raid_type) -- GitLab From e15b97b0df802b76881c51f52582e9107e81c7ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Herv=C3=A9=20Beraud?= Date: Tue, 9 Jun 2020 12:13:19 +0200 Subject: [PATCH 231/303] drop mock from lower-constraints The mock third party library was needed for mock support in py2 runtimes. Since we now only support py36 and later, we don't need it in lower-constraints. These changes will help us to drop `mock` from openstack/requirements Change-Id: I98fb197054d892fd3614ef3cafb29c5d0a2c4cfd --- lower-constraints.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index 0baff17..ccbe9d8 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -19,7 +19,6 @@ keystoneauth1==3.4.0 linecache2==1.0.0 MarkupSafe==1.0 mccabe==0.2.1 -mock==2.0.0 mox3==0.20.0 openstackdocstheme==2.2.1 os-client-config==1.28.0 -- GitLab From 619445ce5446c6bdcec4776823a76f895d18460c Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Wed, 15 Jul 2020 15:21:41 +0200 Subject: [PATCH 232/303] Set min version of tox to 3.2.1 As recommended, since version 3.2.0 tox switches pip invocations to use the module -m pip instead of direct invocation. We set min version to 3.2.1 [1] to also fix the behavior of --parallel--safe-build [1] https://tox.readthedocs.io/en/latest/changelog.html#v3-2-1-2018-08-10 Change-Id: I27cfec6d998c0c0ae1f8a78d07335a907a020588 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 7d0e4a8..ccc2371 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -minversion = 3.1.0 +minversion = 3.2.1 envlist = py3,pep8 skipsdist = True ignore_basepython_conflict=true -- GitLab From 8df68d8f630b1fc1785586df8bb46af478d4de39 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Tue, 21 Jul 2020 18:20:58 +0200 Subject: [PATCH 233/303] Update system schema to 1.10 Updates corresponding unit tests. Change-Id: I9dc958b0159eff189456b1ba192ebda3471f2f18 --- sushy/resources/system/system.py | 2 +- sushy/tests/unit/json_samples/system.json | 27 ++++++++++++------- sushy/tests/unit/resources/oem/test_fake.py | 2 +- .../unit/resources/system/test_system.py | 25 +++++++++-------- 4 files changed, 33 insertions(+), 23 deletions(-) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 350cf7b..5bb30ac 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -14,7 +14,7 @@ # under the License. # This is referred from Redfish standard schema. -# https://redfish.dmtf.org/schemas/ComputerSystem.v1_5_0.json +# https://redfish.dmtf.org/schemas/v1/ComputerSystem.v1_10_0.json import collections import logging diff --git a/sushy/tests/unit/json_samples/system.json b/sushy/tests/unit/json_samples/system.json index 55847a9..5881393 100644 --- a/sushy/tests/unit/json_samples/system.json +++ b/sushy/tests/unit/json_samples/system.json @@ -1,11 +1,12 @@ { - "@odata.type": "#ComputerSystem.v1_5_0.ComputerSystem", + "@odata.type": "#ComputerSystem.v1_10_0.ComputerSystem", "Id": "437XR1138R2", "Name": "WebFrontEnd483", "SystemType": "Physical", "AssetTag": "Chicago-45Z-2381", "Manufacturer": "Contoso", - "Model": "3500RX", + "Model": "3500", + "SubModel": "RX", "SKU": "8675309", "SerialNumber": "437XR1138R2", "PartNumber": "224071-J23", @@ -17,6 +18,9 @@ "Health": "OK", "HealthRollup": "OK" }, + "HostingRoles": [ + "ApplicationServer" + ], "IndicatorLED": "Off", "PowerState": "On", "Boot": { @@ -49,18 +53,18 @@ ], "Oem": { "Contoso": { - "@odata.type": "http://Contoso.com/Schema#Contoso.ComputerSystem", + "@odata.type": "#Contoso.ComputerSystem", "ProductionLocation": { "FacilityName": "PacWest Production Facility", "Country": "USA" } }, "Chipwise": { - "@odata.type": "http://Chipwise.com/Schema#Chipwise.ComputerSystem", + "@odata.type": "#Chipwise.ComputerSystem", "Style": "Executive" } }, - "BiosVersion": "P79 v1.33 (02/28/2015)", + "BiosVersion": "P79 v1.45 (12/06/2017)", "ProcessorSummary": { "Count": 2, "ProcessorFamily": "Multi-Core Intel(R) Xeon(R) processor 7xxx Series", @@ -72,6 +76,8 @@ }, "MemorySummary": { "TotalSystemMemoryGiB": 96, + "TotalSystemPersistentMemoryGiB": 0, + "MemoryMirroring": "None", "Status": { "State": "Enabled", "Health": "OK", @@ -79,7 +85,10 @@ } }, "Bios": { - "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS" + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Bios" + }, + "SecureBoot": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot" }, "Processors": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/Processors" @@ -93,9 +102,6 @@ "SimpleStorage": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/SimpleStorage" }, - "Storage": { - "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage" - }, "LogServices": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/LogServices" }, @@ -121,7 +127,8 @@ "GracefulRestart", "ForceRestart", "Nmi", - "ForceOn" + "ForceOn", + "PushPowerButton" ], "@Redfish.OperationApplyTimeSupport": { "@odata.type": "#Settings.v1_2_0.OperationApplyTimeSupport", diff --git a/sushy/tests/unit/resources/oem/test_fake.py b/sushy/tests/unit/resources/oem/test_fake.py index e9bd7c2..1d585eb 100644 --- a/sushy/tests/unit/resources/oem/test_fake.py +++ b/sushy/tests/unit/resources/oem/test_fake.py @@ -37,7 +37,7 @@ class FakeOEMSystemExtensionTestCase(base.TestCase): self.sys_instance, 'Contoso') def test__parse_oem_attributes(self): - self.assertEqual('http://Contoso.com/Schema#Contoso.ComputerSystem', + self.assertEqual('#Contoso.ComputerSystem', self.fake_sys_oem_extn.data_type) self.assertEqual('PacWest Production Facility', ( self.fake_sys_oem_extn.production_location.facility_name)) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 105e0de..fbaf70e 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -28,7 +28,6 @@ from sushy.resources.system import bios from sushy.resources.system import mappings as sys_map from sushy.resources.system import processor from sushy.resources.system import simple_storage -from sushy.resources.system.storage import storage from sushy.resources.system import system from sushy.tests.unit import base @@ -51,7 +50,7 @@ class SystemTestCase(base.TestCase): self.sys_inst._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_inst.redfish_version) self.assertEqual('Chicago-45Z-2381', self.sys_inst.asset_tag) - self.assertEqual('P79 v1.33 (02/28/2015)', self.sys_inst.bios_version) + self.assertEqual('P79 v1.45 (12/06/2017)', self.sys_inst.bios_version) self.assertEqual('Web Front End node', self.sys_inst.description) self.assertEqual('web483', self.sys_inst.hostname) self.assertEqual('437XR1138R2', self.sys_inst.identity) @@ -127,7 +126,8 @@ class SystemTestCase(base.TestCase): "GracefulRestart", "ForceRestart", "Nmi", - "ForceOn" + "ForceOn", + "PushPowerButton" ], value.allowed_values) @@ -145,7 +145,8 @@ class SystemTestCase(base.TestCase): sushy.RESET_FORCE_OFF, sushy.RESET_FORCE_ON, sushy.RESET_ON, - sushy.RESET_NMI]) + sushy.RESET_NMI, + sushy.RESET_PUSH_POWER_BUTTON]) self.assertEqual(expected, values) self.assertIsInstance(values, set) @@ -538,7 +539,7 @@ class SystemTestCase(base.TestCase): simple_storage.SimpleStorageCollection) def test_storage_for_missing_attr(self): - self.sys_inst.json.pop('Storage') + self.sys_inst.json.pop('SimpleStorage') with self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Storage'): self.sys_inst.storage @@ -550,16 +551,17 @@ class SystemTestCase(base.TestCase): 'storage_collection.json') as f: self.conn.get.return_value.json.return_value = json.load(f) # | WHEN | - actual_storage = self.sys_inst.storage + actual_storage = self.sys_inst.simple_storage # | THEN | - self.assertIsInstance(actual_storage, storage.StorageCollection) + self.assertIsInstance(actual_storage, + simple_storage.SimpleStorageCollection) self.conn.get.return_value.json.assert_called_once_with() # reset mock self.conn.get.return_value.json.reset_mock() # | WHEN & THEN | # tests for same object on invoking subsequently - self.assertIs(actual_storage, self.sys_inst.storage) + self.assertIs(actual_storage, self.sys_inst.simple_storage) self.conn.get.return_value.json.assert_not_called() def test_storage_on_refresh(self): @@ -568,8 +570,8 @@ class SystemTestCase(base.TestCase): 'storage_collection.json') as f: self.conn.get.return_value.json.return_value = json.load(f) # | WHEN & THEN | - self.assertIsInstance(self.sys_inst.storage, - storage.StorageCollection) + self.assertIsInstance(self.sys_inst.simple_storage, + simple_storage.SimpleStorageCollection) # On refreshing the system instance... with open('sushy/tests/unit/json_samples/system.json') as f: @@ -583,7 +585,8 @@ class SystemTestCase(base.TestCase): 'storage_collection.json') as f: self.conn.get.return_value.json.return_value = json.load(f) # | WHEN & THEN | - self.assertIsInstance(self.sys_inst.storage, storage.StorageCollection) + self.assertIsInstance(self.sys_inst.simple_storage, + simple_storage.SimpleStorageCollection) def test_managers(self): # | GIVEN | -- GitLab From 2029f820476c69443596608736a47f4c8d132f1e Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Thu, 23 Jul 2020 19:03:36 +0200 Subject: [PATCH 234/303] Update version of hacking Now with a safe version of flake8! Change-Id: I6600309a72ed63649f8d0be71b9038fdaab78dfd --- lower-constraints.txt | 2 +- test-requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index ccbe9d8..00ff338 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -11,7 +11,7 @@ fixtures==3.0.0 flake8==2.5.5 flake8-import-order==0.17.1 future==0.18.2 -hacking==3.0.0 +hacking==3.1.0 imagesize==0.7.1 iso8601==0.1.11 Jinja2==2.10 diff --git a/test-requirements.txt b/test-requirements.txt index e0ca2ea..01da614 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -8,7 +8,7 @@ oslotest>=3.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 # linters -hacking>=3.0.0,<3.1.0 # Apache-2.0 +hacking>=3.1.0,<3.2.0 # Apache-2.0 flake8-import-order>=0.17.1 # LGPLv3 # docs -- GitLab From 3d4555771623e35fbf64cb40a41135d9aa8ed5b4 Mon Sep 17 00:00:00 2001 From: Christopher Dearborn Date: Mon, 3 Aug 2020 09:45:38 -0400 Subject: [PATCH 235/303] Fix retrieving software & firmware inventory This patch fixes the UpdateService software_inventory and firmware_inventory properties so that they don't error out and they return the correct data. Also overhauled the inventory related test cases. Change-Id: I4013f642b78ebec5b84ae1dd81f70c43e7060722 Story: 2007970 Task: 40472 --- ...e-firmware-inventory-3e0e79e052aa76d9.yaml | 5 ++ .../resources/updateservice/updateservice.py | 38 +++++---- .../firmwareinventory_collection.json | 19 +++++ .../softwareinventory_collection.json | 14 ---- .../json_samples/updateservice_no_inv.json | 24 ++++++ .../updateservice/test_softwareinventory.py | 33 +++++++- .../updateservice/test_updateservice.py | 84 ++++++++++--------- 7 files changed, 146 insertions(+), 71 deletions(-) create mode 100644 releasenotes/notes/fix-software-firmware-inventory-3e0e79e052aa76d9.yaml create mode 100644 sushy/tests/unit/json_samples/firmwareinventory_collection.json delete mode 100644 sushy/tests/unit/json_samples/softwareinventory_collection.json create mode 100644 sushy/tests/unit/json_samples/updateservice_no_inv.json diff --git a/releasenotes/notes/fix-software-firmware-inventory-3e0e79e052aa76d9.yaml b/releasenotes/notes/fix-software-firmware-inventory-3e0e79e052aa76d9.yaml new file mode 100644 index 0000000..12ff8a1 --- /dev/null +++ b/releasenotes/notes/fix-software-firmware-inventory-3e0e79e052aa76d9.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + Fixes bugs in the ``UpdateService.software_inventory`` and + ``UpdateService.firmware_inventory`` properties making them operational. diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py index c075d08..b13c30c 100644 --- a/sushy/resources/updateservice/updateservice.py +++ b/sushy/resources/updateservice/updateservice.py @@ -60,6 +60,10 @@ class UpdateService(base.ResourceBase): _actions = ActionsField('Actions', required=True) + _firmware_inventory_path = base.Field(['FirmwareInventory', '@odata.id']) + + _software_inventory_path = base.Field(['SoftwareInventory', '@odata.id']) + def __init__(self, connector, identity, redfish_version=None, registries=None): """A class representing a UpdateService @@ -142,26 +146,32 @@ class UpdateService(base.ResourceBase): data['Targets'] = targets self._conn.post(target_uri, data=data) - def _get_software_inventory_collection_path(self): - """Helper function to find the SoftwareInventoryCollections path""" - soft_inv_col = self.json.get('SoftwareInventory') - if not soft_inv_col: - raise exceptions.MissingAttributeError( - attribute='SoftwareInventory', resource=self._path) - return soft_inv_col.get('@odata.id') - @property @utils.cache_it def software_inventory(self): - """Property to reference SoftwareInventoryCollection instance""" + """Property to reference SoftwareInventory collection instance""" + if not self._software_inventory_path: + raise exceptions.MissingAttributeError( + attribute='SoftwareInventory/@odata.id', + resource=self._software_inventory_path) + return softwareinventory.SoftwareInventoryCollection( - self._conn, self._get_software_inventory_collection_path, - redfish_version=self.redfish_version, registries=self.registries) + self._conn, + self._software_inventory_path, + redfish_version=self.redfish_version, + registries=self.registries) @property @utils.cache_it def firmware_inventory(self): - """Property to reference SoftwareInventoryCollection instance""" + """Property to reference FirmwareInventory collection instance""" + if not self._firmware_inventory_path: + raise exceptions.MissingAttributeError( + attribute='FirmwareInventory/@odata.id', + resource=self._firmware_inventory_path) + return softwareinventory.SoftwareInventoryCollection( - self._conn, self._get_software_inventory_collection_path, - redfish_version=self.redfish_version, registries=self.registries) + self._conn, + self._firmware_inventory_path, + redfish_version=self.redfish_version, + registries=self.registries) diff --git a/sushy/tests/unit/json_samples/firmwareinventory_collection.json b/sushy/tests/unit/json_samples/firmwareinventory_collection.json new file mode 100644 index 0000000..36a7b81 --- /dev/null +++ b/sushy/tests/unit/json_samples/firmwareinventory_collection.json @@ -0,0 +1,19 @@ +{ + "@odata.context": "/redfish/v1/$metadata#SoftwareInventoryCollection.SoftwareInventoryCollection", + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory", + "@odata.type": "#SoftwareInventoryCollection.SoftwareInventoryCollection", + "Description": "Collection of Firmware Inventory", + "Members": [ + { + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory/Current-101560-25.5.6.0009" + }, + { + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory/Installed-101560-25.5.6.0009" + }, + { + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory/Previous-102302-18.8.9" + } + ], + "Members@odata.count": 3, + "Name": "Firmware Inventory Collection" +} diff --git a/sushy/tests/unit/json_samples/softwareinventory_collection.json b/sushy/tests/unit/json_samples/softwareinventory_collection.json deleted file mode 100644 index 71c58e4..0000000 --- a/sushy/tests/unit/json_samples/softwareinventory_collection.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "@odata.type": "#SoftwareInventoryCollection.v1_4_0.SoftwareInventoryCollection", - "@odata.id": "/redfish/v1/UpdateService/SoftwareInventory", - "Name": "Software Inventory Collection", - "Members@odata.count": 2, - "Members": [ - { - "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory" - }, - { - "@odata.id": "/redfish/v1/UpdateService/SoftwareInventory" - } - ] -} diff --git a/sushy/tests/unit/json_samples/updateservice_no_inv.json b/sushy/tests/unit/json_samples/updateservice_no_inv.json new file mode 100644 index 0000000..c26a79f --- /dev/null +++ b/sushy/tests/unit/json_samples/updateservice_no_inv.json @@ -0,0 +1,24 @@ +{ + "@odata.type": "#UpdateService.v1_2_1.UpdateService", + "Id": "UpdateService", + "Name": "Update service", + "Status": { + "State": "Enabled", + "Health": "OK", + "HealthRollup": "OK" + }, + "ServiceEnabled": true, + "HttpPushUri": "/FWUpdate", + "HttpPushUriTargets": ["/FWUpdate"], + "HttpPushUriTargetsBusy": false, + "Actions": { + "#UpdateService.SimpleUpdate": { + "target": "/redfish/v1/UpdateService/Actions/SimpleUpdate", + "@Redfish.ActionInfo": "/redfish/v1/UpdateService/SimpleUpdateActionInfo" + }, + "Oem": {} + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#UpdateService.UpdateService", + "@odata.id": "/redfish/v1/UpdateService" +} diff --git a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py index b91a83d..1601579 100644 --- a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py +++ b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py @@ -68,27 +68,52 @@ class SoftwareInventoryCollectionTestCase(base.TestCase): super(SoftwareInventoryCollectionTestCase, self).setUp() conn = mock.Mock() with open('sushy/tests/unit/json_samples/' - 'softwareinventory_collection.json') as f: + 'firmwareinventory_collection.json') as f: self.json_doc = json.load(f) conn.get.return_value.json.return_value = self.json_doc self.soft_inv_col = softwareinventory.SoftwareInventoryCollection( - conn, '/redfish/v1/UpdateService/SoftwareInventory', + conn, '/redfish/v1/UpdateService/FirmwareInventory', redfish_version='1.3.0') def test__parse_attributes(self): self.soft_inv_col._parse_attributes(self.json_doc) self.assertEqual('1.3.0', self.soft_inv_col.redfish_version) self.assertEqual( - 'Software Inventory Collection', + 'Firmware Inventory Collection', self.soft_inv_col.name) @mock.patch.object( softwareinventory, 'SoftwareInventory', autospec=True) def test_get_member(self, mock_softwareinventory): - path = '/redfish/v1/UpdateService/SoftwareInventory/1' + path = ('/redfish/v1/UpdateService/FirmwareInventory/' + 'Current-102303-19.0.12') self.soft_inv_col.get_member(path) mock_softwareinventory.assert_called_once_with( self.soft_inv_col._conn, path, self.soft_inv_col.redfish_version, None) + + @mock.patch.object( + softwareinventory, 'SoftwareInventory', autospec=True) + def test_get_members(self, mock_softwareinventory): + members = self.soft_inv_col.get_members() + calls = [ + mock.call(self.soft_inv_col._conn, + ('/redfish/v1/UpdateService/FirmwareInventory/' + 'Current-101560-25.5.6.0009'), + self.soft_inv_col.redfish_version, None), + + mock.call(self.soft_inv_col._conn, + ('/redfish/v1/UpdateService/FirmwareInventory/' + 'Installed-101560-25.5.6.0009'), + self.soft_inv_col.redfish_version, None), + + mock.call(self.soft_inv_col._conn, + ('/redfish/v1/UpdateService/FirmwareInventory/' + 'Previous-102302-18.8.9'), + self.soft_inv_col.redfish_version, None) + ] + mock_softwareinventory.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(3, len(members)) diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py index fbbcb7a..64b8f26 100644 --- a/sushy/tests/unit/resources/updateservice/test_updateservice.py +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -98,42 +98,48 @@ class UpdateServiceTestCase(base.TestCase): targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', transfer_protocol='ROYAL') - def test_software_inventory(self): - # | GIVEN | - self.conn.get.return_value.json.reset_mock() - with open('sushy/tests/unit/json_samples/' - 'softwareinventory_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) - # | WHEN | - actual_software_inventory = self.upd_serv.software_inventory - # | THEN | - self.assertIsInstance(actual_software_inventory, - softwareinventory.SoftwareInventoryCollection) - self.conn.get.return_value.json.assert_called_once_with() - - # reset mock - self.conn.get.return_value.json.reset_mock() - # | WHEN & THEN | - self.assertIs(actual_software_inventory, - self.upd_serv.software_inventory) - self.conn.get.return_value.json.assert_not_called() - - def test_firmware_inventory(self): - # | GIVEN | - self.conn.get.return_value.json.reset_mock() - with open('sushy/tests/unit/json_samples/' - 'softwareinventory_collection.json') as f: - self.conn.get.return_value.json.return_value = json.load(f) - # | WHEN | - actual_firmware_inventory = self.upd_serv.firmware_inventory - # | THEN | - self.assertIsInstance(actual_firmware_inventory, - softwareinventory.SoftwareInventoryCollection) - self.conn.get.return_value.json.assert_called_once_with() - - # reset mock - self.conn.get.return_value.json.reset_mock() - # | WHEN & THEN | - self.assertIs(actual_firmware_inventory, - self.upd_serv.firmware_inventory) - self.conn.get.return_value.json.assert_not_called() + @mock.patch.object(softwareinventory, 'SoftwareInventoryCollection', + autospec=True) + def test_software_inventory(self, software_inventory_collection_mock): + self.upd_serv.software_inventory + software_inventory_collection_mock.assert_called_once_with( + self.conn, '/redfish/v1/UpdateService/SoftwareInventory', + self.upd_serv.redfish_version, + self.upd_serv._registries) + + @mock.patch.object(softwareinventory, 'SoftwareInventoryCollection', + autospec=True) + def test_firmware_inventory(self, software_inventory_collection_mock): + self.upd_serv.firmware_inventory + software_inventory_collection_mock.assert_called_once_with( + self.conn, '/redfish/v1/UpdateService/FirmwareInventory', + self.upd_serv.redfish_version, + self.upd_serv._registries) + + +class UpdateServiceNoInvTestCase(base.TestCase): + + def setUp(self): + super(UpdateServiceNoInvTestCase, self).setUp() + self.conn = mock.Mock() + no_inv_json = 'sushy/tests/unit/json_samples/updateservice_no_inv.json' + with open(no_inv_json) as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.upd_serv = updateservice.UpdateService( + self.conn, '/redfish/v1/UpdateService/UpdateService', + redfish_version='1.3.0') + + def test_software_inventory_when_sw_inv_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'SoftwareInventory/@odata.id', + getattr, self.upd_serv, 'software_inventory') + + def test_firmware_inventory_when_fw_inv_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'FirmwareInventory/@odata.id', + getattr, self.upd_serv, 'firmware_inventory') -- GitLab From d73c2af910de789cb0cf906f59904423f9a197e7 Mon Sep 17 00:00:00 2001 From: Julia Kreger Date: Wed, 12 Aug 2020 11:12:40 -0700 Subject: [PATCH 236/303] Include extended information in debugging output In order to help developers and operators kind of gain a better understanding of what might be happening, we should log the extended information. OEM libraries extending upon sushy should be returning this information in the errors surfaced, as applicable, however that is not always possible so debug does seem a moderately reasonable middle ground. Change-Id: I3e28a3c11160fbe6fa6fe21c0f087f33d832839c --- sushy/exceptions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sushy/exceptions.py b/sushy/exceptions.py index 3d6d18b..e01a8af 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -106,7 +106,8 @@ class HTTPError(SushyError): ext_info = self.body.get('@Message.ExtendedInfo', [{}]) index = self._get_most_severe_msg_index(ext_info) self.detail = ext_info[index].get('Message', self.detail) - error = '%s: %s' % (self.code, self.detail or 'unknown error') + error = '%s: %s extended: %s' % ( + self.code, self.detail or 'unknown error', ext_info or None) kwargs = {'method': method, 'url': url, 'code': self.status_code, 'error': error} -- GitLab From 0e27eba2f55b027a5ec1a648753926d1e874eb85 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Wed, 19 Aug 2020 17:34:47 +0200 Subject: [PATCH 237/303] Add a CI job with UEFI+vmedia and clean up the job definitions Reuse parent jobs from ironic and rename the jobs accordingly. Change-Id: Ic64536c0ce51820461d82f1d0d498e2754a58ed7 --- zuul.d/project.yaml | 6 ++++-- zuul.d/sushy-jobs.yaml | 21 +++++++++++---------- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 68411bc..edc2d25 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -8,7 +8,9 @@ - release-notes-jobs-python3 check: jobs: - - sushy-tempest-ironic-partition-redfish-src + - sushy-tempest-partition-bios-redfish-pxe + - sushy-tempest-partition-uefi-redfish-vmedia gate: jobs: - - sushy-tempest-ironic-partition-redfish-src + - sushy-tempest-partition-bios-redfish-pxe + - sushy-tempest-partition-uefi-redfish-vmedia diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml index e6c4d6a..7914583 100644 --- a/zuul.d/sushy-jobs.yaml +++ b/zuul.d/sushy-jobs.yaml @@ -1,16 +1,17 @@ - job: - name: sushy-tempest-ironic-partition-redfish-src - parent: ironic-base + name: sushy-tempest-partition-bios-redfish-pxe + parent: ironic-tempest-partition-bios-redfish-pxe + irrelevant-files: + - ^test-requirements.txt$ + - ^sushy/tests/.*$ + required-projects: + - openstack/sushy + +- job: + name: sushy-tempest-partition-uefi-redfish-vmedia + parent: ironic-tempest-partition-uefi-redfish-vmedia irrelevant-files: - ^test-requirements.txt$ - ^sushy/tests/.*$ - timeout: 5400 required-projects: - openstack/sushy - vars: - devstack_localrc: - IRONIC_DEFAULT_BOOT_OPTION: netboot - IRONIC_DEPLOY_DRIVER: redfish - IRONIC_ENABLED_HARDWARE_TYPES: redfish - IRONIC_DEFAULT_RESCUE_INTERFACE: "" - EBTABLES_RACE_FIX: True -- GitLab From ec5ceefd678b7c7ec3585ae6cd6d63d06a048e62 Mon Sep 17 00:00:00 2001 From: Julia Kreger Date: Tue, 18 Aug 2020 08:34:19 -0700 Subject: [PATCH 238/303] Remove auth token header completely when error occurs While investigating a report indicating session incompatability with Cisco BMCs, I noticed we didn't completely invalidate the session token, and only set it to None for the request to obtain a new token. This may not actually fix anything, but an authentication error should completely invalidate any cached authentication data force us through re-authentication. Additionally, we have a bit of a lack of debug and error logging around the session retry logic in general. Without additional logging, we have no way to really figure out exactly what is occuring with the BMC when we attempt to reauthenticate the session. Change-Id: I9eb238d1bbaf522d03ee3fce12fc0ab80c1b69b4 --- sushy/auth.py | 10 ++++++++ sushy/connector.py | 24 +++++++++++++++---- .../sessionservice/sessionservice.py | 5 ++-- .../sessionservice/test_sessionservice.py | 4 +--- sushy/tests/unit/test_auth.py | 18 ++++++++++++++ sushy/tests/unit/test_connector.py | 1 + 6 files changed, 51 insertions(+), 11 deletions(-) diff --git a/sushy/auth.py b/sushy/auth.py index 37ccf6f..ccb6df5 100644 --- a/sushy/auth.py +++ b/sushy/auth.py @@ -199,6 +199,16 @@ class SessionAuth(AuthBase): """Reset active session related attributes.""" self._session_key = None self._session_resource_id = None + # Requests session object data is merged with user submitted data + # per https://requests.readthedocs.io/en/master/user/advanced/ + # so we need to clear data explicitly set on the session too. + self._connector._session.auth = None + if 'X-Auth-Token' in self._connector._session.headers: + # Delete the token value that was saved to the session + # as otherwise we would end up with a dictionary containing + # a {'X-Auth-Token': null} being sent across to the remote + # bmc. + del self._connector._session.headers['X-Auth-Token'] class SessionOrBasicAuth(SessionAuth): diff --git a/sushy/connector.py b/sushy/connector.py index 208980d..d9ffe14 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -113,15 +113,29 @@ class Connector(object): # Attempt to re-establish a session. try: exceptions.raise_for_response(method, url, response) - except exceptions.AccessError: + except exceptions.AccessError as e: if self._auth.can_refresh_session(): - self._auth.refresh_session() + try: + self._auth.refresh_session() + except exceptions.AccessError as refresh_exc: + LOG.error("A failure occured while attempting to refresh " + "the session. Error: %s", refresh_exc.message) + raise LOG.debug("Authentication refreshed successfully, " "retrying the call.") - response = self._session.request(method, url, json=data, - headers=headers, - **extra_session_req_kwargs) + try: + response = self._session.request( + method, url, json=data, + headers=headers, + **extra_session_req_kwargs) + except exceptions.HTTPError as retry_exc: + LOG.error("Failure occured while attempting to retry " + "request after refreshing the session. Error: " + "%s", retry_exc.message) + raise else: + LOG.error("Authentication error detected. Cannot proceed: " + "%s", e.message) raise if blocking and response.status_code == 202: diff --git a/sushy/resources/sessionservice/sessionservice.py b/sushy/resources/sessionservice/sessionservice.py index aea60e0..ccc69ac 100644 --- a/sushy/resources/sessionservice/sessionservice.py +++ b/sushy/resources/sessionservice/sessionservice.py @@ -106,9 +106,8 @@ class SessionService(base.ResourceBase): target_uri = self.path + '/Sessions' data = {'UserName': username, 'Password': password} - headers = {'X-Auth-Token': None} - - rsp = self._conn.post(target_uri, data=data, headers=headers) + LOG.debug("Requesting new session from %s.", target_uri) + rsp = self._conn.post(target_uri, data=data) session_key = rsp.headers.get('X-Auth-Token') if session_key is None: raise exceptions.MissingXAuthToken( diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py index 7366d7c..1218688 100644 --- a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -96,10 +96,8 @@ class SessionServiceTestCase(base.TestCase): session_uri) uri = self.sess_serv_inst.path + '/Sessions' data = {'UserName': 'foo', 'Password': 'secret'} - headers = {'X-Auth-Token': None} self.conn.post.assert_called_once_with(uri, - data=data, - headers=headers) + data=data) def test_create_session_missing_x_auth_token(self): with open('sushy/tests/unit/json_samples/' diff --git a/sushy/tests/unit/test_auth.py b/sushy/tests/unit/test_auth.py index e91b598..e477a72 100644 --- a/sushy/tests/unit/test_auth.py +++ b/sushy/tests/unit/test_auth.py @@ -15,6 +15,8 @@ from unittest import mock +import requests + from sushy import auth from sushy import connector from sushy import exceptions @@ -85,6 +87,9 @@ class SessionAuthTestCase(base.TestCase): self.sess_auth = auth.SessionAuth(self.username, self.password) self.conn = mock_connector.return_value + self.conn._session = mock.Mock(spec=requests.Session) + self.conn._session.headers = {} + self.conn._session.auth = None self.root = mock_root.return_value def test_init(self): @@ -108,8 +113,10 @@ class SessionAuthTestCase(base.TestCase): self.sess_auth.get_session_resource_id()) def test_reset_session_attrs(self): + self.sess_auth.set_context(self.root, self.conn) self.sess_auth._session_key = self.sess_key self.sess_auth._session_resource_id = self.sess_uri + self.conn._session.headers = {'X-Auth-Token': 'meow'} self.assertEqual(self.sess_uri, self.sess_auth.get_session_resource_id()) self.assertEqual(self.sess_key, @@ -117,6 +124,7 @@ class SessionAuthTestCase(base.TestCase): self.sess_auth.reset_session_attrs() self.assertIsNone(self.sess_auth.get_session_resource_id()) self.assertIsNone(self.sess_auth.get_session_key()) + self.assertNotIn('X-Auth-Token', self.conn._session.headers) def test_set_context(self): self.sess_auth.set_context(self.root, self.conn) @@ -162,6 +170,7 @@ class SessionAuthTestCase(base.TestCase): mock_sess_serv.create_session.return_value = (self.sess_key, self.sess_uri) self.root.get_session_service.return_value = mock_sess_serv + self._session = mock.Mock(spec=requests.Session) self.sess_auth.set_context(self.root, self.conn) self.sess_auth.refresh_session() self.assertEqual(self.sess_uri, @@ -220,7 +229,11 @@ class SessionOrBasicAuthTestCase(base.TestCase): self.sess_uri = ('https://testing:8000/redfish/v1/' 'SessionService/Sessions/testing') self.conn = mock_connector.return_value + self.conn._session = mock.Mock(spec=requests.Session) + self.conn._session.headers = {} + self.conn._session.auth = None self.root = mock_root.return_value + self.sess_basic_auth = auth.SessionOrBasicAuth(self.username, self.password) @@ -245,8 +258,11 @@ class SessionOrBasicAuthTestCase(base.TestCase): self.sess_basic_auth.get_session_resource_id()) def test_reset_session_attrs(self): + self.sess_basic_auth.set_context(self.root, self.conn) self.sess_basic_auth._session_key = self.sess_key self.sess_basic_auth._session_resource_id = self.sess_uri + self.conn._session.auth = 'meow' + self.conn._session.headers = {'X-Auth-Token': 'meow'} self.assertEqual(self.sess_uri, self.sess_basic_auth.get_session_resource_id()) self.assertEqual(self.sess_key, @@ -254,6 +270,8 @@ class SessionOrBasicAuthTestCase(base.TestCase): self.sess_basic_auth.reset_session_attrs() self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) self.assertIsNone(self.sess_basic_auth.get_session_key()) + self.assertNotIn('X-Auth-Token', self.conn._session.headers) + self.assertIsNone(self.conn._session.auth) def test_set_context(self): self.sess_basic_auth.set_context(self.root, self.conn) diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 542daf2..462be6f 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -285,6 +285,7 @@ class ConnectorOpTestCase(base.TestCase): response = self.conn._op('POST', path='fake/path', data=self.data, headers=self.headers) self.auth.refresh_session.assert_called_with() + self.auth.can_refresh_session.assert_called_with() self.assertEqual(response.json, second_response.json) def test_connection_error(self): -- GitLab From 5ffe8ee3b5ae569e5a2241cac5f25fae1bddad3e Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Tue, 25 Aug 2020 15:39:18 +0200 Subject: [PATCH 239/303] Do not log passwords and auth tokens when using SessionService Change-Id: Id8c5533d884eaf23b802122f7550791c5012d830 --- .../notes/no-passwords-295207ac891d27ab.yaml | 5 +++++ sushy/connector.py | 7 +++++-- sushy/tests/unit/test_utils.py | 7 +++++++ sushy/utils.py | 12 ++++++++++++ 4 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 releasenotes/notes/no-passwords-295207ac891d27ab.yaml diff --git a/releasenotes/notes/no-passwords-295207ac891d27ab.yaml b/releasenotes/notes/no-passwords-295207ac891d27ab.yaml new file mode 100644 index 0000000..d53e5a5 --- /dev/null +++ b/releasenotes/notes/no-passwords-295207ac891d27ab.yaml @@ -0,0 +1,5 @@ +--- +security: + - | + No longer logs passwords and auth tokens in DEBUG mode when using + SessionService for authentication. diff --git a/sushy/connector.py b/sushy/connector.py index d9ffe14..34a730f 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -21,6 +21,7 @@ import requests from sushy import exceptions from sushy.resources.task_monitor import TaskMonitor +from sushy import utils LOG = logging.getLogger(__name__) @@ -94,8 +95,10 @@ class Connector(object): LOG.debug('HTTP request: %(method)s %(url)s; headers: %(headers)s; ' 'body: %(data)s; blocking: %(blocking)s; timeout: ' '%(timeout)s; session arguments: %(session)s;', - {'method': method, 'url': url, 'headers': headers, - 'data': data, 'blocking': blocking, 'timeout': timeout, + {'method': method, 'url': url, + 'headers': utils.sanitize(headers), + 'data': utils.sanitize(data), + 'blocking': blocking, 'timeout': timeout, 'session': extra_session_req_kwargs}) try: response = self._session.request(method, url, json=data, diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index db3dfb8..f8cd84e 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -231,3 +231,10 @@ class CacheTestCase(base.TestCase): def test_cache_clear_failure(self): self.assertRaises( TypeError, utils.cache_clear, self.res, False, only_these=10) + + def test_sanitize(self): + orig = {'UserName': 'admin', 'Password': 'pwd', + 'nested': {'answer': 42, 'password': 'secret'}} + expected = {'UserName': 'admin', 'Password': '***', + 'nested': {'answer': 42, 'password': '***'}} + self.assertEqual(expected, utils.sanitize(orig)) diff --git a/sushy/utils.py b/sushy/utils.py index 98d4596..7835c98 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -328,3 +328,15 @@ def synchronized(wrapped): return wrapped(*args, **kwargs) return wrapper + + +_REMOVE = frozenset(['password', 'x-auth-token']) + + +def sanitize(item): + """Remove passwords from the item.""" + if isinstance(item, dict): + return {key: ('***' if key.lower() in _REMOVE else sanitize(value)) + for key, value in item.items()} + else: + return item -- GitLab From 03f5818ee2e5e5e2e74ab0dc7f7c5d50e13cfbe2 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Tue, 25 Aug 2020 16:10:36 +0200 Subject: [PATCH 240/303] Less scary warning when GET /redfish/v1/SessionService fails It seems normal (at least de facto) to have this endpoint authenticated. Avoid confusing operators by a warning, explain the situation better instead. Change-Id: I5b69c3f732476a9c25db6d09b47ca42ecce06114 --- sushy/resources/sessionservice/sessionservice.py | 7 ++++--- .../unit/resources/sessionservice/test_sessionservice.py | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/sushy/resources/sessionservice/sessionservice.py b/sushy/resources/sessionservice/sessionservice.py index ccc69ac..6825aa0 100644 --- a/sushy/resources/sessionservice/sessionservice.py +++ b/sushy/resources/sessionservice/sessionservice.py @@ -59,9 +59,10 @@ class SessionService(base.ResourceBase): connector, identity, redfish_version, registries) except exceptions.AccessError as ae: - LOG.warning('Received access error "%(ae)s". ' - 'Unable to refresh SessionService.', - {'ae': ae}) + LOG.debug('Received access error "%s" when trying to refresh the ' + 'SessionService. If this happens before ' + 'authentication, we\'ll have to guess the Sessions URL.', + ae) def _get_sessions_collection_path(self): """Helper function to find the SessionCollections path""" diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py index 1218688..61a102f 100644 --- a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -45,7 +45,7 @@ class SessionServiceTestCase(base.TestCase): 'GET', 'any_url', mock.MagicMock())) sessionservice.SessionService( self.conn, '/redfish/v1/SessionService', redfish_version='1.0.2') - self.assertTrue(mock_LOG.warning.called) + self.assertTrue(mock_LOG.debug.called) def test__parse_attributes(self): self.sess_serv_inst._parse_attributes(self.json_doc) -- GitLab From e730987a186be68de9555dfef66661f68b10c57a Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Wed, 26 Aug 2020 16:40:28 +0200 Subject: [PATCH 241/303] Use Sessions URL from root service if it's provided If the root endpoint provides the link to Sessions, we should just use it. Change-Id: I825930623c84d3b6e5994e29f963b4f5af7afe4d Story: 2008059 Task: 40742 --- ...essions-url-from-root-8b8eca57dc450705.yaml | 8 ++++++++ sushy/auth.py | 10 +++++++++- sushy/main.py | 10 ++++++++++ .../resources/sessionservice/sessionservice.py | 18 ++++++++++++------ sushy/tests/unit/test_main.py | 12 ++++++++++-- 5 files changed, 49 insertions(+), 9 deletions(-) create mode 100644 releasenotes/notes/use-sessions-url-from-root-8b8eca57dc450705.yaml diff --git a/releasenotes/notes/use-sessions-url-from-root-8b8eca57dc450705.yaml b/releasenotes/notes/use-sessions-url-from-root-8b8eca57dc450705.yaml new file mode 100644 index 0000000..82b1758 --- /dev/null +++ b/releasenotes/notes/use-sessions-url-from-root-8b8eca57dc450705.yaml @@ -0,0 +1,8 @@ +--- +fixes: + - | + Instead of trying to GET /redfish/v1/SessionService, which is usually + reachable via authentication, fail, and then guess + /redfish/v1/SessionService/Sessions as Sessions URL, we try first to use + directly the Sessions URL provided by the root service, if available. + diff --git a/sushy/auth.py b/sushy/auth.py index ccb6df5..479aa26 100644 --- a/sushy/auth.py +++ b/sushy/auth.py @@ -149,10 +149,18 @@ class SessionAuth(AuthBase): :raises: AccessError :raises: HTTPError """ + target_uri = None + try: + target_uri = self._root_resource.get_sessions_path() + except exceptions.MissingAttributeError: + LOG.debug('Missing Sessions attribute under Links in Root ' + 'Service, we\'ll try to determine it from Session ' + 'Service') session_service = self._root_resource.get_session_service() session_auth_token, session_uri = ( session_service.create_session(self._username, - self._password)) + self._password, + target_uri=target_uri)) self._session_key = session_auth_token self._session_resource_id = session_uri self._connector.set_http_session_auth(session_auth_token) diff --git a/sushy/main.py b/sushy/main.py index be8b0ee..75777d7 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -365,6 +365,16 @@ class Sushy(base.ResourceBase): self._conn, self._session_service_path, redfish_version=self.redfish_version) + def get_sessions_path(self): + """Returns the Sessions url""" + + try: + links_url = self.json.get('Links') + return links_url['Sessions']['@odata.id'] + except (TypeError, KeyError): + raise exceptions.MissingAttributeError( + attribute='Links/Sessions/@data.id', resource=self.path) + def get_session(self, identity): """Given the identity return a Session object diff --git a/sushy/resources/sessionservice/sessionservice.py b/sushy/resources/sessionservice/sessionservice.py index 6825aa0..24f5276 100644 --- a/sushy/resources/sessionservice/sessionservice.py +++ b/sushy/resources/sessionservice/sessionservice.py @@ -91,20 +91,26 @@ class SessionService(base.ResourceBase): """ self._conn.delete(session_uri) - def create_session(self, username, password): + def create_session(self, username, password, target_uri=None): """This function will try to create a session. + :param username: the username of the user requesting a new session + :param password: the password associated to the user requesting + a new session + :param target_uri: the "Sessions" uri, usually in the form: + '/redfish/v1/SessionService/Sessions' :returns: A session key and uri in the form of a tuple :raises: MissingXAuthToken :raises: ConnectionError :raises: AccessError :raises: HTTPError """ - try: - target_uri = self._get_sessions_collection_path() - except Exception: - # Defaulting to /Sessions - target_uri = self.path + '/Sessions' + if not target_uri: + try: + target_uri = self._get_sessions_collection_path() + except Exception: + # Defaulting to /Sessions + target_uri = self.path + '/Sessions' data = {'UserName': username, 'Password': password} LOG.debug("Requesting new session from %s.", target_uri) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 075c088..4a75e79 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -390,6 +390,10 @@ class MainTestCase(base.TestCase): registries[1] self.assertEqual(1, mock_registries.__getitem__.call_count) + def test_get_sessions_path(self): + expected = '/redfish/v1/SessionService/Sessions' + self.assertEqual(expected, self.root.get_sessions_path()) + class BareMinimumMainTestCase(base.TestCase): @@ -432,11 +436,15 @@ class BareMinimumMainTestCase(base.TestCase): exceptions.MissingAttributeError, 'UpdateService/@odata.id', self.root.get_update_service) - def test_get_composition_service_when_compositionservice_attr_absent( - self): + def test_get_composition_service_when_compositionservice_attr_absent(self): self.assertRaisesRegex( exceptions.MissingAttributeError, 'CompositionService/@odata.id', self.root.get_composition_service) def test__get_registry_collection_when_registries_attr_absent(self): self.assertIsNone(self.root._get_registry_collection()) + + def test_get_sessions_path_fail(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Links/Sessions/@data.id', self.root.get_sessions_path) -- GitLab From 46b5d38d8417d64e45ed0b949647da10e64f695f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aija=20Jaunt=C4=93va?= Date: Tue, 1 Sep 2020 06:31:12 -0400 Subject: [PATCH 242/303] Add BIOS update apply time and maintenance window Add option to speficy BIOS attribute apply time and maintenance window. Expose maintenance_window to see if any default maintenance window set. Story: 2008100 Task: 40804 Change-Id: Iebf78dd71efd9dec65ce7ff0503a0d481532277c Co-Authored-By: Eric Barrera Eric_Barrera@Dell.com Co-Authored-By: Richard Pioso richard.pioso@dell.com --- ...time-support-to-bios-315ebad429dcab3d.yaml | 12 +++ sushy/resources/constants.py | 7 ++ sushy/resources/mappings.py | 11 +++ sushy/resources/settings.py | 5 ++ sushy/resources/system/bios.py | 71 ++++++++++++++- sushy/tests/unit/json_samples/bios.json | 11 ++- sushy/tests/unit/json_samples/settings.json | 6 +- .../tests/unit/resources/system/test_bios.py | 86 +++++++++++++++++++ sushy/tests/unit/resources/test_settings.py | 3 + 9 files changed, 206 insertions(+), 6 deletions(-) create mode 100644 releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml diff --git a/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml b/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml new file mode 100644 index 0000000..d49dfef --- /dev/null +++ b/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml @@ -0,0 +1,12 @@ +--- +features: + - | + Adds support for ``bios`` resource to allow specifying BIOS attribute + update time and maintenance window when updating BIOS attributes using + ``set_attribute`` or ``set_attributes``. + + The update is backward compatible and when new parameters not passed, they + default to ``None``. + + Also adds ``maintenance_window`` for ``bios`` resource to expose default + maintenance window set by the system if any. diff --git a/sushy/resources/constants.py b/sushy/resources/constants.py index f0572b5..72c0892 100644 --- a/sushy/resources/constants.py +++ b/sushy/resources/constants.py @@ -143,3 +143,10 @@ DURABLE_NAME_FORMAT_NQN = 'NVMe Qualified Name' DURABLE_NAME_FORMAT_NSID = 'NVM Namespace Identifier' DURABLE_NAME_FORMAT_UUID = 'Universally Unique Identifier' DURABLE_NAME_FORMAT_iQN = 'iSCSI Qualified Name' + +# Apply time constants + +APPLY_TIME_IMMEDIATE = 'immediate' +APPLY_TIME_ON_RESET = 'on reset' +APPLY_TIME_MAINT_START = 'at maintenance window start' +APPLY_TIME_MAINT_RESET = 'in maintenance window on reset' diff --git a/sushy/resources/mappings.py b/sushy/resources/mappings.py index afe9fe7..c81fed2 100644 --- a/sushy/resources/mappings.py +++ b/sushy/resources/mappings.py @@ -115,3 +115,14 @@ DUR_NAME_FORMAT_VALUE_MAP = { 'UUID': res_cons.DURABLE_NAME_FORMAT_UUID, 'iQN': res_cons.DURABLE_NAME_FORMAT_iQN, } + +APPLY_TIME_VALUE_MAP = { + 'Immediate': res_cons.APPLY_TIME_IMMEDIATE, + 'OnReset': res_cons.APPLY_TIME_ON_RESET, + 'AtMaintenanceWindowStart': + res_cons.APPLY_TIME_MAINT_START, + 'InMaintenanceWindowOnReset': + res_cons.APPLY_TIME_MAINT_RESET, +} + +APPLY_TIME_VALUE_MAP_REV = utils.revert_dictionary(APPLY_TIME_VALUE_MAP) diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index e084545..37b63f3 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -154,6 +154,11 @@ class SettingsField(base.CompositeField): to change this resource """ + _supported_apply_times = base.MappedListField( + 'SupportedApplyTimes', + res_maps.APPLY_TIME_VALUE_MAP) + """List of supported apply times""" + @property def maintenance_window(self): """MaintenanceWindow field diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index b1ae6fa..666cd0b 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -19,6 +19,7 @@ import logging from sushy import exceptions from sushy.resources import base from sushy.resources import common +from sushy.resources import mappings as res_maps from sushy.resources import settings from sushy import utils @@ -67,6 +68,11 @@ class Bios(base.ResourceBase): To update use :py:func:`~set_attribute` or :py:func:`~set_attributes` """ + maintenance_window = settings.MaintenanceWindowField( + '@Redfish.MaintenanceWindow') + """Indicates if a given resource has a maintenance window assignment + for applying settings or operations""" + _actions = ActionsField('Actions') _apply_time_settings = settings.SettingsApplyTimeField() @@ -93,7 +99,9 @@ class Bios(base.ResourceBase): def apply_time_settings(self): return self._pending_settings_resource._apply_time_settings - def set_attribute(self, key, value): + def set_attribute(self, key, value, apply_time=None, + maint_window_start_time=None, + maint_window_duration=None): """Update an attribute Attribute update is not immediate but requires system restart. @@ -102,10 +110,25 @@ class Bios(base.ResourceBase): :param key: Attribute name :param value: Attribute value + :param apply_time: When to update the attribute. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param maint_window_start_time: The start time of a maintenance window, + datetime. Required when updating during maintenance window and + default maintenance window not set by the system. + :param maint_window_duration: Duration of maintenance time since + maintenance window start time in seconds. Required when updating + during maintenance window and default maintenance window not + set by the system. """ - self.set_attributes({key: value}) + self.set_attributes({key: value}, apply_time, maint_window_start_time, + maint_window_duration) - def set_attributes(self, value): + def set_attributes(self, value, apply_time=None, + maint_window_start_time=None, + maint_window_duration=None): """Update many attributes at once Attribute update is not immediate but requires system restart. @@ -113,9 +136,41 @@ class Bios(base.ResourceBase): property :param value: Key-value pairs for attribute name and value + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param maint_window_start_time: The start time of a maintenance window, + datetime. Required when updating during maintenance window and + default maintenance window not set by the system. + :param maint_window_duration: Duration of maintenance time since + maintenance window start time in seconds. Required when updating + during maintenance window and default maintenance window not + set by the system. """ + payload = {'Attributes': value} + if (not apply_time + and (maint_window_start_time or maint_window_duration)): + raise ValueError('"apply_time" missing when passing maintenance ' + 'window settings') + if apply_time: + prop = '@Redfish.SettingsApplyTime' + payload[prop] = { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + } + if maint_window_start_time and not maint_window_duration: + raise ValueError('"maint_window_duration" missing') + if not maint_window_start_time and maint_window_duration: + raise ValueError('"maint_window_start_time" missing') + if maint_window_start_time and maint_window_duration: + payload[prop]['MaintenanceWindowStartTime'] =\ + maint_window_start_time.isoformat() + payload[prop]['MaintenanceWindowDurationInSeconds'] =\ + maint_window_duration self._settings.commit(self._conn, - {'Attributes': value}) + payload) utils.cache_clear(self, force_refresh=False, only_these=['_pending_settings_resource']) @@ -183,3 +238,11 @@ class Bios(base.ResourceBase): containing status and any messages """ return self._settings.get_status(self._registries) + + @property + def supported_apply_times(self): + """List of supported BIOS update apply times + + :returns: List of supported update apply time names + """ + return self._settings._supported_apply_times diff --git a/sushy/tests/unit/json_samples/bios.json b/sushy/tests/unit/json_samples/bios.json index 8063b3d..f7a3b7a 100644 --- a/sushy/tests/unit/json_samples/bios.json +++ b/sushy/tests/unit/json_samples/bios.json @@ -32,7 +32,16 @@ "SettingsObject": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" }, - "Time": "2016-03-07T14:44.30-05:00" + "Time": "2016-03-07T14:44.30-05:00", + "SupportedApplyTimes": [ + "OnReset", + "InMaintenanceWindowOnReset" + ] + }, + "@Redfish.MaintenanceWindow": { + "@odata.type": "#Settings.v1_2_0.MaintenanceWindow", + "MaintenanceWindowDurationInSeconds": 600, + "MaintenanceWindowStartTime": "2020-09-01T04:30:00-06:00" }, "Actions": { "#Bios.ResetBios": { diff --git a/sushy/tests/unit/json_samples/settings.json b/sushy/tests/unit/json_samples/settings.json index 75e2d92..69de0c6 100644 --- a/sushy/tests/unit/json_samples/settings.json +++ b/sushy/tests/unit/json_samples/settings.json @@ -17,6 +17,10 @@ "SettingsObject": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" }, - "Time": "2016-03-07T14:44:30-05:00" + "Time": "2016-03-07T14:44:30-05:00", + "SupportedApplyTimes": [ + "OnReset", + "InMaintenanceWindowOnReset" + ] } } diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index a452e71..da26f73 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -12,6 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. +import datetime from http import client as http_client import json from unittest import mock @@ -19,6 +20,7 @@ from unittest import mock from dateutil import parser from sushy import exceptions +from sushy.resources import constants as res_cons from sushy.resources.registry import message_registry from sushy.resources import settings from sushy.resources.system import bios @@ -64,6 +66,14 @@ class BiosTestCase(base.TestCase): self.assertEqual('', self.sys_bios.attributes['AdminPhone']) self.assertEqual('Uefi', self.sys_bios.attributes['BootMode']) self.assertEqual(0, self.sys_bios.attributes['ProcCoreDisable']) + self.assertEqual([res_cons.APPLY_TIME_ON_RESET, + res_cons.APPLY_TIME_MAINT_RESET], + self.sys_bios.supported_apply_times) + self.assertEqual(600, self.sys_bios.maintenance_window + .maintenance_window_duration_in_seconds) + self.assertEqual(parser.parse('2020-09-01T04:30:00-06:00'), + self.sys_bios.maintenance_window + .maintenance_window_start_time) # testing here if settings subfield parsed by checking ETag, # other settings fields tested in specific settings test self.assertEqual('9234ac83b9700123cc32', @@ -79,6 +89,30 @@ class BiosTestCase(base.TestCase): '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', data={'Attributes': {'ProcTurboMode': 'Disabled'}}) + def test_set_attribute_apply_time(self): + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled', + res_cons.APPLY_TIME_ON_RESET) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'OnReset'}}) + + def test_set_attribute_apply_time_with_maintenance_window(self): + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled', + res_cons.APPLY_TIME_MAINT_RESET, + datetime.datetime(2020, 9, 1, 4, 30, 0), + 600) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'InMaintenanceWindowOnReset', + 'MaintenanceWindowStartTime': '2020-09-01T04:30:00', + 'MaintenanceWindowDurationInSeconds': 600}}) + def test_set_attribute_on_refresh(self): self.conn.get.reset_mock() # make it to instantiate pending attributes @@ -103,6 +137,58 @@ class BiosTestCase(base.TestCase): data={'Attributes': {'ProcTurboMode': 'Disabled', 'UsbControl': 'UsbDisabled'}}) + def test_set_attributes_apply_time(self): + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_IMMEDIATE) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'Immediate'}}) + + def test_set_attributes_apply_time_with_maintenance_window(self): + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_MAINT_START, + datetime.datetime(2020, 9, 1, 4, 30, 0), + 600) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'AtMaintenanceWindowStart', + 'MaintenanceWindowStartTime': '2020-09-01T04:30:00', + 'MaintenanceWindowDurationInSeconds': 600}}) + + def test_set_attributes_apply_time_missing(self): + self.assertRaises(ValueError, + self.sys_bios.set_attributes, + {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + maint_window_start_time=datetime.datetime.now(), + maint_window_duration=600) + + def test_set_attributes_maint_window_start_time_missing(self): + self.assertRaises(ValueError, + self.sys_bios.set_attributes, + {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_MAINT_START, + maint_window_duration=600) + + def test_set_attributes_maint_window_duration_missing(self): + self.assertRaises(ValueError, + self.sys_bios.set_attributes, + {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_MAINT_START, + datetime.datetime.now()) + def test_set_attributes_on_refresh(self): self.conn.get.reset_mock() # make it to instantiate pending attributes diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index 6343c78..90474df 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -63,6 +63,9 @@ class SettingsFieldTestCase(base.TestCase): instance.messages[0]._related_properties[0]) self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', instance._settings_object_idref.resource_uri) + self.assertEqual([res_cons.APPLY_TIME_ON_RESET, + res_cons.APPLY_TIME_MAINT_RESET], + instance._supported_apply_times) self.assertIsNone(instance.maintenance_window) mock_LOG.warning.assert_called_once() mock_LOG.reset_mock() -- GitLab From 37990a9bcf9cae5574d600afd40e8ba43a58bde4 Mon Sep 17 00:00:00 2001 From: "Richard G. Pioso" Date: Fri, 4 Sep 2020 18:57:45 +0000 Subject: [PATCH 243/303] Revert "Add BIOS update apply time and maintenance window" This reverts commit 46b5d38d8417d64e45ed0b949647da10e64f695f. Mike Raineri's contribution as a co-author was accidentally overlooked. Change-Id: I8d93659e5f9e3db68b8e2523de98de1022200031 --- ...time-support-to-bios-315ebad429dcab3d.yaml | 12 --- sushy/resources/constants.py | 7 -- sushy/resources/mappings.py | 11 --- sushy/resources/settings.py | 5 -- sushy/resources/system/bios.py | 71 +-------------- sushy/tests/unit/json_samples/bios.json | 11 +-- sushy/tests/unit/json_samples/settings.json | 6 +- .../tests/unit/resources/system/test_bios.py | 86 ------------------- sushy/tests/unit/resources/test_settings.py | 3 - 9 files changed, 6 insertions(+), 206 deletions(-) delete mode 100644 releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml diff --git a/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml b/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml deleted file mode 100644 index d49dfef..0000000 --- a/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml +++ /dev/null @@ -1,12 +0,0 @@ ---- -features: - - | - Adds support for ``bios`` resource to allow specifying BIOS attribute - update time and maintenance window when updating BIOS attributes using - ``set_attribute`` or ``set_attributes``. - - The update is backward compatible and when new parameters not passed, they - default to ``None``. - - Also adds ``maintenance_window`` for ``bios`` resource to expose default - maintenance window set by the system if any. diff --git a/sushy/resources/constants.py b/sushy/resources/constants.py index 72c0892..f0572b5 100644 --- a/sushy/resources/constants.py +++ b/sushy/resources/constants.py @@ -143,10 +143,3 @@ DURABLE_NAME_FORMAT_NQN = 'NVMe Qualified Name' DURABLE_NAME_FORMAT_NSID = 'NVM Namespace Identifier' DURABLE_NAME_FORMAT_UUID = 'Universally Unique Identifier' DURABLE_NAME_FORMAT_iQN = 'iSCSI Qualified Name' - -# Apply time constants - -APPLY_TIME_IMMEDIATE = 'immediate' -APPLY_TIME_ON_RESET = 'on reset' -APPLY_TIME_MAINT_START = 'at maintenance window start' -APPLY_TIME_MAINT_RESET = 'in maintenance window on reset' diff --git a/sushy/resources/mappings.py b/sushy/resources/mappings.py index c81fed2..afe9fe7 100644 --- a/sushy/resources/mappings.py +++ b/sushy/resources/mappings.py @@ -115,14 +115,3 @@ DUR_NAME_FORMAT_VALUE_MAP = { 'UUID': res_cons.DURABLE_NAME_FORMAT_UUID, 'iQN': res_cons.DURABLE_NAME_FORMAT_iQN, } - -APPLY_TIME_VALUE_MAP = { - 'Immediate': res_cons.APPLY_TIME_IMMEDIATE, - 'OnReset': res_cons.APPLY_TIME_ON_RESET, - 'AtMaintenanceWindowStart': - res_cons.APPLY_TIME_MAINT_START, - 'InMaintenanceWindowOnReset': - res_cons.APPLY_TIME_MAINT_RESET, -} - -APPLY_TIME_VALUE_MAP_REV = utils.revert_dictionary(APPLY_TIME_VALUE_MAP) diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index 37b63f3..e084545 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -154,11 +154,6 @@ class SettingsField(base.CompositeField): to change this resource """ - _supported_apply_times = base.MappedListField( - 'SupportedApplyTimes', - res_maps.APPLY_TIME_VALUE_MAP) - """List of supported apply times""" - @property def maintenance_window(self): """MaintenanceWindow field diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 666cd0b..b1ae6fa 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -19,7 +19,6 @@ import logging from sushy import exceptions from sushy.resources import base from sushy.resources import common -from sushy.resources import mappings as res_maps from sushy.resources import settings from sushy import utils @@ -68,11 +67,6 @@ class Bios(base.ResourceBase): To update use :py:func:`~set_attribute` or :py:func:`~set_attributes` """ - maintenance_window = settings.MaintenanceWindowField( - '@Redfish.MaintenanceWindow') - """Indicates if a given resource has a maintenance window assignment - for applying settings or operations""" - _actions = ActionsField('Actions') _apply_time_settings = settings.SettingsApplyTimeField() @@ -99,9 +93,7 @@ class Bios(base.ResourceBase): def apply_time_settings(self): return self._pending_settings_resource._apply_time_settings - def set_attribute(self, key, value, apply_time=None, - maint_window_start_time=None, - maint_window_duration=None): + def set_attribute(self, key, value): """Update an attribute Attribute update is not immediate but requires system restart. @@ -110,25 +102,10 @@ class Bios(base.ResourceBase): :param key: Attribute name :param value: Attribute value - :param apply_time: When to update the attribute. Optional. - APPLY_TIME_IMMEDIATE - Immediate, - APPLY_TIME_ON_RESET - On reset, - APPLY_TIME_MAINT_START - During specified maintenance time - APPLY_TIME_MAINT_RESET - On reset during specified maintenance time - :param maint_window_start_time: The start time of a maintenance window, - datetime. Required when updating during maintenance window and - default maintenance window not set by the system. - :param maint_window_duration: Duration of maintenance time since - maintenance window start time in seconds. Required when updating - during maintenance window and default maintenance window not - set by the system. """ - self.set_attributes({key: value}, apply_time, maint_window_start_time, - maint_window_duration) + self.set_attributes({key: value}) - def set_attributes(self, value, apply_time=None, - maint_window_start_time=None, - maint_window_duration=None): + def set_attributes(self, value): """Update many attributes at once Attribute update is not immediate but requires system restart. @@ -136,41 +113,9 @@ class Bios(base.ResourceBase): property :param value: Key-value pairs for attribute name and value - :param apply_time: When to update the attributes. Optional. - APPLY_TIME_IMMEDIATE - Immediate, - APPLY_TIME_ON_RESET - On reset, - APPLY_TIME_MAINT_START - During specified maintenance time - APPLY_TIME_MAINT_RESET - On reset during specified maintenance time - :param maint_window_start_time: The start time of a maintenance window, - datetime. Required when updating during maintenance window and - default maintenance window not set by the system. - :param maint_window_duration: Duration of maintenance time since - maintenance window start time in seconds. Required when updating - during maintenance window and default maintenance window not - set by the system. """ - payload = {'Attributes': value} - if (not apply_time - and (maint_window_start_time or maint_window_duration)): - raise ValueError('"apply_time" missing when passing maintenance ' - 'window settings') - if apply_time: - prop = '@Redfish.SettingsApplyTime' - payload[prop] = { - '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', - 'ApplyTime': res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] - } - if maint_window_start_time and not maint_window_duration: - raise ValueError('"maint_window_duration" missing') - if not maint_window_start_time and maint_window_duration: - raise ValueError('"maint_window_start_time" missing') - if maint_window_start_time and maint_window_duration: - payload[prop]['MaintenanceWindowStartTime'] =\ - maint_window_start_time.isoformat() - payload[prop]['MaintenanceWindowDurationInSeconds'] =\ - maint_window_duration self._settings.commit(self._conn, - payload) + {'Attributes': value}) utils.cache_clear(self, force_refresh=False, only_these=['_pending_settings_resource']) @@ -238,11 +183,3 @@ class Bios(base.ResourceBase): containing status and any messages """ return self._settings.get_status(self._registries) - - @property - def supported_apply_times(self): - """List of supported BIOS update apply times - - :returns: List of supported update apply time names - """ - return self._settings._supported_apply_times diff --git a/sushy/tests/unit/json_samples/bios.json b/sushy/tests/unit/json_samples/bios.json index f7a3b7a..8063b3d 100644 --- a/sushy/tests/unit/json_samples/bios.json +++ b/sushy/tests/unit/json_samples/bios.json @@ -32,16 +32,7 @@ "SettingsObject": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" }, - "Time": "2016-03-07T14:44.30-05:00", - "SupportedApplyTimes": [ - "OnReset", - "InMaintenanceWindowOnReset" - ] - }, - "@Redfish.MaintenanceWindow": { - "@odata.type": "#Settings.v1_2_0.MaintenanceWindow", - "MaintenanceWindowDurationInSeconds": 600, - "MaintenanceWindowStartTime": "2020-09-01T04:30:00-06:00" + "Time": "2016-03-07T14:44.30-05:00" }, "Actions": { "#Bios.ResetBios": { diff --git a/sushy/tests/unit/json_samples/settings.json b/sushy/tests/unit/json_samples/settings.json index 69de0c6..75e2d92 100644 --- a/sushy/tests/unit/json_samples/settings.json +++ b/sushy/tests/unit/json_samples/settings.json @@ -17,10 +17,6 @@ "SettingsObject": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" }, - "Time": "2016-03-07T14:44:30-05:00", - "SupportedApplyTimes": [ - "OnReset", - "InMaintenanceWindowOnReset" - ] + "Time": "2016-03-07T14:44:30-05:00" } } diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index da26f73..a452e71 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -12,7 +12,6 @@ # License for the specific language governing permissions and limitations # under the License. -import datetime from http import client as http_client import json from unittest import mock @@ -20,7 +19,6 @@ from unittest import mock from dateutil import parser from sushy import exceptions -from sushy.resources import constants as res_cons from sushy.resources.registry import message_registry from sushy.resources import settings from sushy.resources.system import bios @@ -66,14 +64,6 @@ class BiosTestCase(base.TestCase): self.assertEqual('', self.sys_bios.attributes['AdminPhone']) self.assertEqual('Uefi', self.sys_bios.attributes['BootMode']) self.assertEqual(0, self.sys_bios.attributes['ProcCoreDisable']) - self.assertEqual([res_cons.APPLY_TIME_ON_RESET, - res_cons.APPLY_TIME_MAINT_RESET], - self.sys_bios.supported_apply_times) - self.assertEqual(600, self.sys_bios.maintenance_window - .maintenance_window_duration_in_seconds) - self.assertEqual(parser.parse('2020-09-01T04:30:00-06:00'), - self.sys_bios.maintenance_window - .maintenance_window_start_time) # testing here if settings subfield parsed by checking ETag, # other settings fields tested in specific settings test self.assertEqual('9234ac83b9700123cc32', @@ -89,30 +79,6 @@ class BiosTestCase(base.TestCase): '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', data={'Attributes': {'ProcTurboMode': 'Disabled'}}) - def test_set_attribute_apply_time(self): - self.sys_bios.set_attribute('ProcTurboMode', 'Disabled', - res_cons.APPLY_TIME_ON_RESET) - self.sys_bios._conn.patch.assert_called_once_with( - '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', - data={'Attributes': {'ProcTurboMode': 'Disabled'}, - '@Redfish.SettingsApplyTime': { - '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', - 'ApplyTime': 'OnReset'}}) - - def test_set_attribute_apply_time_with_maintenance_window(self): - self.sys_bios.set_attribute('ProcTurboMode', 'Disabled', - res_cons.APPLY_TIME_MAINT_RESET, - datetime.datetime(2020, 9, 1, 4, 30, 0), - 600) - self.sys_bios._conn.patch.assert_called_once_with( - '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', - data={'Attributes': {'ProcTurboMode': 'Disabled'}, - '@Redfish.SettingsApplyTime': { - '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', - 'ApplyTime': 'InMaintenanceWindowOnReset', - 'MaintenanceWindowStartTime': '2020-09-01T04:30:00', - 'MaintenanceWindowDurationInSeconds': 600}}) - def test_set_attribute_on_refresh(self): self.conn.get.reset_mock() # make it to instantiate pending attributes @@ -137,58 +103,6 @@ class BiosTestCase(base.TestCase): data={'Attributes': {'ProcTurboMode': 'Disabled', 'UsbControl': 'UsbDisabled'}}) - def test_set_attributes_apply_time(self): - self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', - 'UsbControl': 'UsbDisabled'}, - res_cons.APPLY_TIME_IMMEDIATE) - self.sys_bios._conn.patch.assert_called_once_with( - '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', - data={'Attributes': {'ProcTurboMode': 'Disabled', - 'UsbControl': 'UsbDisabled'}, - '@Redfish.SettingsApplyTime': { - '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', - 'ApplyTime': 'Immediate'}}) - - def test_set_attributes_apply_time_with_maintenance_window(self): - self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', - 'UsbControl': 'UsbDisabled'}, - res_cons.APPLY_TIME_MAINT_START, - datetime.datetime(2020, 9, 1, 4, 30, 0), - 600) - self.sys_bios._conn.patch.assert_called_once_with( - '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', - data={'Attributes': {'ProcTurboMode': 'Disabled', - 'UsbControl': 'UsbDisabled'}, - '@Redfish.SettingsApplyTime': { - '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', - 'ApplyTime': 'AtMaintenanceWindowStart', - 'MaintenanceWindowStartTime': '2020-09-01T04:30:00', - 'MaintenanceWindowDurationInSeconds': 600}}) - - def test_set_attributes_apply_time_missing(self): - self.assertRaises(ValueError, - self.sys_bios.set_attributes, - {'ProcTurboMode': 'Disabled', - 'UsbControl': 'UsbDisabled'}, - maint_window_start_time=datetime.datetime.now(), - maint_window_duration=600) - - def test_set_attributes_maint_window_start_time_missing(self): - self.assertRaises(ValueError, - self.sys_bios.set_attributes, - {'ProcTurboMode': 'Disabled', - 'UsbControl': 'UsbDisabled'}, - res_cons.APPLY_TIME_MAINT_START, - maint_window_duration=600) - - def test_set_attributes_maint_window_duration_missing(self): - self.assertRaises(ValueError, - self.sys_bios.set_attributes, - {'ProcTurboMode': 'Disabled', - 'UsbControl': 'UsbDisabled'}, - res_cons.APPLY_TIME_MAINT_START, - datetime.datetime.now()) - def test_set_attributes_on_refresh(self): self.conn.get.reset_mock() # make it to instantiate pending attributes diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index 90474df..6343c78 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -63,9 +63,6 @@ class SettingsFieldTestCase(base.TestCase): instance.messages[0]._related_properties[0]) self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', instance._settings_object_idref.resource_uri) - self.assertEqual([res_cons.APPLY_TIME_ON_RESET, - res_cons.APPLY_TIME_MAINT_RESET], - instance._supported_apply_times) self.assertIsNone(instance.maintenance_window) mock_LOG.warning.assert_called_once() mock_LOG.reset_mock() -- GitLab From 2d2015a885877a051047475235e76739f70fd811 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aija=20Jaunt=C4=93va?= Date: Tue, 1 Sep 2020 06:31:12 -0400 Subject: [PATCH 244/303] Add BIOS update apply time and maintenance window Add option to speficy BIOS attribute apply time and maintenance window. Expose maintenance_window to see if any default maintenance window set. Co-Authored-By: Eric Barrera Co-Authored-By: Richard G. Pioso Co-Authored-By: Mike Raineri Story: 2008100 Task: 40804 Change-Id: I96be596e1a4935f13d63019fa5f8b0ea38c3cfe7 --- ...time-support-to-bios-315ebad429dcab3d.yaml | 12 +++ sushy/resources/constants.py | 7 ++ sushy/resources/mappings.py | 11 +++ sushy/resources/settings.py | 5 ++ sushy/resources/system/bios.py | 71 ++++++++++++++- sushy/tests/unit/json_samples/bios.json | 11 ++- sushy/tests/unit/json_samples/settings.json | 6 +- .../tests/unit/resources/system/test_bios.py | 86 +++++++++++++++++++ sushy/tests/unit/resources/test_settings.py | 3 + 9 files changed, 206 insertions(+), 6 deletions(-) create mode 100644 releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml diff --git a/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml b/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml new file mode 100644 index 0000000..d49dfef --- /dev/null +++ b/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml @@ -0,0 +1,12 @@ +--- +features: + - | + Adds support for ``bios`` resource to allow specifying BIOS attribute + update time and maintenance window when updating BIOS attributes using + ``set_attribute`` or ``set_attributes``. + + The update is backward compatible and when new parameters not passed, they + default to ``None``. + + Also adds ``maintenance_window`` for ``bios`` resource to expose default + maintenance window set by the system if any. diff --git a/sushy/resources/constants.py b/sushy/resources/constants.py index f0572b5..72c0892 100644 --- a/sushy/resources/constants.py +++ b/sushy/resources/constants.py @@ -143,3 +143,10 @@ DURABLE_NAME_FORMAT_NQN = 'NVMe Qualified Name' DURABLE_NAME_FORMAT_NSID = 'NVM Namespace Identifier' DURABLE_NAME_FORMAT_UUID = 'Universally Unique Identifier' DURABLE_NAME_FORMAT_iQN = 'iSCSI Qualified Name' + +# Apply time constants + +APPLY_TIME_IMMEDIATE = 'immediate' +APPLY_TIME_ON_RESET = 'on reset' +APPLY_TIME_MAINT_START = 'at maintenance window start' +APPLY_TIME_MAINT_RESET = 'in maintenance window on reset' diff --git a/sushy/resources/mappings.py b/sushy/resources/mappings.py index afe9fe7..c81fed2 100644 --- a/sushy/resources/mappings.py +++ b/sushy/resources/mappings.py @@ -115,3 +115,14 @@ DUR_NAME_FORMAT_VALUE_MAP = { 'UUID': res_cons.DURABLE_NAME_FORMAT_UUID, 'iQN': res_cons.DURABLE_NAME_FORMAT_iQN, } + +APPLY_TIME_VALUE_MAP = { + 'Immediate': res_cons.APPLY_TIME_IMMEDIATE, + 'OnReset': res_cons.APPLY_TIME_ON_RESET, + 'AtMaintenanceWindowStart': + res_cons.APPLY_TIME_MAINT_START, + 'InMaintenanceWindowOnReset': + res_cons.APPLY_TIME_MAINT_RESET, +} + +APPLY_TIME_VALUE_MAP_REV = utils.revert_dictionary(APPLY_TIME_VALUE_MAP) diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index e084545..37b63f3 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -154,6 +154,11 @@ class SettingsField(base.CompositeField): to change this resource """ + _supported_apply_times = base.MappedListField( + 'SupportedApplyTimes', + res_maps.APPLY_TIME_VALUE_MAP) + """List of supported apply times""" + @property def maintenance_window(self): """MaintenanceWindow field diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index b1ae6fa..666cd0b 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -19,6 +19,7 @@ import logging from sushy import exceptions from sushy.resources import base from sushy.resources import common +from sushy.resources import mappings as res_maps from sushy.resources import settings from sushy import utils @@ -67,6 +68,11 @@ class Bios(base.ResourceBase): To update use :py:func:`~set_attribute` or :py:func:`~set_attributes` """ + maintenance_window = settings.MaintenanceWindowField( + '@Redfish.MaintenanceWindow') + """Indicates if a given resource has a maintenance window assignment + for applying settings or operations""" + _actions = ActionsField('Actions') _apply_time_settings = settings.SettingsApplyTimeField() @@ -93,7 +99,9 @@ class Bios(base.ResourceBase): def apply_time_settings(self): return self._pending_settings_resource._apply_time_settings - def set_attribute(self, key, value): + def set_attribute(self, key, value, apply_time=None, + maint_window_start_time=None, + maint_window_duration=None): """Update an attribute Attribute update is not immediate but requires system restart. @@ -102,10 +110,25 @@ class Bios(base.ResourceBase): :param key: Attribute name :param value: Attribute value + :param apply_time: When to update the attribute. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param maint_window_start_time: The start time of a maintenance window, + datetime. Required when updating during maintenance window and + default maintenance window not set by the system. + :param maint_window_duration: Duration of maintenance time since + maintenance window start time in seconds. Required when updating + during maintenance window and default maintenance window not + set by the system. """ - self.set_attributes({key: value}) + self.set_attributes({key: value}, apply_time, maint_window_start_time, + maint_window_duration) - def set_attributes(self, value): + def set_attributes(self, value, apply_time=None, + maint_window_start_time=None, + maint_window_duration=None): """Update many attributes at once Attribute update is not immediate but requires system restart. @@ -113,9 +136,41 @@ class Bios(base.ResourceBase): property :param value: Key-value pairs for attribute name and value + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param maint_window_start_time: The start time of a maintenance window, + datetime. Required when updating during maintenance window and + default maintenance window not set by the system. + :param maint_window_duration: Duration of maintenance time since + maintenance window start time in seconds. Required when updating + during maintenance window and default maintenance window not + set by the system. """ + payload = {'Attributes': value} + if (not apply_time + and (maint_window_start_time or maint_window_duration)): + raise ValueError('"apply_time" missing when passing maintenance ' + 'window settings') + if apply_time: + prop = '@Redfish.SettingsApplyTime' + payload[prop] = { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + } + if maint_window_start_time and not maint_window_duration: + raise ValueError('"maint_window_duration" missing') + if not maint_window_start_time and maint_window_duration: + raise ValueError('"maint_window_start_time" missing') + if maint_window_start_time and maint_window_duration: + payload[prop]['MaintenanceWindowStartTime'] =\ + maint_window_start_time.isoformat() + payload[prop]['MaintenanceWindowDurationInSeconds'] =\ + maint_window_duration self._settings.commit(self._conn, - {'Attributes': value}) + payload) utils.cache_clear(self, force_refresh=False, only_these=['_pending_settings_resource']) @@ -183,3 +238,11 @@ class Bios(base.ResourceBase): containing status and any messages """ return self._settings.get_status(self._registries) + + @property + def supported_apply_times(self): + """List of supported BIOS update apply times + + :returns: List of supported update apply time names + """ + return self._settings._supported_apply_times diff --git a/sushy/tests/unit/json_samples/bios.json b/sushy/tests/unit/json_samples/bios.json index 8063b3d..f7a3b7a 100644 --- a/sushy/tests/unit/json_samples/bios.json +++ b/sushy/tests/unit/json_samples/bios.json @@ -32,7 +32,16 @@ "SettingsObject": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" }, - "Time": "2016-03-07T14:44.30-05:00" + "Time": "2016-03-07T14:44.30-05:00", + "SupportedApplyTimes": [ + "OnReset", + "InMaintenanceWindowOnReset" + ] + }, + "@Redfish.MaintenanceWindow": { + "@odata.type": "#Settings.v1_2_0.MaintenanceWindow", + "MaintenanceWindowDurationInSeconds": 600, + "MaintenanceWindowStartTime": "2020-09-01T04:30:00-06:00" }, "Actions": { "#Bios.ResetBios": { diff --git a/sushy/tests/unit/json_samples/settings.json b/sushy/tests/unit/json_samples/settings.json index 75e2d92..69de0c6 100644 --- a/sushy/tests/unit/json_samples/settings.json +++ b/sushy/tests/unit/json_samples/settings.json @@ -17,6 +17,10 @@ "SettingsObject": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" }, - "Time": "2016-03-07T14:44:30-05:00" + "Time": "2016-03-07T14:44:30-05:00", + "SupportedApplyTimes": [ + "OnReset", + "InMaintenanceWindowOnReset" + ] } } diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index a452e71..da26f73 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -12,6 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. +import datetime from http import client as http_client import json from unittest import mock @@ -19,6 +20,7 @@ from unittest import mock from dateutil import parser from sushy import exceptions +from sushy.resources import constants as res_cons from sushy.resources.registry import message_registry from sushy.resources import settings from sushy.resources.system import bios @@ -64,6 +66,14 @@ class BiosTestCase(base.TestCase): self.assertEqual('', self.sys_bios.attributes['AdminPhone']) self.assertEqual('Uefi', self.sys_bios.attributes['BootMode']) self.assertEqual(0, self.sys_bios.attributes['ProcCoreDisable']) + self.assertEqual([res_cons.APPLY_TIME_ON_RESET, + res_cons.APPLY_TIME_MAINT_RESET], + self.sys_bios.supported_apply_times) + self.assertEqual(600, self.sys_bios.maintenance_window + .maintenance_window_duration_in_seconds) + self.assertEqual(parser.parse('2020-09-01T04:30:00-06:00'), + self.sys_bios.maintenance_window + .maintenance_window_start_time) # testing here if settings subfield parsed by checking ETag, # other settings fields tested in specific settings test self.assertEqual('9234ac83b9700123cc32', @@ -79,6 +89,30 @@ class BiosTestCase(base.TestCase): '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', data={'Attributes': {'ProcTurboMode': 'Disabled'}}) + def test_set_attribute_apply_time(self): + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled', + res_cons.APPLY_TIME_ON_RESET) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'OnReset'}}) + + def test_set_attribute_apply_time_with_maintenance_window(self): + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled', + res_cons.APPLY_TIME_MAINT_RESET, + datetime.datetime(2020, 9, 1, 4, 30, 0), + 600) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'InMaintenanceWindowOnReset', + 'MaintenanceWindowStartTime': '2020-09-01T04:30:00', + 'MaintenanceWindowDurationInSeconds': 600}}) + def test_set_attribute_on_refresh(self): self.conn.get.reset_mock() # make it to instantiate pending attributes @@ -103,6 +137,58 @@ class BiosTestCase(base.TestCase): data={'Attributes': {'ProcTurboMode': 'Disabled', 'UsbControl': 'UsbDisabled'}}) + def test_set_attributes_apply_time(self): + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_IMMEDIATE) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'Immediate'}}) + + def test_set_attributes_apply_time_with_maintenance_window(self): + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_MAINT_START, + datetime.datetime(2020, 9, 1, 4, 30, 0), + 600) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'AtMaintenanceWindowStart', + 'MaintenanceWindowStartTime': '2020-09-01T04:30:00', + 'MaintenanceWindowDurationInSeconds': 600}}) + + def test_set_attributes_apply_time_missing(self): + self.assertRaises(ValueError, + self.sys_bios.set_attributes, + {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + maint_window_start_time=datetime.datetime.now(), + maint_window_duration=600) + + def test_set_attributes_maint_window_start_time_missing(self): + self.assertRaises(ValueError, + self.sys_bios.set_attributes, + {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_MAINT_START, + maint_window_duration=600) + + def test_set_attributes_maint_window_duration_missing(self): + self.assertRaises(ValueError, + self.sys_bios.set_attributes, + {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_MAINT_START, + datetime.datetime.now()) + def test_set_attributes_on_refresh(self): self.conn.get.reset_mock() # make it to instantiate pending attributes diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py index 6343c78..90474df 100644 --- a/sushy/tests/unit/resources/test_settings.py +++ b/sushy/tests/unit/resources/test_settings.py @@ -63,6 +63,9 @@ class SettingsFieldTestCase(base.TestCase): instance.messages[0]._related_properties[0]) self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', instance._settings_object_idref.resource_uri) + self.assertEqual([res_cons.APPLY_TIME_ON_RESET, + res_cons.APPLY_TIME_MAINT_RESET], + instance._supported_apply_times) self.assertIsNone(instance.maintenance_window) mock_LOG.warning.assert_called_once() mock_LOG.reset_mock() -- GitLab From 3f052a34982628d5dd2ffe5c4da742e5aa65dc75 Mon Sep 17 00:00:00 2001 From: Christopher Dearborn Date: Wed, 12 Aug 2020 14:53:47 -0400 Subject: [PATCH 245/303] Allow monitoring progress of a firmware update This patch updates the UpdateService.simple_update method so that it returns a TaskMonitor object. This allows the firmware update to be monitored for completion. New TaskMonitor and Task objects are added according to the Redfish Spec. Co-Authored-By: Aija Jaunteva Change-Id: I485d56a9804af723ddb55f8bc26f28a5ebefccc7 --- ...itor_firmware_update-664b0c6c1a0307cf.yaml | 5 + sushy/exceptions.py | 4 + sushy/main.py | 2 +- sushy/resources/base.py | 128 +++++++++++--- sushy/resources/constants.py | 15 ++ sushy/resources/settings.py | 31 +--- sushy/resources/taskservice/mappings.py | 33 ++++ sushy/resources/taskservice/task.py | 89 ++++++++++ sushy/resources/taskservice/taskmonitor.py | 143 +++++++++++++++ .../resources/updateservice/updateservice.py | 34 +++- sushy/tests/unit/json_samples/task.json | 26 +++ .../registry/test_message_registry.py | 6 +- .../registry/test_message_registry_file.py | 13 +- .../unit/resources/taskservice/__init__.py | 0 .../unit/resources/taskservice/test_task.py | 74 ++++++++ .../resources/taskservice/test_taskmonitor.py | 167 ++++++++++++++++++ .../updateservice/test_updateservice.py | 49 ++++- sushy/tests/unit/test_utils.py | 6 + sushy/utils.py | 12 ++ 19 files changed, 771 insertions(+), 66 deletions(-) create mode 100644 releasenotes/notes/monitor_firmware_update-664b0c6c1a0307cf.yaml create mode 100644 sushy/resources/taskservice/mappings.py create mode 100644 sushy/resources/taskservice/task.py create mode 100644 sushy/resources/taskservice/taskmonitor.py create mode 100644 sushy/tests/unit/json_samples/task.json create mode 100644 sushy/tests/unit/resources/taskservice/__init__.py create mode 100644 sushy/tests/unit/resources/taskservice/test_task.py create mode 100644 sushy/tests/unit/resources/taskservice/test_taskmonitor.py diff --git a/releasenotes/notes/monitor_firmware_update-664b0c6c1a0307cf.yaml b/releasenotes/notes/monitor_firmware_update-664b0c6c1a0307cf.yaml new file mode 100644 index 0000000..0af0eaf --- /dev/null +++ b/releasenotes/notes/monitor_firmware_update-664b0c6c1a0307cf.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Added the ability to monitor the progress of a firmware update by changing + the ``simple_update`` operation to return a task monitor object. diff --git a/sushy/exceptions.py b/sushy/exceptions.py index e01a8af..50d25e4 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -72,6 +72,10 @@ class OEMExtensionNotFoundError(SushyError): message = 'No %(resource)s OEM extension found by name "%(name)s".' +class MissingHeaderError(SushyError): + message = 'Response to %(target_uri)s did not contain a %(header)s header' + + class HTTPError(SushyError): """Basic exception for HTTP errors""" diff --git a/sushy/main.py b/sushy/main.py index 75777d7..08bb2b2 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -180,6 +180,7 @@ class Sushy(base.ResourceBase): if auth is None: auth = sushy_auth.SessionOrBasicAuth(username=username, password=password) + self._auth = auth super(Sushy, self).__init__( connector or sushy_connector.Connector(base_url, verify=verify), @@ -187,7 +188,6 @@ class Sushy(base.ResourceBase): self._public_connector = public_connector or requests self._language = language self._base_url = base_url - self._auth = auth self._auth.set_context(self, self._conn) self._auth.authenticate() diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 025a571..574ec52 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -24,6 +24,7 @@ import zipfile import pkg_resources from sushy import exceptions +from sushy.resources import mappings as res_maps from sushy.resources import oem from sushy import utils @@ -315,7 +316,61 @@ class MappedListField(Field): return instances -class AbstractJsonReader(object, metaclass=abc.ABCMeta): +class MessageListField(ListField): + """List of messages with details of settings update status""" + + message_id = Field('MessageId', required=True) + """The key for this message which can be used + to look up the message in a message registry + """ + + message = Field('Message') + """Human readable message, if provided""" + + severity = MappedField('Severity', + res_maps.SEVERITY_VALUE_MAP) + """Severity of the error""" + + resolution = Field('Resolution') + """Used to provide suggestions on how to resolve + the situation that caused the error + """ + + _related_properties = Field('RelatedProperties') + """List of properties described by the message""" + + message_args = Field('MessageArgs') + """List of message substitution arguments for the message + referenced by `message_id` from the message registry + """ + + +class FieldData(object): + """Contains data to be used when constructing Fields""" + + def __init__(self, status_code, headers, json_doc): + """Initializes the FieldData instance""" + self._status_code = status_code + self._headers = headers + self._json_doc = json_doc + + @property + def status_code(self): + """The status code""" + return self._status_code + + @property + def headers(self): + """The headers""" + return self._headers + + @property + def json_doc(self): + """The parsed JSON body""" + return self._json_doc + + +class AbstractDataReader(object, metaclass=abc.ABCMeta): def set_connection(self, connector, path): """Sets mandatory connection parameters @@ -327,28 +382,33 @@ class AbstractJsonReader(object, metaclass=abc.ABCMeta): self._path = path @abc.abstractmethod - def get_json(self): + def get_data(self): """Based on data source get data and parse to JSON""" -class JsonDataReader(AbstractJsonReader): +class JsonDataReader(AbstractDataReader): """Gets the data from HTTP response given by path""" - def get_json(self): + def get_data(self): """Gets JSON file from URI directly""" data = self._conn.get(path=self._path) - return data.json() if data.content else {} + json_data = data.json() if data.content else {} + + return FieldData(data.status_code, data.headers, json_data) -class JsonPublicFileReader(AbstractJsonReader): + +class JsonPublicFileReader(AbstractDataReader): """Loads the data from the Internet""" - def get_json(self): + def get_data(self): """Get JSON file from full URI""" - return self._conn.get(self._path).json() + data = self._conn.get(self._path) + + return FieldData(data.status_code, data.headers, data.json()) -class JsonArchiveReader(AbstractJsonReader): +class JsonArchiveReader(AbstractDataReader): """Gets the data from JSON file in archive""" def __init__(self, archive_file): @@ -358,15 +418,16 @@ class JsonArchiveReader(AbstractJsonReader): """ self._archive_file = archive_file - def get_json(self): + def get_data(self): """Gets JSON file from archive. Currently supporting ZIP only""" data = self._conn.get(path=self._path) if data.headers.get('content-type') == 'application/zip': try: archive = zipfile.ZipFile(io.BytesIO(data.content)) - return json.loads(archive.read(self._archive_file) - .decode(encoding='utf-8')) + json_data = json.loads(archive.read(self._archive_file) + .decode(encoding='utf-8')) + return FieldData(data.status_code, data.headers, json_data) except (zipfile.BadZipfile, ValueError) as e: raise exceptions.ArchiveParsingError( path=self._path, error=e) @@ -374,8 +435,10 @@ class JsonArchiveReader(AbstractJsonReader): LOG.error('Support for %(type)s not implemented', {'type': data.headers['content-type']}) + return FieldData(data.status_code, data.headers, None) -class JsonPackagedFileReader(AbstractJsonReader): + +class JsonPackagedFileReader(AbstractDataReader): """Gets the data from packaged file given by path""" def __init__(self, resource_package_name): @@ -385,12 +448,28 @@ class JsonPackagedFileReader(AbstractJsonReader): """ self._resource_package_name = resource_package_name - def get_json(self): + def get_data(self): """Gets JSON file from packaged file denoted by path""" with pkg_resources.resource_stream(self._resource_package_name, self._path) as resource: - return json.loads(resource.read().decode(encoding='utf-8')) + json_data = json.loads(resource.read().decode(encoding='utf-8')) + return FieldData(None, None, json_data) + + +def get_reader(connector, path, reader=None): + """Create and configure the reader. + + :param connector: A Connector instance + :param path: sub-URI path to the resource. + :param reader: Reader to use to fetch JSON data. + :returns: the reader + """ + if reader is None: + reader = JsonDataReader() + reader.set_connection(connector, path) + + return reader class ResourceBase(object, metaclass=abc.ABCMeta): @@ -406,7 +485,8 @@ class ResourceBase(object, metaclass=abc.ABCMeta): path='', redfish_version=None, registries=None, - reader=None): + reader=None, + json_doc=None): """A class representing the base of any Redfish resource Invokes the ``refresh()`` method of resource for the first @@ -418,6 +498,7 @@ class ResourceBase(object, metaclass=abc.ABCMeta): :param registries: Dict of Redfish Message Registry objects to be used in any resource that needs registries to parse messages :param reader: Reader to use to fetch JSON data. + :param json_doc: parsed JSON document in form of Python types. """ self._conn = connector self._path = path @@ -429,12 +510,9 @@ class ResourceBase(object, metaclass=abc.ABCMeta): # attribute values are fetched. self._is_stale = True - if reader is None: - reader = JsonDataReader() - reader.set_connection(connector, path) - self._reader = reader + self._reader = get_reader(connector, path, reader) - self.refresh() + self.refresh(json_doc=json_doc) def _parse_attributes(self, json_doc): """Parse the attributes of a resource. @@ -447,7 +525,7 @@ class ResourceBase(object, metaclass=abc.ABCMeta): # Hide the Field object behind the real value setattr(self, attr, field._load(json_doc, self)) - def refresh(self, force=True): + def refresh(self, force=True, json_doc=None): """Refresh the resource Freshly retrieves/fetches the resource attributes and invokes @@ -460,6 +538,7 @@ class ResourceBase(object, metaclass=abc.ABCMeta): :param force: if set to False, will only refresh if the resource is marked as stale, otherwise neither it nor its subresources will be refreshed. + :param json_doc: parsed JSON document in form of Python types. :raises: ResourceNotFoundError :raises: ConnectionError :raises: HTTPError @@ -469,7 +548,10 @@ class ResourceBase(object, metaclass=abc.ABCMeta): if not self._is_stale and not force: return - self._json = self._reader.get_json() + if json_doc: + self._json = json_doc + else: + self._json = self._reader.get_data().json_doc LOG.debug('Received representation of %(type)s %(path)s: %(json)s', {'type': self.__class__.__name__, diff --git a/sushy/resources/constants.py b/sushy/resources/constants.py index 72c0892..497d218 100644 --- a/sushy/resources/constants.py +++ b/sushy/resources/constants.py @@ -32,6 +32,21 @@ STATE_DEFERRING = 'deferring' STATE_QUIESCED = 'quiesced' STATE_UPDATING = 'updating' +# Task state related constants +TASK_STATE_NEW = 'new' +TASK_STATE_STARTING = 'starting' +TASK_STATE_RUNNING = 'running' +TASK_STATE_SUSPENDED = 'suspended' +TASK_STATE_INTERRUPTED = 'interrupted' +TASK_STATE_PENDING = 'pending' +TASK_STATE_STOPPING = 'stopping' +TASK_STATE_COMPLETED = 'completed' +TASK_STATE_KILLED = 'killed' +TASK_STATE_EXCEPTION = 'exception' +TASK_STATE_SERVICE = 'service' +TASK_STATE_CANCELLING = 'cancelling' +TASK_STATE_CANCELLED = 'cancelled' + # Message Registry message parameter type related constants. PARAMTYPE_STRING = 'string' PARAMTYPE_NUMBER = 'number' diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py index 37b63f3..3b18f6e 100644 --- a/sushy/resources/settings.py +++ b/sushy/resources/settings.py @@ -62,35 +62,6 @@ class SettingsUpdate(object): LOG = logging.getLogger(__name__) -class MessageListField(base.ListField): - """List of messages with details of settings update status""" - - message_id = base.Field('MessageId', required=True) - """The key for this message which can be used - to look up the message in a message registry - """ - - message = base.Field('Message') - """Human readable message, if provided""" - - severity = base.MappedField('Severity', - res_maps.SEVERITY_VALUE_MAP) - """Severity of the error""" - - resolution = base.Field('Resolution') - """Used to provide suggestions on how to resolve - the situation that caused the error - """ - - _related_properties = base.Field('RelatedProperties') - """List of properties described by the message""" - - message_args = base.Field('MessageArgs') - """List of message substitution arguments for the message - referenced by `message_id` from the message registry - """ - - class MaintenanceWindowField(base.CompositeField): maintenance_window_duration_in_seconds = base.Field( @@ -172,7 +143,7 @@ class SettingsField(base.CompositeField): '(e.g. System resource)') return None - messages = MessageListField("Messages") + messages = base.MessageListField("Messages") """Represents the results of the last time the values of the Settings resource were applied to the server""" diff --git a/sushy/resources/taskservice/mappings.py b/sushy/resources/taskservice/mappings.py new file mode 100644 index 0000000..20ee281 --- /dev/null +++ b/sushy/resources/taskservice/mappings.py @@ -0,0 +1,33 @@ +# Copyright (c) 2020 Dell, Inc. or its subsidiaries +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from sushy.resources import constants as res_cons + + +TASK_STATE_VALUE_MAP = { + 'New': res_cons.TASK_STATE_NEW, + 'Starting': res_cons.TASK_STATE_STARTING, + 'Running': res_cons.TASK_STATE_RUNNING, + 'Suspended': res_cons.TASK_STATE_SUSPENDED, + 'Interrupted': res_cons.TASK_STATE_INTERRUPTED, + 'Pending': res_cons.TASK_STATE_PENDING, + 'Stopping': res_cons.TASK_STATE_STOPPING, + 'Completed': res_cons.TASK_STATE_COMPLETED, + 'Killed': res_cons.TASK_STATE_KILLED, + 'Exception': res_cons.TASK_STATE_EXCEPTION, + 'Service': res_cons.TASK_STATE_SERVICE, + 'Cancelling': res_cons.TASK_STATE_CANCELLING, + 'Cancelled': res_cons.TASK_STATE_CANCELLED +} diff --git a/sushy/resources/taskservice/task.py b/sushy/resources/taskservice/task.py new file mode 100644 index 0000000..3867f2b --- /dev/null +++ b/sushy/resources/taskservice/task.py @@ -0,0 +1,89 @@ +# Copyright (c) 2020 Dell, Inc. or its subsidiaries +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Task.v1_4_3.json + +from http import client as http_client +import logging + +from sushy.resources import base +from sushy.resources import mappings as res_maps +from sushy.resources.registry import message_registry +from sushy.resources.taskservice import mappings as task_maps +from sushy import utils + + +LOG = logging.getLogger(__name__) + + +class Task(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The Task identity""" + + name = base.Field('Name', required=True) + """The Task name""" + + description = base.Field('Description') + """The Task description""" + + task_monitor = base.Field('TaskMonitor') + """An opaque URL that the client can use to monitor an asynchronous + operation""" + + start_time = base.Field('StartTime') + """Start time of the Task""" + + end_time = base.Field('EndTime') + """End time of the Task""" + + percent_complete = base.Field('PercentComplete', adapter=utils.int_or_none) + """Percentage complete of the Task""" + + task_state = base.MappedField('TaskState', task_maps.TASK_STATE_VALUE_MAP) + """The Task state""" + + task_status = base.MappedField('TaskStatus', res_maps.HEALTH_VALUE_MAP) + """The Task status""" + + messages = base.MessageListField("Messages") + """List of :class:`.MessageListField` with messages from the Task""" + + def __init__(self, connector, identity, redfish_version=None, + registries=None, json_doc=None): + """A class representing a Task + + :param connector: A Connector instance + :param identity: The identity of the task + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + :param field_data: the data to use populating the fields + """ + super(Task, self).__init__( + connector, identity, redfish_version, registries, + json_doc=json_doc) + + @property + def is_processing(self): + """Indicates if the Task is processing""" + return self.status_code == http_client.ACCEPTED + + def parse_messages(self): + """Parses the messages""" + for m in self.messages: + message_registry.parse_message(self._registries, m) diff --git a/sushy/resources/taskservice/taskmonitor.py b/sushy/resources/taskservice/taskmonitor.py new file mode 100644 index 0000000..3cad221 --- /dev/null +++ b/sushy/resources/taskservice/taskmonitor.py @@ -0,0 +1,143 @@ +# Copyright (c) 2020 Dell, Inc. or its subsidiaries +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Task.v1_4_3.json + +from http import client as http_client + +from sushy.resources import base +from sushy.resources.taskservice import task +from sushy import utils + + +class TaskMonitor(object): + def __init__(self, + connector, + task_monitor, + redfish_version=None, + registries=None, + field_data=None): + """A class representing a task monitor + + :param connector: A Connector instance + :param task_monitor: The task monitor + :param retry_after: The amount of time to wait in seconds before + calling is_processing. + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. + """ + self._connector = connector + self._task_monitor = task_monitor + self._redfish_version = redfish_version + self._registries = registries + self._field_data = field_data + self._reader = base.get_reader(connector, task_monitor) + self._task = None + + if self._field_data: + # If a body was returned, assume it's a Task on a 202 status code + content_length = int(self._field_data.headers.get( + 'Content-Length')) + if (self._field_data.status_code == http_client.ACCEPTED + and content_length > 0): + self._task = task.Task(self._connector, self._task_monitor, + redfish_version=self._redfish_version, + registries=self._registries, + json_doc=self._field_data.json_doc) + else: + self.refresh() + + def refresh(self): + """Refresh the Task + + Freshly retrieves/fetches the Task. + :raises: ResourceNotFoundError + :raises: ConnectionError + :raises: HTTPError + """ + self._field_data = self._reader.get_data() + + if self._field_data.status_code == http_client.ACCEPTED: + # A Task should have been returned, but wasn't + if int(self._field_data.headers.get('Content-Length')) == 0: + self._task = None + return + + # Assume that the body contains a Task since we got a 202 + if not self._task: + self._task = task.Task(self._connector, self._task_monitor, + redfish_version=self._redfish_version, + registries=self._registries, + json_doc=self._field_data.json_doc) + else: + self._task.refresh(json_doc=self._field_data.json_doc) + else: + self._task = None + + @property + def task_monitor(self): + """The TaskMonitor URI + + :returns: The TaskMonitor URI. + """ + return self._task_monitor + + @property + def is_processing(self): + """Indicates if the task is still processing + + :returns: A boolean indicating if the task is still processing. + """ + return self._field_data.status_code == http_client.ACCEPTED + + @property + def retry_after(self): + """The amount of time to sleep before retrying + + :returns: The amount of time in seconds to wait before calling + is_processing. + """ + return utils.int_or_none(self._field_data.headers.get('Retry-After')) + + @property + def cancellable(self): + """The amount of time to sleep before retrying + + :returns: A Boolean indicating if the Task is cancellable. + """ + allow = self._field_data.headers.get('Allow') + + cancellable = False + if allow and allow.upper() == 'DELETE': + cancellable = True + + return cancellable + + @property + def task(self): + """The executing task + + :returns: The Task being executed. + """ + + return self._task + + def get_task(self): + return task.Task(self._connector, self._task_monitor, + redfish_version=self._redfish_version, + registries=self._registries) diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py index b13c30c..37a83c1 100644 --- a/sushy/resources/updateservice/updateservice.py +++ b/sushy/resources/updateservice/updateservice.py @@ -19,6 +19,7 @@ import logging from sushy import exceptions from sushy.resources import base from sushy.resources import common +from sushy.resources.taskservice import taskmonitor from sushy.resources.updateservice import constants as up_cons from sushy.resources.updateservice import mappings as up_maps from sushy.resources.updateservice import softwareinventory @@ -115,7 +116,10 @@ class UpdateService(base.ResourceBase): def simple_update(self, image_uri, targets=None, transfer_protocol=up_cons.UPDATE_PROTOCOL_HTTP): - """Simple Update is used to update software components""" + """Simple Update is used to update software components. + + :returns: A task monitor. + """ valid_transfer_protocols = self.get_allowed_transfer_protocols() if transfer_protocol in valid_transfer_protocols: @@ -144,7 +148,33 @@ class UpdateService(base.ResourceBase): data = {'ImageURI': image_uri, 'TransferProtocol': transfer_protocol} if targets: data['Targets'] = targets - self._conn.post(target_uri, data=data) + rsp = self._conn.post(target_uri, data=data) + + json_data = rsp.json() if rsp.content else {} + field_data = base.FieldData(rsp.status_code, rsp.headers, json_data) + + header = 'Location' + task_monitor = rsp.headers.get(header) + if not task_monitor: + raise exceptions.MissingHeaderError(target_uri=target_uri, + header=header) + + return taskmonitor.TaskMonitor(self._conn, + task_monitor, + redfish_version=self.redfish_version, + registries=self.registries, + field_data=field_data) + + def get_task_monitor(self, task_monitor): + """Used to retrieve a TaskMonitor. + + :returns: A task monitor. + """ + return taskmonitor.TaskMonitor( + self._conn, + task_monitor, + redfish_version=self.redfish_version, + registries=self.registries) @property @utils.cache_it diff --git a/sushy/tests/unit/json_samples/task.json b/sushy/tests/unit/json_samples/task.json new file mode 100644 index 0000000..306a2d7 --- /dev/null +++ b/sushy/tests/unit/json_samples/task.json @@ -0,0 +1,26 @@ +{ + "@odata.type":"#Task.v1_4_3.Task", + "Id":"545", + "Name":"Task 545", + "Description": "Task description", + "TaskMonitor":"/taskmon/545", + "TaskState":"Completed", + "StartTime":"2012-03-07T14:44+06:00", + "EndTime":"2012-03-07T14:45+06:00", + "TaskStatus":"OK", + "PercentComplete": 100, + "Messages":[ + { + "MessageId":"Base.1.0.PropertyNotWriteable", + "RelatedProperties":[ + "SKU" + ], + "Message":"Property %1 is read only.", + "MessageArgs":[ + "SKU" + ], + "Severity":"Warning" + } + ], + "@odata.id":"/redfish/v1/TaskService/Tasks/545" + } \ No newline at end of file diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py index 7c3953a..534e930 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -17,9 +17,9 @@ import json from unittest import mock +from sushy.resources import base as sushy_base from sushy.resources import constants as res_cons from sushy.resources.registry import message_registry -from sushy.resources import settings from sushy.tests.unit import base @@ -99,7 +99,7 @@ class MessageRegistryTestCase(base.TestCase): conn, '/redfish/v1/Registries/Test', redfish_version='1.0.2') registries = {'Test.1.0.0': registry} - message_field = settings.MessageListField('Foo') + message_field = sushy_base.MessageListField('Foo') message_field.message_id = 'Test.1.0.0.TooBig' message_field.message_args = ['arg1', 10] message_field.severity = None @@ -120,7 +120,7 @@ class MessageRegistryTestCase(base.TestCase): conn, '/redfish/v1/Registries/Test', redfish_version='1.0.2') registries = {'Test.1.0.0': registry} - message_field = settings.MessageListField('Foo') + message_field = sushy_base.MessageListField('Foo') message_field.message_id = 'Test.1.0.0.Success' message_field.severity = res_cons.SEVERITY_OK message_field.resolution = 'Do nothing' diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py index b36070f..1afa407 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry_file.py +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -16,6 +16,7 @@ import json from unittest import mock +from sushy.resources.base import FieldData from sushy.resources.registry import message_registry_file from sushy.tests.unit import base @@ -59,9 +60,9 @@ class MessageRegistryFileTestCase(base.TestCase): def test_get_message_registry_uri(self, mock_reader, mock_msg_reg): mock_reader_rv = mock.Mock() mock_reader.return_value = mock_reader_rv - mock_reader_rv.get_json.return_value = { + mock_reader_rv.get_data.return_value = FieldData(200, {}, { "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", - } + }) mock_msg_reg_rv = mock.Mock() mock_msg_reg.return_value = mock_msg_reg_rv @@ -78,9 +79,9 @@ class MessageRegistryFileTestCase(base.TestCase): mock_reader_rv = mock.Mock() mock_reader.return_value = mock_reader_rv mock_msg_reg_rv = mock.Mock() - mock_reader_rv.get_json.return_value = { + mock_reader_rv.get_data.return_value = FieldData(200, {}, { "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", - } + }) mock_msg_reg.return_value = mock_msg_reg_rv self.reg_file.location[0].uri = None @@ -100,9 +101,9 @@ class MessageRegistryFileTestCase(base.TestCase): mock_reader_rv = mock.Mock() mock_reader.return_value = mock_reader_rv mock_msg_reg_rv = mock.Mock() - mock_reader_rv.get_json.return_value = { + mock_reader_rv.get_data.return_value = FieldData(200, {}, { "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", - } + }) mock_msg_reg.return_value = mock_msg_reg_rv self.reg_file.location[0].uri = None self.reg_file.location[0].archive_uri = None diff --git a/sushy/tests/unit/resources/taskservice/__init__.py b/sushy/tests/unit/resources/taskservice/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sushy/tests/unit/resources/taskservice/test_task.py b/sushy/tests/unit/resources/taskservice/test_task.py new file mode 100644 index 0000000..0b9830d --- /dev/null +++ b/sushy/tests/unit/resources/taskservice/test_task.py @@ -0,0 +1,74 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from http import client as http_client +import json +from unittest import mock + +from sushy.resources import constants as res_cons +from sushy.resources.taskservice import task +from sushy.tests.unit import base + + +class TaskTestCase(base.TestCase): + + def setUp(self): + super(TaskTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/task.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + message_registry = mock.Mock() + message = mock.Mock() + message.message = "Property %1 is read only." + message.number_of_args = 1 + message_registry.messages = {"PropertyNotWriteable": message} + + self.task = task.Task( + self.conn, '/redfish/v1/TaskService/Tasks/545', + redfish_version='1.4.3', + registries={'Base.1.0': message_registry}) + + def test__parse_attributes(self): + self.task._parse_attributes(self.json_doc) + self.assertEqual('545', self.task.identity) + self.assertEqual('Task 545', self.task.name) + self.assertEqual('Task description', self.task.description) + self.assertEqual('/taskmon/545', self.task.task_monitor) + self.assertEqual('2012-03-07T14:44+06:00', self.task.start_time) + self.assertEqual('2012-03-07T14:45+06:00', self.task.end_time) + self.assertEqual(100, self.task.percent_complete) + self.assertEqual(res_cons.TASK_STATE_COMPLETED, self.task.task_state) + self.assertEqual(res_cons.HEALTH_OK, self.task.task_status) + self.assertEqual(1, len(self.task.messages)) + self.assertEqual('Base.1.0.PropertyNotWriteable', + self.task.messages[0].message_id) + self.assertEqual('Property %1 is read only.', + self.task.messages[0].message) + self.assertEqual(res_cons.SEVERITY_WARNING, + self.task.messages[0].severity) + + def test_is_processing_true(self): + self.task.status_code = http_client.ACCEPTED + self.assertTrue(self.task.is_processing) + + def test_is_processing_false(self): + self.task.status_code = http_client.OK + self.assertFalse(self.task.is_processing) + + def test_parse_messages(self): + self.task.parse_messages() + self.assertEqual('Property SKU is read only.', + self.task.messages[0].message) diff --git a/sushy/tests/unit/resources/taskservice/test_taskmonitor.py b/sushy/tests/unit/resources/taskservice/test_taskmonitor.py new file mode 100644 index 0000000..399f809 --- /dev/null +++ b/sushy/tests/unit/resources/taskservice/test_taskmonitor.py @@ -0,0 +1,167 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from http import client as http_client +import json +from unittest import mock + +from sushy.resources import base as resource_base +from sushy.resources.taskservice import task +from sushy.resources.taskservice import taskmonitor +from sushy.tests.unit import base + + +class TaskMonitorTestCase(base.TestCase): + + def setUp(self): + super(TaskMonitorTestCase, self).setUp() + self.conn = mock.Mock() + + with open('sushy/tests/unit/json_samples/task.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'}, + self.json_doc) + + self.task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + field_data=self.field_data + ) + + def test_init_accepted_no_content(self): + field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Content-Length': 0, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'}, + None) + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + field_data=field_data) + + self.assertIsNone(task_monitor.task) + + def test_init_accepted_content(self): + self.assertIsNotNone(self.task_monitor._task) + + def test_init_no_field_data(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + + task_monitor = taskmonitor.TaskMonitor(self.conn, '/Task/545') + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(task_monitor._task) + + def test_refresh_no_content(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 0} + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNone(self.task_monitor._task) + + def test_refresh_content_no_task(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + self.task_monitor._task = None + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(self.task_monitor._task) + + def test_refresh_content_task(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(self.task_monitor._task) + + def test_refresh_done(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 200 + + self.task_monitor.refresh() + + self.conn.get.assert_called_once_with(path='/Task/545') + self.assertIsNone(self.task_monitor._task) + + def test_task_monitor(self): + self.assertEqual('/Task/545', self.task_monitor.task_monitor) + + def test_is_processing(self): + self.assertTrue(self.task_monitor.is_processing) + + def test_retry_after(self): + self.assertEqual(20, self.task_monitor.retry_after) + + def test_cancellable(self): + self.assertTrue(self.task_monitor.cancellable) + + def test_not_cancellable_no_header(self): + field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20}, + self.json_doc) + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + field_data=field_data + ) + + self.assertFalse(task_monitor.cancellable) + + def test_not_cancellable(self): + field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'GET'}, + self.json_doc) + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + field_data=field_data + ) + + self.assertFalse(task_monitor.cancellable) + + def test_task(self): + tm_task = self.task_monitor.task + + self.assertIsInstance(tm_task, task.Task) + self.assertEqual('545', tm_task.identity) diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py index 64b8f26..daf8a02 100644 --- a/sushy/tests/unit/resources/updateservice/test_updateservice.py +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -16,6 +16,7 @@ from unittest import mock from sushy import exceptions from sushy.resources import constants as res_cons +from sushy.resources.taskservice import taskmonitor from sushy.resources.updateservice import constants as ups_cons from sushy.resources.updateservice import softwareinventory from sushy.resources.updateservice import updateservice @@ -57,10 +58,23 @@ class UpdateServiceTestCase(base.TestCase): self.upd_serv._parse_attributes, self.json_doc) def test_simple_update(self): - self.upd_serv.simple_update( + with open('sushy/tests/unit/json_samples/task.json') as f: + task_json = json.load(f) + task_submitted = mock.Mock() + task_submitted.json.return_value = task_json + task_submitted.status_code = 202 + task_submitted.headers = {'Content-Length': 42, + 'Location': '/Task/545'} + self.conn.post.return_value = task_submitted + + tm = self.upd_serv.simple_update( image_uri='local.server/update.exe', targets=['/redfish/v1/UpdateService/FirmwareInventory/BMC'], transfer_protocol=ups_cons.UPDATE_PROTOCOL_HTTPS) + + self.assertIsInstance(tm, taskmonitor.TaskMonitor) + self.assertEqual('/Task/545', tm.task_monitor) + self.upd_serv._conn.post.assert_called_once_with( '/redfish/v1/UpdateService/Actions/SimpleUpdate', data={ @@ -68,7 +82,32 @@ class UpdateServiceTestCase(base.TestCase): 'Targets': ['/redfish/v1/UpdateService/FirmwareInventory/BMC'], 'TransferProtocol': 'HTTPS'}) + def test_simple_update_missing_location(self): + with open('sushy/tests/unit/json_samples/task.json') as f: + task_json = json.load(f) + task_submitted = mock.Mock() + task_submitted.json.return_value = task_json + task_submitted.status_code = 202 + task_submitted.headers = {'Allow': 'GET'} + self.conn.post.return_value = task_submitted + + self.assertRaises( + exceptions.MissingHeaderError, + self.upd_serv.simple_update, + image_uri='local.server/update.exe', + targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', + transfer_protocol='HTTPS') + def test_simple_update_backward_compatible_protocol(self): + with open('sushy/tests/unit/json_samples/task.json') as f: + task_json = json.load(f) + task_submitted = mock.Mock() + task_submitted.json.return_value = task_json + task_submitted.status_code = 202 + task_submitted.headers = {'Content-Length': 42, + 'Location': '/Task/545'} + self.conn.post.return_value = task_submitted + self.upd_serv.simple_update( image_uri='local.server/update.exe', targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', @@ -81,6 +120,14 @@ class UpdateServiceTestCase(base.TestCase): 'TransferProtocol': 'HTTPS'}) def test_simple_update_without_target(self): + with open('sushy/tests/unit/json_samples/task.json') as f: + task_json = json.load(f) + task_submitted = mock.Mock() + task_submitted.json.return_value = task_json + task_submitted.status_code = 202 + task_submitted.headers = {'Content-Length': 42, + 'Location': '/Task/545'} + self.conn.post.return_value = task_submitted self.upd_serv.simple_update( image_uri='local.server/update.exe', transfer_protocol='HTTPS') diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index f8cd84e..83aafb9 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -44,6 +44,12 @@ class UtilsTestCase(base.TestCase): self.assertEqual(1, utils.int_or_none('1')) self.assertIsNone(None, utils.int_or_none(None)) + def test_bool_or_none_none(self): + self.assertIsNone(utils.bool_or_none(None)) + + def test_bool_or_none_bool(self): + self.assertEqual(True, utils.bool_or_none(True)) + def setUp(self): super(UtilsTestCase, self).setUp() self.conn = mock.MagicMock() diff --git a/sushy/utils.py b/sushy/utils.py index 7835c98..958e9a1 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -66,6 +66,18 @@ def int_or_none(x): return int(x) +def bool_or_none(x): + """Given a value x this method returns either a bool or None + + :param x: The value to transform and return + :returns: Either None or x cast to a bool + + """ + if x is None: + return None + return bool(x) + + def get_sub_resource_path_by(resource, subresource_name, is_collection=False): """Helper function to find the subresource path -- GitLab From 0f5a3233c2addbed971fbc9f0b8ce986ec6fee41 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Mon, 7 Sep 2020 12:26:39 +0000 Subject: [PATCH 246/303] Update .gitreview for stable/victoria Change-Id: Ic4edfa1b93012d59e972ba31f296bc21c38b0e1e --- .gitreview | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitreview b/.gitreview index 9ca57a4..e0bba0f 100644 --- a/.gitreview +++ b/.gitreview @@ -2,3 +2,4 @@ host=review.opendev.org port=29418 project=openstack/sushy.git +defaultbranch=stable/victoria -- GitLab From 3849988679dc21bd69134d2d2facfb4adaabbe34 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Mon, 7 Sep 2020 12:26:40 +0000 Subject: [PATCH 247/303] Update TOX_CONSTRAINTS_FILE for stable/victoria Update the URL to the upper-constraints file to point to the redirect rule on releases.openstack.org so that anyone working on this branch will switch to the correct upper-constraints list automatically when the requirements repository branches. Until the requirements repository has as stable/victoria branch, tests will continue to use the upper-constraints list on master. Change-Id: I95557aa9bc80bb5e367bc7178495b80f6296f53a --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index ccc2371..b4fb75c 100644 --- a/tox.ini +++ b/tox.ini @@ -11,7 +11,7 @@ setenv = VIRTUAL_ENV={envdir} PYTHONWARNINGS=default::DeprecationWarning deps = - -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} + -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/victoria} -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt commands = stestr run --slowest {posargs} -- GitLab From dce8f135ecfdcb40476d2d67b8346e63a880ef7f Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Mon, 7 Sep 2020 12:26:41 +0000 Subject: [PATCH 248/303] Update master for stable/victoria Add file to the reno documentation build to show release notes for stable/victoria. Use pbr instruction to increment the minor version number automatically so that master versions are higher than the versions on stable/victoria. Change-Id: I03c7fa276e5eb621aa6f2113c9f74af10e9753a5 Sem-Ver: feature --- releasenotes/source/index.rst | 1 + releasenotes/source/victoria.rst | 6 ++++++ 2 files changed, 7 insertions(+) create mode 100644 releasenotes/source/victoria.rst diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst index f690a05..d0a327e 100644 --- a/releasenotes/source/index.rst +++ b/releasenotes/source/index.rst @@ -6,6 +6,7 @@ :maxdepth: 1 unreleased + victoria ussuri train stein diff --git a/releasenotes/source/victoria.rst b/releasenotes/source/victoria.rst new file mode 100644 index 0000000..4efc7b6 --- /dev/null +++ b/releasenotes/source/victoria.rst @@ -0,0 +1,6 @@ +============================= +Victoria Series Release Notes +============================= + +.. release-notes:: + :branch: stable/victoria -- GitLab From 5c1fdc162279afdf8d5786846124ac1bf3f844fb Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Mon, 7 Sep 2020 12:26:43 +0000 Subject: [PATCH 249/303] Add Python3 wallaby unit tests This is an automatically generated patch to ensure unit testing is in place for all the of the tested runtimes for wallaby. See also the PTI in governance [1]. [1]: https://governance.openstack.org/tc/reference/project-testing-interface.html Change-Id: Ifcadb664016ea47619ae19c1d1fd0b25efd40827 --- zuul.d/project.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index edc2d25..110f52f 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -3,7 +3,7 @@ - check-requirements - openstack-cover-jobs - openstack-lower-constraints-jobs - - openstack-python3-victoria-jobs + - openstack-python3-wallaby-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 check: -- GitLab From 4317d2580ea8f9e161a2fffea708a23d03622bbe Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Tue, 8 Sep 2020 08:41:03 +0200 Subject: [PATCH 250/303] Now packaging 3.4.0 --- debian/changelog | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/debian/changelog b/debian/changelog index a3ec43c..d1ab019 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (3.4.0-1) experimental; urgency=medium + + * New upstream release. + + -- Thomas Goirand Tue, 08 Sep 2020 08:40:48 +0200 + python-sushy (3.2.0-2) unstable; urgency=medium * Uploading to unstable. -- GitLab From 7fb241306027335807a511a26fb89b9b4c622536 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Tue, 8 Sep 2020 08:43:28 +0200 Subject: [PATCH 251/303] Fixed (build-)depends for this release. --- debian/changelog | 1 + debian/control | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/debian/changelog b/debian/changelog index d1ab019..196135e 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,6 +1,7 @@ python-sushy (3.4.0-1) experimental; urgency=medium * New upstream release. + * Fixed (build-)depends for this release. -- Thomas Goirand Tue, 08 Sep 2020 08:40:48 +0200 diff --git a/debian/control b/debian/control index de8efd0..f9ad60b 100644 --- a/debian/control +++ b/debian/control @@ -21,8 +21,7 @@ Build-Depends-Indep: python3-requests, python3-stestr, python3-stevedore, - python3-testscenarios, - python3-testtools, + python3-sphinxcontrib.apidoc, subunit, Standards-Version: 4.4.1 Vcs-Browser: https://salsa.debian.org/openstack-team/libs/python-sushy -- GitLab From 96a1f1976f06e60c9a0c32a50d3ae8efc6cb21fa Mon Sep 17 00:00:00 2001 From: Ghanshyam Mann Date: Fri, 11 Sep 2020 08:39:20 -0500 Subject: [PATCH 252/303] Fix l-c job Buping few lower constraints to pass it on Focal node. Change-Id: Ib605b1a1ed4f94a1f0975de1bf387c8ab621d029 --- lower-constraints.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index 00ff338..5510af7 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -17,7 +17,7 @@ iso8601==0.1.11 Jinja2==2.10 keystoneauth1==3.4.0 linecache2==1.0.0 -MarkupSafe==1.0 +MarkupSafe==1.1.1 mccabe==0.2.1 mox3==0.20.0 openstackdocstheme==2.2.1 @@ -35,7 +35,7 @@ python-dateutil==2.7.0 python-mimeparse==1.6.0 python-subunit==1.0.0 pytz==2013.6 -PyYAML==3.12 +PyYAML==3.13 reno==3.1.0 requests==2.14.2 requestsexceptions==1.2.0 -- GitLab From 7ec9021c3a37282e24ed537607919b50186b1725 Mon Sep 17 00:00:00 2001 From: Ghanshyam Mann Date: Fri, 11 Sep 2020 08:39:20 -0500 Subject: [PATCH 253/303] Fix l-c job Buping few lower constraints to pass it on Focal node. Change-Id: Ib605b1a1ed4f94a1f0975de1bf387c8ab621d029 (cherry picked from commit 96a1f1976f06e60c9a0c32a50d3ae8efc6cb21fa) --- lower-constraints.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index 00ff338..5510af7 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -17,7 +17,7 @@ iso8601==0.1.11 Jinja2==2.10 keystoneauth1==3.4.0 linecache2==1.0.0 -MarkupSafe==1.0 +MarkupSafe==1.1.1 mccabe==0.2.1 mox3==0.20.0 openstackdocstheme==2.2.1 @@ -35,7 +35,7 @@ python-dateutil==2.7.0 python-mimeparse==1.6.0 python-subunit==1.0.0 pytz==2013.6 -PyYAML==3.12 +PyYAML==3.13 reno==3.1.0 requests==2.14.2 requestsexceptions==1.2.0 -- GitLab From 9562db9c527ebe604bc5fa64c147df2060617495 Mon Sep 17 00:00:00 2001 From: Bill Dodd Date: Mon, 14 Sep 2020 15:59:31 -0500 Subject: [PATCH 254/303] Make Actions field in Volume resource optional In the Volume resource implementation, the Actions field was incorrectly specified as required. It is normal for Redfish services to not have this field in the Volume resource. This would cause an error when reading the Volume resource on any such Redfish service. Updates the Actions field in the Volume resource to be optional. Change-Id: I1399d2996d8651f6166a4909f6bdf43994ebee23 Story: 2003514 Task: 40783 --- .../fix-volume-actions-not-required-730fd637dd2587ce.yaml | 5 +++++ sushy/resources/system/storage/volume.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/fix-volume-actions-not-required-730fd637dd2587ce.yaml diff --git a/releasenotes/notes/fix-volume-actions-not-required-730fd637dd2587ce.yaml b/releasenotes/notes/fix-volume-actions-not-required-730fd637dd2587ce.yaml new file mode 100644 index 0000000..077a09b --- /dev/null +++ b/releasenotes/notes/fix-volume-actions-not-required-730fd637dd2587ce.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + The ``Actions`` field in the ``Volume`` resource was incorrectly specified + as being required. This fix makes the field optional. diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 0b78fd2..78e9b21 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -60,7 +60,7 @@ class Volume(base.ResourceBase): """Indicates if a client is allowed to request for a specific apply time of a create, delete, or action operation of a given resource""" - _actions = ActionsField('Actions', required=True) + _actions = ActionsField('Actions') def _get_initialize_action_element(self): initialize_action = self._actions.initialize -- GitLab From bad68e1e60ba2ea561e36394d1176e2315e3a907 Mon Sep 17 00:00:00 2001 From: Christopher Dearborn Date: Sun, 30 Aug 2020 17:36:24 -0400 Subject: [PATCH 255/303] Make message parsing more resilient Firmware from some vendors only includes the MessageKey in the MessageID. In this case, fall back to finding the MessageKey first in the Messages MessageRegistryFile and then in the BaseMessages MessageRegistryFile. If the message can't be found, then set the parsed message to 'unknown' instead of throwing an exception. If there are not enough arguments to populate the message, then populate the missing arguments with 'unknown'. Change-Id: I045f82167a415c92d6b67532222aef89ab1d68ef --- ...e-parsing-resilience-534da532515a15da.yaml | 10 ++ sushy/main.py | 17 ++- sushy/resources/registry/message_registry.py | 42 +++++- .../registry/test_message_registry.py | 127 ++++++++++++++++++ sushy/tests/unit/test_main.py | 8 +- 5 files changed, 192 insertions(+), 12 deletions(-) create mode 100644 releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml diff --git a/releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml b/releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml new file mode 100644 index 0000000..408938e --- /dev/null +++ b/releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml @@ -0,0 +1,10 @@ +--- +fixes: + - | + Makes message parsing more resilient by handling the case where the message + ID only contains a message key and no registry name. In this case, fall + back to the ``Messages`` message registry file and then to the + ``BaseMessages`` message registry file. If the message ID cannot be found, + then set the message to ``unknown``. When parsing messages, if not enough + arguments were supplied, then fill in the remaining arguments with + ``unknown``. diff --git a/sushy/main.py b/sushy/main.py index 08bb2b2..2625133 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -459,9 +459,10 @@ class Sushy(base.ResourceBase): Fetches all registries if any provided by Redfish service and combines together with packaged standard registries. - :returns: dict of combined message registries where key is - Registry_name.Major_version.Minor_version and value is registry - itself. + :returns: dict of combined message registries keyed by both the + registry name (Registry_name.Major_version.Minor_version) and the + message registry file identity, with the value being the actual + registry itself. """ standard = self._get_standard_message_registry_collection() @@ -473,9 +474,13 @@ class Sushy(base.ResourceBase): if registry_col: provided = registry_col.get_members() - registries.update({r.registry: r.get_message_registry( - self._language, - self._public_connector) for r in provided}) + for r in provided: + message_registry = r.get_message_registry( + self._language, + self._public_connector) + registries[r.registry] = message_registry + if r.identity not in registries: + registries[r.identity] = message_registry return registries diff --git a/sushy/resources/registry/message_registry.py b/sushy/resources/registry/message_registry.py index fc90022..86f458c 100644 --- a/sushy/resources/registry/message_registry.py +++ b/sushy/resources/registry/message_registry.py @@ -13,11 +13,14 @@ # This is referred from Redfish standard schema. # https://redfish.dmtf.org/schemas/v1/MessageRegistry.v1_1_1.json +import logging from sushy.resources import base from sushy.resources import constants as res_cons from sushy.resources import mappings as res_maps +LOG = logging.getLogger(__name__) + class MessageDictionaryField(base.DictionaryField): @@ -92,13 +95,44 @@ def parse_message(message_registries, message_field): :returns: parsed settings.MessageListField with missing attributes filled """ - registry, msg_key = message_field.message_id.rsplit('.', 1) - - reg_msg = message_registries[registry].messages[msg_key] + reg_msg = None + if '.' in message_field.message_id: + registry, msg_key = message_field.message_id.rsplit('.', 1) + + if (registry in message_registries and msg_key + in message_registries[registry].messages): + reg_msg = message_registries[registry].messages[msg_key] + else: + # Some firmware only reports the MessageKey and no RegistryName. + # Fall back to the MessageRegistryFile with Id of Messages next, and + # BaseMessages as a last resort + registry = 'unknown' + msg_key = message_field.message_id + + mrf_ids = ['Messages', 'BaseMessages'] + for mrf_id in mrf_ids: + if (mrf_id in message_registries and msg_key in + message_registries[mrf_id].messages): + reg_msg = message_registries[mrf_id].messages[msg_key] + break + + if not reg_msg: + LOG.warning( + 'Unable to find message for registry %(registry), ' + 'message ID %(msg_key)', { + 'registry': registry, + 'msg_key': msg_key}) + if message_field.message is None: + message_field.message = 'unknown' + return message_field msg = reg_msg.message for i in range(1, reg_msg.number_of_args + 1): - msg = msg.replace('%%%i' % i, str(message_field.message_args[i - 1])) + if i <= len(message_field.message_args): + msg = msg.replace('%%%i' % i, + str(message_field.message_args[i - 1])) + else: + msg = msg.replace('%%%i' % i, 'unknown') message_field.message = msg if not message_field.severity: diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py index 534e930..5b4360a 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -131,3 +131,130 @@ class MessageRegistryTestCase(base.TestCase): self.assertEqual(res_cons.SEVERITY_OK, parsed_msg.severity) self.assertEqual('Everything done successfully.', parsed_msg.message) + + def test_parse_message_bad_registry(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'BadRegistry.TooBig' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field, parsed_msg) + + def test_parse_message_bad_message_key_existing_message(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.BadMessageKey' + message_field.message = 'Message' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field.message, 'Message') + self.assertEqual(message_field.message, parsed_msg.message) + + def test_parse_message_bad_message_key_no_existing_message(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.BadMessageKey' + message_field.message = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field.message, 'unknown') + self.assertEqual(message_field.message, parsed_msg.message) + + def test_parse_message_fallback_to_messages(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Messages': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Success' + message_field.severity = res_cons.SEVERITY_OK + message_field.resolution = 'Do nothing' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Do nothing', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_OK, parsed_msg.severity) + self.assertEqual('Everything done successfully.', + parsed_msg.message) + + def test_parse_message_fallback_to_basemessages(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'BaseMessages': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Success' + message_field.severity = res_cons.SEVERITY_OK + message_field.resolution = 'Do nothing' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Do nothing', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_OK, parsed_msg.severity) + self.assertEqual('Everything done successfully.', + parsed_msg.message) + + def test_parse_message_fallback_failed(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'BadMessageKey' + message_field.message = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field.message, 'unknown') + self.assertEqual(message_field.message, parsed_msg.message) + + def test_parse_message_not_enough_args(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.TooBig' + message_field.message_args = ['arg1'] + message_field.severity = None + message_field.resolution = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Try again', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_WARNING, parsed_msg.severity) + self.assertEqual('Property\'s arg1 value cannot be greater than ' + 'unknown.', parsed_msg.message) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 4a75e79..096f7db 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -325,13 +325,15 @@ class MainTestCase(base.TestCase): mock_msg_reg2.registry_prefix = 'RegistryB' mock_msg_reg2.registry_version = '1.0.0' mock_msg_reg_file = mock.Mock() + mock_msg_reg_file.identity = 'Messages' mock_msg_reg_file.registry = 'RegistryB.1.0' mock_msg_reg_file.get_message_registry.return_value = mock_msg_reg2 mock_col.return_value.get_members.return_value = [mock_msg_reg_file] registries = self.root.registries self.assertEqual({'RegistryA.2.0': mock_msg_reg1, - 'RegistryB.1.0': mock_msg_reg2}, registries) + 'RegistryB.1.0': mock_msg_reg2, + 'Messages': mock_msg_reg2}, registries) @mock.patch('sushy.Sushy._get_standard_message_registry_collection', autospec=True) @@ -347,6 +349,7 @@ class MainTestCase(base.TestCase): mock_msg_reg2.registry_prefix = 'RegistryB' mock_msg_reg2.registry_version = '1.0.0' mock_msg_reg_file = mock.Mock() + mock_msg_reg_file.identity = 'Messages' mock_msg_reg_file.registry = 'RegistryB.1.0' mock_msg_reg_file.get_message_registry.return_value = mock_msg_reg2 mock_col.return_value.get_members.return_value = [mock_msg_reg_file] @@ -363,7 +366,8 @@ class MainTestCase(base.TestCase): expected = { 'RegistryA.2.0': mock_msg_reg1, - 'RegistryB.1.0': mock_msg_reg2 + 'RegistryB.1.0': mock_msg_reg2, + 'Messages': mock_msg_reg2 } self.assertEqual(expected, registries) -- GitLab From aab6812af4ba790d79a837c578aa761bced33d81 Mon Sep 17 00:00:00 2001 From: Christopher Dearborn Date: Sun, 30 Aug 2020 17:36:24 -0400 Subject: [PATCH 256/303] Make message parsing more resilient Firmware from some vendors only includes the MessageKey in the MessageID. In this case, fall back to finding the MessageKey first in the Messages MessageRegistryFile and then in the BaseMessages MessageRegistryFile. If the message can't be found, then set the parsed message to 'unknown' instead of throwing an exception. If there are not enough arguments to populate the message, then populate the missing arguments with 'unknown'. Change-Id: I045f82167a415c92d6b67532222aef89ab1d68ef (cherry picked from commit bad68e1e60ba2ea561e36394d1176e2315e3a907) --- ...e-parsing-resilience-534da532515a15da.yaml | 10 ++ sushy/main.py | 17 ++- sushy/resources/registry/message_registry.py | 42 +++++- .../registry/test_message_registry.py | 127 ++++++++++++++++++ sushy/tests/unit/test_main.py | 8 +- 5 files changed, 192 insertions(+), 12 deletions(-) create mode 100644 releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml diff --git a/releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml b/releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml new file mode 100644 index 0000000..408938e --- /dev/null +++ b/releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml @@ -0,0 +1,10 @@ +--- +fixes: + - | + Makes message parsing more resilient by handling the case where the message + ID only contains a message key and no registry name. In this case, fall + back to the ``Messages`` message registry file and then to the + ``BaseMessages`` message registry file. If the message ID cannot be found, + then set the message to ``unknown``. When parsing messages, if not enough + arguments were supplied, then fill in the remaining arguments with + ``unknown``. diff --git a/sushy/main.py b/sushy/main.py index 08bb2b2..2625133 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -459,9 +459,10 @@ class Sushy(base.ResourceBase): Fetches all registries if any provided by Redfish service and combines together with packaged standard registries. - :returns: dict of combined message registries where key is - Registry_name.Major_version.Minor_version and value is registry - itself. + :returns: dict of combined message registries keyed by both the + registry name (Registry_name.Major_version.Minor_version) and the + message registry file identity, with the value being the actual + registry itself. """ standard = self._get_standard_message_registry_collection() @@ -473,9 +474,13 @@ class Sushy(base.ResourceBase): if registry_col: provided = registry_col.get_members() - registries.update({r.registry: r.get_message_registry( - self._language, - self._public_connector) for r in provided}) + for r in provided: + message_registry = r.get_message_registry( + self._language, + self._public_connector) + registries[r.registry] = message_registry + if r.identity not in registries: + registries[r.identity] = message_registry return registries diff --git a/sushy/resources/registry/message_registry.py b/sushy/resources/registry/message_registry.py index fc90022..86f458c 100644 --- a/sushy/resources/registry/message_registry.py +++ b/sushy/resources/registry/message_registry.py @@ -13,11 +13,14 @@ # This is referred from Redfish standard schema. # https://redfish.dmtf.org/schemas/v1/MessageRegistry.v1_1_1.json +import logging from sushy.resources import base from sushy.resources import constants as res_cons from sushy.resources import mappings as res_maps +LOG = logging.getLogger(__name__) + class MessageDictionaryField(base.DictionaryField): @@ -92,13 +95,44 @@ def parse_message(message_registries, message_field): :returns: parsed settings.MessageListField with missing attributes filled """ - registry, msg_key = message_field.message_id.rsplit('.', 1) - - reg_msg = message_registries[registry].messages[msg_key] + reg_msg = None + if '.' in message_field.message_id: + registry, msg_key = message_field.message_id.rsplit('.', 1) + + if (registry in message_registries and msg_key + in message_registries[registry].messages): + reg_msg = message_registries[registry].messages[msg_key] + else: + # Some firmware only reports the MessageKey and no RegistryName. + # Fall back to the MessageRegistryFile with Id of Messages next, and + # BaseMessages as a last resort + registry = 'unknown' + msg_key = message_field.message_id + + mrf_ids = ['Messages', 'BaseMessages'] + for mrf_id in mrf_ids: + if (mrf_id in message_registries and msg_key in + message_registries[mrf_id].messages): + reg_msg = message_registries[mrf_id].messages[msg_key] + break + + if not reg_msg: + LOG.warning( + 'Unable to find message for registry %(registry), ' + 'message ID %(msg_key)', { + 'registry': registry, + 'msg_key': msg_key}) + if message_field.message is None: + message_field.message = 'unknown' + return message_field msg = reg_msg.message for i in range(1, reg_msg.number_of_args + 1): - msg = msg.replace('%%%i' % i, str(message_field.message_args[i - 1])) + if i <= len(message_field.message_args): + msg = msg.replace('%%%i' % i, + str(message_field.message_args[i - 1])) + else: + msg = msg.replace('%%%i' % i, 'unknown') message_field.message = msg if not message_field.severity: diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py index 534e930..5b4360a 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -131,3 +131,130 @@ class MessageRegistryTestCase(base.TestCase): self.assertEqual(res_cons.SEVERITY_OK, parsed_msg.severity) self.assertEqual('Everything done successfully.', parsed_msg.message) + + def test_parse_message_bad_registry(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'BadRegistry.TooBig' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field, parsed_msg) + + def test_parse_message_bad_message_key_existing_message(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.BadMessageKey' + message_field.message = 'Message' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field.message, 'Message') + self.assertEqual(message_field.message, parsed_msg.message) + + def test_parse_message_bad_message_key_no_existing_message(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.BadMessageKey' + message_field.message = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field.message, 'unknown') + self.assertEqual(message_field.message, parsed_msg.message) + + def test_parse_message_fallback_to_messages(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Messages': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Success' + message_field.severity = res_cons.SEVERITY_OK + message_field.resolution = 'Do nothing' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Do nothing', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_OK, parsed_msg.severity) + self.assertEqual('Everything done successfully.', + parsed_msg.message) + + def test_parse_message_fallback_to_basemessages(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'BaseMessages': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Success' + message_field.severity = res_cons.SEVERITY_OK + message_field.resolution = 'Do nothing' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Do nothing', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_OK, parsed_msg.severity) + self.assertEqual('Everything done successfully.', + parsed_msg.message) + + def test_parse_message_fallback_failed(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'BadMessageKey' + message_field.message = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field.message, 'unknown') + self.assertEqual(message_field.message, parsed_msg.message) + + def test_parse_message_not_enough_args(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.TooBig' + message_field.message_args = ['arg1'] + message_field.severity = None + message_field.resolution = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Try again', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_WARNING, parsed_msg.severity) + self.assertEqual('Property\'s arg1 value cannot be greater than ' + 'unknown.', parsed_msg.message) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 4a75e79..096f7db 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -325,13 +325,15 @@ class MainTestCase(base.TestCase): mock_msg_reg2.registry_prefix = 'RegistryB' mock_msg_reg2.registry_version = '1.0.0' mock_msg_reg_file = mock.Mock() + mock_msg_reg_file.identity = 'Messages' mock_msg_reg_file.registry = 'RegistryB.1.0' mock_msg_reg_file.get_message_registry.return_value = mock_msg_reg2 mock_col.return_value.get_members.return_value = [mock_msg_reg_file] registries = self.root.registries self.assertEqual({'RegistryA.2.0': mock_msg_reg1, - 'RegistryB.1.0': mock_msg_reg2}, registries) + 'RegistryB.1.0': mock_msg_reg2, + 'Messages': mock_msg_reg2}, registries) @mock.patch('sushy.Sushy._get_standard_message_registry_collection', autospec=True) @@ -347,6 +349,7 @@ class MainTestCase(base.TestCase): mock_msg_reg2.registry_prefix = 'RegistryB' mock_msg_reg2.registry_version = '1.0.0' mock_msg_reg_file = mock.Mock() + mock_msg_reg_file.identity = 'Messages' mock_msg_reg_file.registry = 'RegistryB.1.0' mock_msg_reg_file.get_message_registry.return_value = mock_msg_reg2 mock_col.return_value.get_members.return_value = [mock_msg_reg_file] @@ -363,7 +366,8 @@ class MainTestCase(base.TestCase): expected = { 'RegistryA.2.0': mock_msg_reg1, - 'RegistryB.1.0': mock_msg_reg2 + 'RegistryB.1.0': mock_msg_reg2, + 'Messages': mock_msg_reg2 } self.assertEqual(expected, registries) -- GitLab From 69ca7e06f00f1694b7d6c73336b8b069e5b36ae6 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Thu, 24 Sep 2020 11:05:11 +0200 Subject: [PATCH 257/303] Log extended error information in addition to returning it Change-Id: I8bd805ca344d8643cccc276355c53d94e980a5fb --- sushy/exceptions.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/sushy/exceptions.py b/sushy/exceptions.py index 50d25e4..209349d 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -91,7 +91,8 @@ class HTTPError(SushyError): detail = None """Error message defined in the Redfish specification, if present.""" - message = ('HTTP %(method)s %(url)s returned code %(code)s. %(error)s') + message = ('HTTP %(method)s %(url)s returned code %(code)s. %(error)s ' + 'Extended information: %(ext_info)s') def __init__(self, method, url, response): self.status_code = response.status_code @@ -103,6 +104,7 @@ class HTTPError(SushyError): {'method': method, 'url': url, 'code': self.status_code}) error = 'unknown error' + ext_info = 'none' else: self.body = body.get('error', {}) self.code = self.body.get('code', 'Base.1.0.GeneralError') @@ -110,13 +112,13 @@ class HTTPError(SushyError): ext_info = self.body.get('@Message.ExtendedInfo', [{}]) index = self._get_most_severe_msg_index(ext_info) self.detail = ext_info[index].get('Message', self.detail) - error = '%s: %s extended: %s' % ( - self.code, self.detail or 'unknown error', ext_info or None) + error = '%s: %s' % (self.code, self.detail or 'unknown error.') kwargs = {'method': method, 'url': url, 'code': self.status_code, - 'error': error} + 'error': error, 'ext_info': ext_info} LOG.debug('HTTP response for %(method)s %(url)s: ' - 'status code: %(code)s, error: %(error)s', kwargs) + 'status code: %(code)s, error: %(error)s, ' + 'extended: %(ext_info)s', kwargs) super(HTTPError, self).__init__(**kwargs) @staticmethod -- GitLab From 7e368db4912194f9832c06a944432a308d30c339 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Thu, 24 Sep 2020 11:22:42 +0200 Subject: [PATCH 258/303] Trivial: add missing __init__.py to sushy/resources/taskservice Change-Id: Iebf54b6101e6e9e1fd300d4386d556ab37405688 --- sushy/resources/taskservice/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 sushy/resources/taskservice/__init__.py diff --git a/sushy/resources/taskservice/__init__.py b/sushy/resources/taskservice/__init__.py new file mode 100644 index 0000000..e69de29 -- GitLab From bb704a79c0fc00da1cf3344e155dabc888d27e3b Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Thu, 24 Sep 2020 11:22:42 +0200 Subject: [PATCH 259/303] Trivial: add missing __init__.py to sushy/resources/taskservice Change-Id: Iebf54b6101e6e9e1fd300d4386d556ab37405688 (cherry picked from commit 7e368db4912194f9832c06a944432a308d30c339) --- sushy/resources/taskservice/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 sushy/resources/taskservice/__init__.py diff --git a/sushy/resources/taskservice/__init__.py b/sushy/resources/taskservice/__init__.py new file mode 100644 index 0000000..e69de29 -- GitLab From ff9d0887d20b36e03173bc4578501cf6297c5854 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Thu, 24 Sep 2020 11:28:47 +0200 Subject: [PATCH 260/303] * Add missing sushy/resources/taskservice/__init__.py so that the package also contains the missing files in Python 3.7. --- debian/changelog | 7 +++++++ debian/rules | 2 ++ 2 files changed, 9 insertions(+) diff --git a/debian/changelog b/debian/changelog index 196135e..acfed8d 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,10 @@ +python-sushy (3.4.0-2) experimental; urgency=medium + + * Add missing sushy/resources/taskservice/__init__.py so that the package + also contains the missing files in Python 3.7. + + -- Thomas Goirand Thu, 24 Sep 2020 11:28:09 +0200 + python-sushy (3.4.0-1) experimental; urgency=medium * New upstream release. diff --git a/debian/rules b/debian/rules index 71309ed..5729792 100755 --- a/debian/rules +++ b/debian/rules @@ -8,11 +8,13 @@ include /usr/share/openstack-pkg-tools/pkgos.make override_dh_auto_clean: echo "Do nothing..." + rm -f sushy/resources/taskservice/__init__.py override_dh_auto_build: echo "Do nothing..." override_dh_auto_install: + touch $(CURDIR)/sushy/resources/taskservice/__init__.py for i in $(PYTHON3S) ; do \ python3 setup.py install -f --install-layout=deb --root=$(CURDIR)/debian/tmp ; \ done -- GitLab From 5e1d63c4b00b983192d9c9fc91b4176c39cee156 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Fri, 25 Sep 2020 08:31:05 +0200 Subject: [PATCH 261/303] Now packaging 3.4.1 --- debian/changelog | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/debian/changelog b/debian/changelog index acfed8d..07c005b 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (3.4.1-1) experimental; urgency=medium + + * New upstream release. + + -- Thomas Goirand Fri, 25 Sep 2020 08:30:40 +0200 + python-sushy (3.4.0-2) experimental; urgency=medium * Add missing sushy/resources/taskservice/__init__.py so that the package -- GitLab From f9b679239a4baeeaeb344c07250aad597597ed4b Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Fri, 25 Sep 2020 08:32:08 +0200 Subject: [PATCH 262/303] * Removed taskservice/__init__.py hack since after my report, this was fixed upstream. --- debian/changelog | 2 ++ debian/rules | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/debian/changelog b/debian/changelog index 07c005b..ab0c417 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,6 +1,8 @@ python-sushy (3.4.1-1) experimental; urgency=medium * New upstream release. + * Removed taskservice/__init__.py hack since after my report, this was fixed + upstream. -- Thomas Goirand Fri, 25 Sep 2020 08:30:40 +0200 diff --git a/debian/rules b/debian/rules index 5729792..71309ed 100755 --- a/debian/rules +++ b/debian/rules @@ -8,13 +8,11 @@ include /usr/share/openstack-pkg-tools/pkgos.make override_dh_auto_clean: echo "Do nothing..." - rm -f sushy/resources/taskservice/__init__.py override_dh_auto_build: echo "Do nothing..." override_dh_auto_install: - touch $(CURDIR)/sushy/resources/taskservice/__init__.py for i in $(PYTHON3S) ; do \ python3 setup.py install -f --install-layout=deb --root=$(CURDIR)/debian/tmp ; \ done -- GitLab From ee85feda89026875296c0be763e045e25641d6a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aija=20Jaunt=C4=93va?= Date: Fri, 25 Sep 2020 03:38:39 -0400 Subject: [PATCH 263/303] Fix #Bios.ResetBios for HTTP 400 Bad request error Different BMCs return different HTTP errors when encountering missing POST body for BIOS factory reset. This fixes for iDRAC 2.75.75.75 that returns HTTP 400 Bad request. Newer iDRAC versions can handle missing POST body without errors. Change-Id: I2427820e7586b559ae81c3b623a4c1871b561516 Story: 2008198 Task: 40978 --- ...ctory-reset-400-bad-request-3f4a7a2aada0835b.yaml | 7 +++++++ sushy/resources/system/bios.py | 3 ++- sushy/tests/unit/resources/system/test_bios.py | 12 ++++++++++++ 3 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/fix-2008198-bios-factory-reset-400-bad-request-3f4a7a2aada0835b.yaml diff --git a/releasenotes/notes/fix-2008198-bios-factory-reset-400-bad-request-3f4a7a2aada0835b.yaml b/releasenotes/notes/fix-2008198-bios-factory-reset-400-bad-request-3f4a7a2aada0835b.yaml new file mode 100644 index 0000000..b7df556 --- /dev/null +++ b/releasenotes/notes/fix-2008198-bios-factory-reset-400-bad-request-3f4a7a2aada0835b.yaml @@ -0,0 +1,7 @@ +--- +fixes: + - | + Fixes an issue in performing action ``#Bios.ResetBios`` when no body in + POST request provided and BMC responds with HTTP 400 Bad request, for + example, Dell R630 having iDRAC 2.75.75.75. See `story 2008198 + `__ for details. diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py index 666cd0b..bb26596 100644 --- a/sushy/resources/system/bios.py +++ b/sushy/resources/system/bios.py @@ -212,7 +212,8 @@ class Bios(base.ResourceBase): self._conn.post(target_uri) except exceptions.HTTPError as resp: # Send empty payload, if BMC expects body - if resp.status_code == http_client.UNSUPPORTED_MEDIA_TYPE: + if resp.status_code in [http_client.UNSUPPORTED_MEDIA_TYPE, + http_client.BAD_REQUEST]: self._conn.post(target_uri, data={}) else: raise diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index da26f73..a34aa49 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -256,6 +256,18 @@ class BiosTestCase(base.TestCase): self.sys_bios.reset_bios() self.sys_bios._conn.post.assert_has_calls(post_calls) + def test_reset_bios_handle_http_error_400(self): + + target_uri = ( + '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios') + self.conn.post.side_effect = [exceptions.HTTPError( + method='POST', url=target_uri, response=mock.MagicMock( + status_code=http_client.BAD_REQUEST)), '200'] + post_calls = [ + mock.call(target_uri), mock.call(target_uri, data={})] + self.sys_bios.reset_bios() + self.sys_bios._conn.post.assert_has_calls(post_calls) + def test_reset_bios_handle_http_error_405(self): target_uri = ( -- GitLab From ed53860f36e5b16cce9b307186a155095812a091 Mon Sep 17 00:00:00 2001 From: Bob Fournier Date: Tue, 29 Sep 2020 11:38:30 -0400 Subject: [PATCH 264/303] Log only fields set in redfish response, not entire json Currently the entire json response received via redfish is being logged. This fills up the logs and can result in log messages being truncated, hiding useful data. Instead only log the attributes that have been set from the response. Change-Id: I88a426acf5b000ace0041ce43e91b376bdb7b1c7 Story: 2008177 Task: 40933 --- ...redfish-response-log-294f3f10b770e356.yaml | 5 ++ sushy/resources/base.py | 42 ++++++++++++++- .../unit/resources/chassis/test_chassis.py | 16 ++++++ .../unit/resources/chassis/test_power.py | 53 +++++++++++++++++++ .../unit/resources/chassis/test_thermal.py | 45 ++++++++++++++++ .../resources/manager/test_virtual_media.py | 14 ++++- .../registry/test_message_registry.py | 33 ++++++++++++ .../registry/test_message_registry_file.py | 9 ++++ .../tests/unit/resources/system/test_bios.py | 22 ++++++++ .../unit/resources/system/test_system.py | 33 ++++++++++++ .../updateservice/test_softwareinventory.py | 11 ++++ 11 files changed, 280 insertions(+), 3 deletions(-) create mode 100644 releasenotes/notes/redfish-response-log-294f3f10b770e356.yaml diff --git a/releasenotes/notes/redfish-response-log-294f3f10b770e356.yaml b/releasenotes/notes/redfish-response-log-294f3f10b770e356.yaml new file mode 100644 index 0000000..12604f0 --- /dev/null +++ b/releasenotes/notes/redfish-response-log-294f3f10b770e356.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + Reduce the logging from sushy by logging only attributes and values + set in the redfish response, not the entire json. diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 574ec52..8628b56 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -214,6 +214,9 @@ class ListField(Field): return instances + def __getitem__(self, key): + return getattr(self, key) + class DictionaryField(Field): """Base class for fields consisting of dictionary of several sub-fields.""" @@ -247,6 +250,9 @@ class DictionaryField(Field): return instances + def __getitem__(self, key): + return getattr(self, key) + class MappedField(Field): """Field taking real value from a mapping.""" @@ -514,17 +520,49 @@ class ResourceBase(object, metaclass=abc.ABCMeta): self.refresh(json_doc=json_doc) + def _get_value(self, val): + """Iterate through the input to get values for all attributes + + :param val: Either a value or a resource + :returns: Attribute value, which may be a dictionary + """ + if isinstance(val, dict): + subfields = {} + for key, s_val in val.items(): + subfields[key] = self._get_value(s_val) + return subfields + + elif isinstance(val, list): + return [self._get_value(val[i]) for i in range(len(val))] + + elif (isinstance(val, DictionaryField) + or isinstance(val, CompositeField) + or isinstance(val, ListField)): + subfields = {} + for attr, field in val._subfields.items(): + subfields[attr] = self._get_value(val.__getitem__(attr)) + return subfields + + return val + def _parse_attributes(self, json_doc): """Parse the attributes of a resource. Parsed JSON fields are set to `self` as declared in the class. :param json_doc: parsed JSON document in form of Python types + :returns: dictionary of attribute/values after parsing """ + settings = {} for attr, field in _collect_fields(self): # Hide the Field object behind the real value setattr(self, attr, field._load(json_doc, self)) + # Get the attribute/value pairs that have been parsed + settings[attr] = self._get_value(getattr(self, attr)) + + return settings + def refresh(self, force=True, json_doc=None): """Refresh the resource @@ -553,10 +591,10 @@ class ResourceBase(object, metaclass=abc.ABCMeta): else: self._json = self._reader.get_data().json_doc + attributes = self._parse_attributes(self._json) LOG.debug('Received representation of %(type)s %(path)s: %(json)s', {'type': self.__class__.__name__, - 'path': self._path, 'json': self._json}) - self._parse_attributes(self._json) + 'path': self._path, 'json': attributes}) self._do_refresh(force) # Mark it fresh diff --git a/sushy/tests/unit/resources/chassis/test_chassis.py b/sushy/tests/unit/resources/chassis/test_chassis.py index b4436f5..7bdbc13 100644 --- a/sushy/tests/unit/resources/chassis/test_chassis.py +++ b/sushy/tests/unit/resources/chassis/test_chassis.py @@ -74,6 +74,22 @@ class ChassisTestCase(base.TestCase): self.chassis.physical_security.intrusion_sensor_re_arm ) + def test__parse_attributes_return(self): + attributes = self.chassis._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('Blade', attributes.get('name')) + self.assertEqual(sushy.INDICATOR_LED_OFF, + attributes.get('indicator_led')) + self.assertEqual(sushy.POWER_STATE_ON, attributes.get('power_state')) + self.assertEqual({'intrusion_sensor': + sushy.CHASSIS_INTRUSION_SENSOR_NORMAL, + 'intrusion_sensor_number': + 123, + 'intrusion_sensor_re_arm': + 'manual re arm chassis intrusion sensor'}, + attributes.get('physical_security')) + def test_get_allowed_reset_chasis_values(self): # | GIVEN | expected = {sushy.RESET_TYPE_POWER_CYCLE, diff --git a/sushy/tests/unit/resources/chassis/test_power.py b/sushy/tests/unit/resources/chassis/test_power.py index 9438606..ffe5727 100644 --- a/sushy/tests/unit/resources/chassis/test_power.py +++ b/sushy/tests/unit/resources/chassis/test_power.py @@ -135,3 +135,56 @@ class PowerTestCase(base.TestCase): self.power.power_supplies[1].part_number) self.assertEqual('425-591-654', self.power.power_supplies[1].spare_part_number) + + def test__parse_attributes_return(self): + attributes = self.power._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('Quad Blade Chassis Power', attributes.get('name')) + self.assertEqual([{'firmware_version': '2.20', + 'identity': '0', + 'indicator_led': None, + 'input_ranges': + [{'input_type': 'ac', + 'maximum_frequency_hz': 63, + 'maximum_voltage': 250, + 'minimum_frequency_hz': 47, + 'minimum_voltage': 185, + 'output_wattage': 1450}], + 'last_power_output_watts': 650, + 'line_input_voltage': 220, + 'line_input_voltage_type': 'ac240v', + 'manufacturer': 'Cyberdyne', + 'model': '325457-A06', + 'name': 'Power Supply 0', + 'part_number': '425-591-654', + 'power_capacity_watts': 1450, + 'power_supply_type': 'ac', + 'serial_number': '1S0000523', + 'spare_part_number': '425-591-654', + 'status': {'health': 'ok', 'health_rollup': None, + 'state': 'enabled'}}, + {'firmware_version': '2.20', + 'identity': '1', + 'indicator_led': None, + 'input_ranges': + [{'input_type': 'ac', + 'maximum_frequency_hz': 63, + 'maximum_voltage': 250, + 'minimum_frequency_hz': 47, + 'minimum_voltage': 185, + 'output_wattage': 1450}], + 'last_power_output_watts': 635, + 'line_input_voltage': 222, + 'line_input_voltage_type': 'ac240v', + 'manufacturer': 'Cyberdyne', + 'model': '325457-A06', + 'name': 'Power Supply 1', + 'part_number': '425-591-654', + 'power_capacity_watts': 1450, + 'power_supply_type': 'ac', + 'serial_number': '1S0000524', + 'spare_part_number': '425-591-654', + 'status': {'health': 'ok', 'health_rollup': None, + 'state': 'enabled'}}], + attributes.get('power_supplies')) diff --git a/sushy/tests/unit/resources/chassis/test_thermal.py b/sushy/tests/unit/resources/chassis/test_thermal.py index dc4cdc0..830e6cc 100644 --- a/sushy/tests/unit/resources/chassis/test_thermal.py +++ b/sushy/tests/unit/resources/chassis/test_thermal.py @@ -73,3 +73,48 @@ class ThermalTestCase(base.TestCase): self.assertEqual(120, self.thermal.temperatures[0].max_reading_range_temp) self.assertEqual('CPU', self.thermal.temperatures[0].physical_context) + + def test__parse_attributes_return(self): + attributes = self.thermal._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual([{'identity': '0', + 'indicator_led': None, + 'lower_threshold_critical': None, + 'lower_threshold_fatal': 2000, + 'lower_threshold_non_critical': None, + 'manufacturer': None, + 'max_reading_range': 10000, + 'min_reading_range': 0, + 'model': None, + 'name': 'CPU Fan', + 'part_number': None, + 'physical_context': 'CPU', + 'reading': 6000, + 'reading_units': 'RPM', + 'serial_number': None, + 'status': + {'health': 'ok', 'health_rollup': None, + 'state': 'enabled'}, + 'upper_threshold_critical': None, + 'upper_threshold_fatal': None, + 'upper_threshold_non_critical': None}], + attributes.get('fans')) + self.assertEqual([{'identity': '0', + 'lower_threshold_critical': None, + 'lower_threshold_fatal': None, + 'lower_threshold_non_critical': None, + 'max_allowable_operating_value': None, + 'max_reading_range_temp': 120, + 'min_allowable_operating_value': None, + 'min_reading_range_temp': 0, + 'name': 'CPU Temp', + 'physical_context': 'CPU', + 'reading_celsius': 62, + 'sensor_number': None, + 'status': {'health': 'ok', 'health_rollup': None, + 'state': 'enabled'}, + 'upper_threshold_critical': 90, + 'upper_threshold_fatal': 95, + 'upper_threshold_non_critical': 75}], + attributes.get('temperatures')) diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py index 284a0e2..6bbb8d2 100644 --- a/sushy/tests/unit/resources/manager/test_virtual_media.py +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -38,7 +38,7 @@ class VirtualMediaTestCase(base.TestCase): self.conn, '/redfish/v1/Managers/BMC/VirtualMedia/Floppy1', redfish_version='1.0.2') - def test__parse_atrtributes(self): + def test__parse_attributes(self): self.sys_virtual_media._parse_attributes(self.json_doc) self.assertEqual('Virtual Removable Media', self.sys_virtual_media.name) @@ -55,6 +55,18 @@ class VirtualMediaTestCase(base.TestCase): self.assertEqual(True, self.sys_virtual_media.inserted) self.assertEqual(False, self.sys_virtual_media.write_protected) + def test__parse_attributes_return(self): + attributes = self.sys_virtual_media._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('https://www.dmtf.org/freeImages/Sardine.img', + attributes.get('image')) + self.assertEqual(sushy.CONNECTED_VIA_URI, + attributes.get('connected_via')) + self.assertEqual([sushy.VIRTUAL_MEDIA_FLOPPY, + sushy.VIRTUAL_MEDIA_USBSTICK], + attributes.get('media_types')) + def test_insert_media_none(self): self.sys_virtual_media._actions.insert_media = None self.assertRaisesRegex( diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py index 5b4360a..fea7e7c 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry.py +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -74,6 +74,39 @@ class MessageRegistryTestCase(base.TestCase): self.assertEqual( 'Try Later', self.registry.messages['MissingThings'].resolution) + def test__parse_attributes_return(self): + attributes = self.registry._parse_attributes(self.json_doc) + + self.assertEqual({'Failed': + {'description': 'Nothing is OK', + 'message': 'The property %1 broke everything.', + 'number_of_args': 1, + 'param_types': ['string'], + 'resolution': 'Panic', + 'severity': 'critical'}, + 'MissingThings': + {'description': '', + 'message': + "Property's %1 value cannot be less than %2.", + 'number_of_args': 2, + 'param_types': ['string', 'number'], + 'resolution': 'Try Later', + 'severity': 'warning'}, + 'Success': + {'description': 'Everything OK', + 'message': 'Everything done successfully.', + 'number_of_args': 0, 'param_types': None, + 'resolution': 'None', 'severity': 'ok'}, + 'TooBig': + {'description': 'Value too big', + 'message': + "Property's %1 value cannot be greater than %2.", + 'number_of_args': 2, + 'param_types': ['string', 'number'], + 'resolution': 'Try again', + 'severity': 'warning'}}, + attributes.get('messages')) + def test__parse_attributes_missing_msg_desc(self): self.json_doc['Messages']['Success'].pop('Description') self.registry._parse_attributes(self.json_doc) diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py index 1afa407..2dc858f 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry_file.py +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -54,6 +54,15 @@ class MessageRegistryFileTestCase(base.TestCase): self.assertEqual('Test.1.0.json', self.reg_file.location[0].archive_file) + def test__parse_attributes_return(self): + attributes = self.reg_file._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('Test Message Registry File', attributes.get('name')) + self.assertEqual('Test', attributes.get('identity')) + self.assertEqual(['en'], attributes.get('languages')) + self.assertEqual('Test.1.0', attributes.get('registry')) + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', autospec=True) @mock.patch('sushy.resources.base.JsonDataReader', autospec=True) diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py index da26f73..eb8e18d 100644 --- a/sushy/tests/unit/resources/system/test_bios.py +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -83,6 +83,28 @@ class BiosTestCase(base.TestCase): self.assertEqual(settings.UPDATE_FAILURE, self.sys_bios.update_status.status) + def test__parse_attributes_return(self): + attributes = self.sys_bios._parse_attributes(self.bios_json) + + # Test that various types are returned correctly + self.assertEqual('BIOS Configuration Current Settings', + attributes.get('name')) + self.assertEqual({'AdminPhone': '', + 'BootMode': 'Uefi', + 'EmbeddedSata': 'Raid', + 'NicBoot1': 'NetworkBoot', + 'NicBoot2': 'Disabled', + 'PowerProfile': 'MaxPerf', + 'ProcCoreDisable': 0, + 'ProcHyperthreading': 'Enabled', + 'ProcTurboMode': 'Enabled', + 'UsbControl': 'UsbEnabled'}, + attributes.get('attributes')) + self.assertEqual({'maintenance_window_duration_in_seconds': 600, + 'maintenance_window_start_time': + parser.parse('2020-09-01T04:30:00-06:00')}, + attributes.get('maintenance_window')) + def test_set_attribute(self): self.sys_bios.set_attribute('ProcTurboMode', 'Disabled') self.sys_bios._conn.patch.assert_called_once_with( diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index fbaf70e..acb21e8 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -82,6 +82,39 @@ class SystemTestCase(base.TestCase): for oem_vendor in self.sys_inst.oem_vendors: self.assertIn(oem_vendor, ('Contoso', 'Chipwise')) + def test__parse_attributes_return(self): + attributes = self.sys_inst._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('Chicago-45Z-2381', attributes.get('asset_tag')) + self.assertEqual(sushy.INDICATOR_LED_OFF, + attributes.get('indicator_led')) + self.assertEqual({'health': res_cons.HEALTH_OK, + 'health_rollup': res_cons.HEALTH_OK, + 'state': res_cons.STATE_ENABLED}, + attributes.get('status')) + self.assertEqual({'maintenance_window_duration_in_seconds': 1, + 'maintenance_window_start_time': + parser.parse('2016-03-07T14:44:30-05:05')}, + attributes.get('maintenance_window')) + self.assertEqual({'reset': {'allowed_values': + ['On', 'ForceOff', 'GracefulShutdown', + 'GracefulRestart', 'ForceRestart', 'Nmi', + 'ForceOn', 'PushPowerButton'], + 'operation_apply_time_support': + {'_maintenance_window_resource': + {'resource_uri': + '/redfish/v1/Systems/437XR1138R2'}, + 'maintenance_window_duration_in_seconds': 600, + 'maintenance_window_start_time': + parser.parse('2017-05-03T23:12:37-05:00'), + 'supported_values': + ['Immediate', 'AtMaintenanceWindowStart']}, + 'target_uri': + '/redfish/v1/Systems/437XR1138R2/Actions/' + 'ComputerSystem.Reset'}}, + attributes.get('_actions')) + def test__parse_attributes_missing_actions(self): self.sys_inst.json.pop('Actions') self.assertRaisesRegex( diff --git a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py index 1601579..b365fb4 100644 --- a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py +++ b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py @@ -55,6 +55,17 @@ class SoftwareInventoryTestCase(base.TestCase): self.assertTrue(self.soft_inv.updateable) self.assertEqual('1.45.455b66-rev4', self.soft_inv.version) + def test__parse_attributes_return(self): + attributes = self.soft_inv._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('BMC', attributes.get('identity')) + self.assertEqual({'health': res_cons.HEALTH_OK, + 'health_rollup': None, + 'state': res_cons.STATE_ENABLED}, + attributes.get('status')) + self.assertEqual(True, attributes.get('updateable')) + def test__parse_attributes_missing_identity(self): self.soft_inv.json.pop('Id') self.assertRaisesRegex( -- GitLab From f91033cae06c1a078c9a1f375efbe3dc555aec6b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aija=20Jaunt=C4=93va?= Date: Wed, 7 Oct 2020 10:09:50 -0400 Subject: [PATCH 265/303] Lower log severity for unknown type registries When unknown type encountered return and do not try another location as it is not expected to have different type. This is done to avoid another LOG.warning about not finding registry at all. Also fix mocking for test `test_get_message_registry_unknown_type` to take the intended path. And add `test_get_message_registry_loading_type_fails` to cover missing lines due to changes. Change-Id: I5a8190c55837f1e09ebdd3be34aae1fccd5a8229 Story: 2008094 Task: 40795 --- .../registry/message_registry_file.py | 5 ++- .../registry/test_message_registry_file.py | 40 ++++++++++++++----- 2 files changed, 32 insertions(+), 13 deletions(-) diff --git a/sushy/resources/registry/message_registry_file.py b/sushy/resources/registry/message_registry_file.py index 4d7d8d9..248c8d1 100644 --- a/sushy/resources/registry/message_registry_file.py +++ b/sushy/resources/registry/message_registry_file.py @@ -149,8 +149,9 @@ class MessageRegistryFile(base.ResourceBase): 'error': exc}) continue - LOG.warning('Ignoring unsupported flavor of registry %(registry)s', - {'registry': registry._odata_type}) + LOG.debug('Ignoring unsupported flavor of registry %(registry)s', + {'registry': registry._odata_type}) + return LOG.warning('No message registry found for %(language)s or ' 'default', {'language': language}) diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py index 2dc858f..79367a2 100644 --- a/sushy/tests/unit/resources/registry/test_message_registry_file.py +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -125,22 +125,20 @@ class MessageRegistryFileTestCase(base.TestCase): reader=mock_reader_rv) self.assertEqual(mock_msg_reg_rv, registry) - @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.LOG', autospec=True) - @mock.patch('sushy.resources.base.JsonDataReader', autospec=True) def test_get_message_registry_unknown_type( - self, mock_reader, mock_msg_reg): - mock_reader_rv = mock.Mock() - mock_reader.return_value = mock_reader_rv - mock_reader_rv.get_json.return_value = { - "@odata.type": "#FishingRegistry.v1_1_1.FishingRegistry", - } - mock_msg_reg_rv = mock.Mock() - mock_msg_reg.return_value = mock_msg_reg_rv + self, mock_log, mock_registry_type): + mock_fishing_registry = mock_registry_type.return_value + mock_fishing_registry._odata_type = 'FishingRegistry' registry = self.reg_file.get_message_registry('en', None) - self.assertFalse(mock_msg_reg.called) self.assertIsNone(registry) + mock_log.debug.assert_called_with( + 'Ignoring unsupported flavor of registry %(registry)s', + {'registry': 'FishingRegistry'}) @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', autospec=True) @@ -214,6 +212,26 @@ class MessageRegistryFileTestCase(base.TestCase): reader=None, redfish_version=self.reg_file.redfish_version) self.assertIsNone(registry) + @mock.patch('sushy.resources.registry.message_registry_file.LOG', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', + autospec=True) + def test_get_message_registry_loading_type_fails( + self, mock_reg_type, mock_log): + mock_reg_type.side_effect = TypeError('Something wrong') + + registry = self.reg_file.get_message_registry('en', None) + self.assertTrue(mock_reg_type.called) + self.assertIsNone(registry) + mock_log.warning.assert_any_call( + 'Cannot load message registry type from location ' + '%(location)s: %(error)s', + {'location': '/redfish/v1/Registries/Test/Test.1.0.json', + 'error': mock.ANY}) + mock_log.warning.assert_called_with( + 'No message registry found for %(language)s or default', + {'language': 'en'}) + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', autospec=True) def test_get_message_registry_strangely_cased_lang( -- GitLab From 060597fd1f90947bc3b373269f16f7492aeab37f Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Wed, 14 Oct 2020 14:13:45 +0200 Subject: [PATCH 266/303] Fixed debian/watch. --- debian/changelog | 6 ++++++ debian/watch | 5 ++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/debian/changelog b/debian/changelog index ab0c417..752cf5c 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (3.4.1-2) UNRELEASED; urgency=medium + + * Fixed debian/watch. + + -- Thomas Goirand Wed, 14 Oct 2020 14:13:39 +0200 + python-sushy (3.4.1-1) experimental; urgency=medium * New upstream release. diff --git a/debian/watch b/debian/watch index 5e22772..e08351e 100644 --- a/debian/watch +++ b/debian/watch @@ -1,4 +1,3 @@ version=3 -opts=uversionmangle=s/(rc|a|b|c)/~$1/ \ -https://pypi.debian.net/sushy/sushy-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) - +opts="uversionmangle=s/\.0rc/~rc/;s/\.0b1/~b1/;s/\.0b2/~b2/;s/\.0b3/~b3/" \ +https://github.com/openstack/sushy/tags .*/(\d[brc\d\.]+)\.tar\.gz -- GitLab From 1646aab7a7965966483d285db0b80530c3a1ad56 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Wed, 14 Oct 2020 14:14:03 +0200 Subject: [PATCH 267/303] Uploading to unstable. --- debian/changelog | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/debian/changelog b/debian/changelog index 752cf5c..3f90dde 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,6 +1,7 @@ -python-sushy (3.4.1-2) UNRELEASED; urgency=medium +python-sushy (3.4.1-2) unstable; urgency=medium * Fixed debian/watch. + * Uploading to unstable. -- Thomas Goirand Wed, 14 Oct 2020 14:13:39 +0200 -- GitLab From 37425c95853697ad4450a877e8931bf035ab60d9 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Wed, 14 Oct 2020 14:15:10 +0200 Subject: [PATCH 268/303] Add a debian/salsa-ci.yml. --- debian/changelog | 1 + debian/salsa-ci.yml | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 debian/salsa-ci.yml diff --git a/debian/changelog b/debian/changelog index 3f90dde..46a8e4c 100644 --- a/debian/changelog +++ b/debian/changelog @@ -2,6 +2,7 @@ python-sushy (3.4.1-2) unstable; urgency=medium * Fixed debian/watch. * Uploading to unstable. + * Add a debian/salsa-ci.yml. -- Thomas Goirand Wed, 14 Oct 2020 14:13:39 +0200 diff --git a/debian/salsa-ci.yml b/debian/salsa-ci.yml new file mode 100644 index 0000000..0c22dc4 --- /dev/null +++ b/debian/salsa-ci.yml @@ -0,0 +1,3 @@ +include: + - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/salsa-ci.yml + - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/pipeline-jobs.yml -- GitLab From 6e27fe156e4d52a080bd74f991042c099cc7818d Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Thu, 15 Oct 2020 16:20:33 +0200 Subject: [PATCH 269/303] Set safe version of hacking Versions of hacking from 3.1.0 until 4.0.0 NOT included are supposed to require a safe version of flake8, so besides normale issues related to minor version upgrades, we can safely use those versions. Also forcing pycodestyle versions to be compatible with flake8 installed by hacking. Change-Id: Ibcfe45c0618b17796c47296c018e5b6c650edf82 --- test-requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index 01da614..686a2b6 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -8,8 +8,9 @@ oslotest>=3.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 # linters -hacking>=3.1.0,<3.2.0 # Apache-2.0 +hacking>=3.1.0,<4.0.0 # Apache-2.0 flake8-import-order>=0.17.1 # LGPLv3 +pycodestyle>=2.0.0,<2.7.0 # MIT # docs sphinx>=2.0.0,!=2.1.0 # BSD -- GitLab From 9d9dae7d019c6a55a2c78281bd349f1ebc45d90a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20Sch=C3=B6nlaub?= Date: Thu, 8 Oct 2020 18:48:30 +0200 Subject: [PATCH 270/303] Adds basic support for TaskService to retrieve task information. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Change-Id: I3e266902da9346119a0ea1b94c49a71fb2da0f7c Signed-off-by: Manuel Schönlaub --- .../add-task-service-c751ce51e0b8dc11.yaml | 5 + sushy/__init__.py | 1 + sushy/main.py | 11 +++ sushy/resources/taskservice/constants.py | 19 ++++ sushy/resources/taskservice/mappings.py | 10 ++ sushy/resources/taskservice/task.py | 21 ++++ sushy/resources/taskservice/taskservice.py | 76 ++++++++++++++ sushy/tests/unit/json_samples/task2.json | 11 +++ .../unit/json_samples/task_collection.json | 16 +++ .../tests/unit/json_samples/taskservice.json | 19 ++++ .../unit/resources/taskservice/test_task.py | 98 +++++++++++++++++++ .../resources/taskservice/test_taskservice.py | 54 ++++++++++ 12 files changed, 341 insertions(+) create mode 100644 releasenotes/notes/add-task-service-c751ce51e0b8dc11.yaml create mode 100644 sushy/resources/taskservice/constants.py create mode 100644 sushy/resources/taskservice/taskservice.py create mode 100644 sushy/tests/unit/json_samples/task2.json create mode 100644 sushy/tests/unit/json_samples/task_collection.json create mode 100644 sushy/tests/unit/json_samples/taskservice.json create mode 100644 sushy/tests/unit/resources/taskservice/test_taskservice.py diff --git a/releasenotes/notes/add-task-service-c751ce51e0b8dc11.yaml b/releasenotes/notes/add-task-service-c751ce51e0b8dc11.yaml new file mode 100644 index 0000000..68a697f --- /dev/null +++ b/releasenotes/notes/add-task-service-c751ce51e0b8dc11.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds initial support for the TaskService resource to the library. + `TaskService` is responsible for managing tasks. \ No newline at end of file diff --git a/sushy/__init__.py b/sushy/__init__.py index 8fb461c..4f46183 100644 --- a/sushy/__init__.py +++ b/sushy/__init__.py @@ -24,6 +24,7 @@ from sushy.resources.manager.constants import * # noqa from sushy.resources.system.constants import * # noqa from sushy.resources.system.storage.constants import * # noqa from sushy.resources.updateservice.constants import * # noqa +from sushy.resources.taskservice.constants import * # noqa __all__ = ('Sushy',) __version__ = pbr.version.VersionInfo( diff --git a/sushy/main.py b/sushy/main.py index 2625133..eecb204 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -31,6 +31,7 @@ from sushy.resources.registry import message_registry_file from sushy.resources.sessionservice import session from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system +from sushy.resources.taskservice import taskservice from sushy.resources.updateservice import updateservice from sushy import utils @@ -400,6 +401,16 @@ class Sushy(base.ResourceBase): redfish_version=self.redfish_version, registries=self.lazy_registries) + def get_task_service(self): + """Get the TaskService object + + :returns: The TaskService object + """ + return taskservice.TaskService( + self._conn, utils.get_sub_resource_path_by(self, 'Tasks'), + redfish_version=self.redfish_version, + registries=self.lazy_registries) + def _get_registry_collection(self): """Get MessageRegistryFileCollection object diff --git a/sushy/resources/taskservice/constants.py b/sushy/resources/taskservice/constants.py new file mode 100644 index 0000000..a185c57 --- /dev/null +++ b/sushy/resources/taskservice/constants.py @@ -0,0 +1,19 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values come from the Redfish UpdateService json-schema. +# https://redfish.dmtf.org/schemas/v1/TaskService.v1_1_5.json#/definitions/OverWritePolicy + +# Overwrite Policy constants + +OVERWRITE_POLICY_OLDEST = 'oldest completed' +OVERWRITE_POLICY_MANUAL = 'manual only' diff --git a/sushy/resources/taskservice/mappings.py b/sushy/resources/taskservice/mappings.py index 20ee281..138422b 100644 --- a/sushy/resources/taskservice/mappings.py +++ b/sushy/resources/taskservice/mappings.py @@ -14,6 +14,8 @@ # limitations under the License. from sushy.resources import constants as res_cons +from sushy.resources.taskservice import constants as ts_cons +from sushy import utils TASK_STATE_VALUE_MAP = { @@ -31,3 +33,11 @@ TASK_STATE_VALUE_MAP = { 'Cancelling': res_cons.TASK_STATE_CANCELLING, 'Cancelled': res_cons.TASK_STATE_CANCELLED } + +OVERWRITE_POLICY_VALUE_MAP = { + 'Oldest': ts_cons.OVERWRITE_POLICY_OLDEST, + 'Manual': ts_cons.OVERWRITE_POLICY_MANUAL, +} + +OVERWRITE_POLICY_VALUE_MAP_REV = ( + utils.revert_dictionary(OVERWRITE_POLICY_VALUE_MAP)) diff --git a/sushy/resources/taskservice/task.py b/sushy/resources/taskservice/task.py index 3867f2b..445c119 100644 --- a/sushy/resources/taskservice/task.py +++ b/sushy/resources/taskservice/task.py @@ -87,3 +87,24 @@ class Task(base.ResourceBase): """Parses the messages""" for m in self.messages: message_registry.parse_message(self._registries, m) + + +class TaskCollection(base.ResourceCollectionBase): + + @property + def _resource_type(self): + return Task + + @property + @utils.cache_it + def summary(self): + """Summary of task ids and corresponding state + + :returns: dictionary in the format + {'jid_123456789': sushy.TASK_STATE_NEW, + 'jid_123454321': sushy.TASK_STATE_RUNNING} + """ + task_dict = {} + for task in self.get_members(): + task_dict[task.identity] = task.task_state + return task_dict diff --git a/sushy/resources/taskservice/taskservice.py b/sushy/resources/taskservice/taskservice.py new file mode 100644 index 0000000..3d7f1d3 --- /dev/null +++ b/sushy/resources/taskservice/taskservice.py @@ -0,0 +1,76 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/v1/TaskService.v1_1_5.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources.taskservice import mappings as ts_maps +from sushy.resources.taskservice import task +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class TaskService(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The task service identity""" + + name = base.Field('Name', required=True) + """The task service name""" + + service_enabled = base.Field('ServiceEnabled') + """The status of whether this service is enabled""" + + status = common.StatusField('Status') + """The status of the task service""" + + overwrite_policy = base.MappedField( + 'CompletedTaskOverWritePolicy', ts_maps.OVERWRITE_POLICY_VALUE_MAP) + """The overwrite policy for completed tasks""" + + event_on_task_state_change = base.Field( + 'LifeCycleEventOnTaskStateChange', adapter=bool) + """Whether a task state change sends an event""" + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a TaskService + + :param connector: A Connector instance + :param identity: The identity of the TaskService resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(TaskService, self).__init__( + connector, identity, redfish_version, registries) + + @property + @utils.cache_it + def tasks(self): + """Property to reference `TaskCollection` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return task.TaskCollection( + self._conn, utils.get_sub_resource_path_by(self, 'Tasks'), + redfish_version=self.redfish_version, + registries=self.registries) diff --git a/sushy/tests/unit/json_samples/task2.json b/sushy/tests/unit/json_samples/task2.json new file mode 100644 index 0000000..fd4af6f --- /dev/null +++ b/sushy/tests/unit/json_samples/task2.json @@ -0,0 +1,11 @@ +{ + "@odata.type":"#Task.v1_4_3.Task", + "Id":"546", + "Name":"Task 546", + "Description": "Task description", + "TaskMonitor":"/taskmon/546", + "TaskState":"Pending", + "TaskStatus":"OK", + "PercentComplete": 55, + "@odata.id":"/redfish/v1/TaskService/Tasks/546" + } \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/task_collection.json b/sushy/tests/unit/json_samples/task_collection.json new file mode 100644 index 0000000..6c15a61 --- /dev/null +++ b/sushy/tests/unit/json_samples/task_collection.json @@ -0,0 +1,16 @@ +{ + "@odata.context": "/redfish/v1/$metadata#TaskCollection.TaskCollection", + "@odata.id": "/redfish/v1/TaskService/Tasks", + "@odata.type": "#TaskCollection.TaskCollection", + "Description": "Collection of Tasks", + "Members": [ + { + "@odata.id": "/redfish/v1/TaskService/Tasks/545" + }, + { + "@odata.id": "/redfish/v1/TaskService/Tasks/546" + } + ], + "Members@odata.count": 2, + "Name": "Task Collection" +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/taskservice.json b/sushy/tests/unit/json_samples/taskservice.json new file mode 100644 index 0000000..8b89c95 --- /dev/null +++ b/sushy/tests/unit/json_samples/taskservice.json @@ -0,0 +1,19 @@ +{ + "@odata.type": "#TaskService.v1_1_2.TaskService", + "Id": "TaskService", + "Name": "Tasks Service", + "DateTime": "2015-03-13T04:14:33+06:00", + "CompletedTaskOverWritePolicy": "Manual", + "LifeCycleEventOnTaskStateChange": true, + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "ServiceEnabled": true, + "Tasks": { + "@odata.id": "/redfish/v1/TaskService/Tasks" + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#TaskService.TaskService", + "@odata.id": "/redfish/v1/TaskService" +} \ No newline at end of file diff --git a/sushy/tests/unit/resources/taskservice/test_task.py b/sushy/tests/unit/resources/taskservice/test_task.py index 0b9830d..ecf568c 100644 --- a/sushy/tests/unit/resources/taskservice/test_task.py +++ b/sushy/tests/unit/resources/taskservice/test_task.py @@ -72,3 +72,101 @@ class TaskTestCase(base.TestCase): self.task.parse_messages() self.assertEqual('Property SKU is read only.', self.task.messages[0].message) + + +class TaskCollectionTestCase(base.TestCase): + + def setUp(self): + super(TaskCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'task_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.task_col = task.TaskCollection( + self.conn, '/redfish/v1/TaskService/Tasks', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.task_col._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.task_col.redfish_version) + self.assertEqual('Task Collection', self.task_col.name) + self.assertEqual(('/redfish/v1/TaskService/Tasks/545', + '/redfish/v1/TaskService/Tasks/546'), + self.task_col.members_identities) + + @mock.patch.object(task, 'Task', autospec=True) + def test_get_member(self, mock_task): + self.task_col.get_member( + '/redfish/v1/TaskService/Tasks/545') + mock_task.assert_called_once_with( + self.task_col._conn, + '/redfish/v1/TaskService/Tasks/545', + self.task_col.redfish_version, None) + + @mock.patch.object(task, 'Task', autospec=True) + def test_get_members(self, mock_task): + members = self.task_col.get_members() + calls = [ + mock.call(self.task_col._conn, + '/redfish/v1/TaskService/Tasks/545', + self.task_col.redfish_version, None), + mock.call(self.task_col._conn, + '/redfish/v1/TaskService/Tasks/546', + self.task_col.redfish_version, None), + ] + mock_task.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(2, len(members)) + + def _setUp_task_summary(self): + self.conn.get.return_value.json.reset_mock() + successive_return_values = [] + file_names = ['sushy/tests/unit/json_samples/task.json', + 'sushy/tests/unit/json_samples/task2.json'] + for file_name in file_names: + with open(file_name) as f: + successive_return_values.append(json.load(f)) + + self.conn.get.return_value.json.side_effect = successive_return_values + + def test_summary(self): + # | GIVEN | + self._setUp_task_summary() + # | WHEN | + actual_summary = self.task_col.summary + # | THEN | + self.assertEqual({'545': 'completed', '546': 'pending'}, + actual_summary) + + # reset mock + self.conn.get.return_value.json.reset_mock() + + # | WHEN & THEN | + # tests for same object on invoking subsequently + self.assertIs(actual_summary, + self.task_col.summary) + self.conn.get.return_value.json.assert_not_called() + + def test_summary_on_refresh(self): + # | GIVEN | + self._setUp_task_summary() + # | WHEN & THEN | + self.assertEqual({'545': 'completed', '546': 'pending'}, + self.task_col.summary) + + self.conn.get.return_value.json.side_effect = None + # On refreshing the task_col instance... + with open('sushy/tests/unit/json_samples/' + 'task_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.task_col.invalidate() + self.task_col.refresh(force=False) + + # | GIVEN | + self._setUp_task_summary() + # | WHEN & THEN | + self.assertEqual({'545': 'completed', '546': 'pending'}, + self.task_col.summary) diff --git a/sushy/tests/unit/resources/taskservice/test_taskservice.py b/sushy/tests/unit/resources/taskservice/test_taskservice.py new file mode 100644 index 0000000..1c13d8a --- /dev/null +++ b/sushy/tests/unit/resources/taskservice/test_taskservice.py @@ -0,0 +1,54 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from unittest import mock + +from sushy.resources import constants as res_cons +from sushy.resources.taskservice import constants as ts_cons +from sushy.resources.taskservice import task +from sushy.resources.taskservice import taskservice +from sushy.tests.unit import base + + +class TaskServiceTestCase(base.TestCase): + + def setUp(self): + super(TaskServiceTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/taskservice.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.tsk_serv = taskservice.TaskService( + self.conn, '/redfish/v1/TaskService/TaskService', + redfish_version='1.3.0') + + def test__parse_attributes(self): + self.tsk_serv._parse_attributes(self.json_doc) + self.assertEqual('TaskService', self.tsk_serv.identity) + self.assertTrue(self.tsk_serv.service_enabled) + self.assertTrue(self.tsk_serv.event_on_task_state_change) + self.assertEqual(res_cons.STATE_ENABLED, self.tsk_serv.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.tsk_serv.status.health) + self.assertEqual(self.tsk_serv.overwrite_policy, + ts_cons.OVERWRITE_POLICY_MANUAL) + + @mock.patch.object(task, 'TaskCollection', autospec=True) + def test_tasks(self, task_collection_mock): + self.tsk_serv.tasks + task_collection_mock.assert_called_once_with( + self.conn, '/redfish/v1/TaskService/Tasks', + self.tsk_serv.redfish_version, + self.tsk_serv._registries) -- GitLab From d5a09e965935a856b8f18a44b33099df9d68a5e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20Sch=C3=B6nlaub?= Date: Wed, 30 Sep 2020 17:18:02 +0200 Subject: [PATCH 271/303] Add support to expose oem_vendors from resource links MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Some BMC implementations (such as Dell G13 iDrac) report the OEM moniker in the Links attribute and do not expose OEM info at the resource's root level. Change-Id: If03458858ddffeb1d3ef76621e096782e0d4d318 Signed-off-by: Manuel Schönlaub Story: 2008210 Task: 40995 --- sushy/resources/base.py | 15 ++++++++++++++- sushy/tests/unit/resources/test_base.py | 20 ++++++++++++++++++-- 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 8628b56..963efb0 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -218,6 +218,11 @@ class ListField(Field): return getattr(self, key) +class LinksField(CompositeField): + """Reference to linked resources.""" + oem_vendors = Field('Oem', adapter=list) + + class DictionaryField(Field): """Base class for fields consisting of dictionary of several sub-fields.""" @@ -483,9 +488,11 @@ class ResourceBase(object, metaclass=abc.ABCMeta): redfish_version = None """The Redfish version""" - oem_vendors = Field('Oem', adapter=list) + _oem_vendors = Field('Oem', adapter=list) """The list of OEM extension names for this resource.""" + links = LinksField('Links') + def __init__(self, connector, path='', @@ -638,6 +645,12 @@ class ResourceBase(object, metaclass=abc.ABCMeta): if force_refresh: self.refresh() + @property + def oem_vendors(self): + return list( + set((self._oem_vendors or []) + (self.links.oem_vendors or [])) + ) + @property def json(self): return self._json diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index d45f395..0ee4b93 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -40,7 +40,21 @@ BASE_RESOURCE_JSON = { "@odata.type": "http://AnotherStandardsBody/schemas.v1_0_1#styleInfoExt", # noqa "Style": "Executive" } + }, + "Links": { + "Oem": { + "Contoso": { + "@odata.type": "http://contoso.com/schemas/extensions.v1_2_1#contoso.AnvilTypes1", # noqa + "slogan": "Contoso never fail", + "disclaimer": "* Most of the time" + }, + "EID_420_ASB_345": { + "@odata.type": "http://AnotherStandardsBody/schemas.v1_0_1#styleInfoExt", # noqa + "Style": "Executive" + } + } } + } @@ -139,8 +153,10 @@ class ResourceBaseTestCase(base.TestCase): self.assertIsNot(resource_a._reader, resource_b._reader) def test__parse_attributes(self): - for oem_vendor in self.base_resource2.oem_vendors: - self.assertTrue(oem_vendor in ('Contoso', 'EID_412_ASB_123')) + expected_oem_vendors = ['Contoso', 'EID_412_ASB_123', + 'EID_420_ASB_345'] + actual_oem_vendors = sorted(self.base_resource2.oem_vendors) + self.assertEqual(expected_oem_vendors, actual_oem_vendors) self.assertEqual('base_resource2', self.base_resource2.resource_name) def test_refresh_local(self): -- GitLab From 5be884b3df4676ecc4166d5c78c6d810e64ff61d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20Sch=C3=B6nlaub?= Date: Wed, 28 Oct 2020 09:47:28 +0100 Subject: [PATCH 272/303] Fixes a typo when accessing the connector in CompositionService MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Change-Id: I2e062894c2654266051bfe240720d99b4a59568b Signed-off-by: Manuel Schönlaub --- .../compositionservice/compositionservice.py | 4 ++-- .../test_compositionservice.py | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/sushy/resources/compositionservice/compositionservice.py b/sushy/resources/compositionservice/compositionservice.py index 73f9dad..b733165 100644 --- a/sushy/resources/compositionservice/compositionservice.py +++ b/sushy/resources/compositionservice/compositionservice.py @@ -85,7 +85,7 @@ class CompositionService(base.ResourceBase): def resource_blocks(self): """Property to reference `ResourceBlockCollection` instance""" return resourceblock.ResourceBlockCollection( - self.conn, self._get_resource_blocks_collection_path, + self._conn, self._get_resource_blocks_collection_path, self.redfish_version, self.registries) @property @@ -93,5 +93,5 @@ class CompositionService(base.ResourceBase): def resource_zones(self): """Property to reference `ResourceZoneCollection` instance""" return resourcezone.ResourceZoneCollection( - self.conn, self._get_resource_zones_collection_path, + self._conn, self._get_resource_zones_collection_path, self.redfish_version, self.registries) diff --git a/sushy/tests/unit/resources/compositionservice/test_compositionservice.py b/sushy/tests/unit/resources/compositionservice/test_compositionservice.py index f6ba81f..ad5f3b7 100644 --- a/sushy/tests/unit/resources/compositionservice/test_compositionservice.py +++ b/sushy/tests/unit/resources/compositionservice/test_compositionservice.py @@ -15,6 +15,8 @@ import json from unittest import mock from sushy.resources.compositionservice import compositionservice +from sushy.resources.compositionservice import resourceblock +from sushy.resources.compositionservice import resourcezone from sushy.resources import constants as res_cons from sushy.tests.unit import base @@ -49,3 +51,19 @@ class CompositionServiceTestCase(base.TestCase): self.assertEqual(res_cons.STATE_ENABLED, self.comp_ser.status.state) self.assertEqual(res_cons.HEALTH_OK, self.comp_ser.status.health) self.assertTrue(self.comp_ser.service_enabled) + + @mock.patch.object(resourceblock, 'ResourceBlockCollection', autospec=True) + def test_get_resource_blocks(self, mock_resourceblock_col): + _ = self.comp_ser.resource_blocks + mock_resourceblock_col.assert_called_once_with( + self.comp_ser._conn, + self.comp_ser._get_resource_blocks_collection_path, + self.comp_ser.redfish_version, None) + + @mock.patch.object(resourcezone, 'ResourceZoneCollection', autospec=True) + def test_get_resource_zones(self, mock_resourcezone_col): + _ = self.comp_ser.resource_zones + mock_resourcezone_col.assert_called_once_with( + self.comp_ser._conn, + self.comp_ser._get_resource_zones_collection_path, + self.comp_ser.redfish_version, None) -- GitLab From 8fe28a6882abb209897aa5ab18ad397005773a35 Mon Sep 17 00:00:00 2001 From: Iury Gregory Melo Ferreira Date: Thu, 29 Oct 2020 20:42:55 +0100 Subject: [PATCH 273/303] Avoid running functional jobs for doc changes Change-Id: Ie618831c40a70c97f2012f6228546b7d94e05f91 --- zuul.d/sushy-jobs.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml index 7914583..96da6fb 100644 --- a/zuul.d/sushy-jobs.yaml +++ b/zuul.d/sushy-jobs.yaml @@ -2,6 +2,8 @@ name: sushy-tempest-partition-bios-redfish-pxe parent: ironic-tempest-partition-bios-redfish-pxe irrelevant-files: + - ^.*\.rst$ + - ^doc/.*$ - ^test-requirements.txt$ - ^sushy/tests/.*$ required-projects: @@ -11,6 +13,8 @@ name: sushy-tempest-partition-uefi-redfish-vmedia parent: ironic-tempest-partition-uefi-redfish-vmedia irrelevant-files: + - ^.*\.rst$ + - ^doc/.*$ - ^test-requirements.txt$ - ^sushy/tests/.*$ required-projects: -- GitLab From d293dba6287cd331de3c3afc4b2631476d053d6a Mon Sep 17 00:00:00 2001 From: Kafilat Adeleke Date: Tue, 27 Oct 2020 02:18:06 -0700 Subject: [PATCH 274/303] Adds sushy library overview Change-Id: I4c2d5eb427a7b76a1bfe69551cf0c9937567550d Story: #2006610 Task: #36858 --- README.rst | 4 ++-- doc/source/index.rst | 18 ++++++++++++++++++ doc/source/reference/index.rst | 21 ++------------------- 3 files changed, 22 insertions(+), 21 deletions(-) diff --git a/README.rst b/README.rst index ef3ec98..7ade1e1 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ -About Sushy -=========== +Overview +======== Sushy is a Python library to communicate with `Redfish`_ based systems. diff --git a/doc/source/index.rst b/doc/source/index.rst index 6c23168..e3bcc48 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -9,6 +9,24 @@ Welcome to Sushy's documentation! .. include:: ../../README.rst +Features +======== + +* Abstraction around the SystemCollection and System resources (Basic + server identification and asset information) +* RAID in Redfish based Systems +* Redfish Ethernet Interface +* System mappings +* System processor +* Storage management +* Systems power management (Both soft and hard; Including NMI injection) +* Changing systems boot device, frequency (Once or permanently) and mode + (UEFI or BIOS) +* Chassis management +* OEM extention +* Virtual media management +* Session Management + Documentation ============= diff --git a/doc/source/reference/index.rst b/doc/source/reference/index.rst index 998aacf..ba410c6 100644 --- a/doc/source/reference/index.rst +++ b/doc/source/reference/index.rst @@ -2,31 +2,14 @@ Sushy Library Reference ======================= -Features -======== - -* Abstraction around the SystemCollection and System resources (Basic - server identification and asset information) -* Systems power management (Both soft and hard; Including NMI injection) -* Changing systems boot device, frequency (Once or permanently) and mode - (UEFI or BIOS) -* Virtual media management -* SessionManagement +Usage +===== .. toctree:: :maxdepth: 2 usage -Missing Features -================ - -These are some features that sushy is presently missing. - -* Collect sensor data (Health state, temperature, fans etc...) -* System disk size -* Serial console - Sushy Python API Reference ========================== -- GitLab From 1f4f654267f5ac9cba9c5dbc8f623133fb316f93 Mon Sep 17 00:00:00 2001 From: Bill Dodd Date: Wed, 25 Nov 2020 12:30:35 -0600 Subject: [PATCH 275/303] Raise exception when async operation fails In the _op() method of class Connector, an exception is raised when a synchronous operation fails (status_code >= 400). But no exception is raised when a blocking asynchronous operation fails. This patch fixes the asynchronous case to raise the appropriate exception if the operation fails. Change-Id: I5f7c57881ad1c576cd338454292b5eb828bf1e12 Story: 2003514 Task: 41299 --- ...n-async-task-failure-b67c7bc189a4d6ca.yaml | 5 +++++ sushy/connector.py | 1 + sushy/tests/unit/test_connector.py | 20 +++++++++++++++++++ 3 files changed, 26 insertions(+) create mode 100644 releasenotes/notes/raise-error-on-async-task-failure-b67c7bc189a4d6ca.yaml diff --git a/releasenotes/notes/raise-error-on-async-task-failure-b67c7bc189a4d6ca.yaml b/releasenotes/notes/raise-error-on-async-task-failure-b67c7bc189a4d6ca.yaml new file mode 100644 index 0000000..ac1bc5e --- /dev/null +++ b/releasenotes/notes/raise-error-on-async-task-failure-b67c7bc189a4d6ca.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + Fixes an issue in the ``Connector`` class where an exception is not raised + when an asynchronous operations fails. diff --git a/sushy/connector.py b/sushy/connector.py index 34a730f..b646590 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -163,6 +163,7 @@ class Connector(object): 'timeout': timeout}) raise exceptions.ConnectionError(url=url, error=m) response = mon.response + exceptions.raise_for_response(method, url, response) LOG.debug('HTTP response for %(method)s %(url)s: ' 'status code: %(code)s', diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 462be6f..798a20e 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -356,3 +356,23 @@ class ConnectorOpTestCase(base.TestCase): with self.assertRaisesRegex(exceptions.ConnectionError, 'status 202, but no Location header'): self.conn._op('POST', 'http://foo.bar', blocking=True) + + @mock.patch('sushy.connector.time.sleep', autospec=True) + def test_blocking_task_fails(self, mock_sleep): + response1 = mock.MagicMock(spec=requests.Response) + response1.status_code = http_client.ACCEPTED + response1.headers = { + 'retry-after': 5, + 'location': '/redfish/v1/taskmon/1' + } + response2 = mock.MagicMock(spec=requests.Response) + response2.status_code = http_client.BAD_REQUEST + message = 'Unable to create Volume with given parameters' + response2.json.return_value = { + 'error': { + 'message': message + } + } + self.request.side_effect = [response1, response1, response2] + with self.assertRaisesRegex(exceptions.BadRequestError, message): + self.conn._op('POST', 'http://foo.bar', blocking=True) -- GitLab From c4580134f5a0d03d2d1ae6d08f6e8a451fdedf2e Mon Sep 17 00:00:00 2001 From: Iury Gregory Melo Ferreira Date: Tue, 8 Dec 2020 19:05:26 +0100 Subject: [PATCH 276/303] Fix lower-constraints with the new pip resolver * move pep8 dependencies from test-requirements to tox.ini, they're not needed there and are hard to constraint properly. * bump docutils required by sphinx Change-Id: I349056b953878fa53ac43f37428e72411376038b --- lower-constraints.txt | 2 +- test-requirements.txt | 5 ----- tox.ini | 4 ++++ 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index 5510af7..2c3c5a7 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -4,7 +4,7 @@ Babel==2.3.4 cliff==3.1.0 cmd2==0.8.9 coverage==4.0 -docutils==0.11 +docutils==0.12 dulwich==0.15.0 extras==1.0.0 fixtures==3.0.0 diff --git a/test-requirements.txt b/test-requirements.txt index 686a2b6..8f73969 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -7,11 +7,6 @@ coverage!=4.4,>=4.0 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 -# linters -hacking>=3.1.0,<4.0.0 # Apache-2.0 -flake8-import-order>=0.17.1 # LGPLv3 -pycodestyle>=2.0.0,<2.7.0 # MIT - # docs sphinx>=2.0.0,!=2.1.0 # BSD openstackdocstheme>=2.2.1 # Apache-2.0 diff --git a/tox.ini b/tox.ini index ccc2371..842bc68 100644 --- a/tox.ini +++ b/tox.ini @@ -17,6 +17,10 @@ deps = commands = stestr run --slowest {posargs} [testenv:pep8] +deps= + hacking>=3.1.0,<4.0.0 # Apache-2.0 + flake8-import-order>=0.17.1 # LGPLv3 + pycodestyle>=2.0.0,<2.7.0 # MIT commands = flake8 {posargs} [testenv:venv] -- GitLab From 74a8505e4fbcb44248f0f4501b71c3d3eb481266 Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Mon, 14 Dec 2020 18:53:03 +0100 Subject: [PATCH 277/303] Remove lower-constraints job As discussed during the upstream ironic community meeting on Monday Dec 14 2020, the lower-constraints job is being removed. Change-Id: Ie71603d57eef5f56f9a9fabac2a5283f472b135e --- lower-constraints.txt | 55 ------------------------------------------- zuul.d/project.yaml | 1 - 2 files changed, 56 deletions(-) delete mode 100644 lower-constraints.txt diff --git a/lower-constraints.txt b/lower-constraints.txt deleted file mode 100644 index 2c3c5a7..0000000 --- a/lower-constraints.txt +++ /dev/null @@ -1,55 +0,0 @@ -alabaster==0.7.10 -appdirs==1.3.0 -Babel==2.3.4 -cliff==3.1.0 -cmd2==0.8.9 -coverage==4.0 -docutils==0.12 -dulwich==0.15.0 -extras==1.0.0 -fixtures==3.0.0 -flake8==2.5.5 -flake8-import-order==0.17.1 -future==0.18.2 -hacking==3.1.0 -imagesize==0.7.1 -iso8601==0.1.11 -Jinja2==2.10 -keystoneauth1==3.4.0 -linecache2==1.0.0 -MarkupSafe==1.1.1 -mccabe==0.2.1 -mox3==0.20.0 -openstackdocstheme==2.2.1 -os-client-config==1.28.0 -oslotest==3.2.0 -pbr==2.0.0 -pep8==1.5.7 -prettytable==0.7.2 -pycodestyle==2.6.0 -pyflakes==0.8.1 -Pygments==2.2.0 -pyparsing==2.4.7 -pyperclip==1.8.0 -python-dateutil==2.7.0 -python-mimeparse==1.6.0 -python-subunit==1.0.0 -pytz==2013.6 -PyYAML==3.13 -reno==3.1.0 -requests==2.14.2 -requestsexceptions==1.2.0 -six==1.14.0 -snowballstemmer==1.2.1 -Sphinx==2.0.0 -sphinxcontrib-apidoc==0.2.0 -sphinxcontrib-websupport==1.0.1 -stestr==2.0.0 -stevedore==1.29.0 -testrepository==0.0.20 -testscenarios==0.4 -testtools==2.2.0 -traceback2==1.4.0 -unittest2==1.1.0 -voluptuous==0.11.7 -wcwidth==0.1.9 diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml index 110f52f..debeca9 100644 --- a/zuul.d/project.yaml +++ b/zuul.d/project.yaml @@ -2,7 +2,6 @@ templates: - check-requirements - openstack-cover-jobs - - openstack-lower-constraints-jobs - openstack-python3-wallaby-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 -- GitLab From f3535506013c9530b23ff0e917ffa3e5760a2882 Mon Sep 17 00:00:00 2001 From: Bill Dodd Date: Mon, 19 Oct 2020 17:38:35 -0500 Subject: [PATCH 278/303] Add OperationApplyTime support to Volume methods The supported_values property in the OperationApplyTimeSupportField class returns an unmapped list of OperationApplyTime values supported by the service. The convention in Sushy for properties that return a list of schema-defined enumeration values is to return a MappedListField. This type of field performs mapping between Redfish schema-defined enumeration values and constants exposed by the Sushy package. This update adds the mapped_supported_values property to return a MappedListField of OperationApplyTime values supported by the service. The supported_values property is deprecated. The create_volume method in the VolumeCollection class and the delete_volume and initialize_volume methods in the Volume class are updated to take optional apply_time and timeout keyword parameters. This allows the caller of those volume methods to specify a preferred OperationApplyTime annotation and a maximum timeout for synchronous operations. If the specified apply_time is 'Immediate', the operation is called with blocking enabled. Otherwise blocking is disabled. For asynchronous operations, those three methods will now return a TaskMonitor instance that the caller can use to monitor the state of the task. Change-Id: I8ba2b9ff1e80fa0c2edc3a11ab80844d732e4394 Story: 2003514 Task: 41087 --- ...pport-for-volume-ops-f2ebc412e3b4290a.yaml | 19 +++++ sushy/resources/common.py | 14 ++- sushy/resources/system/storage/volume.py | 80 +++++++++++++++-- .../resources/system/storage/test_volume.py | 85 +++++++++++++++++-- .../unit/resources/system/test_system.py | 12 ++- 5 files changed, 188 insertions(+), 22 deletions(-) create mode 100644 releasenotes/notes/apply-time-support-for-volume-ops-f2ebc412e3b4290a.yaml diff --git a/releasenotes/notes/apply-time-support-for-volume-ops-f2ebc412e3b4290a.yaml b/releasenotes/notes/apply-time-support-for-volume-ops-f2ebc412e3b4290a.yaml new file mode 100644 index 0000000..e211d78 --- /dev/null +++ b/releasenotes/notes/apply-time-support-for-volume-ops-f2ebc412e3b4290a.yaml @@ -0,0 +1,19 @@ +--- +deprecations: + - | + The ``supported_values`` property in the + ``OperationApplyTimeSupportField`` class is deprecated. Use the + ``mapped_supported_values`` property instead. The + ``mapped_supported_values`` property uses the ``MappedListField`` type + to map the Redfish schema-defined enumeration values to constants exposed + by the Sushy package. +features: + - | + Update the ``create_volume`` method in the ``VolumeCollection`` class and + the ``delete_volume`` and ``initialize_volume`` methods in the ``Volume`` + class to take optional ``apply_time`` and ``timeout`` keyword parameters. + This allows the caller of those volume methods to specify a preferred + ``OperationApplyTime`` annotation and a maximum timeout for synchronous + operations. For asynchronous operations, those three methods will now + return a ``TaskMonitor`` instance that the caller can use to monitor the + state of the task. diff --git a/sushy/resources/common.py b/sushy/resources/common.py index df2fc93..8d1e470 100644 --- a/sushy/resources/common.py +++ b/sushy/resources/common.py @@ -42,8 +42,18 @@ class OperationApplyTimeSupportField(base.CompositeField): supported_values = base.Field('SupportedValues', required=True, adapter=list) - """The client is allowed request when performing a create, delete, or - action operation""" + """The types of apply times that the client is allowed request when + performing a create, delete, or action operation returned as an unmapped + list + + Deprecated: Use `mapped_supported_values`. + """ + + mapped_supported_values = base.MappedListField( + 'SupportedValues', res_maps.APPLY_TIME_VALUE_MAP, required=True) + """The types of apply times that the client is allowed request when + performing a create, delete, or action operation returned as a mapped + list""" class ActionField(base.CompositeField): diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 78e9b21..e7f9d57 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -18,7 +18,11 @@ import logging from sushy import exceptions from sushy.resources import base from sushy.resources import common +from sushy.resources import constants as res_cons +from sushy.resources import mappings as res_maps +from sushy.resources.system.storage import constants as store_cons from sushy.resources.system.storage import mappings as store_maps +from sushy.resources.task_monitor import TaskMonitor from sushy import utils LOG = logging.getLogger(__name__) @@ -86,30 +90,71 @@ class Volume(base.ResourceBase): set(store_maps.VOLUME_INIT_TYPE_MAP). intersection(action.allowed_values)]) - def initialize_volume(self, value): + def initialize_volume(self, value=store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=None, timeout=500): """Initialize the volume. :param value: The InitializeType value. + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. :raises: InvalidParameterValueError, if the target value is not allowed. + :raises: ConnectionError + :raises: HTTPError + :returns: TaskMonitor if async task or None if successful init """ valid_values = self.get_allowed_initialize_volume_values() if value not in valid_values: raise exceptions.InvalidParameterValueError( parameter='value', value=value, valid_values=valid_values) value = store_maps.VOLUME_INIT_TYPE_MAP_REV[value] + payload = {'InitializeType': value} + blocking = False + oat_prop = '@Redfish.OperationApplyTime' + if apply_time: + payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + if (payload and payload.get(oat_prop) == res_maps. + APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): + blocking = True target_uri = self._get_initialize_action_element().target_uri - self._conn.post(target_uri, data={'InitializeType': value}, - blocking=True) + r = self._conn.post(target_uri, data=payload, blocking=blocking, + timeout=timeout) + if r.status_code == 202: + return (TaskMonitor(self, r.headers.get('location')) + .set_retry_after(r.headers.get('retry-after'))) - def delete_volume(self, payload=None): + def delete_volume(self, payload=None, apply_time=None, timeout=500): """Delete the volume. :param payload: May contain @Redfish.OperationApplyTime property + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. :raises: ConnectionError :raises: HTTPError + :returns: TaskMonitor if async task or None if successful deletion """ - self._conn.delete(self._path, data=payload, blocking=True) + blocking = False + oat_prop = '@Redfish.OperationApplyTime' + if apply_time: + if payload is None: + payload = {} + payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + if (payload and payload.get(oat_prop) == res_maps. + APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): + blocking = True + r = self._conn.delete(self._path, data=payload, blocking=blocking, + timeout=timeout) + if r.status_code == 202: + return (TaskMonitor(self, r.headers.get('location')) + .set_retry_after(r.headers.get('retry-after'))) class VolumeCollection(base.ResourceCollectionBase): @@ -145,17 +190,36 @@ class VolumeCollection(base.ResourceCollectionBase): """Indicates if a client is allowed to request for a specific apply time of a create, delete, or action operation of a given resource""" - def create_volume(self, payload): + def create_volume(self, payload, apply_time=None, timeout=500): """Create a volume. :param payload: The payload representing the new volume to create. + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. :raises: ConnectionError :raises: HTTPError - :returns: Newly created Volume resource or None if no Location header + :returns: Newly created Volume resource or TaskMonitor if async task """ - r = self._conn.post(self._path, data=payload, blocking=True) + blocking = False + oat_prop = '@Redfish.OperationApplyTime' + if apply_time: + if payload is None: + payload = {} + payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + if (payload and payload.get(oat_prop) == res_maps. + APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): + blocking = True + r = self._conn.post(self._path, data=payload, blocking=blocking, + timeout=timeout) location = r.headers.get('Location') if r.status_code == 201: if location: self.refresh() return self.get_member(location) + elif r.status_code == 202: + return (TaskMonitor(self, location) + .set_retry_after(r.headers.get('retry-after'))) diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index e6fe9a0..9fdc093 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -17,6 +17,8 @@ from dateutil import parser import sushy from sushy import exceptions +from sushy.resources import constants as res_cons +from sushy.resources.system.storage import constants as store_cons from sushy.resources.system.storage import volume from sushy.tests.unit import base @@ -54,12 +56,27 @@ class VolumeTestCase(base.TestCase): identifier.durable_name) self.assertIsNone(self.stor_volume.block_size_bytes) - def test_initialize_volume(self): + def test_initialize_volume_immediate(self): target_uri = '/redfish/v1/Systems/3/Storage/RAIDIntegrated/' \ 'Volumes/1/Actions/Volume.Initialize' - self.stor_volume.initialize_volume('fast') + self.stor_volume.initialize_volume( + store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=res_cons.APPLY_TIME_IMMEDIATE) self.stor_volume._conn.post.assert_called_once_with( - target_uri, data={'InitializeType': 'Fast'}, blocking=True) + target_uri, data={'InitializeType': 'Fast', + '@Redfish.OperationApplyTime': 'Immediate'}, + blocking=True, timeout=500) + + def test_initialize_volume_on_reset(self): + target_uri = '/redfish/v1/Systems/3/Storage/RAIDIntegrated/' \ + 'Volumes/1/Actions/Volume.Initialize' + self.stor_volume.initialize_volume( + store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=res_cons.APPLY_TIME_ON_RESET) + self.stor_volume._conn.post.assert_called_once_with( + target_uri, data={'InitializeType': 'Fast', + '@Redfish.OperationApplyTime': 'OnReset'}, + blocking=False, timeout=500) def test_initialize_volume_bad_value(self): self.assertRaisesRegex( @@ -70,13 +87,28 @@ class VolumeTestCase(base.TestCase): def test_delete_volume(self): self.stor_volume.delete_volume() self.stor_volume._conn.delete.assert_called_once_with( - self.stor_volume._path, data=None, blocking=True) + self.stor_volume._path, data=None, blocking=False, timeout=500) def test_delete_volume_with_payload(self): - payload = {'@Redfish.OperationApplyTime': 'OnReset'} + payload = {'@Redfish.OperationApplyTime': 'Immediate'} self.stor_volume.delete_volume(payload=payload) self.stor_volume._conn.delete.assert_called_once_with( - self.stor_volume._path, data=payload, blocking=True) + self.stor_volume._path, data=payload, blocking=True, timeout=500) + + def test_delete_volume_immediate(self): + payload = {} + self.stor_volume.delete_volume( + payload=payload, apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=payload, blocking=True, timeout=500) + + def test_delete_volume_on_reset(self): + payload = {} + self.stor_volume.delete_volume( + payload=payload, apply_time=res_cons.APPLY_TIME_ON_RESET, + timeout=250) + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=payload, blocking=False, timeout=250) class VolumeCollectionTestCase(base.TestCase): @@ -113,6 +145,10 @@ class VolumeCollectionTestCase(base.TestCase): support._maintenance_window_resource.resource_uri) self.assertEqual(['Immediate', 'OnReset', 'AtMaintenanceWindowStart'], support.supported_values) + self.assertEqual([res_cons.APPLY_TIME_IMMEDIATE, + res_cons.APPLY_TIME_ON_RESET, + res_cons.APPLY_TIME_MAINT_START], + support.mapped_supported_values) @mock.patch.object(volume, 'Volume', autospec=True) def test_get_member(self, Volume_mock): @@ -175,23 +211,54 @@ class VolumeCollectionTestCase(base.TestCase): self.assertEqual(1073741824000, self.stor_vol_col.max_size_bytes) - def test_create_volume(self): + def test_create_volume_immediate(self): + payload = { + 'Name': 'My Volume 4', + 'VolumeType': 'Mirrored', + 'RAIDType': 'RAID1', + 'CapacityBytes': 107374182400 + } + expected_payload = dict(payload) + expected_payload['@Redfish.OperationApplyTime'] = 'Immediate' + with open('sushy/tests/unit/json_samples/volume4.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.conn.post.return_value.status_code = 201 + self.conn.post.return_value.headers.return_value = { + 'Location': '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4' + } + new_vol = self.stor_vol_col.create_volume( + payload, apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_vol_col._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', + data=expected_payload, blocking=True, timeout=500) + self.stor_vol_col.refresh.assert_called_once() + self.assertIsNotNone(new_vol) + self.assertEqual('4', new_vol.identity) + self.assertEqual('My Volume 4', new_vol.name) + self.assertEqual(107374182400, new_vol.capacity_bytes) + self.assertEqual(sushy.VOLUME_TYPE_MIRRORED, new_vol.volume_type) + self.assertEqual(sushy.RAID_TYPE_RAID1, new_vol.raid_type) + + def test_create_volume_on_reset(self): payload = { 'Name': 'My Volume 4', 'VolumeType': 'Mirrored', 'RAIDType': 'RAID1', 'CapacityBytes': 107374182400 } + expected_payload = dict(payload) + expected_payload['@Redfish.OperationApplyTime'] = 'OnReset' with open('sushy/tests/unit/json_samples/volume4.json') as f: self.conn.get.return_value.json.return_value = json.load(f) self.conn.post.return_value.status_code = 201 self.conn.post.return_value.headers.return_value = { 'Location': '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4' } - new_vol = self.stor_vol_col.create_volume(payload) + new_vol = self.stor_vol_col.create_volume( + payload, apply_time=res_cons.APPLY_TIME_ON_RESET) self.stor_vol_col._conn.post.assert_called_once_with( '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', - data=payload, blocking=True) + data=expected_payload, blocking=False, timeout=500) self.stor_vol_col.refresh.assert_called_once() self.assertIsNotNone(new_vol) self.assertEqual('4', new_vol.identity) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index acb21e8..9bf8488 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -107,9 +107,12 @@ class SystemTestCase(base.TestCase): '/redfish/v1/Systems/437XR1138R2'}, 'maintenance_window_duration_in_seconds': 600, 'maintenance_window_start_time': - parser.parse('2017-05-03T23:12:37-05:00'), - 'supported_values': - ['Immediate', 'AtMaintenanceWindowStart']}, + parser.parse('2017-05-03T23:12:37-05:00'), + 'supported_values': + ['Immediate', 'AtMaintenanceWindowStart'], + 'mapped_supported_values': + [res_cons.APPLY_TIME_IMMEDIATE, + res_cons.APPLY_TIME_MAINT_START]}, 'target_uri': '/redfish/v1/Systems/437XR1138R2/Actions/' 'ComputerSystem.Reset'}}, @@ -206,6 +209,9 @@ class SystemTestCase(base.TestCase): self.assertIsNotNone(support) self.assertEqual(['Immediate', 'AtMaintenanceWindowStart'], support.supported_values) + self.assertEqual([res_cons.APPLY_TIME_IMMEDIATE, + res_cons.APPLY_TIME_MAINT_START], + support.mapped_supported_values) self.assertEqual(parser.parse('2017-05-03T23:12:37-05:00'), support.maintenance_window_start_time) self.assertEqual(600, support.maintenance_window_duration_in_seconds) -- GitLab From e4a3e18de079968f23aa6473750ed017ff43fa4c Mon Sep 17 00:00:00 2001 From: Iury Gregory Melo Ferreira Date: Mon, 4 Jan 2021 18:35:55 +0100 Subject: [PATCH 279/303] Add doc/requirements We need to specify doc requirements in doc/requirements.txt to avoid problems with the pip resolver for the release team. Removed specific doc requirements from test-requirements.txt Change-Id: Ie401218013438b9de07d8dc9daa24485608b29a5 --- doc/requirements.txt | 4 ++++ test-requirements.txt | 8 -------- tox.ini | 9 +++++++++ 3 files changed, 13 insertions(+), 8 deletions(-) create mode 100644 doc/requirements.txt diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 0000000..f5e3228 --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,4 @@ +reno>=3.1.0 # Apache-2.0 +sphinx>=2.0.0,!=2.1.0 # BSD +openstackdocstheme>=2.2.1 # Apache-2.0 +sphinxcontrib-apidoc>=0.2.0 # BSD diff --git a/test-requirements.txt b/test-requirements.txt index 8f73969..16c5d48 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,11 +6,3 @@ coverage!=4.4,>=4.0 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 - -# docs -sphinx>=2.0.0,!=2.1.0 # BSD -openstackdocstheme>=2.2.1 # Apache-2.0 -sphinxcontrib-apidoc>=0.2.0 # BSD - -# releasenotes -reno>=3.1.0 # Apache-2.0 diff --git a/tox.ini b/tox.ini index 842bc68..f868a55 100644 --- a/tox.ini +++ b/tox.ini @@ -41,14 +41,23 @@ commands = coverage erase coverage xml -o cover/coverage.xml [testenv:docs] +deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} + -r{toxinidir}/requirements.txt + -r{toxinidir}/doc/requirements.txt commands = sphinx-build -W -b html doc/source doc/build/html [testenv:pdf-docs] +usedevelop = False whitelist_externals = make +deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} + -r{toxinidir}/doc/requirements.txt commands = sphinx-build -b latex doc/source doc/build/pdf make -C doc/build/pdf [testenv:releasenotes] +usedevelop = False +deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} + -r{toxinidir}/doc/requirements.txt commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html -- GitLab From 3dcffb9a76b894eeeb837007f137452beba8439b Mon Sep 17 00:00:00 2001 From: ankit Date: Tue, 1 Dec 2020 09:31:24 +0000 Subject: [PATCH 280/303] Fixes issue of redfish firmware update This patch fixes issue of redfish firmware update for ilo5 based hardware by adding a new attribute task_uri to monitor and validate the firmware update since task_monitor uri does not provide the appropriate response. Change-Id: Ida1d314f77b9f114e572639c7ad46545be1ce962 Story: #2008403 Task: #41339 --- ...irmware-update-issue-273862b2a11e3536.yaml | 6 ++++ sushy/resources/base.py | 2 +- sushy/resources/taskservice/taskmonitor.py | 9 +++--- .../resources/updateservice/updateservice.py | 6 ++++ .../tests/unit/json_samples/task_monitor.json | 25 ++++++++++++++++ .../resources/taskservice/test_taskmonitor.py | 5 +++- .../updateservice/test_updateservice.py | 29 +++++++++++++++++-- 7 files changed, 73 insertions(+), 9 deletions(-) create mode 100644 releasenotes/notes/fixes-ilo5-redfish-firmware-update-issue-273862b2a11e3536.yaml create mode 100644 sushy/tests/unit/json_samples/task_monitor.json diff --git a/releasenotes/notes/fixes-ilo5-redfish-firmware-update-issue-273862b2a11e3536.yaml b/releasenotes/notes/fixes-ilo5-redfish-firmware-update-issue-273862b2a11e3536.yaml new file mode 100644 index 0000000..e129388 --- /dev/null +++ b/releasenotes/notes/fixes-ilo5-redfish-firmware-update-issue-273862b2a11e3536.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Adding a new attribute task_uri to monitor redfish firmware + update since some vendors(ilo) does not provide appropriate + response with task_monitor uri. diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 963efb0..6ecf4e8 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -330,7 +330,7 @@ class MappedListField(Field): class MessageListField(ListField): """List of messages with details of settings update status""" - message_id = Field('MessageId', required=True) + message_id = Field('MessageId') """The key for this message which can be used to look up the message in a message registry """ diff --git a/sushy/resources/taskservice/taskmonitor.py b/sushy/resources/taskservice/taskmonitor.py index 3cad221..5d4edc5 100644 --- a/sushy/resources/taskservice/taskmonitor.py +++ b/sushy/resources/taskservice/taskmonitor.py @@ -40,6 +40,7 @@ class TaskMonitor(object): the object according to schema of the given version. :param registries: Dict of Redfish Message Registry objects to be used in any resource that needs registries to parse messages. + :param field_data: the data to use populating the fields. """ self._connector = connector self._task_monitor = task_monitor @@ -50,11 +51,9 @@ class TaskMonitor(object): self._task = None if self._field_data: - # If a body was returned, assume it's a Task on a 202 status code - content_length = int(self._field_data.headers.get( - 'Content-Length')) - if (self._field_data.status_code == http_client.ACCEPTED - and content_length > 0): + # We do not check 'content-length' as it is not always present + # and will rely on task uri in those cases. + if self._field_data.status_code == http_client.ACCEPTED: self._task = task.Task(self._connector, self._task_monitor, redfish_version=self._redfish_version, registries=self._registries, diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py index 37a83c1..c6b630e 100644 --- a/sushy/resources/updateservice/updateservice.py +++ b/sushy/resources/updateservice/updateservice.py @@ -15,6 +15,7 @@ # https://redfish.dmtf.org/schemas/UpdateService.v1_2_2.json import logging +from urllib.parse import urljoin from sushy import exceptions from sushy.resources import base @@ -155,6 +156,11 @@ class UpdateService(base.ResourceBase): header = 'Location' task_monitor = rsp.headers.get(header) + task_uri_data = json_data.get('@odata.id') + + if task_uri_data: + task_monitor = urljoin(task_monitor, task_uri_data) + if not task_monitor: raise exceptions.MissingHeaderError(target_uri=target_uri, header=header) diff --git a/sushy/tests/unit/json_samples/task_monitor.json b/sushy/tests/unit/json_samples/task_monitor.json new file mode 100644 index 0000000..5ed2a04 --- /dev/null +++ b/sushy/tests/unit/json_samples/task_monitor.json @@ -0,0 +1,25 @@ +{ + "@odata.type":"#Task.v1_4_3.Task", + "Id":"545", + "Name":"Task 545", + "Description": "Task description", + "TaskMonitor":"/taskmon/545", + "TaskState":"Completed", + "StartTime":"2012-03-07T14:44+06:00", + "EndTime":"2012-03-07T14:45+06:00", + "TaskStatus":"OK", + "PercentComplete": 100, + "Messages":[ + { + "MessageId":"Base.1.0.PropertyNotWriteable", + "RelatedProperties":[ + "SKU" + ], + "Message":"Property %1 is read only.", + "MessageArgs":[ + "SKU" + ], + "Severity":"Warning" + } + ] + } diff --git a/sushy/tests/unit/resources/taskservice/test_taskmonitor.py b/sushy/tests/unit/resources/taskservice/test_taskmonitor.py index 399f809..3155a5c 100644 --- a/sushy/tests/unit/resources/taskservice/test_taskmonitor.py +++ b/sushy/tests/unit/resources/taskservice/test_taskmonitor.py @@ -58,7 +58,10 @@ class TaskMonitorTestCase(base.TestCase): self.conn, '/Task/545', field_data=field_data) - self.assertIsNone(task_monitor.task) + self.assertEqual(http_client.ACCEPTED, + task_monitor._field_data._status_code) + self.assertEqual( + 0, task_monitor._field_data._headers['Content-Length']) def test_init_accepted_content(self): self.assertIsNotNone(self.task_monitor._task) diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py index daf8a02..90100e7 100644 --- a/sushy/tests/unit/resources/updateservice/test_updateservice.py +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -58,7 +58,7 @@ class UpdateServiceTestCase(base.TestCase): self.upd_serv._parse_attributes, self.json_doc) def test_simple_update(self): - with open('sushy/tests/unit/json_samples/task.json') as f: + with open('sushy/tests/unit/json_samples/task_monitor.json') as f: task_json = json.load(f) task_submitted = mock.Mock() task_submitted.json.return_value = task_json @@ -82,12 +82,37 @@ class UpdateServiceTestCase(base.TestCase): 'Targets': ['/redfish/v1/UpdateService/FirmwareInventory/BMC'], 'TransferProtocol': 'HTTPS'}) - def test_simple_update_missing_location(self): + def test_simple_update_task_uri(self): with open('sushy/tests/unit/json_samples/task.json') as f: task_json = json.load(f) task_submitted = mock.Mock() task_submitted.json.return_value = task_json task_submitted.status_code = 202 + task_submitted.headers = {'Content-Length': 42, + 'Location': '/Taskmonitor/545'} + self.conn.post.return_value = task_submitted + + tm = self.upd_serv.simple_update( + image_uri='local.server/update.exe', + targets=['/redfish/v1/UpdateService/FirmwareInventory/BMC'], + transfer_protocol=ups_cons.UPDATE_PROTOCOL_HTTPS) + + self.assertIsInstance(tm, taskmonitor.TaskMonitor) + self.assertEqual('/redfish/v1/TaskService/Tasks/545', tm.task_monitor) + + self.upd_serv._conn.post.assert_called_once_with( + '/redfish/v1/UpdateService/Actions/SimpleUpdate', + data={ + 'ImageURI': 'local.server/update.exe', + 'Targets': ['/redfish/v1/UpdateService/FirmwareInventory/BMC'], + 'TransferProtocol': 'HTTPS'}) + + def test_simple_update_missing_location(self): + with open('sushy/tests/unit/json_samples/task_monitor.json') as f: + task_json = json.load(f) + task_submitted = mock.Mock() + task_submitted.json.return_value = task_json + task_submitted.status_code = 202 task_submitted.headers = {'Allow': 'GET'} self.conn.post.return_value = task_submitted -- GitLab From 5425c005e27bcaa6a9ba0be5e03aa0b4946656e7 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Wed, 13 Jan 2021 13:03:40 +0100 Subject: [PATCH 281/303] Secure boot support: enabling/disabling and resetting keys Story: #2008270 Task: #41582 Change-Id: I72f8103fe9bb4613ef8c53a3ff08a710dc96c12d --- .../notes/secure-boot-76c5b80371ea85d1.yaml | 5 + sushy/exceptions.py | 4 +- sushy/resources/system/constants.py | 17 +++ sushy/resources/system/mappings.py | 24 ++++ sushy/resources/system/secure_boot.py | 123 ++++++++++++++++++ sushy/resources/system/system.py | 16 +++ .../tests/unit/json_samples/secure_boot.json | 22 ++++ .../unit/resources/system/test_secure_boot.py | 96 ++++++++++++++ .../unit/resources/system/test_system.py | 10 ++ 9 files changed, 316 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/secure-boot-76c5b80371ea85d1.yaml create mode 100644 sushy/resources/system/secure_boot.py create mode 100644 sushy/tests/unit/json_samples/secure_boot.json create mode 100644 sushy/tests/unit/resources/system/test_secure_boot.py diff --git a/releasenotes/notes/secure-boot-76c5b80371ea85d1.yaml b/releasenotes/notes/secure-boot-76c5b80371ea85d1.yaml new file mode 100644 index 0000000..2ec653d --- /dev/null +++ b/releasenotes/notes/secure-boot-76c5b80371ea85d1.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds support for UEFI secure boot: reading the current status, enabling or + disabling secure boot, resetting keys. diff --git a/sushy/exceptions.py b/sushy/exceptions.py index 209349d..9acc559 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -25,7 +25,9 @@ class SushyError(Exception): message = None - def __init__(self, **kwargs): + def __init__(self, message=None, **kwargs): + if message is not None: + self.message = message if self.message and kwargs: self.message = self.message % kwargs diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index 26c3866..4bb5251 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -173,3 +173,20 @@ SYSTEM_TYPE_VIRTUALLY_PARTITIONED = "VirtuallyPartitioned" """A virtual or software-based partition of a computer system""" SYSTEM_TYPE_COMPOSED = "Composed" """A computer system created by binding resource blocks together""" + +# Secure boot constants + +SECURE_BOOT_ENABLED = "Enabled" +"""UEFI secure boot is enabled.""" + +SECURE_BOOT_DISABLED = "Disabled" +"""UEFI secure boot is disabled.""" + +SECURE_BOOT_MODE_SETUP = "SetupMode" +SECURE_BOOT_MODE_USER = "UserMode" +SECURE_BOOT_MODE_AUDIT = "AuditMode" +SECURE_BOOT_MODE_DEPLOYED = "DeployedMode" + +SECURE_BOOT_RESET_KEYS_TO_DEFAULT = "ResetAllKeysToDefault" +SECURE_BOOT_RESET_KEYS_DELETE_ALL = "DeleteAllKeys" +SECURE_BOOT_RESET_KEYS_DELETE_PK = "DeletePK" diff --git a/sushy/resources/system/mappings.py b/sushy/resources/system/mappings.py index fa59a3f..3c35414 100644 --- a/sushy/resources/system/mappings.py +++ b/sushy/resources/system/mappings.py @@ -113,3 +113,27 @@ SYSTEM_TYPE_VALUE_MAP = { SYSTEM_TYPE_VALUE_MAP_REV = ( utils.revert_dictionary(SYSTEM_TYPE_VALUE_MAP)) + +SECURE_BOOT_STATE = { + 'Enabled': sys_cons.SECURE_BOOT_ENABLED, + 'Disabled': sys_cons.SECURE_BOOT_DISABLED, +} + +SECURE_BOOT_STATE_REV = utils.revert_dictionary(SECURE_BOOT_STATE) + +SECURE_BOOT_MODE = { + 'SetupMode': sys_cons.SECURE_BOOT_MODE_SETUP, + 'UserMode': sys_cons.SECURE_BOOT_MODE_USER, + 'AuditMode': sys_cons.SECURE_BOOT_MODE_AUDIT, + 'DeployedMode': sys_cons.SECURE_BOOT_MODE_DEPLOYED, +} + +SECURE_BOOT_MODE_REV = utils.revert_dictionary(SECURE_BOOT_MODE) + +SECURE_BOOT_RESET_KEYS = { + 'ResetAllKeysToDefault': sys_cons.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + 'DeleteAllKeys': sys_cons.SECURE_BOOT_RESET_KEYS_DELETE_ALL, + 'DeletePK': sys_cons.SECURE_BOOT_RESET_KEYS_DELETE_PK, +} + +SECURE_BOOT_RESET_KEYS_REV = utils.revert_dictionary(SECURE_BOOT_RESET_KEYS) diff --git a/sushy/resources/system/secure_boot.py b/sushy/resources/system/secure_boot.py new file mode 100644 index 0000000..ab07762 --- /dev/null +++ b/sushy/resources/system/secure_boot.py @@ -0,0 +1,123 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/SecureBoot.v1_1_0.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources.system import mappings + +LOG = logging.getLogger(__name__) + + +class ResetKeysActionField(common.ActionField): + + allowed_values = base.Field('ResetKeysType@Redfish.AllowableValues', + adapter=list) + + +class ActionsField(base.CompositeField): + + reset_keys = ResetKeysActionField('#SecureBoot.ResetKeys') + """Action that resets the UEFI Secure Boot keys.""" + + +class SecureBoot(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The Bios resource identity string""" + + name = base.Field('Name') + """The name of the resource""" + + description = base.Field('Description') + """Human-readable description of the BIOS resource""" + + current_boot = base.MappedField('SecureBootCurrentBoot', + mappings.SECURE_BOOT_STATE) + """The UEFI Secure Boot state during the current boot cycle.""" + + enabled = base.Field('SecureBootEnable') + """Whether the UEFI Secure Boot takes effect on next boot. + + This property can be enabled in UEFI boot mode only. + """ + + mode = base.MappedField('SecureBootMode', mappings.SECURE_BOOT_MODE) + """The current UEFI Secure Boot Mode.""" + + # TODO(dtantsur): SecureBootDatabases + + _actions = ActionsField('Actions') + + def __init__(self, connector, path, redfish_version=None, registries=None): + """A class representing secure boot settings. + + :param connector: A Connector instance + :param path: Sub-URI path to the SecureBoot resource + :param registries: Dict of message registries to be used when + parsing messages of attribute update status + """ + super().__init__(connector, path, redfish_version, registries) + + def _get_reset_action_element(self): + reset_action = self._actions.reset_keys + if not reset_action: + raise exceptions.MissingActionError(action='#SecureBoot.ResetKeys', + resource=self._path) + return reset_action + + def get_allowed_reset_keys_values(self): + """Get the allowed values for resetting the keys. + + :returns: A set with the allowed values. + """ + reset_action = self._get_reset_action_element() + + if not reset_action.allowed_values: + LOG.warning('Could not figure out the allowed values for the ' + 'reset keys action for %s', self.identity) + return set(mappings.SECURE_BOOT_RESET_KEYS_REV) + + return set([mappings.SECURE_BOOT_RESET_KEYS[v] for v in + set(mappings.SECURE_BOOT_RESET_KEYS). + intersection(reset_action.allowed_values)]) + + def reset_keys(self, reset_type): + """Reset secure boot keys. + + :param reset_type: Reset type, one of `SECORE_BOOT_RESET_KEYS_*` + constants. + """ + valid_resets = self.get_allowed_reset_keys_values() + if reset_type not in valid_resets: + raise exceptions.InvalidParameterValueError( + parameter='reset_type', value=reset_type, + valid_values=valid_resets) + + target_uri = self._get_reset_action_element().target_uri + self._conn.post(target_uri, data={'ResetKeysType': reset_type}) + + def set_enabled(self, enabled): + """Enable/disable secure boot. + + :param enabled: True, if secure boot is enabled for next boot. + """ + if not isinstance(enabled, bool): + raise exceptions.InvalidParameterValueError( + "Expected a boolean for 'enabled', got %r" % enabled) + + self._conn.patch(self.path, data={'SecureBootEnable': enabled}) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index 5bb30ac..b7d3a80 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -31,6 +31,7 @@ from sushy.resources.system import constants as sys_cons from sushy.resources.system import ethernet_interface from sushy.resources.system import mappings as sys_maps from sushy.resources.system import processor +from sushy.resources.system import secure_boot from sushy.resources.system import simple_storage as sys_simple_storage from sushy.resources.system.storage import storage as sys_storage from sushy import utils @@ -436,6 +437,21 @@ class System(base.ResourceBase): registries=self.registries) for path in paths] + @property + @utils.cache_it + def secure_boot(self): + """Property to reference `SecureBoot` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return secure_boot.SecureBoot( + self._conn, + utils.get_sub_resource_path_by(self, 'SecureBoot'), + redfish_version=self.redfish_version, + registries=self.registries) + class SystemCollection(base.ResourceCollectionBase): diff --git a/sushy/tests/unit/json_samples/secure_boot.json b/sushy/tests/unit/json_samples/secure_boot.json new file mode 100644 index 0000000..ac4b495 --- /dev/null +++ b/sushy/tests/unit/json_samples/secure_boot.json @@ -0,0 +1,22 @@ +{ + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot", + "@odata.type": "#SecureBoot.v1_1_0.SecureBoot", + "Id": "SecureBoot", + "Name": "UEFI Secure Boot", + "Actions": { + "#SecureBoot.ResetKeys": { + "target": "/redfish/v1/Systems/437XR1138R2/SecureBoot/Actions/SecureBoot.ResetKeys", + "ResetKeysType@Redfish.AllowableValues": [ + "ResetAllKeysToDefault", + "DeleteAllKeys", + "DeletePK" + ] + } + } , + "SecureBootEnable": false, + "SecureBootCurrentBoot": "Disabled", + "SecureBootMode": "DeployedMode", + "SecureBootDatabases": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases" + } +} diff --git a/sushy/tests/unit/resources/system/test_secure_boot.py b/sushy/tests/unit/resources/system/test_secure_boot.py new file mode 100644 index 0000000..2e61592 --- /dev/null +++ b/sushy/tests/unit/resources/system/test_secure_boot.py @@ -0,0 +1,96 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + +from sushy import exceptions +from sushy.resources.system import constants +from sushy.resources.system import secure_boot +from sushy.tests.unit import base + + +class SecureBootTestCase(base.TestCase): + + def setUp(self): + super(SecureBootTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/secure_boot.json') as f: + self.secure_boot_json = json.load(f) + + self.conn.get.return_value.json.side_effect = [ + self.secure_boot_json + ] + self.secure_boot = secure_boot.SecureBoot( + self.conn, '/redfish/v1/Systems/437XR1138R2/SecureBoot', + registries={}, redfish_version='1.1.0') + + def test__parse_attributes(self): + self.secure_boot._parse_attributes(self.secure_boot_json) + self.assertEqual('1.1.0', self.secure_boot.redfish_version) + self.assertEqual('SecureBoot', self.secure_boot.identity) + self.assertEqual('UEFI Secure Boot', self.secure_boot.name) + self.assertIsNone(self.secure_boot.description) + self.assertIs(False, self.secure_boot.enabled) + self.assertEqual(constants.SECURE_BOOT_DISABLED, + self.secure_boot.current_boot) + self.assertEqual(constants.SECURE_BOOT_MODE_DEPLOYED, + self.secure_boot.mode) + + @mock.patch.object(secure_boot.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values(self, mock_log): + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + constants.SECURE_BOOT_RESET_KEYS_DELETE_ALL, + constants.SECURE_BOOT_RESET_KEYS_DELETE_PK}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertFalse(mock_log.called) + + @mock.patch.object(secure_boot.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values_no_values(self, mock_log): + self.secure_boot._actions.reset_keys.allowed_values = None + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + constants.SECURE_BOOT_RESET_KEYS_DELETE_ALL, + constants.SECURE_BOOT_RESET_KEYS_DELETE_PK}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertTrue(mock_log.called) + + @mock.patch.object(secure_boot.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values_custom_values(self, mock_log): + self.secure_boot._actions.reset_keys.allowed_values = [ + 'ResetAllKeysToDefault', + 'IamNotRedfishCompatible', + ] + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertFalse(mock_log.called) + + def test_set_enabled(self): + self.secure_boot.set_enabled(True) + self.conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/SecureBoot', + data={'SecureBootEnable': True}) + + def test_set_enabled_wrong_type(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.secure_boot.set_enabled, 'banana') + + def test_reset_keys(self): + self.secure_boot.reset_keys( + constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT) + self.conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/SecureBoot' + '/Actions/SecureBoot.ResetKeys', + data={'ResetKeysType': 'ResetAllKeysToDefault'}) + + def test_reset_keys_wrong_value(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.secure_boot.reset_keys, 'DeleteEverything') diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index acb21e8..d4bea2c 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -27,6 +27,7 @@ from sushy.resources.oem import fake from sushy.resources.system import bios from sushy.resources.system import mappings as sys_map from sushy.resources.system import processor +from sushy.resources.system import secure_boot from sushy.resources.system import simple_storage from sushy.resources.system import system from sushy.tests.unit import base @@ -520,6 +521,15 @@ class SystemTestCase(base.TestCase): self.assertEqual('BIOS Configuration Current Settings', self.sys_inst.bios.name) + def test_secure_boot(self): + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/secure_boot.json') as f: + self.conn.get.return_value.json.side_effect = [json.load(f)] + + self.assertIsInstance(self.sys_inst.secure_boot, + secure_boot.SecureBoot) + self.assertEqual('UEFI Secure Boot', self.sys_inst.secure_boot.name) + def test_simple_storage_for_missing_attr(self): self.sys_inst.json.pop('SimpleStorage') with self.assertRaisesRegex( -- GitLab From 9432172588772b2932e35dc5a11a9e73b542a83c Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Thu, 21 Jan 2021 15:44:08 +0100 Subject: [PATCH 282/303] Add release version to release notes Change-Id: Ie006b57eddefb36c776bceab4708d46ac4e81e38 --- releasenotes/source/ussuri.rst | 6 +++--- releasenotes/source/victoria.rst | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/releasenotes/source/ussuri.rst b/releasenotes/source/ussuri.rst index e21e50e..42409c6 100644 --- a/releasenotes/source/ussuri.rst +++ b/releasenotes/source/ussuri.rst @@ -1,6 +1,6 @@ -=========================== -Ussuri Series Release Notes -=========================== +=========================================== +Ussuri Series (3.0.0 - 3.2.x) Release Notes +=========================================== .. release-notes:: :branch: stable/ussuri diff --git a/releasenotes/source/victoria.rst b/releasenotes/source/victoria.rst index 4efc7b6..604382e 100644 --- a/releasenotes/source/victoria.rst +++ b/releasenotes/source/victoria.rst @@ -1,6 +1,6 @@ -============================= -Victoria Series Release Notes -============================= +============================================= +Victoria Series (3.3.0 - 3.4.x) Release Notes +============================================= .. release-notes:: :branch: stable/victoria -- GitLab From 9758d39969d73b812bb4aa702da1373fe0d40cce Mon Sep 17 00:00:00 2001 From: Riccardo Pittau Date: Fri, 22 Jan 2021 09:30:29 +0100 Subject: [PATCH 283/303] Update minversion of tox The minimum version since when tox accepts having inline comments in deps is 3.9.0 https://tox.readthedocs.io/en/latest/changelog.html#v3-9-0-2019-04-17 Change-Id: I6e0b2bb4be15c67fa64a5a415e7814be9f8243f2 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index f868a55..56ba2fa 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -minversion = 3.2.1 +minversion = 3.9.0 envlist = py3,pep8 skipsdist = True ignore_basepython_conflict=true -- GitLab From 7ec04224b4e80225ff3db51d860210e8e160f02f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aija=20Jaunt=C4=93va?= Date: Fri, 22 Jan 2021 09:53:05 -0500 Subject: [PATCH 284/303] Fix ExtendedInfo error handling for non-list item Change-Id: I2d082762fcf59d7b9bc8adc7c5159520ac628043 --- ...nded-info-error-handling-73fecb6bf5c852ff.yaml | 7 +++++++ sushy/exceptions.py | 12 ++++++------ .../unit/json_samples/error_single_ext_info.json | 13 +++++++++++++ sushy/tests/unit/test_connector.py | 15 +++++++++++++++ 4 files changed, 41 insertions(+), 6 deletions(-) create mode 100644 releasenotes/notes/fix-extended-info-error-handling-73fecb6bf5c852ff.yaml create mode 100644 sushy/tests/unit/json_samples/error_single_ext_info.json diff --git a/releasenotes/notes/fix-extended-info-error-handling-73fecb6bf5c852ff.yaml b/releasenotes/notes/fix-extended-info-error-handling-73fecb6bf5c852ff.yaml new file mode 100644 index 0000000..8ee28a4 --- /dev/null +++ b/releasenotes/notes/fix-extended-info-error-handling-73fecb6bf5c852ff.yaml @@ -0,0 +1,7 @@ +--- +fixes: + - | + Fixes ``AttributeError: 'str' object has no attribute 'get'`` during error + handling. This occurs when BMC does not return a list of messages inside + ``@Message.ExtendedInfo``, but a single item. This has been observed with + iDRAC. diff --git a/sushy/exceptions.py b/sushy/exceptions.py index 9acc559..9ddf27e 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -112,10 +112,9 @@ class HTTPError(SushyError): self.code = self.body.get('code', 'Base.1.0.GeneralError') self.detail = self.body.get('message') ext_info = self.body.get('@Message.ExtendedInfo', [{}]) - index = self._get_most_severe_msg_index(ext_info) - self.detail = ext_info[index].get('Message', self.detail) + message = self._get_most_severe_msg(ext_info) + self.detail = message or self.detail error = '%s: %s' % (self.code, self.detail or 'unknown error.') - kwargs = {'method': method, 'url': url, 'code': self.status_code, 'error': error, 'ext_info': ext_info} LOG.debug('HTTP response for %(method)s %(url)s: ' @@ -124,13 +123,14 @@ class HTTPError(SushyError): super(HTTPError, self).__init__(**kwargs) @staticmethod - def _get_most_severe_msg_index(extended_info): + def _get_most_severe_msg(extended_info): + if not isinstance(extended_info, list): + return extended_info.get('Message', None) if len(extended_info) > 0: for sev in ['Critical', 'Warning']: for i, m in enumerate(extended_info): if m.get('Severity') == sev: - return i - return 0 + return m.get('Message') class BadRequestError(HTTPError): diff --git a/sushy/tests/unit/json_samples/error_single_ext_info.json b/sushy/tests/unit/json_samples/error_single_ext_info.json new file mode 100644 index 0000000..d56a0a6 --- /dev/null +++ b/sushy/tests/unit/json_samples/error_single_ext_info.json @@ -0,0 +1,13 @@ +{ + "error": { + "code": "Base.1.5.GeneralError", + "message": "A general error has occurred. See ExtendedInfo for more information.", + "@Message.ExtendedInfo": { + "@odata.type": "#Message.v1_0_0.Message", + "MessageId": "Base.1.5.GeneralError", + "Message": "A general error has occurred. See Resolution for information on how to resolve the error.", + "Resolution": "Redfish request contains unsupported media type. Correct the request body and resubmit.", + "Severity": "Warning" + } + } +} \ No newline at end of file diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 798a20e..b77ca2a 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -317,6 +317,21 @@ class ConnectorOpTestCase(base.TestCase): self.assertIsNotNone(exc.body) self.assertIn('body submitted was malformed JSON', exc.detail) + def test_known_http_error_nonlist_ext_info(self): + self.request.return_value.status_code =\ + http_client.UNSUPPORTED_MEDIA_TYPE + with open('sushy/tests/unit/json_samples/' + 'error_single_ext_info.json') as f: + self.request.return_value.json.return_value = json.load(f) + + with self.assertRaisesRegex(exceptions.HTTPError, + 'See Resolution for information') as cm: + self.conn._op('POST', 'http://foo.bar') + exc = cm.exception + self.assertEqual(http_client.UNSUPPORTED_MEDIA_TYPE, exc.status_code) + self.assertIsNotNone(exc.body) + self.assertIn('See Resolution for information', exc.detail) + def test_not_found_error(self): self.request.return_value.status_code = http_client.NOT_FOUND self.request.return_value.json.side_effect = ValueError('no json') -- GitLab From 0e912555bc6cc306ec2f51a09f0fc5158986b2aa Mon Sep 17 00:00:00 2001 From: Bill Dodd Date: Sun, 31 Jan 2021 18:03:05 -0600 Subject: [PATCH 285/303] Fix TaskMonitor constructor calls in volume.py In the volume.py module, the first parameter passed to the TaskMonitor constructor was incorrect. The parameter passed was the resource object (self), but it should have been the connector object (self._conn). This affected the create_volume() and delete_volume() methods. The constructor calls were updated to provide the correct connector parameter. And the unit tests were updated to test that the TaskMonitor objects are successfully created. Change-Id: I10e564185b4cd9faa24739766536d39646f8a7c1 Story: 2003514 Task: 41720 --- ...lls-in-volume-module-0f8a747acd0cfe3f.yaml | 8 +++++ sushy/resources/system/storage/volume.py | 4 +-- .../resources/system/storage/test_volume.py | 33 +++++++++++-------- 3 files changed, 30 insertions(+), 15 deletions(-) create mode 100644 releasenotes/notes/fix-taskmonitor-init-calls-in-volume-module-0f8a747acd0cfe3f.yaml diff --git a/releasenotes/notes/fix-taskmonitor-init-calls-in-volume-module-0f8a747acd0cfe3f.yaml b/releasenotes/notes/fix-taskmonitor-init-calls-in-volume-module-0f8a747acd0cfe3f.yaml new file mode 100644 index 0000000..765dcbb --- /dev/null +++ b/releasenotes/notes/fix-taskmonitor-init-calls-in-volume-module-0f8a747acd0cfe3f.yaml @@ -0,0 +1,8 @@ +--- +fixes: + - | + Fixes issues in the ``volume`` module where the first parameter passed to + the ``TaskMonitor`` constructor was incorrect. The parameter passed was + the resource object (self), but it should have been the connector object + (self._conn). This affected the ``create_volume()`` and + ``delete_volume()`` methods. diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index e7f9d57..8d075d4 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -153,7 +153,7 @@ class Volume(base.ResourceBase): r = self._conn.delete(self._path, data=payload, blocking=blocking, timeout=timeout) if r.status_code == 202: - return (TaskMonitor(self, r.headers.get('location')) + return (TaskMonitor(self._conn, r.headers.get('location')) .set_retry_after(r.headers.get('retry-after'))) @@ -221,5 +221,5 @@ class VolumeCollection(base.ResourceCollectionBase): self.refresh() return self.get_member(location) elif r.status_code == 202: - return (TaskMonitor(self, location) + return (TaskMonitor(self._conn, location) .set_retry_after(r.headers.get('retry-after'))) diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index 9fdc093..3245317 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -97,18 +97,28 @@ class VolumeTestCase(base.TestCase): def test_delete_volume_immediate(self): payload = {} - self.stor_volume.delete_volume( + self.conn.delete.return_value.status_code = 200 + resource = self.stor_volume.delete_volume( payload=payload, apply_time=res_cons.APPLY_TIME_IMMEDIATE) self.stor_volume._conn.delete.assert_called_once_with( self.stor_volume._path, data=payload, blocking=True, timeout=500) + self.assertIsNone(resource) def test_delete_volume_on_reset(self): payload = {} - self.stor_volume.delete_volume( + self.conn.delete.return_value.status_code = 202 + self.conn.delete.return_value.headers = { + 'location': '/redfish/v1/taskmon/4608f7e6', + 'retry-after': '120' + } + task_mon = self.stor_volume.delete_volume( payload=payload, apply_time=res_cons.APPLY_TIME_ON_RESET, timeout=250) self.stor_volume._conn.delete.assert_called_once_with( self.stor_volume._path, data=payload, blocking=False, timeout=250) + self.assertIsNotNone(task_mon) + self.assertEqual(task_mon.resource_name, 'task_monitor') + self.assertEqual(task_mon.path, '/redfish/v1/taskmon/4608f7e6') class VolumeCollectionTestCase(base.TestCase): @@ -250,19 +260,16 @@ class VolumeCollectionTestCase(base.TestCase): expected_payload['@Redfish.OperationApplyTime'] = 'OnReset' with open('sushy/tests/unit/json_samples/volume4.json') as f: self.conn.get.return_value.json.return_value = json.load(f) - self.conn.post.return_value.status_code = 201 - self.conn.post.return_value.headers.return_value = { - 'Location': '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4' + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = { + 'Location': '/redfish/v1/taskmon/4608f7e6', + 'retry-after': '120' } - new_vol = self.stor_vol_col.create_volume( + task_mon = self.stor_vol_col.create_volume( payload, apply_time=res_cons.APPLY_TIME_ON_RESET) self.stor_vol_col._conn.post.assert_called_once_with( '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', data=expected_payload, blocking=False, timeout=500) - self.stor_vol_col.refresh.assert_called_once() - self.assertIsNotNone(new_vol) - self.assertEqual('4', new_vol.identity) - self.assertEqual('My Volume 4', new_vol.name) - self.assertEqual(107374182400, new_vol.capacity_bytes) - self.assertEqual(sushy.VOLUME_TYPE_MIRRORED, new_vol.volume_type) - self.assertEqual(sushy.RAID_TYPE_RAID1, new_vol.raid_type) + self.assertIsNotNone(task_mon) + self.assertEqual(task_mon.resource_name, 'task_monitor') + self.assertEqual(task_mon.path, '/redfish/v1/taskmon/4608f7e6') -- GitLab From 4abea1879fb2296b42590b2235a219e5f0a81d76 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Mon, 1 Feb 2021 14:57:28 +0100 Subject: [PATCH 286/303] Initial support for secure boot databases Change-Id: Id35d5b3c1d09718d05f6b2e470412d76d60b5198 --- ...secure-boot-database-7fae673722d7cf4f.yaml | 5 + sushy/resources/system/constants.py | 14 ++ sushy/resources/system/mappings.py | 26 ++++ sushy/resources/system/secure_boot.py | 21 +++ .../resources/system/secure_boot_database.py | 112 ++++++++++++++ .../json_samples/secure_boot_database.json | 26 ++++ .../secure_boot_database_collection.json | 34 +++++ .../unit/resources/system/test_secure_boot.py | 22 ++- .../system/test_secure_boot_database.py | 138 ++++++++++++++++++ 9 files changed, 395 insertions(+), 3 deletions(-) create mode 100644 releasenotes/notes/secure-boot-database-7fae673722d7cf4f.yaml create mode 100644 sushy/resources/system/secure_boot_database.py create mode 100644 sushy/tests/unit/json_samples/secure_boot_database.json create mode 100644 sushy/tests/unit/json_samples/secure_boot_database_collection.json create mode 100644 sushy/tests/unit/resources/system/test_secure_boot_database.py diff --git a/releasenotes/notes/secure-boot-database-7fae673722d7cf4f.yaml b/releasenotes/notes/secure-boot-database-7fae673722d7cf4f.yaml new file mode 100644 index 0000000..6b60f94 --- /dev/null +++ b/releasenotes/notes/secure-boot-database-7fae673722d7cf4f.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds support for fetching and resetting individual UEFI secure boot + databases. diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index 4bb5251..92a8be0 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -190,3 +190,17 @@ SECURE_BOOT_MODE_DEPLOYED = "DeployedMode" SECURE_BOOT_RESET_KEYS_TO_DEFAULT = "ResetAllKeysToDefault" SECURE_BOOT_RESET_KEYS_DELETE_ALL = "DeleteAllKeys" SECURE_BOOT_RESET_KEYS_DELETE_PK = "DeletePK" + +SECURE_BOOT_PLATFORM_KEY = "PK" +SECURE_BOOT_KEY_EXCHANGE_KEYS = "KEK" +SECURE_BOOT_ALLOWED_KEYS_DATABASE = "db" +SECURE_BOOT_DENIED_KEYS_DATABASE = "dbx" +SECURE_BOOT_RECOVERY_KEYS_DATABASE = "dbr" +SECURE_BOOT_TIMESTAMP_DATABASE = "dbt" + +SECURE_BOOT_DEFAULT_PLATFORM_KEY = "PKDefault" +SECURE_BOOT_DEFAULT_KEY_EXCHANGE_KEYS = "KEKDefault" +SECURE_BOOT_DEFAULT_ALLOWED_KEYS_DATABASE = "dbDefault" +SECURE_BOOT_DEFAULT_DENIED_KEYS_DATABASE = "dbxDefault" +SECURE_BOOT_DEFAULT_RECOVERY_KEYS_DATABASE = "dbrDefault" +SECURE_BOOT_DEFAULT_TIMESTAMP_DATABASE = "dbtDefault" diff --git a/sushy/resources/system/mappings.py b/sushy/resources/system/mappings.py index 3c35414..5e77b2b 100644 --- a/sushy/resources/system/mappings.py +++ b/sushy/resources/system/mappings.py @@ -137,3 +137,29 @@ SECURE_BOOT_RESET_KEYS = { } SECURE_BOOT_RESET_KEYS_REV = utils.revert_dictionary(SECURE_BOOT_RESET_KEYS) + +SECURE_BOOT_DATABASE_TYPE = { + 'PK': sys_cons.SECURE_BOOT_PLATFORM_KEY, + 'KEK': sys_cons.SECURE_BOOT_KEY_EXCHANGE_KEYS, + 'db': sys_cons.SECURE_BOOT_ALLOWED_KEYS_DATABASE, + 'dbx': sys_cons.SECURE_BOOT_DENIED_KEYS_DATABASE, + 'dbr': sys_cons.SECURE_BOOT_RECOVERY_KEYS_DATABASE, + 'dbt': sys_cons.SECURE_BOOT_TIMESTAMP_DATABASE, + 'PKDefault': sys_cons.SECURE_BOOT_DEFAULT_PLATFORM_KEY, + 'KEKDefault': sys_cons.SECURE_BOOT_DEFAULT_KEY_EXCHANGE_KEYS, + 'dbDefault': sys_cons.SECURE_BOOT_DEFAULT_ALLOWED_KEYS_DATABASE, + 'dbxDefault': sys_cons.SECURE_BOOT_DEFAULT_DENIED_KEYS_DATABASE, + 'dbrDefault': sys_cons.SECURE_BOOT_DEFAULT_RECOVERY_KEYS_DATABASE, + 'dbtDefault': sys_cons.SECURE_BOOT_DEFAULT_TIMESTAMP_DATABASE, +} + +SECURE_BOOT_DATABASE_TYPE_REV = utils.revert_dictionary( + SECURE_BOOT_DATABASE_TYPE) + +SECURE_BOOT_DATABASE_RESET_KEYS = { + 'ResetAllKeysToDefault': sys_cons.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + 'DeleteAllKeys': sys_cons.SECURE_BOOT_RESET_KEYS_DELETE_ALL, +} + +SECURE_BOOT_DATABASE_RESET_KEYS_REV = utils.revert_dictionary( + SECURE_BOOT_DATABASE_RESET_KEYS) diff --git a/sushy/resources/system/secure_boot.py b/sushy/resources/system/secure_boot.py index ab07762..aa6136f 100644 --- a/sushy/resources/system/secure_boot.py +++ b/sushy/resources/system/secure_boot.py @@ -19,6 +19,8 @@ from sushy import exceptions from sushy.resources import base from sushy.resources import common from sushy.resources.system import mappings +from sushy.resources.system import secure_boot_database +from sushy import utils LOG = logging.getLogger(__name__) @@ -73,6 +75,25 @@ class SecureBoot(base.ResourceBase): """ super().__init__(connector, path, redfish_version, registries) + @property + @utils.cache_it + def databases(self): + """A collection of secure boot databases. + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + + :raises: MissingAttributeError if 'SecureBootDatabases/@odata.id' field + is missing. + :returns: `SimpleStorageCollection` instance + """ + return secure_boot_database.SecureBootDatabaseCollection( + self._conn, utils.get_sub_resource_path_by( + self, "SecureBootDatabases"), + redfish_version=self.redfish_version, + registries=self.registries) + def _get_reset_action_element(self): reset_action = self._actions.reset_keys if not reset_action: diff --git a/sushy/resources/system/secure_boot_database.py b/sushy/resources/system/secure_boot_database.py new file mode 100644 index 0000000..aa4e9b1 --- /dev/null +++ b/sushy/resources/system/secure_boot_database.py @@ -0,0 +1,112 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources.system import mappings + +LOG = logging.getLogger(__name__) + + +class ResetKeysActionField(common.ActionField): + + allowed_values = base.Field('ResetKeysType@Redfish.AllowableValues', + adapter=list) + + +class ActionsField(base.CompositeField): + + reset_keys = ResetKeysActionField('#SecureBootDatabase.ResetKeys') + """Action that resets the UEFI Secure Boot keys.""" + + +class SecureBootDatabase(base.ResourceBase): + + # TODO(dtantsur): certificates + + database_id = base.MappedField('DatabaseId', + mappings.SECURE_BOOT_DATABASE_TYPE) + """Standard UEFI database type.""" + + description = base.Field('Description') + """The system description""" + + identity = base.Field('Id', required=True) + """The secure boot database identity string""" + + name = base.Field('Name') + """The secure boot database name""" + + # TODO(dtantsur): signatures + + _actions = ActionsField('Actions') + + def _get_reset_action_element(self): + reset_action = self._actions.reset_keys + if not reset_action: + raise exceptions.MissingActionError( + action='#SecureBootDatabase.ResetKeys', resource=self._path) + return reset_action + + def get_allowed_reset_keys_values(self): + """Get the allowed values for resetting the keys. + + :returns: A set with the allowed values. + """ + reset_action = self._get_reset_action_element() + + if not reset_action.allowed_values: + LOG.warning('Could not figure out the allowed values for the ' + 'reset keys action for %s', self.identity) + return set(mappings.SECURE_BOOT_DATABASE_RESET_KEYS_REV) + + return set([mappings.SECURE_BOOT_DATABASE_RESET_KEYS[v] for v in + set(mappings.SECURE_BOOT_DATABASE_RESET_KEYS). + intersection(reset_action.allowed_values)]) + + def reset_keys(self, reset_type): + """Reset secure boot keys. + + :param reset_type: Reset type, one of `SECORE_BOOT_RESET_KEYS_*` + constants. + """ + valid_resets = self.get_allowed_reset_keys_values() + if reset_type not in valid_resets: + raise exceptions.InvalidParameterValueError( + parameter='reset_type', value=reset_type, + valid_values=valid_resets) + + target_uri = self._get_reset_action_element().target_uri + self._conn.post(target_uri, data={'ResetKeysType': reset_type}) + + +class SecureBootDatabaseCollection(base.ResourceCollectionBase): + + @property + def _resource_type(self): + return SecureBootDatabase + + def __init__(self, connector, path, redfish_version=None, registries=None): + """A class representing a ComputerSystemCollection + + :param connector: A Connector instance + :param path: The canonical path to the System collection resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(SecureBootDatabaseCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/tests/unit/json_samples/secure_boot_database.json b/sushy/tests/unit/json_samples/secure_boot_database.json new file mode 100644 index 0000000..53e7e94 --- /dev/null +++ b/sushy/tests/unit/json_samples/secure_boot_database.json @@ -0,0 +1,26 @@ +{ + "@odata.type": "#SecureBootDatabase.v1_0_0.SecureBootDatabase", + "Id": "db", + "Name": "db - Authorized Signature Database", + "Description": "UEFI db Secure Boot Database", + "DatabaseId": "db", + "Certificates": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db/Certificates/" + }, + "Signatures": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db/Signatures/" + }, + "Actions": { + "#SecureBootDatabase.ResetKeys": { + "target": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db/Actions/SecureBootDatabase.ResetKeys", + "ResetKeysType@Redfish.AllowableValues": [ + "ResetAllKeysToDefault", + "DeleteAllKeys" + ] + }, + "Oem": {} + }, + "Oem": {}, + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db", + "@Redfish.Copyright": "Copyright 2014-2021 DMTF. For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/secure_boot_database_collection.json b/sushy/tests/unit/json_samples/secure_boot_database_collection.json new file mode 100644 index 0000000..ed6b22b --- /dev/null +++ b/sushy/tests/unit/json_samples/secure_boot_database_collection.json @@ -0,0 +1,34 @@ +{ + "@odata.type": "#SecureBootDatabaseCollection.SecureBootDatabaseCollection", + "Name": "UEFI SecureBoot Database Collection", + "Members@odata.count": 8, + "Members": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/PK" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/KEK" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/dbx" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/PKDefault" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/KEKDefault" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/dbDefault" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/dbxDefault" + } + ], + "Oem": {}, + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases", + "@Redfish.Copyright": "Copyright 2014-2021 DMTF. For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/resources/system/test_secure_boot.py b/sushy/tests/unit/resources/system/test_secure_boot.py index 2e61592..b189e72 100644 --- a/sushy/tests/unit/resources/system/test_secure_boot.py +++ b/sushy/tests/unit/resources/system/test_secure_boot.py @@ -16,6 +16,7 @@ from unittest import mock from sushy import exceptions from sushy.resources.system import constants from sushy.resources.system import secure_boot +from sushy.resources.system import secure_boot_database from sushy.tests.unit import base @@ -27,9 +28,7 @@ class SecureBootTestCase(base.TestCase): with open('sushy/tests/unit/json_samples/secure_boot.json') as f: self.secure_boot_json = json.load(f) - self.conn.get.return_value.json.side_effect = [ - self.secure_boot_json - ] + self.conn.get.return_value.json.return_value = self.secure_boot_json self.secure_boot = secure_boot.SecureBoot( self.conn, '/redfish/v1/Systems/437XR1138R2/SecureBoot', registries={}, redfish_version='1.1.0') @@ -94,3 +93,20 @@ class SecureBootTestCase(base.TestCase): def test_reset_keys_wrong_value(self): self.assertRaises(exceptions.InvalidParameterValueError, self.secure_boot.reset_keys, 'DeleteEverything') + + def test_databases(self): + self.conn.get.return_value.json.reset_mock() + + with open('sushy/tests/unit/json_samples/' + 'secure_boot_database_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + result = self.secure_boot.databases + self.assertIsInstance( + result, secure_boot_database.SecureBootDatabaseCollection) + self.conn.get.return_value.json.assert_called_once_with() + + self.conn.get.return_value.json.reset_mock() + + self.assertIs(result, self.secure_boot.databases) + self.conn.get.return_value.json.assert_not_called() diff --git a/sushy/tests/unit/resources/system/test_secure_boot_database.py b/sushy/tests/unit/resources/system/test_secure_boot_database.py new file mode 100644 index 0000000..714c8ef --- /dev/null +++ b/sushy/tests/unit/resources/system/test_secure_boot_database.py @@ -0,0 +1,138 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + +from sushy import exceptions +from sushy.resources.system import constants +from sushy.resources.system import secure_boot_database +from sushy.tests.unit import base + + +class SecureBootDatabaseTestCase(base.TestCase): + + def setUp(self): + super(SecureBootDatabaseTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'secure_boot_database.json') as f: + self.secure_boot_json = json.load(f) + + self.conn.get.return_value.json.return_value = self.secure_boot_json + self.secure_boot = secure_boot_database.SecureBootDatabase( + self.conn, + '/redfish/v1/Systems/437XR1138R2/SecureBoot' + '/SecureBootDatabases/db', + registries={}, redfish_version='1.0.0') + + def test__parse_attributes(self): + self.secure_boot._parse_attributes(self.secure_boot_json) + self.assertEqual('1.0.0', self.secure_boot.redfish_version) + self.assertEqual('db', self.secure_boot.identity) + self.assertEqual('db - Authorized Signature Database', + self.secure_boot.name) + + @mock.patch.object(secure_boot_database.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values(self, mock_log): + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + constants.SECURE_BOOT_RESET_KEYS_DELETE_ALL}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertFalse(mock_log.called) + + @mock.patch.object(secure_boot_database.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values_no_values(self, mock_log): + self.secure_boot._actions.reset_keys.allowed_values = None + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + constants.SECURE_BOOT_RESET_KEYS_DELETE_ALL}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertTrue(mock_log.called) + + @mock.patch.object(secure_boot_database.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values_custom_values(self, mock_log): + self.secure_boot._actions.reset_keys.allowed_values = [ + 'ResetAllKeysToDefault', + 'IamNotRedfishCompatible', + ] + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertFalse(mock_log.called) + + def test_reset_keys(self): + self.secure_boot.reset_keys( + constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT) + self.conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db' + '/Actions/SecureBootDatabase.ResetKeys', + data={'ResetKeysType': 'ResetAllKeysToDefault'}) + + def test_reset_keys_wrong_value(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.secure_boot.reset_keys, 'DeleteEverything') + + +class SecureBootDatabaseCollectionTestCase(base.TestCase): + + def setUp(self): + super(SecureBootDatabaseCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'secure_boot_database_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.collection = secure_boot_database.SecureBootDatabaseCollection( + self.conn, '/redfish/v1/Systems/437XR1138R2/SecureBootDatabases', + redfish_version='1.0.0') + + def test__parse_attributes(self): + self.collection._parse_attributes(self.json_doc) + self.assertEqual('1.0.0', self.collection.redfish_version) + self.assertEqual('UEFI SecureBoot Database Collection', + self.collection.name) + self.assertEqual(tuple( + '/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/' + + member + for member in ('PK', 'KEK', 'db', 'dbx', + 'PKDefault', 'KEKDefault', + 'dbDefault', 'dbxDefault') + ), self.collection.members_identities) + + @mock.patch.object(secure_boot_database, 'SecureBootDatabase', + autospec=True) + def test_get_member(self, mock_secure_boot_database): + self.collection.get_member( + '/redfish/v1/Systems/437XR1138R2/SecureBoot' + '/SecureBootDatabases/db') + mock_secure_boot_database.assert_called_once_with( + self.collection._conn, + '/redfish/v1/Systems/437XR1138R2/SecureBoot' + '/SecureBootDatabases/db', + self.collection.redfish_version, None) + + @mock.patch.object(secure_boot_database, 'SecureBootDatabase', + autospec=True) + def test_get_members(self, mock_secure_boot_database): + members = self.collection.get_members() + calls = [ + mock.call(self.collection._conn, + '/redfish/v1/Systems/437XR1138R2/SecureBoot' + '/SecureBootDatabases/%s' % member, + self.collection.redfish_version, None) + for member in ('PK', 'KEK', 'db', 'dbx', + 'PKDefault', 'KEKDefault', + 'dbDefault', 'dbxDefault') + ] + mock_secure_boot_database.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(8, len(members)) -- GitLab From 09cee5cdc354c355a01a623bde479a03199dc1e2 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Mon, 1 Feb 2021 14:04:46 +0100 Subject: [PATCH 287/303] Automatically retry HTTP 5xx on GET requests I have experienced this pretty often when testing secure boot on Dell R430. Story: #2008563 Task: #41702 Change-Id: I615e806196740af208ff7f68f814929a2fdc0332 --- .../notes/get-retry-9ca311caf8a0b7bb.yaml | 4 ++++ sushy/connector.py | 19 ++++++++++++++++++- sushy/tests/unit/test_connector.py | 10 ++++++++-- 3 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 releasenotes/notes/get-retry-9ca311caf8a0b7bb.yaml diff --git a/releasenotes/notes/get-retry-9ca311caf8a0b7bb.yaml b/releasenotes/notes/get-retry-9ca311caf8a0b7bb.yaml new file mode 100644 index 0000000..6d820a1 --- /dev/null +++ b/releasenotes/notes/get-retry-9ca311caf8a0b7bb.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + Automatically retries internal server errors from GET requests. diff --git a/sushy/connector.py b/sushy/connector.py index b646590..003f4b9 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -26,6 +26,10 @@ from sushy import utils LOG = logging.getLogger(__name__) +_SERVER_SIDE_RETRIES = 5 +_SERVER_SIDE_RETRY_DELAY = 3 + + class Connector(object): def __init__( @@ -68,7 +72,8 @@ class Connector(object): self._session.close() def _op(self, method, path='', data=None, headers=None, blocking=False, - timeout=60, **extra_session_req_kwargs): + timeout=60, server_side_retries=_SERVER_SIDE_RETRIES, + **extra_session_req_kwargs): """Generic RESTful request handler. :param method: The HTTP method to be used, e.g: GET, POST, @@ -140,6 +145,18 @@ class Connector(object): LOG.error("Authentication error detected. Cannot proceed: " "%s", e.message) raise + except exceptions.ServerSideError as e: + if method.lower() != 'get' or server_side_retries <= 0: + raise + else: + LOG.warning('Got server side error %s in response to a ' + 'GET request, retrying after %d seconds', + e, _SERVER_SIDE_RETRY_DELAY) + time.sleep(_SERVER_SIDE_RETRY_DELAY) + return self._op(method, path, data=data, headers=headers, + blocking=blocking, timeout=timeout, + server_side_retries=server_side_retries - 1, + **extra_session_req_kwargs) if blocking and response.status_code == 202: if not response.headers.get('location'): diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index b77ca2a..0e131ab 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -332,7 +332,8 @@ class ConnectorOpTestCase(base.TestCase): self.assertIsNotNone(exc.body) self.assertIn('See Resolution for information', exc.detail) - def test_not_found_error(self): + @mock.patch('time.sleep', autospec=True) + def test_not_found_error(self, mock_sleep): self.request.return_value.status_code = http_client.NOT_FOUND self.request.return_value.json.side_effect = ValueError('no json') @@ -341,8 +342,11 @@ class ConnectorOpTestCase(base.TestCase): self.conn._op('GET', 'http://foo.bar') exc = cm.exception self.assertEqual(http_client.NOT_FOUND, exc.status_code) + self.assertFalse(mock_sleep.called) + self.assertEqual(1, self.request.call_count) - def test_server_error(self): + @mock.patch('time.sleep', autospec=True) + def test_server_error(self, mock_sleep): self.request.return_value.status_code = ( http_client.INTERNAL_SERVER_ERROR) self.request.return_value.json.side_effect = ValueError('no json') @@ -352,6 +356,8 @@ class ConnectorOpTestCase(base.TestCase): self.conn._op('GET', 'http://foo.bar') exc = cm.exception self.assertEqual(http_client.INTERNAL_SERVER_ERROR, exc.status_code) + self.assertEqual(5, mock_sleep.call_count) + self.assertEqual(6, self.request.call_count) def test_access_error(self): self.conn._auth.can_refresh_session.return_value = False -- GitLab From d9004ec7d541d139f94bdb6e1be86f7492bba1fa Mon Sep 17 00:00:00 2001 From: Javier Pena Date: Tue, 9 Feb 2021 09:25:42 +0100 Subject: [PATCH 288/303] Fix deprecation on collections.MutableMapping Python 3.10 removes the deprecated aliases to collections abstract base clases [1]. [1] - https://bugs.python.org/issue37324 Change-Id: I16eabdce9bb6e9303df8fb3baa48e490f4afa966 --- sushy/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sushy/main.py b/sushy/main.py index eecb204..a985558 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -58,7 +58,7 @@ class ProtocolFeaturesSupportedField(base.CompositeField): """The select query parameter is supported""" -class LazyRegistries(collections.MutableMapping): +class LazyRegistries(collections.abc.MutableMapping): """Download registries on demand. Redfish message registries can be very large. On top of that, -- GitLab From 07ca7f458b38654946f5e2e8c59fad24c24d61c1 Mon Sep 17 00:00:00 2001 From: Bob Fournier Date: Tue, 9 Feb 2021 21:29:13 -0500 Subject: [PATCH 289/303] Don't log ERROR if GET of /redfish/v1/SessionService fails Similar to https://review.opendev.org/c/openstack/sushy/+/747960, an ERROR is logged when the GET of this Redfish endpoint fails prior to authentication, which causes concern for operators. Log a debug message instead. Change-Id: I70f7a6ecbbe72cfa7b0a46206de9cebbd5aef23e --- sushy/connector.py | 9 +++++++-- sushy/tests/unit/test_connector.py | 19 +++++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/sushy/connector.py b/sushy/connector.py index 003f4b9..4a49054 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -142,8 +142,13 @@ class Connector(object): "%s", retry_exc.message) raise else: - LOG.error("Authentication error detected. Cannot proceed: " - "%s", e.message) + if method == 'GET' and url.endswith('SessionService'): + LOG.debug('HTTP GET of SessionService failed %s, ' + 'this is expected prior to authentication', + e.message) + else: + LOG.error("Authentication error detected. Cannot proceed: " + "%s", e.message) raise except exceptions.ServerSideError as e: if method.lower() != 'get' or server_side_retries <= 0: diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 0e131ab..7616719 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -371,6 +371,25 @@ class ConnectorOpTestCase(base.TestCase): exc = cm.exception self.assertEqual(http_client.FORBIDDEN, exc.status_code) + @mock.patch.object(connector.LOG, 'debug', autospec=True) + def test_access_error_service_session(self, mock_log): + self.conn._auth.can_refresh_session.return_value = False + + self.request.return_value.status_code = http_client.FORBIDDEN + self.request.return_value.json.side_effect = ValueError('no json') + + with self.assertRaisesRegex(exceptions.AccessError, + 'unknown error') as cm: + self.conn._op('GET', 'http://redfish/v1/SessionService') + exc = cm.exception + mock_log.assert_called_with( + 'HTTP GET of SessionService failed %s, ' + 'this is expected prior to authentication', 'HTTP GET ' + 'http://redfish/v1/SessionService returned code ' + 'HTTPStatus.FORBIDDEN. unknown error Extended information: ' + 'none') + self.assertEqual(http_client.FORBIDDEN, exc.status_code) + def test_blocking_no_location_header(self): self.request.return_value.status_code = http_client.ACCEPTED self.request.return_value.headers = {'retry-after': 5} -- GitLab From 7a9a2b6020d9b16a671339e6cefaebd0baff99ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aija=20Jaunt=C4=93va?= Date: Mon, 8 Feb 2021 14:26:08 -0500 Subject: [PATCH 290/303] Refactor TaskMonitor and update Volume methods Refactor TaskMonitor-s: - deprecate ``resources.task_monitor.TaskMonitor`` - move TaskMonitor from ``resources.taskservice.taskmonitor`` to ``taskmonitor`` - add necessary methods from deprecated TaskMonitor to the moved one Update Volume methods to use remaining TaskMonitor. Add static helper method to TaskMonitor to initiate TaskMonitor instance from response. Replace TaskMonitor in connector with the remaining TaskMonitor. Add method get_task_monitor to Sushy class. Deprecate UpdateService.get_task_monitor. Uppercase Location header in connector. While headers are case insensitive, in unit tests where headers are mocked they are case sensitive. Change-Id: Idd4158d87d27b6358c6d7a2a7183427a494ee384 --- ...onitor-update-volume-ba99380188395852.yaml | 38 +++ sushy/connector.py | 11 +- sushy/main.py | 13 + sushy/resources/system/storage/volume.py | 161 ++++++--- sushy/resources/task_monitor.py | 3 + sushy/resources/taskservice/taskmonitor.py | 155 ++------- .../resources/updateservice/updateservice.py | 25 +- sushy/taskmonitor.py | 248 ++++++++++++++ .../resources/system/storage/test_volume.py | 107 ++++++ .../resources/taskservice/test_taskmonitor.py | 9 + .../tests/unit/resources/test_task_monitor.py | 16 + .../updateservice/test_updateservice.py | 2 +- sushy/tests/unit/test_connector.py | 6 +- sushy/tests/unit/test_main.py | 8 + sushy/tests/unit/test_taskmonitor.py | 313 ++++++++++++++++++ 15 files changed, 921 insertions(+), 194 deletions(-) create mode 100644 releasenotes/notes/refactor-taskmonitor-update-volume-ba99380188395852.yaml create mode 100644 sushy/taskmonitor.py create mode 100644 sushy/tests/unit/test_taskmonitor.py diff --git a/releasenotes/notes/refactor-taskmonitor-update-volume-ba99380188395852.yaml b/releasenotes/notes/refactor-taskmonitor-update-volume-ba99380188395852.yaml new file mode 100644 index 0000000..422c9e4 --- /dev/null +++ b/releasenotes/notes/refactor-taskmonitor-update-volume-ba99380188395852.yaml @@ -0,0 +1,38 @@ +--- +features: + - | + Adds new method ``get_task_monitor`` to retrieve TaskMonitor instance by + task monitor URI. +deprecations: + - | + Existing two ``TaskMonitor``-s are deprecated and replaced with one + ``taskmonitor.TaskMonitor``. + + For ``resources.task_monitor.TaskMonitor`` users changes include: + + * ``in_progress`` is replaced with method ``check_is_processing`` + * ``location_header`` is replaced with method ``task_monitor_uri`` + * there is no replacement for ``set_retry_after``, + ``taskmonitor.TaskMonitor`` sets this internally from Retry-After + header + + For ``resources.taskservice.taskmonitor.TaskMonitor`` users changes + include: + + * ``check_is_processing``, ``sleep_for`` and static ``get_task_monitor`` + added. + * in ``__init__`` parameter ``field_data`` is deprecated, use ``response`` + * in ``__init__`` parameter ``task_monitor`` is renamed to + ``task_monitor_uri`` + * ``task_monitor`` is deprecated, use ``task_monitor_uri`` + * ``retry_after`` is deprecated, use ``sleep_for`` + + Methods ``create_volume``, ``delete_volume``, ``initialize_volume`` in + volume module are deprecated and replaced with ones named ``create``, + ``delete`` and ``initialize``. New methods for asynchronous operations + return ``taskmonitor.TaskMonitor`` instead of + deprecated ``resources.task_monitor.TaskMonitor``. + + Method ``resources.updateservice.UpdateService.get_task_monitor`` is + deprecated, use ``Sushy.get_task_monitor`` instead. + diff --git a/sushy/connector.py b/sushy/connector.py index 003f4b9..6bedd3d 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -20,7 +20,7 @@ from urllib import parse as urlparse import requests from sushy import exceptions -from sushy.resources.task_monitor import TaskMonitor +from sushy.taskmonitor import TaskMonitor from sushy import utils LOG = logging.getLogger(__name__) @@ -159,21 +159,20 @@ class Connector(object): **extra_session_req_kwargs) if blocking and response.status_code == 202: - if not response.headers.get('location'): + if not response.headers.get('Location'): m = ('HTTP response for %(method)s request to %(url)s ' 'returned status 202, but no Location header' % {'method': method, 'url': url}) raise exceptions.ConnectionError(url=url, error=m) timeout_at = time.time() + timeout - mon = (TaskMonitor(self, response.headers.get('location')) - .set_retry_after(response.headers.get('retry-after'))) - while mon.in_progress: + mon = TaskMonitor.get_task_monitor(self, response, path) + while mon.check_is_processing: LOG.debug('Blocking for in-progress %(method)s call to ' '%(url)s; sleeping for %(sleep)s seconds', {'method': method, 'url': url, 'sleep': mon.sleep_for}) time.sleep(mon.sleep_for) - if time.time() >= timeout_at and mon.in_progress: + if time.time() >= timeout_at and mon.check_is_processing: m = ('Timeout waiting for blocking %(method)s ' 'request to %(url)s (timeout = %(timeout)s)' % {'method': method, 'url': url, diff --git a/sushy/main.py b/sushy/main.py index eecb204..baa69e4 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -33,6 +33,7 @@ from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system from sushy.resources.taskservice import taskservice from sushy.resources.updateservice import updateservice +from sushy import taskmonitor from sushy import utils LOG = logging.getLogger(__name__) @@ -507,3 +508,15 @@ class Sushy(base.ResourceBase): itself. """ return LazyRegistries(self) + + def get_task_monitor(self, task_monitor_uri): + """Used to retrieve a TaskMonitor by task monitor URI. + + :param task_monitor_uri: Task monitor URI + :returns: A task monitor. + """ + return taskmonitor.TaskMonitor( + self._conn, + task_monitor_uri, + redfish_version=self.redfish_version, + registries=self.registries) diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 8d075d4..2e307ed 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -22,7 +22,8 @@ from sushy.resources import constants as res_cons from sushy.resources import mappings as res_maps from sushy.resources.system.storage import constants as store_cons from sushy.resources.system.storage import mappings as store_maps -from sushy.resources.task_monitor import TaskMonitor +from sushy.resources.task_monitor import TaskMonitor as TaskMonitorDepr +from sushy.taskmonitor import TaskMonitor from sushy import utils LOG = logging.getLogger(__name__) @@ -90,10 +91,55 @@ class Volume(base.ResourceBase): set(store_maps.VOLUME_INIT_TYPE_MAP). intersection(action.allowed_values)]) + def _initialize(self, value=store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=None, timeout=500): + valid_values = self.get_allowed_initialize_volume_values() + if value not in valid_values: + raise exceptions.InvalidParameterValueError( + parameter='value', value=value, valid_values=valid_values) + value = store_maps.VOLUME_INIT_TYPE_MAP_REV[value] + payload = {'InitializeType': value} + blocking = False + oat_prop = '@Redfish.OperationApplyTime' + if apply_time: + payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + if (payload and payload.get(oat_prop) == res_maps. + APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): + blocking = True + target_uri = self._get_initialize_action_element().target_uri + r = self._conn.post(target_uri, data=payload, blocking=blocking, + timeout=timeout) + return r, target_uri + + def initialize(self, value=store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=None, timeout=500): + """Initialize the volume. + + :param value: The InitializeType value. + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. + :raises: InvalidParameterValueError, if the target value is not + allowed. + :raises: ConnectionError + :raises: HTTPError + :returns: TaskMonitor if async task or None if successful init + """ + r, target_uri = self._initialize(value, apply_time, timeout) + if r.status_code == 202: + return TaskMonitor.get_task_monitor( + self._conn, r, target_uri, self.redfish_version, + self.registries) + def initialize_volume(self, value=store_cons.VOLUME_INIT_TYPE_FAST, apply_time=None, timeout=500): """Initialize the volume. + Deprecated: Use initialize + :param value: The InitializeType value. :param apply_time: When to update the attributes. Optional. APPLY_TIME_IMMEDIATE - Immediate, @@ -107,29 +153,50 @@ class Volume(base.ResourceBase): :raises: HTTPError :returns: TaskMonitor if async task or None if successful init """ - valid_values = self.get_allowed_initialize_volume_values() - if value not in valid_values: - raise exceptions.InvalidParameterValueError( - parameter='value', value=value, valid_values=valid_values) - value = store_maps.VOLUME_INIT_TYPE_MAP_REV[value] - payload = {'InitializeType': value} + r, _ = self._initialize(value, apply_time, timeout) + if r.status_code == 202: + return (TaskMonitorDepr(self, r.headers.get('location')) + .set_retry_after(r.headers.get('retry-after'))) + + def _delete(self, payload=None, apply_time=None, timeout=500): blocking = False oat_prop = '@Redfish.OperationApplyTime' if apply_time: + if payload is None: + payload = {} payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] if (payload and payload.get(oat_prop) == res_maps. APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): blocking = True - target_uri = self._get_initialize_action_element().target_uri - r = self._conn.post(target_uri, data=payload, blocking=blocking, - timeout=timeout) + r = self._conn.delete(self._path, data=payload, blocking=blocking, + timeout=timeout) + return r + + def delete(self, payload=None, apply_time=None, timeout=500): + """Delete the volume. + + :param payload: May contain @Redfish.OperationApplyTime property + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. + :raises: ConnectionError + :raises: HTTPError + :returns: TaskMonitor if async task or None if successful deletion + """ + r = self._delete(payload, apply_time, timeout) if r.status_code == 202: - return (TaskMonitor(self, r.headers.get('location')) - .set_retry_after(r.headers.get('retry-after'))) + return TaskMonitor.get_task_monitor( + self._conn, r, self._path, self.redfish_version, + self.registries) def delete_volume(self, payload=None, apply_time=None, timeout=500): """Delete the volume. + Deprecated: Use delete + :param payload: May contain @Redfish.OperationApplyTime property :param apply_time: When to update the attributes. Optional. APPLY_TIME_IMMEDIATE - Immediate, @@ -141,19 +208,9 @@ class Volume(base.ResourceBase): :raises: HTTPError :returns: TaskMonitor if async task or None if successful deletion """ - blocking = False - oat_prop = '@Redfish.OperationApplyTime' - if apply_time: - if payload is None: - payload = {} - payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] - if (payload and payload.get(oat_prop) == res_maps. - APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): - blocking = True - r = self._conn.delete(self._path, data=payload, blocking=blocking, - timeout=timeout) + r = self._delete(payload, apply_time, timeout) if r.status_code == 202: - return (TaskMonitor(self._conn, r.headers.get('location')) + return (TaskMonitorDepr(self._conn, r.headers.get('location')) .set_retry_after(r.headers.get('retry-after'))) @@ -190,9 +247,50 @@ class VolumeCollection(base.ResourceCollectionBase): """Indicates if a client is allowed to request for a specific apply time of a create, delete, or action operation of a given resource""" + def _create(self, payload, apply_time=None, timeout=500): + blocking = False + oat_prop = '@Redfish.OperationApplyTime' + if apply_time: + if payload is None: + payload = {} + payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + if (payload and payload.get(oat_prop) == res_maps. + APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): + blocking = True + r = self._conn.post(self._path, data=payload, blocking=blocking, + timeout=timeout) + location = r.headers.get('Location') + return r, location + + def create(self, payload, apply_time=None, timeout=500): + """Create a volume. + + :param payload: The payload representing the new volume to create. + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. + :raises: ConnectionError + :raises: HTTPError + :returns: Newly created Volume resource or TaskMonitor if async task + """ + r, location = self._create(payload, apply_time, timeout) + if r.status_code == 201: + if location: + self.refresh() + return self.get_member(location) + elif r.status_code == 202: + return TaskMonitor.get_task_monitor( + self._conn, r, self._path, self.redfish_version, + self.registries) + def create_volume(self, payload, apply_time=None, timeout=500): """Create a volume. + Deprecated: Use create. + :param payload: The payload representing the new volume to create. :param apply_time: When to update the attributes. Optional. APPLY_TIME_IMMEDIATE - Immediate, @@ -204,22 +302,11 @@ class VolumeCollection(base.ResourceCollectionBase): :raises: HTTPError :returns: Newly created Volume resource or TaskMonitor if async task """ - blocking = False - oat_prop = '@Redfish.OperationApplyTime' - if apply_time: - if payload is None: - payload = {} - payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] - if (payload and payload.get(oat_prop) == res_maps. - APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): - blocking = True - r = self._conn.post(self._path, data=payload, blocking=blocking, - timeout=timeout) - location = r.headers.get('Location') + r, location = self._create(payload, apply_time, timeout) if r.status_code == 201: if location: self.refresh() return self.get_member(location) elif r.status_code == 202: - return (TaskMonitor(self._conn, location) + return (TaskMonitorDepr(self._conn, location) .set_retry_after(r.headers.get('retry-after'))) diff --git a/sushy/resources/task_monitor.py b/sushy/resources/task_monitor.py index cac9cba..f34b9db 100644 --- a/sushy/resources/task_monitor.py +++ b/sushy/resources/task_monitor.py @@ -26,6 +26,7 @@ LOG = logging.getLogger(__name__) class TaskMonitor(base.ResourceBase): + """Deprecated: Use sushy.taskmonitor.TaskMonitor""" def __init__(self, connector, @@ -38,6 +39,8 @@ class TaskMonitor(base.ResourceBase): :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. """ + LOG.warning('sushy.resources.task_monitor.TaskMonitor is deprecated. ' + 'Use sushy.taskmonitor.TaskMonitor') super(TaskMonitor, self).__init__(connector, path, redfish_version) self._retry_after = None self._location_header = None diff --git a/sushy/resources/taskservice/taskmonitor.py b/sushy/resources/taskservice/taskmonitor.py index 5d4edc5..7aabc29 100644 --- a/sushy/resources/taskservice/taskmonitor.py +++ b/sushy/resources/taskservice/taskmonitor.py @@ -13,130 +13,31 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This is referred from Redfish standard schema. -# https://redfish.dmtf.org/schemas/Task.v1_4_3.json - -from http import client as http_client - -from sushy.resources import base -from sushy.resources.taskservice import task -from sushy import utils - - -class TaskMonitor(object): - def __init__(self, - connector, - task_monitor, - redfish_version=None, - registries=None, - field_data=None): - """A class representing a task monitor - - :param connector: A Connector instance - :param task_monitor: The task monitor - :param retry_after: The amount of time to wait in seconds before - calling is_processing. - :param redfish_version: The version of RedFish. Used to construct - the object according to schema of the given version. - :param registries: Dict of Redfish Message Registry objects to be - used in any resource that needs registries to parse messages. - :param field_data: the data to use populating the fields. - """ - self._connector = connector - self._task_monitor = task_monitor - self._redfish_version = redfish_version - self._registries = registries - self._field_data = field_data - self._reader = base.get_reader(connector, task_monitor) - self._task = None - - if self._field_data: - # We do not check 'content-length' as it is not always present - # and will rely on task uri in those cases. - if self._field_data.status_code == http_client.ACCEPTED: - self._task = task.Task(self._connector, self._task_monitor, - redfish_version=self._redfish_version, - registries=self._registries, - json_doc=self._field_data.json_doc) - else: - self.refresh() - - def refresh(self): - """Refresh the Task - - Freshly retrieves/fetches the Task. - :raises: ResourceNotFoundError - :raises: ConnectionError - :raises: HTTPError - """ - self._field_data = self._reader.get_data() - - if self._field_data.status_code == http_client.ACCEPTED: - # A Task should have been returned, but wasn't - if int(self._field_data.headers.get('Content-Length')) == 0: - self._task = None - return - - # Assume that the body contains a Task since we got a 202 - if not self._task: - self._task = task.Task(self._connector, self._task_monitor, - redfish_version=self._redfish_version, - registries=self._registries, - json_doc=self._field_data.json_doc) - else: - self._task.refresh(json_doc=self._field_data.json_doc) - else: - self._task = None - - @property - def task_monitor(self): - """The TaskMonitor URI - - :returns: The TaskMonitor URI. - """ - return self._task_monitor - - @property - def is_processing(self): - """Indicates if the task is still processing - - :returns: A boolean indicating if the task is still processing. - """ - return self._field_data.status_code == http_client.ACCEPTED - - @property - def retry_after(self): - """The amount of time to sleep before retrying - - :returns: The amount of time in seconds to wait before calling - is_processing. - """ - return utils.int_or_none(self._field_data.headers.get('Retry-After')) - - @property - def cancellable(self): - """The amount of time to sleep before retrying - - :returns: A Boolean indicating if the Task is cancellable. - """ - allow = self._field_data.headers.get('Allow') - - cancellable = False - if allow and allow.upper() == 'DELETE': - cancellable = True - - return cancellable - - @property - def task(self): - """The executing task - - :returns: The Task being executed. - """ - - return self._task - - def get_task(self): - return task.Task(self._connector, self._task_monitor, - redfish_version=self._redfish_version, - registries=self._registries) +import logging + +from sushy import taskmonitor + +LOG = logging.getLogger(__name__) + + +def TaskMonitor(connector, + task_monitor, + redfish_version=None, + registries=None, + field_data=None): + """A class representing a task monitor + + Deprecated, use sushy.taskmonitor.TaskMonitor. + + :param connector: A Connector instance + :param task_monitor: The task monitor URI + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. + :param field_data: the data to use populating the fields. + """ + LOG.warning('sushy.resources.taskservice.taskmonitor.TaskMonitor ' + 'is deprecated. Use sushy.taskmonitor.TaskMonitor.') + return taskmonitor.TaskMonitor(connector, task_monitor, redfish_version, + registries, field_data) diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py index c6b630e..ae8f176 100644 --- a/sushy/resources/updateservice/updateservice.py +++ b/sushy/resources/updateservice/updateservice.py @@ -15,15 +15,14 @@ # https://redfish.dmtf.org/schemas/UpdateService.v1_2_2.json import logging -from urllib.parse import urljoin from sushy import exceptions from sushy.resources import base from sushy.resources import common -from sushy.resources.taskservice import taskmonitor from sushy.resources.updateservice import constants as up_cons from sushy.resources.updateservice import mappings as up_maps from sushy.resources.updateservice import softwareinventory +from sushy import taskmonitor from sushy import utils LOG = logging.getLogger(__name__) @@ -151,29 +150,13 @@ class UpdateService(base.ResourceBase): data['Targets'] = targets rsp = self._conn.post(target_uri, data=data) - json_data = rsp.json() if rsp.content else {} - field_data = base.FieldData(rsp.status_code, rsp.headers, json_data) - - header = 'Location' - task_monitor = rsp.headers.get(header) - task_uri_data = json_data.get('@odata.id') - - if task_uri_data: - task_monitor = urljoin(task_monitor, task_uri_data) - - if not task_monitor: - raise exceptions.MissingHeaderError(target_uri=target_uri, - header=header) - - return taskmonitor.TaskMonitor(self._conn, - task_monitor, - redfish_version=self.redfish_version, - registries=self.registries, - field_data=field_data) + return taskmonitor.TaskMonitor.get_task_monitor( + self._conn, rsp, target_uri, self.redfish_version, self.registries) def get_task_monitor(self, task_monitor): """Used to retrieve a TaskMonitor. + Deprecated: Use sushy.Sushy.get_task_monitor :returns: A task monitor. """ return taskmonitor.TaskMonitor( diff --git a/sushy/taskmonitor.py b/sushy/taskmonitor.py new file mode 100644 index 0000000..1560e2e --- /dev/null +++ b/sushy/taskmonitor.py @@ -0,0 +1,248 @@ +# Copyright (c) 2021 Dell, Inc. or its subsidiaries +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime +from http import client as http_client +import json +import logging +from urllib.parse import urljoin + +from dateutil import parser +import requests + +from sushy import exceptions +from sushy.resources.taskservice import task +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class TaskMonitor(object): + def __init__(self, + connector, + task_monitor_uri, + redfish_version=None, + registries=None, + field_data=None, + response=None): + """A class representing a task monitor + + :param connector: A Connector instance + :param task_monitor_uri: The task monitor URI + :param redfish_version: The version of Redfish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. + :param field_data: the data to use populating the fields. Deprecated + use response. + :param response: Raw response + """ + self._connector = connector + self._task_monitor_uri = task_monitor_uri + self._redfish_version = redfish_version + self._registries = registries + self._field_data = field_data + if self._field_data is not None: + LOG.warning('TaskMonitor field_data is deprecated in TaskMonitor. ' + 'Use response.') + self._task = None + self._response = response + + # Backward compability for deprecated field_data + if self._field_data and not self._response: + self._response = requests.Response() + self._response.status_code = self._field_data.status_code + self._response.headers = self._field_data.headers + self._response._content = json.dumps( + self._field_data.json_doc).encode('utf-8') + + if (self._response and self._response.content + and self._response.status_code == http_client.ACCEPTED): + self._task = task.Task(self._connector, self._task_monitor_uri, + redfish_version=self._redfish_version, + registries=self._registries, + json_doc=self._response.json()) + else: + self.refresh() + + def refresh(self): + """Refresh the Task + + Freshly retrieves/fetches the Task. + :raises: ResourceNotFoundError + :raises: ConnectionError + :raises: HTTPError + """ + self._response = self._connector.get(path=self.task_monitor_uri) + if self._response.status_code == http_client.ACCEPTED: + # A Task should have been returned, but wasn't + if not self._response.content: + self._task = None + return + + # Assume that the body contains a Task since we got a 202 + if not self._task: + self._task = task.Task(self._connector, self._task_monitor_uri, + redfish_version=self._redfish_version, + registries=self._registries, + json_doc=self._response.json()) + else: + self._task.refresh(json_doc=self._response.json()) + else: + self._task = None + + @property + def task_monitor(self): + """The TaskMonitor URI + + Deprecated: Use task_monitor_uri + + :returns: The TaskMonitor URI. + """ + LOG.warning('task_monitor is deprecated in TaskMonitor. ' + 'Use task_monitor_uri.') + return self._task_monitor_uri + + @property + def task_monitor_uri(self): + """The TaskMonitor URI + + :returns: The TaskMonitor URI. + """ + return self._task_monitor_uri + + @property + def is_processing(self): + """Indicates if the task is still processing + + :returns: A boolean indicating if the task is still processing. + """ + return self._response.status_code == http_client.ACCEPTED + + @property + def check_is_processing(self): + """Refreshes task and check if it is still processing + + :returns: A boolean indicating if the task is still processing. + """ + if not self.is_processing: + return False + + self.refresh() + + return self.is_processing + + @property + def retry_after(self): + """The amount of time to sleep before retrying + + Deprecated: use sleep_for. This is not working with Retry-After header + in date format. + + :returns: The amount of time in seconds to wait before calling + is_processing. + """ + LOG.warning('TaskMonitor retry_after is deprecated, use sleep_for.') + return utils.int_or_none(self._response.headers.get('Retry-After')) + + @property + def sleep_for(self): + """Seconds the client should wait before querying the operation status + + Defaults to 1 second if Retry-After not specified in response. + + :returns: The number of seconds to wait + """ + retry_after = self._response.headers.get('Retry-After') + if retry_after is None: + return 1 + + if isinstance(retry_after, int) or retry_after.isdigit(): + return retry_after + + return max(0, (parser.parse(retry_after) + - datetime.now().astimezone()).total_seconds()) + + @property + def cancellable(self): + """The amount of time to sleep before retrying + + :returns: A Boolean indicating if the Task is cancellable. + """ + allow = self._response.headers.get('Allow') + + cancellable = False + if allow and allow.upper() == 'DELETE': + cancellable = True + + return cancellable + + @property + def task(self): + """The executing task + + :returns: The Task being executed. + """ + + return self._task + + @property + def response(self): + """Unprocessed response. + + Intended to be used internally. + :returns: Unprocessed response. + """ + return self._response + + def get_task(self): + """Construct Task instance from task monitor URI. + + :returns: Task instance. + """ + return task.Task(self._connector, self._task_monitor_uri, + redfish_version=self._redfish_version, + registries=self._registries) + + @staticmethod + def get_task_monitor(conn, response, target_uri, redfish_version=None, + registries=None): + """Construct TaskMonitor instance from received response. + + :response: Unprocessed response + :target_uri: URI used to initiate async operation + :redfish_version: Redfish version. Optional when used internally. + :registries: Redfish registries. Optional when used internally. + :returns: TaskMonitor instance + :raises: MissingHeaderError if Location is missing in response + """ + json_data = response.json() if response.content else {} + + header = 'Location' + task_monitor_uri = response.headers.get(header) + task_uri_data = json_data.get('@odata.id') + + if task_uri_data: + task_monitor_uri = urljoin(task_monitor_uri, task_uri_data) + + if not task_monitor_uri: + raise exceptions.MissingHeaderError(target_uri=target_uri, + header=header) + + return TaskMonitor(conn, + task_monitor_uri, + redfish_version=redfish_version, + registries=registries, + response=response) diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py index 3245317..44170f0 100644 --- a/sushy/tests/unit/resources/system/storage/test_volume.py +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -20,6 +20,7 @@ from sushy import exceptions from sushy.resources import constants as res_cons from sushy.resources.system.storage import constants as store_cons from sushy.resources.system.storage import volume +from sushy import taskmonitor from sushy.tests.unit import base @@ -84,6 +85,28 @@ class VolumeTestCase(base.TestCase): 'The parameter.*lazy.*invalid', self.stor_volume.initialize_volume, 'lazy') + def test_initialize_immediate(self): + target_uri = '/redfish/v1/Systems/3/Storage/RAIDIntegrated/' \ + 'Volumes/1/Actions/Volume.Initialize' + self.stor_volume.initialize( + store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_volume._conn.post.assert_called_once_with( + target_uri, data={'InitializeType': 'Fast', + '@Redfish.OperationApplyTime': 'Immediate'}, + blocking=True, timeout=500) + + def test_initialize_on_reset(self): + target_uri = '/redfish/v1/Systems/3/Storage/RAIDIntegrated/' \ + 'Volumes/1/Actions/Volume.Initialize' + self.stor_volume.initialize( + store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=res_cons.APPLY_TIME_ON_RESET) + self.stor_volume._conn.post.assert_called_once_with( + target_uri, data={'InitializeType': 'Fast', + '@Redfish.OperationApplyTime': 'OnReset'}, + blocking=False, timeout=500) + def test_delete_volume(self): self.stor_volume.delete_volume() self.stor_volume._conn.delete.assert_called_once_with( @@ -120,6 +143,34 @@ class VolumeTestCase(base.TestCase): self.assertEqual(task_mon.resource_name, 'task_monitor') self.assertEqual(task_mon.path, '/redfish/v1/taskmon/4608f7e6') + def test_delete_immediate(self): + payload = {} + self.conn.delete.return_value.status_code = 200 + resource = self.stor_volume.delete( + payload=payload, apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=payload, blocking=True, timeout=500) + self.assertIsNone(resource) + + def test_delete_on_reset(self): + payload = {} + self.conn.delete.return_value.status_code = 202 + self.conn.delete.return_value.headers = { + 'Location': '/redfish/v1/taskmon/4608f7e6', + 'Retry-After': '120' + } + self.conn.delete.return_value.json.return_value = {'Id': 3, + 'Name': 'Test'} + task_mon = self.stor_volume.delete( + payload=payload, apply_time=res_cons.APPLY_TIME_ON_RESET, + timeout=250) + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=payload, blocking=False, timeout=250) + self.assertIsNotNone(task_mon) + self.assertIsInstance(task_mon, taskmonitor.TaskMonitor) + self.assertEqual(task_mon.task_monitor_uri, + '/redfish/v1/taskmon/4608f7e6') + class VolumeCollectionTestCase(base.TestCase): @@ -273,3 +324,59 @@ class VolumeCollectionTestCase(base.TestCase): self.assertIsNotNone(task_mon) self.assertEqual(task_mon.resource_name, 'task_monitor') self.assertEqual(task_mon.path, '/redfish/v1/taskmon/4608f7e6') + + def test_create_immediate(self): + payload = { + 'Name': 'My Volume 4', + 'VolumeType': 'Mirrored', + 'RAIDType': 'RAID1', + 'CapacityBytes': 107374182400 + } + expected_payload = dict(payload) + expected_payload['@Redfish.OperationApplyTime'] = 'Immediate' + with open('sushy/tests/unit/json_samples/volume4.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.conn.post.return_value.status_code = 201 + self.conn.post.return_value.headers.return_value = { + 'Location': '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4' + } + new_vol = self.stor_vol_col.create( + payload, apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_vol_col._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', + data=expected_payload, blocking=True, timeout=500) + self.stor_vol_col.refresh.assert_called_once() + self.assertIsNotNone(new_vol) + self.assertEqual('4', new_vol.identity) + self.assertEqual('My Volume 4', new_vol.name) + self.assertEqual(107374182400, new_vol.capacity_bytes) + self.assertEqual(sushy.VOLUME_TYPE_MIRRORED, new_vol.volume_type) + self.assertEqual(sushy.RAID_TYPE_RAID1, new_vol.raid_type) + + def test_create_on_reset(self): + payload = { + 'Name': 'My Volume 4', + 'VolumeType': 'Mirrored', + 'RAIDType': 'RAID1', + 'CapacityBytes': 107374182400 + } + expected_payload = dict(payload) + expected_payload['@Redfish.OperationApplyTime'] = 'OnReset' + with open('sushy/tests/unit/json_samples/task.json') as f: + self.conn.post.return_value.json.return_value = json.load(f) + + self.conn.post.return_value.content.return_value = "Something" + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = { + 'Location': '/redfish/v1/taskmon/4608f7e6', + 'Retry-After': '120' + } + task_mon = self.stor_vol_col.create( + payload, apply_time=res_cons.APPLY_TIME_ON_RESET) + self.stor_vol_col._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', + data=expected_payload, blocking=False, timeout=500) + self.assertIsNotNone(task_mon) + self.assertIsInstance(task_mon, taskmonitor.TaskMonitor) + self.assertEqual(task_mon.task_monitor_uri, + '/redfish/v1/TaskService/Tasks/545') diff --git a/sushy/tests/unit/resources/taskservice/test_taskmonitor.py b/sushy/tests/unit/resources/taskservice/test_taskmonitor.py index 3155a5c..aa784d8 100644 --- a/sushy/tests/unit/resources/taskservice/test_taskmonitor.py +++ b/sushy/tests/unit/resources/taskservice/test_taskmonitor.py @@ -45,6 +45,14 @@ class TaskMonitorTestCase(base.TestCase): field_data=self.field_data ) + @mock.patch.object(taskmonitor.LOG, 'warning', autospec=True) + def test_init_deprecation_warning(self, mock_log): + taskmonitor.TaskMonitor(self.conn, '/Task/545') + + mock_log.assert_called_once_with( + 'sushy.resources.taskservice.taskmonitor.TaskMonitor ' + 'is deprecated. Use sushy.taskmonitor.TaskMonitor.') + def test_init_accepted_no_content(self): field_data = resource_base.FieldData( http_client.ACCEPTED, @@ -81,6 +89,7 @@ class TaskMonitorTestCase(base.TestCase): self.conn.reset_mock() self.conn.get.return_value.status_code = 202 self.conn.get.return_value.headers = {'Content-Length': 0} + self.conn.get.return_value.content = None self.task_monitor.refresh() diff --git a/sushy/tests/unit/resources/test_task_monitor.py b/sushy/tests/unit/resources/test_task_monitor.py index d228010..ae81ec4 100644 --- a/sushy/tests/unit/resources/test_task_monitor.py +++ b/sushy/tests/unit/resources/test_task_monitor.py @@ -18,6 +18,7 @@ from unittest import mock from dateutil import parser +from sushy.resources import task_monitor from sushy.resources.task_monitor import TaskMonitor from sushy.tests.unit import base @@ -37,6 +38,21 @@ class TaskMonitorTestCase(base.TestCase): self.res_headers2 = {'location': 'https://sample.com/foo/bar', 'retry-after': str(self.seconds)} + @mock.patch.object(task_monitor.LOG, 'warning', autospec=True) + def test_init_deprecation_warning(self, mock_log): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.json.return_value = {} + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + TaskMonitor(self.conn, res.headers.get('location'))\ + .set_retry_after(res.headers.get('retry-after')) + mock_log.assert_called_once_with( + 'sushy.resources.task_monitor.TaskMonitor is deprecated. ' + 'Use sushy.taskmonitor.TaskMonitor') + def test_task_in_progress(self): self.conn.post.return_value.status_code = 202 self.conn.post.return_value.headers = self.res_headers1.copy() diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py index 90100e7..9d76522 100644 --- a/sushy/tests/unit/resources/updateservice/test_updateservice.py +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -16,10 +16,10 @@ from unittest import mock from sushy import exceptions from sushy.resources import constants as res_cons -from sushy.resources.taskservice import taskmonitor from sushy.resources.updateservice import constants as ups_cons from sushy.resources.updateservice import softwareinventory from sushy.resources.updateservice import updateservice +from sushy import taskmonitor from sushy.tests.unit import base diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 0e131ab..683d2ff 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -383,9 +383,11 @@ class ConnectorOpTestCase(base.TestCase): response1 = mock.MagicMock(spec=requests.Response) response1.status_code = http_client.ACCEPTED response1.headers = { - 'retry-after': 5, - 'location': '/redfish/v1/taskmon/1' + 'Retry-After': 5, + 'Location': '/redfish/v1/taskmon/1', + 'Content-Length': 10 } + response1.json.return_value = {'Id': 3, 'Name': 'Test'} response2 = mock.MagicMock(spec=requests.Response) response2.status_code = http_client.BAD_REQUEST message = 'Unable to create Volume with given parameters' diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 096f7db..0b4769a 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -29,6 +29,7 @@ from sushy.resources.sessionservice import session from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system from sushy.resources.updateservice import updateservice +from sushy import taskmonitor from sushy.tests.unit import base @@ -398,6 +399,13 @@ class MainTestCase(base.TestCase): expected = '/redfish/v1/SessionService/Sessions' self.assertEqual(expected, self.root.get_sessions_path()) + @mock.patch.object(taskmonitor, 'TaskMonitor', autospec=True) + def test_get_task_monitor(self, mock_task_mon): + self.root.get_task_monitor('/TaskService/Task/123') + mock_task_mon.assert_called_once_with( + self.root._conn, '/TaskService/Task/123', + self.root.redfish_version, self.root.lazy_registries) + class BareMinimumMainTestCase(base.TestCase): diff --git a/sushy/tests/unit/test_taskmonitor.py b/sushy/tests/unit/test_taskmonitor.py new file mode 100644 index 0000000..c436648 --- /dev/null +++ b/sushy/tests/unit/test_taskmonitor.py @@ -0,0 +1,313 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from http import client as http_client +import json +from unittest import mock + +from sushy import exceptions +from sushy.resources import base as resource_base +from sushy.resources.taskservice import task +from sushy import taskmonitor +from sushy.tests.unit import base + + +class TaskMonitorTestCase(base.TestCase): + + def setUp(self): + super(TaskMonitorTestCase, self).setUp() + self.conn = mock.Mock() + + with open('sushy/tests/unit/json_samples/task.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'}, + self.json_doc) + + self.response = mock.Mock() + self.response.status_code = http_client.ACCEPTED + self.response.headers = {'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'} + self.response.content = json.dumps(self.json_doc).encode('utf-8') + self.response.json.return_value = self.json_doc + + self.task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + response=self.response + ) + + @mock.patch.object(taskmonitor.LOG, 'warning', autospec=True) + def test_init_field_data(self, mock_log): + field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'}, + self.json_doc) + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + field_data=field_data) + + self.assertEqual(field_data.status_code, + task_monitor.response.status_code) + self.assertEqual(field_data.headers, + task_monitor.response.headers) + self.assertEqual(field_data.json_doc, + task_monitor.response.json()) + mock_log.assert_called_once_with( + 'TaskMonitor field_data is deprecated in TaskMonitor. ' + 'Use response.') + + def test_init_accepted_no_content(self): + response = mock.Mock() + response.status_code = http_client.ACCEPTED + response.headers = {'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'} + response.content = None + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + response=response) + + self.assertIsNone(task_monitor.task) + + def test_init_accepted_content(self): + self.assertIsNotNone(self.task_monitor.task) + + def test_init_no_response(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + + task_monitor = taskmonitor.TaskMonitor(self.conn, '/Task/545') + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(task_monitor.task) + + def test_refresh_no_content(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 0} + self.conn.get.return_value.content = None + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNone(self.task_monitor.task) + + def test_refresh_content_no_task(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + self.task_monitor._task = None + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(self.task_monitor.task) + + def test_refresh_content_task(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(self.task_monitor.task) + + def test_refresh_done(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 200 + + self.task_monitor.refresh() + + self.conn.get.assert_called_once_with(path='/Task/545') + self.assertIsNone(self.task_monitor.task) + + @mock.patch.object(taskmonitor.LOG, 'warning', autospec=True) + def test_task_monitor(self, mock_log): + self.assertEqual('/Task/545', self.task_monitor.task_monitor) + mock_log.assert_called_once_with( + 'task_monitor is deprecated in TaskMonitor. Use task_monitor_uri.') + + def test_task_monitor_uri(self): + self.assertEqual('/Task/545', self.task_monitor.task_monitor_uri) + + def test_is_processing(self): + self.assertTrue(self.task_monitor.is_processing) + + def test_check_is_processing_not_processing(self): + response = mock.Mock() + response.status_code = http_client.OK + response.headers = {} + response.content = None + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + response=response) + + self.assertEqual(False, task_monitor.check_is_processing) + + def test_check_is_processing_refreshing(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {} + self.conn.get.return_value.content = None + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545') + + self.assertEqual(True, task_monitor.check_is_processing) + + @mock.patch.object(taskmonitor.LOG, 'warning', autospec=True) + def test_retry_after(self, mock_log): + self.assertEqual(20, self.task_monitor.retry_after) + mock_log.assert_called_once_with('TaskMonitor retry_after is ' + 'deprecated, use sleep_for.') + + def test_cancellable(self): + self.assertTrue(self.task_monitor.cancellable) + + def test_sleep_for_retry_after_empty(self): + self.task_monitor._response.headers["Retry-After"] = None + self.assertEqual(1, self.task_monitor.sleep_for) + + def test_sleep_for_retry_after_digit(self): + self.assertEqual(20, self.task_monitor.sleep_for) + + def test_sleep_for_retry_after_date_past(self): + self.task_monitor._response.headers["Retry-After"] =\ + 'Fri, 31 Dec 1999 23:59:59 GMT' + self.assertEqual(0, self.task_monitor.sleep_for) + + def test_not_cancellable_no_header(self): + response = mock.Mock() + response.status_code = http_client.ACCEPTED + response.headers = { + 'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20} + response.json.return_value = self.json_doc + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + response=response + ) + + self.assertFalse(task_monitor.cancellable) + + def test_not_cancellable(self): + response = mock.Mock() + response.status_code = http_client.ACCEPTED + response.headers = { + 'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'GET'} + response.json.return_value = self.json_doc + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + response=response + ) + + self.assertFalse(task_monitor.cancellable) + + def test_task(self): + tm_task = self.task_monitor.task + + self.assertIsInstance(tm_task, task.Task) + self.assertEqual('545', tm_task.identity) + + def test_get_task(self): + tm_task = self.task_monitor.get_task() + + self.assertIsInstance(tm_task, task.Task) + self.assertEqual('545', tm_task.identity) + + def test_get_task_monitor_no_content(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + response = mock.Mock() + response.content = None + response.headers = {'Location': '/Task/545'} + response.status_code = http_client.ACCEPTED + + tm = taskmonitor.TaskMonitor.get_task_monitor( + self.conn, response, + '/redfish/v1/UpdateService/Actions/SimpleUpdate') + + self.assertIsInstance(tm, taskmonitor.TaskMonitor) + self.assertEqual('/Task/545', tm.task_monitor_uri) + self.assertIsNotNone(tm.task) + self.assertEqual('545', tm.task.identity) + + def test_get_task_monitor_odata_id(self): + response = mock.Mock() + response.content = "something" + response.json.return_value = {'Id': '545', 'Name': 'test', + '@odata.id': '545'} + response.headers = {'Location': '/TaskMonitor/'} + response.status_code = http_client.ACCEPTED + + tm = taskmonitor.TaskMonitor.get_task_monitor( + self.conn, response, + '/redfish/v1/UpdateService/Actions/SimpleUpdate') + + self.assertIsInstance(tm, taskmonitor.TaskMonitor) + self.assertEqual('/TaskMonitor/545', tm.task_monitor_uri) + self.assertIsNotNone(tm.task) + self.assertEqual('545', tm.task.identity) + + def test_get_task_monitor_location_header_missing(self): + response = mock.Mock() + response.content = "something" + response.json.return_value = {'Id': '545', 'Name': 'test'} + response.headers = {} + response.status_code = http_client.ACCEPTED + + self.assertRaises(exceptions.MissingHeaderError, + taskmonitor.TaskMonitor.get_task_monitor, + self.conn, response, + '/redfish/v1/UpdateService/Actions/SimpleUpdate') + + def test_get_task_monitor(self): + response = mock.Mock() + response.content = "something" + response.json.return_value = {'Id': '545', 'Name': 'test'} + response.headers = {'Location': '/Task/545'} + response.status_code = http_client.ACCEPTED + + tm = taskmonitor.TaskMonitor.get_task_monitor( + self.conn, response, + '/redfish/v1/UpdateService/Actions/SimpleUpdate') + + self.assertIsInstance(tm, taskmonitor.TaskMonitor) + self.assertEqual('/Task/545', tm.task_monitor_uri) + self.assertIsNotNone(tm.task) + self.assertEqual('545', tm.task.identity) -- GitLab From 375b29b87f0251aa43752c15855c929bd4e1b9ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aija=20Jaunt=C4=93va?= Date: Fri, 26 Feb 2021 04:49:38 -0500 Subject: [PATCH 291/303] Follow up TaskMonitor refactor Follow up to change Idd4158d87d27b6358c6d7a2a7183427a494ee384 Add wait method. Rename TaskMonitor.get_task_monitor to TaskMonitor.from_response Change-Id: I1b14bb1eddbf6eff82b0bd06e36c667bcaaf217d --- sushy/connector.py | 17 +---- sushy/resources/system/storage/volume.py | 6 +- .../resources/updateservice/updateservice.py | 2 +- sushy/taskmonitor.py | 29 ++++++++- sushy/tests/unit/test_taskmonitor.py | 62 ++++++++++++++++--- 5 files changed, 88 insertions(+), 28 deletions(-) diff --git a/sushy/connector.py b/sushy/connector.py index bec6406..22817a6 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -169,20 +169,9 @@ class Connector(object): 'returned status 202, but no Location header' % {'method': method, 'url': url}) raise exceptions.ConnectionError(url=url, error=m) - timeout_at = time.time() + timeout - mon = TaskMonitor.get_task_monitor(self, response, path) - while mon.check_is_processing: - LOG.debug('Blocking for in-progress %(method)s call to ' - '%(url)s; sleeping for %(sleep)s seconds', - {'method': method, 'url': url, - 'sleep': mon.sleep_for}) - time.sleep(mon.sleep_for) - if time.time() >= timeout_at and mon.check_is_processing: - m = ('Timeout waiting for blocking %(method)s ' - 'request to %(url)s (timeout = %(timeout)s)' - % {'method': method, 'url': url, - 'timeout': timeout}) - raise exceptions.ConnectionError(url=url, error=m) + + mon = TaskMonitor.from_response(self, response, path) + mon.wait(timeout) response = mon.response exceptions.raise_for_response(method, url, response) diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py index 2e307ed..29522b1 100644 --- a/sushy/resources/system/storage/volume.py +++ b/sushy/resources/system/storage/volume.py @@ -130,7 +130,7 @@ class Volume(base.ResourceBase): """ r, target_uri = self._initialize(value, apply_time, timeout) if r.status_code == 202: - return TaskMonitor.get_task_monitor( + return TaskMonitor.from_response( self._conn, r, target_uri, self.redfish_version, self.registries) @@ -188,7 +188,7 @@ class Volume(base.ResourceBase): """ r = self._delete(payload, apply_time, timeout) if r.status_code == 202: - return TaskMonitor.get_task_monitor( + return TaskMonitor.from_response( self._conn, r, self._path, self.redfish_version, self.registries) @@ -282,7 +282,7 @@ class VolumeCollection(base.ResourceCollectionBase): self.refresh() return self.get_member(location) elif r.status_code == 202: - return TaskMonitor.get_task_monitor( + return TaskMonitor.from_response( self._conn, r, self._path, self.redfish_version, self.registries) diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py index ae8f176..fbdc491 100644 --- a/sushy/resources/updateservice/updateservice.py +++ b/sushy/resources/updateservice/updateservice.py @@ -150,7 +150,7 @@ class UpdateService(base.ResourceBase): data['Targets'] = targets rsp = self._conn.post(target_uri, data=data) - return taskmonitor.TaskMonitor.get_task_monitor( + return taskmonitor.TaskMonitor.from_response( self._conn, rsp, target_uri, self.redfish_version, self.registries) def get_task_monitor(self, task_monitor): diff --git a/sushy/taskmonitor.py b/sushy/taskmonitor.py index 1560e2e..1e05a3e 100644 --- a/sushy/taskmonitor.py +++ b/sushy/taskmonitor.py @@ -17,6 +17,7 @@ from datetime import datetime from http import client as http_client import json import logging +import time from urllib.parse import urljoin from dateutil import parser @@ -86,6 +87,7 @@ class TaskMonitor(object): :raises: HTTPError """ self._response = self._connector.get(path=self.task_monitor_uri) + if self._response.status_code == http_client.ACCEPTED: # A Task should have been returned, but wasn't if not self._response.content: @@ -216,9 +218,32 @@ class TaskMonitor(object): redfish_version=self._redfish_version, registries=self._registries) + def wait(self, timeout_sec): + """Waits until task is completed or it times out. + + :param timeout_sec: Timeout to wait + :raises: ConnectionError when times out + """ + timeout_at = time.time() + timeout_sec + + while self.check_is_processing: + + LOG.debug('Waiting for task monitor %(url)s; sleeping for ' + '%(sleep)s seconds', + {'url': self.task_monitor_uri, + 'sleep': self.sleep_for}) + time.sleep(self.sleep_for) + if time.time() >= timeout_at and self.check_is_processing: + m = ('Timeout waiting for task monitor %(url)s ' + '(timeout = %(timeout)s)' + % {'url': self.task_monitor_uri, + 'timeout': timeout_sec}) + raise exceptions.ConnectionError(url=self.task_monitor_uri, + error=m) + @staticmethod - def get_task_monitor(conn, response, target_uri, redfish_version=None, - registries=None): + def from_response(conn, response, target_uri, redfish_version=None, + registries=None): """Construct TaskMonitor instance from received response. :response: Unprocessed response diff --git a/sushy/tests/unit/test_taskmonitor.py b/sushy/tests/unit/test_taskmonitor.py index c436648..80d897d 100644 --- a/sushy/tests/unit/test_taskmonitor.py +++ b/sushy/tests/unit/test_taskmonitor.py @@ -15,6 +15,8 @@ from http import client as http_client import json from unittest import mock +import requests + from sushy import exceptions from sushy.resources import base as resource_base from sushy.resources.taskservice import task @@ -250,7 +252,51 @@ class TaskMonitorTestCase(base.TestCase): self.assertIsInstance(tm_task, task.Task) self.assertEqual('545', tm_task.identity) - def test_get_task_monitor_no_content(self): + @mock.patch('time.sleep', autospec=True) + def test_wait(self, mock_time): + self.conn.reset_mock() + response1 = mock.MagicMock(spec=requests.Response) + response1.status_code = http_client.ACCEPTED + response1.headers = { + 'Retry-After': 5, + 'Location': '/redfish/v1/taskmon/1', + 'Content-Length': 10 + } + response1.json.return_value = {'Id': 3, 'Name': 'Test'} + + response2 = mock.MagicMock(spec=requests.Response) + response2.status_code = http_client.OK + response2.headers = { + 'Retry-After': 5, + 'Location': '/redfish/v1/taskmon/1', + 'Content-Length': 10 + } + response2.json.return_value = {'Id': 3, 'Name': 'Test'} + + self.conn.get.side_effect = [response1, response2] + self.task_monitor.wait(60) + + self.assertFalse(self.task_monitor.is_processing) + self.assertEqual(response2, self.task_monitor.response) + + @mock.patch('time.sleep', autospec=True) + def test_wait_timeout(self, mock_time): + self.conn.reset_mock() + response1 = mock.MagicMock(spec=requests.Response) + response1.status_code = http_client.ACCEPTED + response1.headers = { + 'Retry-After': 5, + 'Location': '/redfish/v1/taskmon/1', + 'Content-Length': 10 + } + response1.json.return_value = {'Id': 3, 'Name': 'Test'} + + self.conn.get.side_effect = [response1, response1] + + self.assertRaises(exceptions.ConnectionError, + self.task_monitor.wait, -10) + + def test_from_response_no_content(self): self.conn.reset_mock() self.conn.get.return_value.status_code = 202 response = mock.Mock() @@ -258,7 +304,7 @@ class TaskMonitorTestCase(base.TestCase): response.headers = {'Location': '/Task/545'} response.status_code = http_client.ACCEPTED - tm = taskmonitor.TaskMonitor.get_task_monitor( + tm = taskmonitor.TaskMonitor.from_response( self.conn, response, '/redfish/v1/UpdateService/Actions/SimpleUpdate') @@ -267,7 +313,7 @@ class TaskMonitorTestCase(base.TestCase): self.assertIsNotNone(tm.task) self.assertEqual('545', tm.task.identity) - def test_get_task_monitor_odata_id(self): + def test_from_response_odata_id(self): response = mock.Mock() response.content = "something" response.json.return_value = {'Id': '545', 'Name': 'test', @@ -275,7 +321,7 @@ class TaskMonitorTestCase(base.TestCase): response.headers = {'Location': '/TaskMonitor/'} response.status_code = http_client.ACCEPTED - tm = taskmonitor.TaskMonitor.get_task_monitor( + tm = taskmonitor.TaskMonitor.from_response( self.conn, response, '/redfish/v1/UpdateService/Actions/SimpleUpdate') @@ -284,7 +330,7 @@ class TaskMonitorTestCase(base.TestCase): self.assertIsNotNone(tm.task) self.assertEqual('545', tm.task.identity) - def test_get_task_monitor_location_header_missing(self): + def test_from_response_location_header_missing(self): response = mock.Mock() response.content = "something" response.json.return_value = {'Id': '545', 'Name': 'test'} @@ -292,18 +338,18 @@ class TaskMonitorTestCase(base.TestCase): response.status_code = http_client.ACCEPTED self.assertRaises(exceptions.MissingHeaderError, - taskmonitor.TaskMonitor.get_task_monitor, + taskmonitor.TaskMonitor.from_response, self.conn, response, '/redfish/v1/UpdateService/Actions/SimpleUpdate') - def test_get_task_monitor(self): + def test_from_response(self): response = mock.Mock() response.content = "something" response.json.return_value = {'Id': '545', 'Name': 'test'} response.headers = {'Location': '/Task/545'} response.status_code = http_client.ACCEPTED - tm = taskmonitor.TaskMonitor.get_task_monitor( + tm = taskmonitor.TaskMonitor.from_response( self.conn, response, '/redfish/v1/UpdateService/Actions/SimpleUpdate') -- GitLab From f8c55b784d12fb6043af779970dc7170eba61061 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Sun, 14 Mar 2021 16:34:40 +0100 Subject: [PATCH 292/303] Now packaging 3.7.0 --- debian/changelog | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/debian/changelog b/debian/changelog index 46a8e4c..a2cdb73 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (3.7.0-1) experimental; urgency=medium + + * New upstream release. + + -- Thomas Goirand Sun, 14 Mar 2021 16:34:25 +0100 + python-sushy (3.4.1-2) unstable; urgency=medium * Fixed debian/watch. -- GitLab From c6eee81d24e024ad249c43af4c1331716afdfdc5 Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Sun, 14 Mar 2021 16:35:30 +0100 Subject: [PATCH 293/303] Fix diff with upstream tag. --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 83224c1..56ba2fa 100644 --- a/tox.ini +++ b/tox.ini @@ -11,7 +11,7 @@ setenv = VIRTUAL_ENV={envdir} PYTHONWARNINGS=default::DeprecationWarning deps = - -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/victoria} + -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt commands = stestr run --slowest {posargs} -- GitLab From a27fc6a2f08d7f376675cb269b110c68aa861276 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Thu, 18 Mar 2021 10:26:51 +0000 Subject: [PATCH 294/303] Update .gitreview for stable/wallaby Change-Id: I55ab789b71159d19c8f31ace510b47c903b1a8e2 --- .gitreview | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitreview b/.gitreview index 9ca57a4..8f8aacc 100644 --- a/.gitreview +++ b/.gitreview @@ -2,3 +2,4 @@ host=review.opendev.org port=29418 project=openstack/sushy.git +defaultbranch=stable/wallaby -- GitLab From ee2b83ee67b0653fe936d7f375c3054818e7a9e8 Mon Sep 17 00:00:00 2001 From: OpenStack Release Bot Date: Thu, 18 Mar 2021 10:27:02 +0000 Subject: [PATCH 295/303] Update TOX_CONSTRAINTS_FILE for stable/wallaby Update the URL to the upper-constraints file to point to the redirect rule on releases.openstack.org so that anyone working on this branch will switch to the correct upper-constraints list automatically when the requirements repository branches. Until the requirements repository has as stable/wallaby branch, tests will continue to use the upper-constraints list on master. Change-Id: Ieef76c74907986dd8ab3db6f8c71a30720819442 --- tox.ini | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tox.ini b/tox.ini index 56ba2fa..34b07b1 100644 --- a/tox.ini +++ b/tox.ini @@ -11,7 +11,7 @@ setenv = VIRTUAL_ENV={envdir} PYTHONWARNINGS=default::DeprecationWarning deps = - -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} + -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/wallaby} -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt commands = stestr run --slowest {posargs} @@ -41,7 +41,7 @@ commands = coverage erase coverage xml -o cover/coverage.xml [testenv:docs] -deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} +deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/wallaby} -r{toxinidir}/requirements.txt -r{toxinidir}/doc/requirements.txt commands = sphinx-build -W -b html doc/source doc/build/html @@ -49,14 +49,14 @@ commands = sphinx-build -W -b html doc/source doc/build/html [testenv:pdf-docs] usedevelop = False whitelist_externals = make -deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} +deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/wallaby} -r{toxinidir}/doc/requirements.txt commands = sphinx-build -b latex doc/source doc/build/pdf make -C doc/build/pdf [testenv:releasenotes] usedevelop = False -deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} +deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/wallaby} -r{toxinidir}/doc/requirements.txt commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html -- GitLab From ab3a97e6e89e626e28d91f956dd828a12f7fd116 Mon Sep 17 00:00:00 2001 From: Steve Baker Date: Mon, 8 Mar 2021 20:20:07 +1300 Subject: [PATCH 296/303] Ensure Content-Type header is set when required This used to be set to application/json when there was request data, but this was removed in change I6bd9a0719acfb839fcf137c58bcf03254b1af5ad without any particular explanation. There have been reports of some redfish implementations returning error responses (400 or 415) when this header is not set, specifically HPE Gen9. It is good practice to always set Content-Type on requests with data anyway. This change adds that header back. Change-Id: I13107bbea4d422fdfe620ade735a2e150cb51bf2 (cherry picked from commit f52aac46cd0a77f9282591f4d888a241b6eb0a39) --- sushy/connector.py | 5 ++++- sushy/tests/unit/test_connector.py | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/sushy/connector.py b/sushy/connector.py index 22817a6..90b8334 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -93,7 +93,10 @@ class Connector(object): url = path if urlparse.urlparse(path).netloc else urlparse.urljoin( self._url, path) headers = headers or {} - if not any(k.lower() == 'odata-version' for k in headers): + lc_headers = [k.lower() for k in headers] + if data is not None and 'content-type' not in lc_headers: + headers['Content-Type'] = 'application/json' + if 'odata-version' not in lc_headers: headers['OData-Version'] = '4.0' # TODO(lucasagomes): We should mask the data to remove sensitive # information diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 8cebecc..d0ebbbe 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -220,6 +220,7 @@ class ConnectorOpTestCase(base.TestCase): self.conn._session.headers['X-Auth-Token'] = 'asdf1234' expected_headers = self.headers.copy() expected_headers['OData-Version'] = '4.0' + expected_headers['Content-Type'] = 'application/json' self.conn._op('POST', path='fake/path', headers=self.headers, data=self.data) self.request.assert_called_once_with( -- GitLab From bc4987800906d8413f964c9a3f5c7eb580de55e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=9Cmit=20Seren?= Date: Sat, 24 Apr 2021 21:57:26 +0200 Subject: [PATCH 297/303] Implement fallback method for virtual media Some vendors implement the original method of inserting and ejecting virtual media using a PATCH request to the target URI instead of using a POST request to the insert_media and eject_media action URI. Additionally the actions attribute might set to None This implements the fallback method, if there are no actions and the PATCH method is sent back in the Allow header Change-Id: I2046c4e7c8739edd4aaac3e7facc378b82276ca4 (cherry picked from commit 9e5a103565e204b9358e10580cd0346391be9bec) --- ...rtual-media-fallback-15a559414a65c014.yaml | 7 ++ sushy/resources/base.py | 30 +++++++ sushy/resources/manager/virtual_media.py | 63 +++++++++++---- .../resources/manager/test_virtual_media.py | 81 +++++++++++++++++++ 4 files changed, 165 insertions(+), 16 deletions(-) create mode 100644 releasenotes/notes/fix-virtual-media-fallback-15a559414a65c014.yaml diff --git a/releasenotes/notes/fix-virtual-media-fallback-15a559414a65c014.yaml b/releasenotes/notes/fix-virtual-media-fallback-15a559414a65c014.yaml new file mode 100644 index 0000000..caf129c --- /dev/null +++ b/releasenotes/notes/fix-virtual-media-fallback-15a559414a65c014.yaml @@ -0,0 +1,7 @@ +--- +fixes: + - | + Adds a fallback for inserting and ejecting virtual media + using the PATCH HTTP request instead of the explicit action URIs. + The fallback is required for Lenovo ThinkSystem machines (i.e. SD530, ..) + that only implement the PATCH method. \ No newline at end of file diff --git a/sushy/resources/base.py b/sushy/resources/base.py index 6ecf4e8..b2b40c1 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -19,6 +19,7 @@ import copy import io import json import logging +import re import zipfile import pkg_resources @@ -570,6 +571,35 @@ class ResourceBase(object, metaclass=abc.ABCMeta): return settings + def _get_etag(self): + """Returns the ETag of the HTTP request if any was specified. + + :returns ETag or None + """ + pattern = re.compile(r'^(W\/)?("\w*")$') + match = pattern.match(self._get_headers().get('ETag', '')) + if match: + return match.group(2) + return None + + def _get_headers(self): + """Returns the HTTP headers of the request for the resource. + + :returns: dict of HTTP headers + """ + return self._reader.get_data()._headers + + def _allow_patch(self): + """Returns if the resource supports the PATCH HTTP method. + + If the resource supports the PATCH HTTP method for updates, + it will return it in the Allow HTTP header. + :returns: Boolean flag if PATCH is supported or not + """ + allow_header = self._get_headers().get('Allow', '') + methods = set([h.strip().upper() for h in allow_header.split(',')]) + return "PATCH" in methods + def refresh(self, force=True, json_doc=None): """Refresh the resource diff --git a/sushy/resources/manager/virtual_media.py b/sushy/resources/manager/virtual_media.py index 150e7d0..a100022 100644 --- a/sushy/resources/manager/virtual_media.py +++ b/sushy/resources/manager/virtual_media.py @@ -67,19 +67,31 @@ class VirtualMedia(base.ResourceBase): _actions = ActionsField('Actions') """Insert/eject action for virtual media""" - def _get_insert_media_element(self): - insert_media = self._actions.insert_media + def _get_insert_media_uri(self): + insert_media = self._actions.insert_media if self._actions else None + use_patch = False if not insert_media: - raise exceptions.MissingActionError( - action='#VirtualMedia.InsertMedia', resource=self._path) - return insert_media - - def _get_eject_media_element(self): - eject_media = self._actions.eject_media + insert_uri = self.path + use_patch = self._allow_patch() + if not use_patch: + raise exceptions.MissingActionError( + action='#VirtualMedia.InsertMedia', resource=self._path) + else: + insert_uri = insert_media.target_uri + return insert_uri, use_patch + + def _get_eject_media_uri(self): + eject_media = self._actions.eject_media if self._actions else None + use_patch = False if not eject_media: - raise exceptions.MissingActionError( - action='#VirtualMedia.EjectMedia', resource=self._path) - return eject_media + eject_uri = self.path + use_patch = self._allow_patch() + if not use_patch: + raise exceptions.MissingActionError( + action='#VirtualMedia.EjectMedia', resource=self._path) + else: + eject_uri = eject_media.target_uri + return eject_uri, use_patch def insert_media(self, image, inserted=True, write_protected=False): """Attach remote media to virtual media @@ -89,9 +101,17 @@ class VirtualMedia(base.ResourceBase): completion of the action. :param write_protected: indicates the media is write protected """ - target_uri = self._get_insert_media_element().target_uri - self._conn.post(target_uri, data={"Image": image, "Inserted": inserted, - "WriteProtected": write_protected}) + target_uri, use_patch = self._get_insert_media_uri() + payload = {"Image": image, "Inserted": inserted, + "WriteProtected": write_protected} + if use_patch: + headers = None + etag = self._get_etag() + if etag is not None: + headers = {"If-Match": etag} + self._conn.patch(target_uri, data=payload, headers=headers) + else: + self._conn.post(target_uri, data=payload) self.invalidate() def eject_media(self): @@ -101,8 +121,19 @@ class VirtualMedia(base.ResourceBase): empty. """ try: - target_uri = self._get_eject_media_element().target_uri - self._conn.post(target_uri) + target_uri, use_patch = self._get_eject_media_uri() + if use_patch: + payload = { + "Image": None, + "Inserted": False + } + headers = None + etag = self._get_etag() + if etag is not None: + headers = {"If-Match": etag} + self._conn.patch(target_uri, data=payload, headers=headers) + else: + self._conn.post(target_uri) except exceptions.HTTPError as response: # Some vendors like HPE iLO has this kind of implementation. # It needs to pass an empty dict. diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py index 6bbb8d2..fd52af8 100644 --- a/sushy/tests/unit/resources/manager/test_virtual_media.py +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -28,6 +28,7 @@ class VirtualMediaTestCase(base.TestCase): def setUp(self): super(VirtualMediaTestCase, self).setUp() self.conn = mock.Mock() + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD'} with open('sushy/tests/unit/json_samples/' 'virtual_media.json') as f: self.json_doc = json.load(f) @@ -74,6 +75,12 @@ class VirtualMediaTestCase(base.TestCase): self.sys_virtual_media.insert_media, "https://www.dmtf.org/freeImages/Sardine.img", True, False) + self.sys_virtual_media._actions = None + self.assertRaisesRegex( + exceptions.MissingActionError, 'action #VirtualMedia.InsertMedia', + self.sys_virtual_media.insert_media, + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + def test_insert_media(self): self.assertFalse(self.sys_virtual_media._is_stale) self.sys_virtual_media.insert_media( @@ -86,12 +93,55 @@ class VirtualMediaTestCase(base.TestCase): ) self.assertTrue(self.sys_virtual_media._is_stale) + def test_insert_media_fallback(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH'} + self.sys_virtual_media._actions.insert_media = None + self.sys_virtual_media.insert_media( + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": "https://www.dmtf.org/freeImages/Sardine.img", + "Inserted": True, "WriteProtected": False}, + headers=None) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_insert_media_fallback_with_etag(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH', + 'ETag': '"3d7b8a7360bf2941d"'} + self.sys_virtual_media._actions.insert_media = None + self.sys_virtual_media.insert_media( + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": "https://www.dmtf.org/freeImages/Sardine.img", + "Inserted": True, "WriteProtected": False}, + headers={"If-Match": '"3d7b8a7360bf2941d"'}) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_insert_media_fallback_with_weak_etag(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH', + 'ETag': 'W/"3d7b8a7360bf2941d"'} + self.sys_virtual_media._actions.insert_media = None + self.sys_virtual_media.insert_media( + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": "https://www.dmtf.org/freeImages/Sardine.img", + "Inserted": True, "WriteProtected": False}, + headers={"If-Match": '"3d7b8a7360bf2941d"'}) + self.assertTrue(self.sys_virtual_media._is_stale) + def test_eject_media_none(self): self.sys_virtual_media._actions.eject_media = None self.assertRaisesRegex( exceptions.MissingActionError, 'action #VirtualMedia.EjectMedia', self.sys_virtual_media.eject_media) + self.sys_virtual_media._actions = None + self.assertRaisesRegex( + exceptions.MissingActionError, 'action #VirtualMedia.EjectMedia', + self.sys_virtual_media.eject_media) + def test_eject_media(self): self.assertFalse(self.sys_virtual_media._is_stale) self.sys_virtual_media.eject_media() @@ -100,6 +150,37 @@ class VirtualMediaTestCase(base.TestCase): "/VirtualMedia.EjectMedia")) self.assertTrue(self.sys_virtual_media._is_stale) + def test_eject_media_fallback(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH'} + self.sys_virtual_media._actions.eject_media = None + self.sys_virtual_media.eject_media() + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": None, "Inserted": False}, headers=None) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_fallback_with_etag(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH', + 'ETag': '"3d7b8a7360bf2941d"'} + self.sys_virtual_media._actions.eject_media = None + self.sys_virtual_media.eject_media() + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": None, "Inserted": False}, + headers={"If-Match": '"3d7b8a7360bf2941d"'}) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_fallback_with_weak_etag(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH', + 'ETag': 'W/"3d7b8a7360bf2941d"'} + self.sys_virtual_media._actions.eject_media = None + self.sys_virtual_media.eject_media() + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": None, "Inserted": False}, + headers={"If-Match": '"3d7b8a7360bf2941d"'}) + self.assertTrue(self.sys_virtual_media._is_stale) + def test_eject_media_pass_empty_dict_415(self): target_uri = ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" "/VirtualMedia.EjectMedia") -- GitLab From 1b299c64ab8ea3f5e842c9a9b4d9fcfaf9cbf5ea Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Thu, 10 Jun 2021 19:32:25 +0200 Subject: [PATCH 298/303] Fix incorrect formatting and a Python 3.10 failure While testing on Python 3.10 two issues were spotted: 1) Missing formatting specifiers in registry logging 2) Reliance on the exact way HTTP status are converted to string Fedora bug https://bugzilla.redhat.com/show_bug.cgi?id=1969148 Change-Id: I22d0033691ee0ba34fca14ac14904e33342a584a (cherry picked from commit 5b38ec591e2ea8df8f49fc956a2aecb7776a6656) --- sushy/resources/registry/message_registry.py | 4 ++-- sushy/tests/unit/test_connector.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sushy/resources/registry/message_registry.py b/sushy/resources/registry/message_registry.py index 86f458c..d4ea811 100644 --- a/sushy/resources/registry/message_registry.py +++ b/sushy/resources/registry/message_registry.py @@ -118,8 +118,8 @@ def parse_message(message_registries, message_field): if not reg_msg: LOG.warning( - 'Unable to find message for registry %(registry), ' - 'message ID %(msg_key)', { + 'Unable to find message for registry %(registry)s, ' + 'message ID %(msg_key)s', { 'registry': registry, 'msg_key': msg_key}) if message_field.message is None: diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index d0ebbbe..40d3164 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -387,8 +387,8 @@ class ConnectorOpTestCase(base.TestCase): 'HTTP GET of SessionService failed %s, ' 'this is expected prior to authentication', 'HTTP GET ' 'http://redfish/v1/SessionService returned code ' - 'HTTPStatus.FORBIDDEN. unknown error Extended information: ' - 'none') + '%s. unknown error Extended information: ' + 'none' % http_client.FORBIDDEN) self.assertEqual(http_client.FORBIDDEN, exc.status_code) def test_blocking_no_location_header(self): -- GitLab From fee19b2e09eecbe7b4af2f317179214a350a6db1 Mon Sep 17 00:00:00 2001 From: Michal Arbet Date: Sat, 26 Jun 2021 12:58:42 +0200 Subject: [PATCH 299/303] Add me to d/copyright --- debian/copyright | 1 + 1 file changed, 1 insertion(+) diff --git a/debian/copyright b/debian/copyright index c773807..a0d8b66 100644 --- a/debian/copyright +++ b/debian/copyright @@ -11,6 +11,7 @@ License: Apache-2 Files: debian/* Copyright: (c) 2017, Thomas Goirand + (c) 2021, Michal Arbet License: Apache-2 License: Apache-2 -- GitLab From 777ce6ded1a8bcd0d5790e7c0d8779a367800fc8 Mon Sep 17 00:00:00 2001 From: Michal Arbet Date: Sat, 26 Jun 2021 12:58:56 +0200 Subject: [PATCH 300/303] Add me to uploaders field --- debian/control | 1 + 1 file changed, 1 insertion(+) diff --git a/debian/control b/debian/control index f9ad60b..d3df720 100644 --- a/debian/control +++ b/debian/control @@ -4,6 +4,7 @@ Priority: optional Maintainer: Debian OpenStack Uploaders: Thomas Goirand , + Michal Arbet , Build-Depends: debhelper-compat (= 10), dh-python, -- GitLab From a48bee53caeb504d5437e1a73a499f406d756c8c Mon Sep 17 00:00:00 2001 From: Michal Arbet Date: Sat, 26 Jun 2021 12:59:48 +0200 Subject: [PATCH 301/303] Release to experimental --- debian/changelog | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/debian/changelog b/debian/changelog index a2cdb73..eb6ad70 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,11 @@ +python-sushy (3.7.2-1) experimental; urgency=medium + + * New upstream version + * d/copyright: Add me to copyright file + * d/control: Add me to uploaders field + + -- Michal Arbet Sat, 26 Jun 2021 12:59:00 +0200 + python-sushy (3.7.0-1) experimental; urgency=medium * New upstream release. -- GitLab From 8220d68427d27964aa3df82a43dfb9bf36bd157b Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Mon, 16 Aug 2021 09:45:25 +0200 Subject: [PATCH 302/303] Upload to unstable. --- debian/changelog | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/debian/changelog b/debian/changelog index eb6ad70..ba32629 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +python-sushy (3.7.2-2) unstable; urgency=medium + + * Upload to unstable. + + -- Thomas Goirand Mon, 16 Aug 2021 09:43:15 +0200 + python-sushy (3.7.2-1) experimental; urgency=medium * New upstream version -- GitLab From e74c54106562905ef67b16bd6999879d9489bbc1 Mon Sep 17 00:00:00 2001 From: Debian Janitor Date: Sun, 22 Aug 2021 02:46:12 +0000 Subject: [PATCH 303/303] Apply multi-arch hints. + python-sushy-doc: Add Multi-Arch: foreign. Changes-By: apply-multiarch-hints --- debian/changelog | 7 +++++++ debian/control | 1 + 2 files changed, 8 insertions(+) diff --git a/debian/changelog b/debian/changelog index ba32629..8dcfbcc 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,10 @@ +python-sushy (3.7.2-3) UNRELEASED; urgency=medium + + * Apply multi-arch hints. + + python-sushy-doc: Add Multi-Arch: foreign. + + -- Debian Janitor Sun, 22 Aug 2021 02:46:12 -0000 + python-sushy (3.7.2-2) unstable; urgency=medium * Upload to unstable. diff --git a/debian/control b/debian/control index d3df720..197a80c 100644 --- a/debian/control +++ b/debian/control @@ -35,6 +35,7 @@ Architecture: all Depends: ${misc:Depends}, ${sphinxdoc:Depends}, +Multi-Arch: foreign Description: small library to communicate with Redfish based systems - doc Sushy is a Python library to communicate with Redfish based systems. The goal of the library is to be extremely simple, small, have as few dependencies as -- GitLab