diff --git a/.coveragerc b/.coveragerc index 3532cb58985e7cd9c03b11cff90ada3b9c2b522b..0affbccdca6a9853bfc499a204009508b01c0c02 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,9 +1,13 @@ [run] branch = True source = sushy +omit = + *tests* [report] ignore_errors = True +omit = + *tests* [html] directory = cover diff --git a/.gitignore b/.gitignore index 82d3306d927b4d72ee4f1d4a0ea72ecd3bac08f7..0c088d0d1420a7b9bf16d38c8f1b4ca0b1e9ff85 100644 --- a/.gitignore +++ b/.gitignore @@ -27,7 +27,7 @@ cover/ !.coveragerc .tox nosetests.xml -.testrepository +.stestr/ .venv # Translations diff --git a/.gitreview b/.gitreview index 7d5bc85bf4133ddf83dd46f1693bd236bc7c8243..8f8aaccf57d621cd55b6bdb265db15d91d078207 100644 --- a/.gitreview +++ b/.gitreview @@ -1,5 +1,5 @@ [gerrit] -host=review.openstack.org +host=review.opendev.org port=29418 project=openstack/sushy.git -defaultbranch=stable/pike +defaultbranch=stable/wallaby diff --git a/.stestr.conf b/.stestr.conf new file mode 100644 index 0000000000000000000000000000000000000000..beace5567c8485c0029ac5874f2a5a47b822b0ac --- /dev/null +++ b/.stestr.conf @@ -0,0 +1,3 @@ +[DEFAULT] +test_path=./sushy/tests +top_dir=. diff --git a/.testr.conf b/.testr.conf deleted file mode 100644 index 6d83b3c4ec569285143d929e8239c99cdffa8eb4..0000000000000000000000000000000000000000 --- a/.testr.conf +++ /dev/null @@ -1,7 +0,0 @@ -[DEFAULT] -test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \ - OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \ - OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \ - ${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION -test_id_option=--load-list $IDFILE -test_list_option=--list diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 3765c1b3633b2ff9e60335a47843b516bb9c326f..2190b3854e344b69e486aa45c8d2b08c95c99d7d 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -12,6 +12,6 @@ submitted for review via the Gerrit tool: Pull requests submitted through GitHub will be ignored. -Bugs should be filed on Launchpad, not GitHub: +Bugs should be filed in StoryBoard, not GitHub: - https://bugs.launchpad.net/sushy + https://storyboard.openstack.org/#!/project/960 diff --git a/HACKING.rst b/HACKING.rst index c001f187a657efa906728a91327dce2b6e626715..9660e992694d95ce10298b5621839099e06447e1 100644 --- a/HACKING.rst +++ b/HACKING.rst @@ -1,4 +1,4 @@ Sushy Style Commandments ======================== -Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/ +Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/ diff --git a/README.rst b/README.rst index c4046804b55655d7997db8d1c104ff8673b6fdf7..7ade1e11a51c1459331cca4f3f91dda2c21843f4 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ -About Sushy -=========== +Overview +======== Sushy is a Python library to communicate with `Redfish`_ based systems. @@ -10,12 +10,15 @@ by issuing just enough requests to it (BMCs are very flaky). Therefore, the scope of the library has been limited to what is supported by the `OpenStack Ironic `_ project. As the project grows and more features from `Redfish`_ are -needed we can expand Sushy to fullfil those requirements. +needed we can expand Sushy to fulfill those requirements. * Free software: Apache license +* Includes Redfish registry files licensed under + Creative Commons Attribution 4.0 License: + https://creativecommons.org/licenses/by/4.0/ * Documentation: https://docs.openstack.org/sushy/latest/ * Usage: https://docs.openstack.org/sushy/latest/reference/usage.html -* Source: https://git.openstack.org/cgit/openstack/sushy -* Bugs: https://bugs.launchpad.net/sushy +* Source: https://opendev.org/openstack/sushy +* Bugs: https://storyboard.openstack.org/#!/project/960 .. _Redfish: http://www.dmtf.org/standards/redfish diff --git a/babel.cfg b/babel.cfg deleted file mode 100644 index 15cd6cb76b93453343e70650a70db58cff98195b..0000000000000000000000000000000000000000 --- a/babel.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[python: **.py] - diff --git a/bindep.txt b/bindep.txt new file mode 100644 index 0000000000000000000000000000000000000000..273e463630cffb97a78113e4b96a4418d806f963 --- /dev/null +++ b/bindep.txt @@ -0,0 +1,2 @@ +# fonts-freefont-otf is needed for pdf docs builds with the 'xelatex' engine +fonts-freefont-otf [doc] diff --git a/debian/changelog b/debian/changelog new file mode 100644 index 0000000000000000000000000000000000000000..8dcfbccc53d6c73b05d32d1c4ddf6a325b7fda19 --- /dev/null +++ b/debian/changelog @@ -0,0 +1,144 @@ +python-sushy (3.7.2-3) UNRELEASED; urgency=medium + + * Apply multi-arch hints. + + python-sushy-doc: Add Multi-Arch: foreign. + + -- Debian Janitor Sun, 22 Aug 2021 02:46:12 -0000 + +python-sushy (3.7.2-2) unstable; urgency=medium + + * Upload to unstable. + + -- Thomas Goirand Mon, 16 Aug 2021 09:43:15 +0200 + +python-sushy (3.7.2-1) experimental; urgency=medium + + * New upstream version + * d/copyright: Add me to copyright file + * d/control: Add me to uploaders field + + -- Michal Arbet Sat, 26 Jun 2021 12:59:00 +0200 + +python-sushy (3.7.0-1) experimental; urgency=medium + + * New upstream release. + + -- Thomas Goirand Sun, 14 Mar 2021 16:34:25 +0100 + +python-sushy (3.4.1-2) unstable; urgency=medium + + * Fixed debian/watch. + * Uploading to unstable. + * Add a debian/salsa-ci.yml. + + -- Thomas Goirand Wed, 14 Oct 2020 14:13:39 +0200 + +python-sushy (3.4.1-1) experimental; urgency=medium + + * New upstream release. + * Removed taskservice/__init__.py hack since after my report, this was fixed + upstream. + + -- Thomas Goirand Fri, 25 Sep 2020 08:30:40 +0200 + +python-sushy (3.4.0-2) experimental; urgency=medium + + * Add missing sushy/resources/taskservice/__init__.py so that the package + also contains the missing files in Python 3.7. + + -- Thomas Goirand Thu, 24 Sep 2020 11:28:09 +0200 + +python-sushy (3.4.0-1) experimental; urgency=medium + + * New upstream release. + * Fixed (build-)depends for this release. + + -- Thomas Goirand Tue, 08 Sep 2020 08:40:48 +0200 + +python-sushy (3.2.0-2) unstable; urgency=medium + + * Uploading to unstable. + + -- Thomas Goirand Fri, 08 May 2020 11:47:55 +0200 + +python-sushy (3.2.0-1) experimental; urgency=medium + + * New upstream release. + * Removed -six from build-depends. + + -- Thomas Goirand Mon, 06 Apr 2020 22:57:07 +0200 + +python-sushy (2.0.0-2) unstable; urgency=medium + + [ Ondřej Nový ] + * Bump Standards-Version to 4.4.1. + + [ Thomas Goirand ] + * Uploading to unstable. + + -- Thomas Goirand Mon, 21 Oct 2019 10:25:40 +0200 + +python-sushy (2.0.0-1) experimental; urgency=medium + + [ Ondřej Nový ] + * Use debhelper-compat instead of debian/compat. + * Bump Standards-Version to 4.4.0. + + [ Thomas Goirand ] + * New upstream release. + + -- Thomas Goirand Thu, 26 Sep 2019 15:00:16 +0200 + +python-sushy (1.8.1-2) unstable; urgency=medium + + * Uploading to unstable. + + -- Thomas Goirand Wed, 17 Jul 2019 00:39:40 +0200 + +python-sushy (1.8.1-1) experimental; urgency=medium + + [ Ondřej Nový ] + * Running wrap-and-sort -bast. + + [ Thomas Goirand ] + * New upstream release. + * Fixed (build-)depends for this release. + * Standards-Version: 4.3.0 (no change). + * Running unit tests with installed Python module. + + -- Thomas Goirand Wed, 27 Mar 2019 13:36:13 +0100 + +python-sushy (1.3.1-3) unstable; urgency=medium + + [ Ondřej Nový ] + * d/control: Use team+openstack@tracker.debian.org as maintainer + + [ Thomas Goirand ] + * Removed Python 2 support, not needed anymore in Debian. + + -- Thomas Goirand Mon, 01 Oct 2018 11:19:49 +0200 + +python-sushy (1.3.1-2) unstable; urgency=medium + + * Uploading to unstable. + + -- Thomas Goirand Sun, 25 Feb 2018 22:56:36 +0000 + +python-sushy (1.3.1-1) experimental; urgency=medium + + [ Ondřej Nový ] + * d/control: Set Vcs-* to salsa.debian.org + * d/copyright: Use https in Format + + [ Thomas Goirand ] + * New upstream release. + * Fixed (build-)depends for this release. + * Standards-Version is now 4.1.3. + + -- Thomas Goirand Sun, 18 Feb 2018 20:52:04 +0000 + +python-sushy (1.1.0-1) unstable; urgency=medium + + * Initial release. (Closes: #879968) + + -- Thomas Goirand Fri, 27 Oct 2017 20:11:38 +0200 diff --git a/debian/control b/debian/control new file mode 100644 index 0000000000000000000000000000000000000000..197a80cec7baa3c9844b9efcadee886a4bbbe894 --- /dev/null +++ b/debian/control @@ -0,0 +1,70 @@ +Source: python-sushy +Section: python +Priority: optional +Maintainer: Debian OpenStack +Uploaders: + Thomas Goirand , + Michal Arbet , +Build-Depends: + debhelper-compat (= 10), + dh-python, + openstack-pkg-tools, + python3-all, + python3-pbr, + python3-setuptools, + python3-sphinx, +Build-Depends-Indep: + python3-coverage, + python3-dateutil, + python3-hacking, + python3-openstackdocstheme, + python3-oslotest, + python3-requests, + python3-stestr, + python3-stevedore, + python3-sphinxcontrib.apidoc, + subunit, +Standards-Version: 4.4.1 +Vcs-Browser: https://salsa.debian.org/openstack-team/libs/python-sushy +Vcs-Git: https://salsa.debian.org/openstack-team/libs/python-sushy.git +Homepage: https://docs.openstack.org/sushy + +Package: python-sushy-doc +Section: doc +Architecture: all +Depends: + ${misc:Depends}, + ${sphinxdoc:Depends}, +Multi-Arch: foreign +Description: small library to communicate with Redfish based systems - doc + Sushy is a Python library to communicate with Redfish based systems. The goal + of the library is to be extremely simple, small, have as few dependencies as + possible and be very conservative when dealing with BMCs by issuing just + enough requests to it (BMCs are very flaky). + . + Therefore, the scope of the library has been limited to what is supported by + the OpenStack Ironic project. As the project grows and more features from + Redfish are needed Sushy will expand to fulfil those requirements. + . + This package contains the documentation. + +Package: python3-sushy +Architecture: all +Depends: + python3-pbr (>= 2.0.0), + python3-requests (>= 2.14.2), + ${misc:Depends}, + ${python3:Depends}, +Suggests: + python-sushy-doc, +Description: small library to communicate with Redfish based systems - Python 3.x + Sushy is a Python library to communicate with Redfish based systems. The goal + of the library is to be extremely simple, small, have as few dependencies as + possible and be very conservative when dealing with BMCs by issuing just + enough requests to it (BMCs are very flaky). + . + Therefore, the scope of the library has been limited to what is supported by + the OpenStack Ironic project. As the project grows and more features from + Redfish are needed Sushy will expand to fulfil those requirements. + . + This package contains the Python 3.x module. diff --git a/debian/copyright b/debian/copyright new file mode 100644 index 0000000000000000000000000000000000000000..a0d8b66cfb846731847053963fc123b60295b529 --- /dev/null +++ b/debian/copyright @@ -0,0 +1,31 @@ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: sushy +Source: https://docs.openstack.org/sushy + +Files: * +Copyright: (c) 2014-2016, Distributed Management Task Force, Inc. (DMTF). + (c) 2010-2016, OpenStack Foundation + (c) 2017, Red Hat, Inc. + (c) 2013 Hewlett-Packard Development Company, L.P. +License: Apache-2 + +Files: debian/* +Copyright: (c) 2017, Thomas Goirand + (c) 2021, Michal Arbet +License: Apache-2 + +License: Apache-2 + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + . + http://www.apache.org/licenses/LICENSE-2.0 + . + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + . + On Debian-based systems the full text of the Apache version 2.0 license + can be found in /usr/share/common-licenses/Apache-2.0. diff --git a/debian/python-sushy-doc.doc-base b/debian/python-sushy-doc.doc-base new file mode 100644 index 0000000000000000000000000000000000000000..8d58cc136921af48a6aa74f9f610de6fd45370f5 --- /dev/null +++ b/debian/python-sushy-doc.doc-base @@ -0,0 +1,9 @@ +Document: python-sushy-doc +Title: sushy Documentation +Author: N/A +Abstract: Sphinx documentation for sushy +Section: Programming/Python + +Format: HTML +Index: /usr/share/doc/python-sushy-doc/html/index.html +Files: /usr/share/doc/python-sushy-doc/html/* diff --git a/debian/python3-sushy.install b/debian/python3-sushy.install new file mode 100644 index 0000000000000000000000000000000000000000..74e4e23b0176541176f4e5f591bcbe3c6b5e4cde --- /dev/null +++ b/debian/python3-sushy.install @@ -0,0 +1 @@ +/usr diff --git a/debian/rules b/debian/rules new file mode 100755 index 0000000000000000000000000000000000000000..71309edc090427bfa1cc6a6d200f2c70ab4e9fc8 --- /dev/null +++ b/debian/rules @@ -0,0 +1,34 @@ +#!/usr/bin/make -f + +UPSTREAM_GIT := https://github.com/openstack/sushy.git +include /usr/share/openstack-pkg-tools/pkgos.make + +%: + dh $@ --buildsystem=python_distutils --with python3,sphinxdoc + +override_dh_auto_clean: + echo "Do nothing..." + +override_dh_auto_build: + echo "Do nothing..." + +override_dh_auto_install: + for i in $(PYTHON3S) ; do \ + python3 setup.py install -f --install-layout=deb --root=$(CURDIR)/debian/tmp ; \ + done +ifeq (,$(findstring nocheck, $(DEB_BUILD_OPTIONS))) + PYTHONPATH=$(CURDIR)/debian/tmp/usr/lib/python3/dist-packages pkgos-dh_auto_test --no-py2 +endif + +override_dh_auto_test: + echo "Do nothing..." + +override_dh_sphinxdoc: +ifeq (,$(findstring nodocs, $(DEB_BUILD_OPTIONS))) + PYTHONPATH=. PYTHON=python3 python3 -m sphinx -b html doc/source debian/python-sushy-doc/usr/share/doc/python-sushy-doc/html + dh_sphinxdoc -O--buildsystem=python_distutils +endif + +override_dh_clean: + dh_clean -O--buildsystem=python_distutils + rm -rf build diff --git a/debian/salsa-ci.yml b/debian/salsa-ci.yml new file mode 100644 index 0000000000000000000000000000000000000000..0c22dc4373420a05999d6f9bc2235f73fc0ffa14 --- /dev/null +++ b/debian/salsa-ci.yml @@ -0,0 +1,3 @@ +include: + - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/salsa-ci.yml + - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/pipeline-jobs.yml diff --git a/debian/source/format b/debian/source/format new file mode 100644 index 0000000000000000000000000000000000000000..163aaf8d82b6c54f23c45f32895dbdfdcc27b047 --- /dev/null +++ b/debian/source/format @@ -0,0 +1 @@ +3.0 (quilt) diff --git a/debian/source/options b/debian/source/options new file mode 100644 index 0000000000000000000000000000000000000000..cb61fa5267b6ad8b3bbc2a612754b79dae466292 --- /dev/null +++ b/debian/source/options @@ -0,0 +1 @@ +extend-diff-ignore = "^[^/]*[.]egg-info/" diff --git a/debian/watch b/debian/watch new file mode 100644 index 0000000000000000000000000000000000000000..e08351ee55e3f619b3137bbf98e64fc7ab496daf --- /dev/null +++ b/debian/watch @@ -0,0 +1,3 @@ +version=3 +opts="uversionmangle=s/\.0rc/~rc/;s/\.0b1/~b1/;s/\.0b2/~b2/;s/\.0b3/~b3/" \ +https://github.com/openstack/sushy/tags .*/(\d[brc\d\.]+)\.tar\.gz diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..f5e3228c497b79c604d80b87334d09045303b8fe --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,4 @@ +reno>=3.1.0 # Apache-2.0 +sphinx>=2.0.0,!=2.1.0 # BSD +openstackdocstheme>=2.2.1 # Apache-2.0 +sphinxcontrib-apidoc>=0.2.0 # BSD diff --git a/doc/source/conf.py b/doc/source/conf.py index f0f25aafe08e3b53bbecd4361ba370af870b7922..d226d48d20b9eb846bb7dd0c68d95b9b0d913c8e 100755 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -20,7 +20,7 @@ sys.path.insert(0, os.path.abspath('../..')) # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ - 'sphinx.ext.autodoc', + 'sphinxcontrib.apidoc', #'sphinx.ext.intersphinx', 'openstackdocstheme' ] @@ -36,7 +36,6 @@ source_suffix = '.rst' master_doc = 'index' # General information about the project. -project = u'sushy' copyright = u'2016, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. @@ -47,7 +46,12 @@ add_function_parentheses = True add_module_names = True # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = 'native' + +# openstackdocstheme options +openstackdocs_repo_name = 'openstack/sushy' +openstackdocs_use_storyboard = True +openstackdocs_pdf_link = True # -- Options for HTML output -------------------------------------------------- @@ -58,17 +62,33 @@ html_theme = 'openstackdocs' # html_static_path = ['static'] # Output file base name for HTML help builder. -htmlhelp_basename = '%sdoc' % project +htmlhelp_basename = 'sushydoc' + +# The openstackdocstheme 2.1.0 extension stopped always overriding latex_engine +# to 'xelatex'. We need the 'xelatex' engine in order to handle some Unicode +# characters we use in our feature classification matrix, like the "X" mark, so +# we specify it here. +latex_engine = 'xelatex' + +latex_use_xindy = False # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', - '%s.tex' % project, - u'%s Documentation' % project, + 'doc-sushy.tex', + u'Sushy Documentation', u'OpenStack Foundation', 'manual'), ] # Example configuration for intersphinx: refer to the Python standard library. #intersphinx_mapping = {'http://docs.python.org/': None} + +# -- sphinxcontrib.apidoc configuration -------------------------------------- + +apidoc_module_dir = '../../sushy' +apidoc_output_dir = 'reference/api' +apidoc_excluded_paths = [ + 'tests', +] diff --git a/doc/source/contributor/index.rst b/doc/source/contributor/index.rst index bb0c76e1fa9536ad1ab63269dd04dcf2c05d3a76..94fb0105d6bbfd6bc972de2fd2c07c31651d9263 100644 --- a/doc/source/contributor/index.rst +++ b/doc/source/contributor/index.rst @@ -45,7 +45,7 @@ After the download, extract the files somewhere in the file-system:: unzip DSP2043_1.0.0.zip -d -Now run ``sushy-static`` pointing to those files. For example to serve +Now run ``sushy-static`` pointing to those files. For example to serve the ``DSP2043-server`` mockup files, run:: sushy-static --mockup-files /DSP2043-server @@ -82,7 +82,7 @@ use following command:: openssl req -x509 -newkey rsa:2048 -keyout key.pem -out cert.pem -days 365 Start the mockup server passing the ``--ssl-certificate`` and -``--ssl-key`` parameters to it to it, for example:: +``--ssl-key`` parameters to it, for example:: sushy-emulator --ssl-key key.pem --ssl-certificate cert.pem @@ -97,4 +97,4 @@ pointing to the certificate file when instantiating Sushy, for example: s = sushy.Sushy('https://localhost:8000', verify='cert.pem', username='foo', password='bar') .. _SSL: https://en.wikipedia.org/wiki/Secure_Sockets_Layer -.. _sushy-tools: https://git.openstack.org/cgit/openstack/sushy-tools +.. _sushy-tools: https://opendev.org/openstack/sushy-tools diff --git a/doc/source/index.rst b/doc/source/index.rst index 86810b032d18038e207cda6f9aa00d0da3ebd72e..e3bcc482f8ef811d49da328733c7589880275a95 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -1,5 +1,5 @@ .. sushy documentation master file, created by - sphinx-quickstart on Tue Jul 9 22:26:36 2013. + sphinx-quickstart on Tue Jul 9 22:26:36 2013. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. @@ -9,6 +9,24 @@ Welcome to Sushy's documentation! .. include:: ../../README.rst +Features +======== + +* Abstraction around the SystemCollection and System resources (Basic + server identification and asset information) +* RAID in Redfish based Systems +* Redfish Ethernet Interface +* System mappings +* System processor +* Storage management +* Systems power management (Both soft and hard; Including NMI injection) +* Changing systems boot device, frequency (Once or permanently) and mode + (UEFI or BIOS) +* Chassis management +* OEM extention +* Virtual media management +* Session Management + Documentation ============= diff --git a/doc/source/reference/index.rst b/doc/source/reference/index.rst index a0259edfe1ca67bc6f583e94336055eae2c9bf29..ba410c631ce4a8beaecf255843ffb8486861ca5a 100644 --- a/doc/source/reference/index.rst +++ b/doc/source/reference/index.rst @@ -2,36 +2,21 @@ Sushy Library Reference ======================= -Features -======== - -* Abstraction around the SystemCollection and System resources (Basic - server identification and asset information) -* Systems power management (Both soft and hard; Including NMI injection) -* Changing systems boot device, frequency (Once or permanently) and mode - (UEFI or BIOS) +Usage +===== .. toctree:: :maxdepth: 2 usage -Missing Features -================ - -These are some features that sushy is presently missing. - -* Collect sensor data (Health state, temperature, fans etc...) -* System inspection (Number of CPUs, memory and disk size) -* Serial console - Sushy Python API Reference ========================== * :ref:`modindex` -.. # api/autoindex is hidden since it's in the modindex link above. +.. # api/modules is hidden since it's in the modindex link above. .. toctree:: :hidden: - api/autoindex + api/modules diff --git a/doc/source/reference/usage.rst b/doc/source/reference/usage.rst index 827ce9da1c8368190323c7aea727059b4d5fee10..64e50be9868c66ec9d8f7a983e7f7ca2109e976a 100644 --- a/doc/source/reference/usage.rst +++ b/doc/source/reference/usage.rst @@ -5,6 +5,53 @@ Using Sushy To use sushy in a project: +----------------------------------------- +Specifying an authentication type +----------------------------------------- + +There are three authentication objects. By default we use SessionOrBasicAuth. +Authentication Modes: +auth.SessionOrBasicAuth: Use session based authentication. If we are unable +to create a session we will fallback to basic authentication. +auth.BasicAuth: Use basic authentication only. +auth.SessionAuth: Use session based authentication only. + +.. code-block:: python + + import logging + + import sushy + from sushy import auth + + # Enable logging at DEBUG level + LOG = logging.getLogger('sushy') + LOG.setLevel(logging.DEBUG) + LOG.addHandler(logging.StreamHandler()) + + basic_auth = auth.BasicAuth(username='foo', password='bar') + session_auth = auth.SessionAuth(username='foo', password='bar') + session_or_basic_auth = auth.SessionOrBasicAuth(username='foo', + password='bar') + + s = sushy.Sushy('http://localhost:8000/redfish/v1', + auth=basic_auth) + + s = sushy.Sushy('http://localhost:8000/redfish/v1', + auth=session_auth) + + s = sushy.Sushy('http://localhost:8000/redfish/v1', + auth=session_or_basic_auth) + + # It is important to note that you can + # call sushy without supplying an + # authentication object. In that case we + # will use the SessionOrBasicAuth authentication + # object in an attempt to connect to all different + # types of redfish servers. + s = sushy.Sushy('http://localhost:8000/redfish/v1', + username='foo', + password='bar') + ---------------------------------------- Creating and using a sushy system object ---------------------------------------- @@ -47,6 +94,8 @@ Creating and using a sushy system object sys_inst = sys_col.get_member(sys_col.members_identities[0]) # Refresh the system collection object + # + # See below for more options on how to refresh resources. sys_col.refresh() @@ -59,9 +108,18 @@ Creating and using a sushy system object # Get a list of allowed reset values print(sys_inst.get_allowed_reset_system_values()) - # Refresh the system object + # Refresh the system object (with all its sub-resources) sys_inst.refresh() + # Alternatively, you can only refresh the resource if it is stale by passing + # force=False: + sys_inst.refresh(force=False) + + # A resource can be marked stale by calling invalidate. Note that its + # subresources won't be marked as stale, and thus they won't be refreshed by + # a call to refresh(force=False) + sys_inst.invalidate() + # Get the current power state print(sys_inst.power_state) @@ -126,6 +184,7 @@ Creating and using a sushy manager object mgr_inst = mgr_col.get_member(mgr_col.members_identities[0]) # Refresh the manager collection object + mgr_col.invalidate() mgr_col.refresh() @@ -147,10 +206,152 @@ Creating and using a sushy manager object # Reset the manager mgr_inst.reset_manager(sushy.RESET_MANAGER_FORCE_RESTART) - # Refresh the manager object - mgr_inst.refresh() + # Refresh the manager object (with all its sub-resources) + mgr_inst.refresh(force=True) + + + # Using Virtual Media + + # Instantiate a VirtualMediaCollection object + virtmedia_col = mgr_inst.virtual_media + + # Print the ID of the VirtualMedia available in the collection + print(virtmedia_col.members_identities) + + # Get a list of VirtualMedia objects available in the collection + virtmedia_insts = virtmedia_col.get_members() + + # Instantiate a VirtualMedia object + virtmedia_inst = virtmedia_col.get_member( + virtmedia_col.members_identities[0]) + + + # Print out some of the VirtualMedia properties + print(virtmedia_inst.name, + virtmedia_inst.media_types) + + # Insert virtual media (invalidates virtmedia_inst contents) + virtmedia_inst.insert_media('https://www.dmtf.org/freeImages/Sardine.img') + + # Refresh the resource to load actual contents + virtmedia_inst.refresh() + + # Print out some of the VirtualMedia properties + print(virtmedia_inst.image, + virtmedia_inst.image_path, + virtmedia_inst.inserted, + virtmedia_inst.write_protected) + + # ... Boot the system off the virtual media... + + # Eject virtual media (invalidates virtmedia_inst contents) + virtmedia_inst.eject_media() + + +------------------------------------------------- +Creating and using a sushy session service object +------------------------------------------------- + +.. code-block:: python + + import logging + + import sushy + + # Enable logging at DEBUG level + LOG = logging.getLogger('sushy') + LOG.setLevel(logging.DEBUG) + LOG.addHandler(logging.StreamHandler()) + + s = sushy.Sushy('http://localhost:8000/redfish/v1', + username='foo', password='bar') + + # Instantiate a SessionService object + sess_serv = s.get_session_service() + + # Get SessionCollection + sess_col = sess_serv.sessions + + # Print the ID of the sessions available in the collection + print(sess_col.members_identities) + + # Get a list of systems objects available in the collection + sess_col_insts = sess_col.get_members() + + # Instantiate a session object, same as getting it directly + sess_inst = sess_col.get_member(sess_col.members_identities[0]) + # Getting it directly + sess_inst = s.get_session(sess_col.members_identities[0]) + + # Delete the session + sess_inst.delete() + + # Create a new session + session_key, session_id = sess_serv.create_session( + username='foo', password='bar') + + # Delete a session + sess_serv.close_session(sess_col.members_identities[0]) + + +-------------------- +Using OEM extensions +-------------------- + +Before running this example, please make sure you have a Redfish BMC that +includes the OEM piece for a specific vendor, as well as the Sushy OEM +extension package installed in the system for the same vendor. + +You can check the presence of the OEM extension within each Redfish +resource by specifying the vendor ID and search for them. + +In the following example, we are looking up "Acme" vendor extension to Redfish +Manager resource. + +.. code-block:: python + + import sushy + + root = sushy.Sushy('http://localhost:8000/redfish/v1') + + # Instantiate a system object + system = root.get_system('/redfish/v1/Systems/437XR1138R2') + + print('Working on system resource %s' % system.identity) + + for manager in system.managers: + + print('Using System manager %s' % manager.identity) + + # Get a list of OEM extension names for the system manager + oem_vendors = manager.oem_vendors + + print('Listing OEM extension name(s) for the System ' + 'manager %s' % manager.identity ) + + print(*oem_vendors, sep="\n") + + try: + manager_oem = manager.get_oem_extension('Acme') + + except sushy.exceptions.OEMExtensionNotFoundError: + print('ERROR: Acme OEM extension not found in ' + 'Manager %s' % manager.identity) + continue + + print('%s is an OEM extension of Manager %s' + % (manager_oem.get_extension(), manager.identity)) + + # set boot device to a virtual media device image + manager_oem.set_virtual_boot_device(sushy.VIRTUAL_MEDIA_CD, + manager=manager) If you do not have any real baremetal machine that supports the Redfish protocol you can look at the :ref:`contributing` page to learn how to run a Redfish emulator. + +For the OEM extension example, presently, both of the emulators +(static/dynamic) do not expose any OEM; as a result, users may need to add +manually some OEM resources to emulators' templates. It may be easier to +start with a static emulator. diff --git a/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml b/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d49dfef69c69857ce8a92daae209f2a95dccbc32 --- /dev/null +++ b/releasenotes/notes/add-apply-time-support-to-bios-315ebad429dcab3d.yaml @@ -0,0 +1,12 @@ +--- +features: + - | + Adds support for ``bios`` resource to allow specifying BIOS attribute + update time and maintenance window when updating BIOS attributes using + ``set_attribute`` or ``set_attributes``. + + The update is backward compatible and when new parameters not passed, they + default to ``None``. + + Also adds ``maintenance_window`` for ``bios`` resource to expose default + maintenance window set by the system if any. diff --git a/releasenotes/notes/add-bios-bf69ac56c4ae8f50.yaml b/releasenotes/notes/add-bios-bf69ac56c4ae8f50.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d82ef150d8c87d0fcb0aafb07cef474e345b613a --- /dev/null +++ b/releasenotes/notes/add-bios-bf69ac56c4ae8f50.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds support for the BIOS resource to the library. diff --git a/releasenotes/notes/add-bios-update-status-cc59816c374b78e4.yaml b/releasenotes/notes/add-bios-update-status-cc59816c374b78e4.yaml new file mode 100644 index 0000000000000000000000000000000000000000..72f7f79cd86aa829cb892ee4255535e40550d529 --- /dev/null +++ b/releasenotes/notes/add-bios-update-status-cc59816c374b78e4.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + ``Bios`` resource introduces ``update_status`` property that exposes + the status and any errors of last BIOS attribute update. diff --git a/releasenotes/notes/add-chassis-linkage-d8e567f9c791169d.yaml b/releasenotes/notes/add-chassis-linkage-d8e567f9c791169d.yaml new file mode 100644 index 0000000000000000000000000000000000000000..19084e9e8d85727359b5ed065840c931862afb38 --- /dev/null +++ b/releasenotes/notes/add-chassis-linkage-d8e567f9c791169d.yaml @@ -0,0 +1,7 @@ +--- +features: + - | + Establishes linkage between Chassis and ComputerSystem/Managers + resources as references at sushy data abstraction level. That + makes it possible to look up Chassis by Manager/ComputerSystem or + any other way around. diff --git a/releasenotes/notes/add-chassis-support-5b97daffe1c61a2b.yaml b/releasenotes/notes/add-chassis-support-5b97daffe1c61a2b.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7852e7c16b8ee7afc4255b485e7c6ad5edb0c531 --- /dev/null +++ b/releasenotes/notes/add-chassis-support-5b97daffe1c61a2b.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds support for the Chassis resource to the library. + diff --git a/releasenotes/notes/add-custom-connector-support-0a49c6649d5f7eaf.yaml b/releasenotes/notes/add-custom-connector-support-0a49c6649d5f7eaf.yaml new file mode 100644 index 0000000000000000000000000000000000000000..881de7c7c8ca71504f9aaa3720b9e2c5dbb063f3 --- /dev/null +++ b/releasenotes/notes/add-custom-connector-support-0a49c6649d5f7eaf.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds the ability to specify user-defined connector object on creation + of a root Sushy instance. diff --git a/releasenotes/notes/add-default-identity-10c5dd23bed0e915.yaml b/releasenotes/notes/add-default-identity-10c5dd23bed0e915.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6aa2a8b9cd21c38e157993cf329985b8cc85df53 --- /dev/null +++ b/releasenotes/notes/add-default-identity-10c5dd23bed0e915.yaml @@ -0,0 +1,10 @@ +--- +features: + - | + The ``get_system``, ``get_manager`` and ``get_chassis`` methods modified + not to require the ``identity`` parameter referring to a particular + resource instance. If ``identity`` is omited, sushy will default to the + only available resource for as long as it's single and therefore + deterministic. + The intent is to simplify user API by not requiring the consumer to + discover available resources prior to requesting one. diff --git a/releasenotes/notes/add-drive-led-97b687013fec88c9.yaml b/releasenotes/notes/add-drive-led-97b687013fec88c9.yaml new file mode 100644 index 0000000000000000000000000000000000000000..af31939f9fe0ac37f15e1ab4e1886abf672c33cb --- /dev/null +++ b/releasenotes/notes/add-drive-led-97b687013fec88c9.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Adds the ``IndicatorLED`` property to the ``Drive`` resource. The state of + the LED can be read and can be changed via the ``.set_indicator_led()`` + method of the ``Drive`` sushy class. diff --git a/releasenotes/notes/add-endpoint-subresource-to-fabric-b03e5fd99ece1bf4.yaml b/releasenotes/notes/add-endpoint-subresource-to-fabric-b03e5fd99ece1bf4.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fa752fcb0ef5fec3bab99a5ae168cd0321105854 --- /dev/null +++ b/releasenotes/notes/add-endpoint-subresource-to-fabric-b03e5fd99ece1bf4.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Exposes the ``endpoint`` sub-resource from the ``fabric`` resource. + ``endpoint`` represents the properties of an entity that sends or receives + protocol defined messages over a transport. diff --git a/releasenotes/notes/add-fabric-support-1520f7fcb0e12539.yaml b/releasenotes/notes/add-fabric-support-1520f7fcb0e12539.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f9f129829c7d47ceb162f725cbf6809f49b77099 --- /dev/null +++ b/releasenotes/notes/add-fabric-support-1520f7fcb0e12539.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds support for the Fabric resource to the library. + diff --git a/releasenotes/notes/add-initial-redfish-oem-extension-support-50c9849bb7b6b25c.yaml b/releasenotes/notes/add-initial-redfish-oem-extension-support-50c9849bb7b6b25c.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fbeb59105bd392cb1327634e49dbc8b68f7cec4c --- /dev/null +++ b/releasenotes/notes/add-initial-redfish-oem-extension-support-50c9849bb7b6b25c.yaml @@ -0,0 +1,13 @@ +--- +features: + - | + Adds foundation for supporting resource extensibility proposed as + OEM extensibility in Redfish specification [1] to the library. + + * Provides an attribute 'oem_vendors' in Resource classes to + discover the available OEM extensions. + * Provides a method 'get_oem_extension()' in Resource classes + to get the vendor defined resource OEM extension object, if + discovered. + + [1] http://redfish.dmtf.org/schemas/DSP0266_1.1.html#resource-extensibility diff --git a/releasenotes/notes/add-mapped-list-field-04c671f7a73d83f6.yaml b/releasenotes/notes/add-mapped-list-field-04c671f7a73d83f6.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2006fad1b540e78d48c8b39914568b9b9799e609 --- /dev/null +++ b/releasenotes/notes/add-mapped-list-field-04c671f7a73d83f6.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds a new field called ``MappedListField`` which supports a list of + mapped instances. diff --git a/releasenotes/notes/add-odata-version-header-96dc8179c0e2e9bd.yaml b/releasenotes/notes/add-odata-version-header-96dc8179c0e2e9bd.yaml new file mode 100644 index 0000000000000000000000000000000000000000..12d81380a1d8c5cd63d25a919af1c693726aa05e --- /dev/null +++ b/releasenotes/notes/add-odata-version-header-96dc8179c0e2e9bd.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + Improve interoperability by including the recommended OData-Version + header in outgoing Redfish requests. diff --git a/releasenotes/notes/add-partial-key-match-27bed73d577b1187.yaml b/releasenotes/notes/add-partial-key-match-27bed73d577b1187.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b7e8227b123a7c5597f05175671b74b5bfa3a878 --- /dev/null +++ b/releasenotes/notes/add-partial-key-match-27bed73d577b1187.yaml @@ -0,0 +1,9 @@ +--- +features: + - | + Adds the ability to conditionally match sushy fields against received JSON + object. The conditional matching is performed by a user-supplied callable + which gets the key to consider (along with the value and potentially other + details) and should indicate the the caller if the match occurred. + The motivation behind this change is to accommodate malformed Redfish + resource properties as observed in the OEM wilderness. diff --git a/releasenotes/notes/add-power-resource-e141ddf298673305.yaml b/releasenotes/notes/add-power-resource-e141ddf298673305.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ef9070afd5556ee5acac252023be691d1508b5c4 --- /dev/null +++ b/releasenotes/notes/add-power-resource-e141ddf298673305.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds the Power resource to the Library. \ No newline at end of file diff --git a/releasenotes/notes/add-processor-id-and-status-b81d4c6e6c14c25f.yaml b/releasenotes/notes/add-processor-id-and-status-b81d4c6e6c14c25f.yaml new file mode 100644 index 0000000000000000000000000000000000000000..76cdb0eccdaa7af387e2a964551c2eb1d28b1cb9 --- /dev/null +++ b/releasenotes/notes/add-processor-id-and-status-b81d4c6e6c14c25f.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds the processor status and id fields to the ``Processor`` class. diff --git a/releasenotes/notes/add-raid-type-properties-2090da5bea37c660.yaml b/releasenotes/notes/add-raid-type-properties-2090da5bea37c660.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7ebf2ee57f16461059df7ddd6cfa05dcc8545f87 --- /dev/null +++ b/releasenotes/notes/add-raid-type-properties-2090da5bea37c660.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Add RAIDType property to the Volume resource and SupportedRAIDTypes + property to the Storage resource. diff --git a/releasenotes/notes/add-response-cb-65d448ee2690d0b2.yaml b/releasenotes/notes/add-response-cb-65d448ee2690d0b2.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b1d80d1405a930eae931184fd715b7e7b471cc7a --- /dev/null +++ b/releasenotes/notes/add-response-cb-65d448ee2690d0b2.yaml @@ -0,0 +1,7 @@ +--- +features: + - | + Adds optional ``response_callback`` parameter to ``Connector`` class + that can be used by the application to receive vanilla HTTP messages + in the course of running Redfish call. The intention is to facilitate + Redfish exchange debugging. diff --git a/releasenotes/notes/add-simple-storage-915464811737bb05.yaml b/releasenotes/notes/add-simple-storage-915464811737bb05.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b98823bc241ab682ca28a9a6a3ad8fdb95ccfc2c --- /dev/null +++ b/releasenotes/notes/add-simple-storage-915464811737bb05.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds the "SimpleStorage" to the library. It also provides the max size + available (in bytes) among all its directly attached devices. diff --git a/releasenotes/notes/add-storage-and-simple-storage-attributes-to-system-16e81f9b15b1897d.yaml b/releasenotes/notes/add-storage-and-simple-storage-attributes-to-system-16e81f9b15b1897d.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4af67c99152e457958f3d02848c1f7c9462e8328 --- /dev/null +++ b/releasenotes/notes/add-storage-and-simple-storage-attributes-to-system-16e81f9b15b1897d.yaml @@ -0,0 +1,12 @@ +--- +features: + - | + Exposes the ``simple_storage`` and ``storage`` properties from system + resource in sushy. + + * ``simple_storage`` property indicates a collection of storage + controllers and their directly-attached devices associated with the + system. + * ``storage`` property refers to a collection of storage subsystem + associated with system. Resources such as drives and volumes can be + accessed from that subsystem. diff --git a/releasenotes/notes/add-storage-da766d3dbf9fb385.yaml b/releasenotes/notes/add-storage-da766d3dbf9fb385.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8bf426f176f40db42350ddac3a08b87866910be2 --- /dev/null +++ b/releasenotes/notes/add-storage-da766d3dbf9fb385.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Adds the Storage resource to the library. It also provides the + max size available (in bytes) of drives and volumes that can be + accessed from storage. diff --git a/releasenotes/notes/add-system-manager-linkage-86be69c9df4cb359.yaml b/releasenotes/notes/add-system-manager-linkage-86be69c9df4cb359.yaml new file mode 100644 index 0000000000000000000000000000000000000000..69e0c003dfc51f06ad2b968ac557f104b0012dea --- /dev/null +++ b/releasenotes/notes/add-system-manager-linkage-86be69c9df4cb359.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Establishes ComputerSystem->Managers and Manager->ComputerSystems + references at sushy data abstraction level what make it possible to + look up Manager(s) responsible for a ComputerSystem and vice versa. diff --git a/releasenotes/notes/add-system-status-field-41b3f2a8c4b85f38.yaml b/releasenotes/notes/add-system-status-field-41b3f2a8c4b85f38.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c18c7b0079ab3e7f61463275ecbfc8b07d1753e7 --- /dev/null +++ b/releasenotes/notes/add-system-status-field-41b3f2a8c4b85f38.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds the system status field to show the system status. diff --git a/releasenotes/notes/add-system-type-mapping-bf456c5c15a90877.yaml b/releasenotes/notes/add-system-type-mapping-bf456c5c15a90877.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e16704b2023a1cc95fc7606e73db024e86a1da4d --- /dev/null +++ b/releasenotes/notes/add-system-type-mapping-bf456c5c15a90877.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds mappings and constants for possible values of System Type in System + resource. This represents the type of the computer system. diff --git a/releasenotes/notes/add-task-monitor-support-21f711927ad6ec91.yaml b/releasenotes/notes/add-task-monitor-support-21f711927ad6ec91.yaml new file mode 100644 index 0000000000000000000000000000000000000000..557b5a21d6b60dc0cee924ce4b27fc1c1881cf2d --- /dev/null +++ b/releasenotes/notes/add-task-monitor-support-21f711927ad6ec91.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Add support for a Task Monitor resource to be able to monitor the state + of asynchronous operations. diff --git a/releasenotes/notes/add-task-service-c751ce51e0b8dc11.yaml b/releasenotes/notes/add-task-service-c751ce51e0b8dc11.yaml new file mode 100644 index 0000000000000000000000000000000000000000..68a697f108fd5c1c31a846acb1b9dc61b25e5062 --- /dev/null +++ b/releasenotes/notes/add-task-service-c751ce51e0b8dc11.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds initial support for the TaskService resource to the library. + `TaskService` is responsible for managing tasks. \ No newline at end of file diff --git a/releasenotes/notes/add-thermal-resource-5c965a3c940f9028.yaml b/releasenotes/notes/add-thermal-resource-5c965a3c940f9028.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1f69131fa5742ab3b3b5f776c9c809e97da71ce8 --- /dev/null +++ b/releasenotes/notes/add-thermal-resource-5c965a3c940f9028.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds the Thermal resource to the Library. \ No newline at end of file diff --git a/releasenotes/notes/add-virtual-media-support-f522fbec4420341c.yaml b/releasenotes/notes/add-virtual-media-support-f522fbec4420341c.yaml new file mode 100644 index 0000000000000000000000000000000000000000..88ecfafc8cb5f9800a9bcfe23175ad1bfae61dcd --- /dev/null +++ b/releasenotes/notes/add-virtual-media-support-f522fbec4420341c.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Adds support for the virtual media resource to the library. diff --git a/releasenotes/notes/add_composition_service-84750d8d1d96474a.yaml b/releasenotes/notes/add_composition_service-84750d8d1d96474a.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c6585cb2fe5e3a692552498093d4ddbb2ba837e6 --- /dev/null +++ b/releasenotes/notes/add_composition_service-84750d8d1d96474a.yaml @@ -0,0 +1,8 @@ +--- +features: + - | + Adds support for the CompositionService resource to the library. + + The `CompositionService` is the top level resource for all things + related to Composability. If a Redfish service supports Composability, + the Service Root resource will contain the `CompositionService` property. diff --git a/releasenotes/notes/add_ethernet_interface-df308f814f0e4bce.yaml b/releasenotes/notes/add_ethernet_interface-df308f814f0e4bce.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6eb0328365b9ca950fdf77e1d9c5be7ef5be7ae9 --- /dev/null +++ b/releasenotes/notes/add_ethernet_interface-df308f814f0e4bce.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds the "EthernetInterfaces" to the library. + It also returns the list of connected MACs. diff --git a/releasenotes/notes/add_keyword_argument_for_connector-cea5dc4e6c01b548.yaml b/releasenotes/notes/add_keyword_argument_for_connector-cea5dc4e6c01b548.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9b29dfed5dada9b1fd2541e9ce9609357f84a483 --- /dev/null +++ b/releasenotes/notes/add_keyword_argument_for_connector-cea5dc4e6c01b548.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds functionality to pass different requests library session + arguments to sushy connector. diff --git a/releasenotes/notes/add_product_and_protocol_features_supported-59de3f89b7382434.yaml b/releasenotes/notes/add_product_and_protocol_features_supported-59de3f89b7382434.yaml new file mode 100644 index 0000000000000000000000000000000000000000..01f2c22650cc1e458b7c5bfa00e762325f277ade --- /dev/null +++ b/releasenotes/notes/add_product_and_protocol_features_supported-59de3f89b7382434.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds `Product` and `ProductFeaturesSupported` properties support to + the Redfish `Root Service` diff --git a/releasenotes/notes/add_update_service-b54c9bb0177e3468.yaml b/releasenotes/notes/add_update_service-b54c9bb0177e3468.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e9dd6620d981619eb2ea6641ca21343310a31682 --- /dev/null +++ b/releasenotes/notes/add_update_service-b54c9bb0177e3468.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds support for the UpdateService resource to the library. + `UpdateService` is responsible for managing firmware updates. diff --git a/releasenotes/notes/apply-time-support-for-volume-ops-f2ebc412e3b4290a.yaml b/releasenotes/notes/apply-time-support-for-volume-ops-f2ebc412e3b4290a.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e211d78893571b3e95fd6589a29bbdd38c2de3c8 --- /dev/null +++ b/releasenotes/notes/apply-time-support-for-volume-ops-f2ebc412e3b4290a.yaml @@ -0,0 +1,19 @@ +--- +deprecations: + - | + The ``supported_values`` property in the + ``OperationApplyTimeSupportField`` class is deprecated. Use the + ``mapped_supported_values`` property instead. The + ``mapped_supported_values`` property uses the ``MappedListField`` type + to map the Redfish schema-defined enumeration values to constants exposed + by the Sushy package. +features: + - | + Update the ``create_volume`` method in the ``VolumeCollection`` class and + the ``delete_volume`` and ``initialize_volume`` methods in the ``Volume`` + class to take optional ``apply_time`` and ``timeout`` keyword parameters. + This allows the caller of those volume methods to specify a preferred + ``OperationApplyTime`` annotation and a maximum timeout for synchronous + operations. For asynchronous operations, those three methods will now + return a ``TaskMonitor`` instance that the caller can use to monitor the + state of the task. diff --git a/releasenotes/notes/bug-1754514-ca6ebe16c4e4b3b0.yaml b/releasenotes/notes/bug-1754514-ca6ebe16c4e4b3b0.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e8e6f8a83147e8989123ae8fbf9f5cdd771db332 --- /dev/null +++ b/releasenotes/notes/bug-1754514-ca6ebe16c4e4b3b0.yaml @@ -0,0 +1,5 @@ +--- +critical: + - | + Fixes authentication failure when SessionService attribute is + not present in the root resource. diff --git a/releasenotes/notes/decouple-boot-params-c75e80f5951abb12.yaml b/releasenotes/notes/decouple-boot-params-c75e80f5951abb12.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7a5a502637976877a1ba88cc1e99f3becc86c4ab --- /dev/null +++ b/releasenotes/notes/decouple-boot-params-c75e80f5951abb12.yaml @@ -0,0 +1,10 @@ +--- +fixes: + - | + Adds a new ``set_system_boot_options`` method to the ``System`` object + superseding the ``set_system_boot_source`` method. The new method has + all boot parameters optional to allow for more atomicity when PATCH'ing + Redfish ``Boot`` object. The new method will only include those items in + the PATCH document, that are explicitly passed by the user. This change + might improve interoperability with BMCs that do not handle certain + attributes of the ``Boot`` object. diff --git a/releasenotes/notes/deprecate-system-leds-f1a72422c53d281e.yaml b/releasenotes/notes/deprecate-system-leds-f1a72422c53d281e.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1cdef5d178ddd67e2c8bd81c533124823570e861 --- /dev/null +++ b/releasenotes/notes/deprecate-system-leds-f1a72422c53d281e.yaml @@ -0,0 +1,9 @@ +--- +deprecations: + - | + Deprecates system-specific indicator LEDs as redundant. + The ``SYSTEM_INDICATOR_LED_LIT``, ``SYSTEM_INDICATOR_LED_BLINKING``, + ``SYSTEM_INDICATOR_LED_OFF`` and ``SYSTEM_INDICATOR_LED_UNKNOWN`` constants + should not be used. Generic indicator LED constants should be used instead. + Those are ``INDICATOR_LED_LIT``, ``INDICATOR_LED_BLINKING``, + ``INDICATOR_LED_OFF`` and ``INDICATOR_LED_UNKNOWN`` respectively. diff --git a/releasenotes/notes/disable-conn-pooling-3456782afe56ac94.yaml b/releasenotes/notes/disable-conn-pooling-3456782afe56ac94.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9dadcfd34f01e6c7476dc52bcd08c58bc3063a28 --- /dev/null +++ b/releasenotes/notes/disable-conn-pooling-3456782afe56ac94.yaml @@ -0,0 +1,8 @@ +--- +fixes: + - | + Disable HTTP connection pooling by asking HTTP server to close our + connection right upon use. The rationale is that some BMC observed in + the wild seem to close persistent connections abruptly upon eventual + re-use failing completely unrelated operation. So in ``sushy`` we + just try not to maintain persistent connections with BMC at all. diff --git a/releasenotes/notes/drop-py-2-7-cc931c210ce08e33.yaml b/releasenotes/notes/drop-py-2-7-cc931c210ce08e33.yaml new file mode 100644 index 0000000000000000000000000000000000000000..95bb1cec9dfbf3a803994771d84b40da4ed4a7cc --- /dev/null +++ b/releasenotes/notes/drop-py-2-7-cc931c210ce08e33.yaml @@ -0,0 +1,5 @@ +upgrade: + - | + Python 2.7 support has been dropped. Last release of sushy + to support Python 2.7 is OpenStack Train. The minimum version of Python now + supported by sushy is Python 3.6. diff --git a/releasenotes/notes/enhance-oem-extension-design-3143717e710b3eaf.yaml b/releasenotes/notes/enhance-oem-extension-design-3143717e710b3eaf.yaml new file mode 100644 index 0000000000000000000000000000000000000000..39982de12a168cf1f177b5d998e142b2891f6a1a --- /dev/null +++ b/releasenotes/notes/enhance-oem-extension-design-3143717e710b3eaf.yaml @@ -0,0 +1,11 @@ +--- +upgrade: + - | + OEM resource class hierarchy has been redesigned to allow for non-terminal + sub-resources (e.g. Links) to be handled within OEM resource model. As a + consequence, backward compatibility with previously existing OEM extension + framework (anything based on ``OEMExtensionResourceBase`` class) is not + preserved. User OEM code migration would involve switching from + ``OEMExtensionResourceBase`` to ``OEMResourceBase`` (note ``__init__`` + call signature change) and replacing ``OEMField``-based classes with their + generic sushy ``Field`` counterparts. diff --git a/releasenotes/notes/enhance-storage-volume-drive-support-16314d30f3631fb3.yaml b/releasenotes/notes/enhance-storage-volume-drive-support-16314d30f3631fb3.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d3194cbacfa09ca76a7e2989c8fb89659642a1d2 --- /dev/null +++ b/releasenotes/notes/enhance-storage-volume-drive-support-16314d30f3631fb3.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Update the Storage, Volume, and Drive models to support RAID + configuration management. diff --git a/releasenotes/notes/expand-drive-schema-042901f919be646c.yaml b/releasenotes/notes/expand-drive-schema-042901f919be646c.yaml new file mode 100644 index 0000000000000000000000000000000000000000..14ded88cacaead9aef6494cd9ed4fa0db716b546 --- /dev/null +++ b/releasenotes/notes/expand-drive-schema-042901f919be646c.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds ``CapacityBites``, ``Manufacturer``, ``Model``, ``PartNumber``, + ``SerialNumber`` and ``Status`` properties to the ``Drive`` resource. \ No newline at end of file diff --git a/releasenotes/notes/fix-2008198-bios-factory-reset-400-bad-request-3f4a7a2aada0835b.yaml b/releasenotes/notes/fix-2008198-bios-factory-reset-400-bad-request-3f4a7a2aada0835b.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b7df556387f0cac47b1ef113b5f373add9ade0b4 --- /dev/null +++ b/releasenotes/notes/fix-2008198-bios-factory-reset-400-bad-request-3f4a7a2aada0835b.yaml @@ -0,0 +1,7 @@ +--- +fixes: + - | + Fixes an issue in performing action ``#Bios.ResetBios`` when no body in + POST request provided and BMC responds with HTTP 400 Bad request, for + example, Dell R630 having iDRAC 2.75.75.75. See `story 2008198 + `__ for details. diff --git a/releasenotes/notes/fix-eject-media-empty-dict-573b4c9e06f52ce7.yaml b/releasenotes/notes/fix-eject-media-empty-dict-573b4c9e06f52ce7.yaml new file mode 100644 index 0000000000000000000000000000000000000000..99c207d60924ccab66ae64d7b24439934eee39e5 --- /dev/null +++ b/releasenotes/notes/fix-eject-media-empty-dict-573b4c9e06f52ce7.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Some vendors like HPE iLO has this kind of implementation that for eject + virtual media need to pass empty dictionary otherwise throws Unsupported + media type error. diff --git a/releasenotes/notes/fix-extended-info-error-handling-73fecb6bf5c852ff.yaml b/releasenotes/notes/fix-extended-info-error-handling-73fecb6bf5c852ff.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8ee28a4f22c8fb169607142ee1b9d2898e3b9b86 --- /dev/null +++ b/releasenotes/notes/fix-extended-info-error-handling-73fecb6bf5c852ff.yaml @@ -0,0 +1,7 @@ +--- +fixes: + - | + Fixes ``AttributeError: 'str' object has no attribute 'get'`` during error + handling. This occurs when BMC does not return a list of messages inside + ``@Message.ExtendedInfo``, but a single item. This has been observed with + iDRAC. diff --git a/releasenotes/notes/fix-malformed-boot-mode-1ba1117cad8dcc47.yaml b/releasenotes/notes/fix-malformed-boot-mode-1ba1117cad8dcc47.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c3aa85edb193002e2453a4e3b2dc1f7b6a1418de --- /dev/null +++ b/releasenotes/notes/fix-malformed-boot-mode-1ba1117cad8dcc47.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Fixes malformed value of the ``BootSourceOverrideMode`` element which goes + against the Redfish schema and causes some of the boot mode calls to + fail. diff --git a/releasenotes/notes/fix-manager-action-d71fd415cea29aa6.yaml b/releasenotes/notes/fix-manager-action-d71fd415cea29aa6.yaml new file mode 100644 index 0000000000000000000000000000000000000000..870bd0e5cbfdb2202e8162aa3909c2853360479f --- /dev/null +++ b/releasenotes/notes/fix-manager-action-d71fd415cea29aa6.yaml @@ -0,0 +1,7 @@ +--- +fixes: + - | + Makes ``Manager->Actions`` field optional as Redfish Manager schema + defines it. Otherwise sushy fails hard at parsing response from a + Redfish agent that does not include ``Actions`` field in its document + tree. diff --git a/releasenotes/notes/fix-oem-loading-52da045252b6c33e.yaml b/releasenotes/notes/fix-oem-loading-52da045252b6c33e.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8050478cac2f2830f1b1e15c28a11a8f71ca5ade --- /dev/null +++ b/releasenotes/notes/fix-oem-loading-52da045252b6c33e.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Fixes Sushy OEM extension loading when using multiple servers that + caused loaded extensions to point to server for which the extension + was loaded first. diff --git a/releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml b/releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f09eeac4d9c00f8f606ec8df2fa6e1d01ff63b61 --- /dev/null +++ b/releasenotes/notes/fix-refine-resource-refresh-86c21ce230967251.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + New ``force`` argument to the ``refresh`` method on resources can be set to + ``False`` to prevent refreshing of resources that are not stale. Resources + can be marked as stale by calling a new ``invalidate`` method. diff --git a/releasenotes/notes/fix-simple-storage-device-capacity-bytes-null-0672eed36d9da70a.yaml b/releasenotes/notes/fix-simple-storage-device-capacity-bytes-null-0672eed36d9da70a.yaml new file mode 100644 index 0000000000000000000000000000000000000000..591760913971d643404698b0350b44e9e419f917 --- /dev/null +++ b/releasenotes/notes/fix-simple-storage-device-capacity-bytes-null-0672eed36d9da70a.yaml @@ -0,0 +1,11 @@ +--- +fixes: + - | + Fixes bug in ``SimpleStorageCollection.disks_sizes_bytes`` which assumed + the type of a disk's ``CapacityBytes`` property is ``integer``. According + to the Distributed Management Task Force (DMTF) Redfish standard schema + [1], it can be ``null``, which is converted to ``None`` in Python. For + more information, see `story 2006918 + `_. + + [1] https://redfish.dmtf.org/schemas/SimpleStorage.v1_2_3.json \ No newline at end of file diff --git a/releasenotes/notes/fix-simple-update-e88838fab4170920.yaml b/releasenotes/notes/fix-simple-update-e88838fab4170920.yaml new file mode 100644 index 0000000000000000000000000000000000000000..03262c2a2ed01f49802beb29fd4a0868b7d3bd44 --- /dev/null +++ b/releasenotes/notes/fix-simple-update-e88838fab4170920.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + Fixes bug in ``UpdateService.simple_update`` method making it operational. diff --git a/releasenotes/notes/fix-software-firmware-inventory-3e0e79e052aa76d9.yaml b/releasenotes/notes/fix-software-firmware-inventory-3e0e79e052aa76d9.yaml new file mode 100644 index 0000000000000000000000000000000000000000..12ff8a108efd5ad3bed33af2f8b0cce4e37c6672 --- /dev/null +++ b/releasenotes/notes/fix-software-firmware-inventory-3e0e79e052aa76d9.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + Fixes bugs in the ``UpdateService.software_inventory`` and + ``UpdateService.firmware_inventory`` properties making them operational. diff --git a/releasenotes/notes/fix-taskmonitor-init-calls-in-volume-module-0f8a747acd0cfe3f.yaml b/releasenotes/notes/fix-taskmonitor-init-calls-in-volume-module-0f8a747acd0cfe3f.yaml new file mode 100644 index 0000000000000000000000000000000000000000..765dcbb47013cc4f146f6f03dab7b2018ba0a69a --- /dev/null +++ b/releasenotes/notes/fix-taskmonitor-init-calls-in-volume-module-0f8a747acd0cfe3f.yaml @@ -0,0 +1,8 @@ +--- +fixes: + - | + Fixes issues in the ``volume`` module where the first parameter passed to + the ``TaskMonitor`` constructor was incorrect. The parameter passed was + the resource object (self), but it should have been the connector object + (self._conn). This affected the ``create_volume()`` and + ``delete_volume()`` methods. diff --git a/releasenotes/notes/fix-to-close-session-on-dealloc-c3687d4dcb1441b8.yaml b/releasenotes/notes/fix-to-close-session-on-dealloc-c3687d4dcb1441b8.yaml new file mode 100644 index 0000000000000000000000000000000000000000..df9fbfd569e1e5f6a6de1d96c27fa286da9c77ac --- /dev/null +++ b/releasenotes/notes/fix-to-close-session-on-dealloc-c3687d4dcb1441b8.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Tries to terminate authenticated Redfish session at BMC Session Service on + the event of ``Sushy`` object deallocation. This should reduce the chance + of authenticated sessions pool exhaustion at some BMCs. diff --git a/releasenotes/notes/fix-update-service-constants-b8c3f48ccee6ce1f.yaml b/releasenotes/notes/fix-update-service-constants-b8c3f48ccee6ce1f.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9d1a5ab6dccd318ab0324217ec32e4b882276c47 --- /dev/null +++ b/releasenotes/notes/fix-update-service-constants-b8c3f48ccee6ce1f.yaml @@ -0,0 +1,9 @@ +--- +fixes: + - | + The ``transfer_protocol`` parameter of the ``UpdateService.simple_update`` + method should be given one of the newly exposed constants rather than a + string literal. This is a breaking change. +features: + - | + Exposes ``UpdateService`` constants to ``sushy`` namespace. \ No newline at end of file diff --git a/releasenotes/notes/fix-virtual-media-fallback-15a559414a65c014.yaml b/releasenotes/notes/fix-virtual-media-fallback-15a559414a65c014.yaml new file mode 100644 index 0000000000000000000000000000000000000000..caf129c579b61eb5695b1f714052635a0340030a --- /dev/null +++ b/releasenotes/notes/fix-virtual-media-fallback-15a559414a65c014.yaml @@ -0,0 +1,7 @@ +--- +fixes: + - | + Adds a fallback for inserting and ejecting virtual media + using the PATCH HTTP request instead of the explicit action URIs. + The fallback is required for Lenovo ThinkSystem machines (i.e. SD530, ..) + that only implement the PATCH method. \ No newline at end of file diff --git a/releasenotes/notes/fix-volume-actions-not-required-730fd637dd2587ce.yaml b/releasenotes/notes/fix-volume-actions-not-required-730fd637dd2587ce.yaml new file mode 100644 index 0000000000000000000000000000000000000000..077a09b5d6d799f394d973990344596ddb6a31ad --- /dev/null +++ b/releasenotes/notes/fix-volume-actions-not-required-730fd637dd2587ce.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + The ``Actions`` field in the ``Volume`` resource was incorrectly specified + as being required. This fix makes the field optional. diff --git a/releasenotes/notes/fixes-ilo5-redfish-firmware-update-issue-273862b2a11e3536.yaml b/releasenotes/notes/fixes-ilo5-redfish-firmware-update-issue-273862b2a11e3536.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e12938810f467227c0ac8e3dc1e1b5d8c15054d0 --- /dev/null +++ b/releasenotes/notes/fixes-ilo5-redfish-firmware-update-issue-273862b2a11e3536.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + Adding a new attribute task_uri to monitor redfish firmware + update since some vendors(ilo) does not provide appropriate + response with task_monitor uri. diff --git a/releasenotes/notes/get-retry-9ca311caf8a0b7bb.yaml b/releasenotes/notes/get-retry-9ca311caf8a0b7bb.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6d820a1d0e9feafb4069e90799f6021cde5f1c4b --- /dev/null +++ b/releasenotes/notes/get-retry-9ca311caf8a0b7bb.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + Automatically retries internal server errors from GET requests. diff --git a/releasenotes/notes/health_literals_change-0e3fc0c439b765e3.yaml b/releasenotes/notes/health_literals_change-0e3fc0c439b765e3.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f4a4c1bd638c374a4f1a051e6854b6987afffb7b --- /dev/null +++ b/releasenotes/notes/health_literals_change-0e3fc0c439b765e3.yaml @@ -0,0 +1,7 @@ +--- +other: + - | + Changes the values for the constants ``HEALTH_STATE_ENABLED``, + ``HEALTH_STATE_DISABLED``, ``HEALTH_OK``, ``HEALTH_WARNING`` + and ``HEALTH_CRITICAL``. These could be correctly used + with their mapped values in mappings.py. diff --git a/releasenotes/notes/indicator-led-mappings-e7b34da03f6abb06.yaml b/releasenotes/notes/indicator-led-mappings-e7b34da03f6abb06.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e5b6c82876c57d2b5589c7d8c98db384f4eddf05 --- /dev/null +++ b/releasenotes/notes/indicator-led-mappings-e7b34da03f6abb06.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds mappings and constants for possible values of the Indicator LED + value in the System class. diff --git a/releasenotes/notes/lazily-load-registries-0e9441e435c2471d.yaml b/releasenotes/notes/lazily-load-registries-0e9441e435c2471d.yaml new file mode 100644 index 0000000000000000000000000000000000000000..06211935f29920fb04302ac5ec5435467b7d33d5 --- /dev/null +++ b/releasenotes/notes/lazily-load-registries-0e9441e435c2471d.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Postpones (potentially very large) Redfish message registries download and + processing up to the first access by the client. The goal is to reduce + the amount of unnecessary traffic and CPU cycles. diff --git a/releasenotes/notes/make-leds-settable-c82cb513de0171f5.yaml b/releasenotes/notes/make-leds-settable-c82cb513de0171f5.yaml new file mode 100644 index 0000000000000000000000000000000000000000..846f75eb958c108471cc11fd5d4b93838cda5710 --- /dev/null +++ b/releasenotes/notes/make-leds-settable-c82cb513de0171f5.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + The ``IndicatorLED`` property of ``System`` and ``Chassis`` resources + made settable with the introduction of the ``.set_indicator_led()`` + method to the respective sushy classes. diff --git a/releasenotes/notes/make-volume-ops-blocking-de5c2ae032041d5d.yaml b/releasenotes/notes/make-volume-ops-blocking-de5c2ae032041d5d.yaml new file mode 100644 index 0000000000000000000000000000000000000000..40b9e8cd46d9d2725e6aa216be09afcff567953f --- /dev/null +++ b/releasenotes/notes/make-volume-ops-blocking-de5c2ae032041d5d.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Make POST and DELETE operations in Volume and VolumeCollection blocking. diff --git a/releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml b/releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml new file mode 100644 index 0000000000000000000000000000000000000000..408938eeee19f79749980b0e4335b52cf93d1474 --- /dev/null +++ b/releasenotes/notes/message-parsing-resilience-534da532515a15da.yaml @@ -0,0 +1,10 @@ +--- +fixes: + - | + Makes message parsing more resilient by handling the case where the message + ID only contains a message key and no registry name. In this case, fall + back to the ``Messages`` message registry file and then to the + ``BaseMessages`` message registry file. If the message ID cannot be found, + then set the message to ``unknown``. When parsing messages, if not enough + arguments were supplied, then fill in the remaining arguments with + ``unknown``. diff --git a/releasenotes/notes/monitor_firmware_update-664b0c6c1a0307cf.yaml b/releasenotes/notes/monitor_firmware_update-664b0c6c1a0307cf.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0af0eaffad8e65cf45a1bdd5591c7f371ebe0019 --- /dev/null +++ b/releasenotes/notes/monitor_firmware_update-664b0c6c1a0307cf.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Added the ability to monitor the progress of a firmware update by changing + the ``simple_update`` operation to return a task monitor object. diff --git a/releasenotes/notes/no-passwords-295207ac891d27ab.yaml b/releasenotes/notes/no-passwords-295207ac891d27ab.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d53e5a562d7be0c869936bc741771460ecf027a4 --- /dev/null +++ b/releasenotes/notes/no-passwords-295207ac891d27ab.yaml @@ -0,0 +1,5 @@ +--- +security: + - | + No longer logs passwords and auth tokens in DEBUG mode when using + SessionService for authentication. diff --git a/releasenotes/notes/raise-error-on-async-task-failure-b67c7bc189a4d6ca.yaml b/releasenotes/notes/raise-error-on-async-task-failure-b67c7bc189a4d6ca.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ac1bc5e13ac934d17a34bafc982fd7cd196c1988 --- /dev/null +++ b/releasenotes/notes/raise-error-on-async-task-failure-b67c7bc189a4d6ca.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + Fixes an issue in the ``Connector`` class where an exception is not raised + when an asynchronous operations fails. diff --git a/releasenotes/notes/redfish-response-log-294f3f10b770e356.yaml b/releasenotes/notes/redfish-response-log-294f3f10b770e356.yaml new file mode 100644 index 0000000000000000000000000000000000000000..12604f0604b5aa2fe8bfd942031e5f275be5fa47 --- /dev/null +++ b/releasenotes/notes/redfish-response-log-294f3f10b770e356.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + Reduce the logging from sushy by logging only attributes and values + set in the redfish response, not the entire json. diff --git a/releasenotes/notes/refactor-taskmonitor-update-volume-ba99380188395852.yaml b/releasenotes/notes/refactor-taskmonitor-update-volume-ba99380188395852.yaml new file mode 100644 index 0000000000000000000000000000000000000000..422c9e4f9c9cb214496bd4298d8c2ef13c5e4943 --- /dev/null +++ b/releasenotes/notes/refactor-taskmonitor-update-volume-ba99380188395852.yaml @@ -0,0 +1,38 @@ +--- +features: + - | + Adds new method ``get_task_monitor`` to retrieve TaskMonitor instance by + task monitor URI. +deprecations: + - | + Existing two ``TaskMonitor``-s are deprecated and replaced with one + ``taskmonitor.TaskMonitor``. + + For ``resources.task_monitor.TaskMonitor`` users changes include: + + * ``in_progress`` is replaced with method ``check_is_processing`` + * ``location_header`` is replaced with method ``task_monitor_uri`` + * there is no replacement for ``set_retry_after``, + ``taskmonitor.TaskMonitor`` sets this internally from Retry-After + header + + For ``resources.taskservice.taskmonitor.TaskMonitor`` users changes + include: + + * ``check_is_processing``, ``sleep_for`` and static ``get_task_monitor`` + added. + * in ``__init__`` parameter ``field_data`` is deprecated, use ``response`` + * in ``__init__`` parameter ``task_monitor`` is renamed to + ``task_monitor_uri`` + * ``task_monitor`` is deprecated, use ``task_monitor_uri`` + * ``retry_after`` is deprecated, use ``sleep_for`` + + Methods ``create_volume``, ``delete_volume``, ``initialize_volume`` in + volume module are deprecated and replaced with ones named ``create``, + ``delete`` and ``initialize``. New methods for asynchronous operations + return ``taskmonitor.TaskMonitor`` instead of + deprecated ``resources.task_monitor.TaskMonitor``. + + Method ``resources.updateservice.UpdateService.get_task_monitor`` is + deprecated, use ``Sushy.get_task_monitor`` instead. + diff --git a/releasenotes/notes/secure-boot-76c5b80371ea85d1.yaml b/releasenotes/notes/secure-boot-76c5b80371ea85d1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2ec653d8dc4e980846ef772ac88c6668c5026513 --- /dev/null +++ b/releasenotes/notes/secure-boot-76c5b80371ea85d1.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds support for UEFI secure boot: reading the current status, enabling or + disabling secure boot, resetting keys. diff --git a/releasenotes/notes/secure-boot-database-7fae673722d7cf4f.yaml b/releasenotes/notes/secure-boot-database-7fae673722d7cf4f.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6b60f94a80600de25c57ee6593a37d1aa4f0ab35 --- /dev/null +++ b/releasenotes/notes/secure-boot-database-7fae673722d7cf4f.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + Adds support for fetching and resetting individual UEFI secure boot + databases. diff --git a/releasenotes/notes/sessions.yml b/releasenotes/notes/sessions.yml new file mode 100644 index 0000000000000000000000000000000000000000..294ab47d5164d5abcbe0d26db7a12941009f98ec --- /dev/null +++ b/releasenotes/notes/sessions.yml @@ -0,0 +1,6 @@ +--- +features: + - | + Adds "SessionService" and "Sessions" to the library. + - | + Adds the abillity to specify authentication type on creation of root sushy object. diff --git a/releasenotes/notes/standard-registry-license-0ded489afd6cfad1.yaml b/releasenotes/notes/standard-registry-license-0ded489afd6cfad1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4d3c25fd030226bb0606a0ce3f5ec8abaf69a2e3 --- /dev/null +++ b/releasenotes/notes/standard-registry-license-0ded489afd6cfad1.yaml @@ -0,0 +1,6 @@ +--- +other: + - | + Includes Redfish standard message registry files that are licensed + under Creative Commons Attribution 4.0 License: + https://creativecommons.org/licenses/by/4.0/ diff --git a/releasenotes/notes/story-2006246-reset-bios-return-http-error-415-08170df7fe6300f8.yaml b/releasenotes/notes/story-2006246-reset-bios-return-http-error-415-08170df7fe6300f8.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e306bdff4e5c3b17b0797dd19c9322828ce02423 --- /dev/null +++ b/releasenotes/notes/story-2006246-reset-bios-return-http-error-415-08170df7fe6300f8.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Fixes an issue in performing action ``#Bios.ResetBios`` when BMC expects + the POST request with empty body instead of no body. See `story 2006246 + `__ for details. diff --git a/releasenotes/notes/story-2007216-fix-to-message-registry-cff37659f03ba815.yaml b/releasenotes/notes/story-2007216-fix-to-message-registry-cff37659f03ba815.yaml new file mode 100644 index 0000000000000000000000000000000000000000..905fa8a2daee65916da4fe06ad1f41aec806cf26 --- /dev/null +++ b/releasenotes/notes/story-2007216-fix-to-message-registry-cff37659f03ba815.yaml @@ -0,0 +1,8 @@ +--- +fixes: + - | + Handles incomplete messages in MessageRegistry that are not having fields + like 'Description' and 'Severity'. See story + `2007216 `_ for more + information. + diff --git a/releasenotes/notes/update-apply-time-support-53c5445b58cd3b42.yaml b/releasenotes/notes/update-apply-time-support-53c5445b58cd3b42.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ec74b8285d5cfec76bb102ee19b4cbe52cf61b20 --- /dev/null +++ b/releasenotes/notes/update-apply-time-support-53c5445b58cd3b42.yaml @@ -0,0 +1,13 @@ +--- +features: + - | + Update sushy models to support the Redfish SettingsApplyTime and + OperationApplyTimeSupport annotations. +deprecations: + - | + The ``operation_apply_time_support`` and ``maintenance_window`` + properties in the ``SettingsField`` class are deprecated. The + ``SettingsField`` class represents the ``@Redfish.Settings`` + annotation and those properties cannot appear within this + annotation. Instead use the ``apply_time_settings`` property + in the target resource (e.g. ``Bios`` resource). diff --git a/releasenotes/notes/update_sushy_models-9b8ea0350eb4d4d0.yaml b/releasenotes/notes/update_sushy_models-9b8ea0350eb4d4d0.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6ca3eb30055f4fcf63ea5a745506596687fdeb48 --- /dev/null +++ b/releasenotes/notes/update_sushy_models-9b8ea0350eb4d4d0.yaml @@ -0,0 +1,7 @@ +features: + - | + Unifies sushy models by Redfish schema bundle. + + These changes introduce and update currently implemented + sushy models to comply with the most recent schema bundle[1]. + [1]https://www.dmtf.org/documents/redfish-spmf/redfish-schema-bundle-20181 diff --git a/releasenotes/notes/use-sessions-url-from-root-8b8eca57dc450705.yaml b/releasenotes/notes/use-sessions-url-from-root-8b8eca57dc450705.yaml new file mode 100644 index 0000000000000000000000000000000000000000..82b1758b858bb614f16c079a36251748363fb281 --- /dev/null +++ b/releasenotes/notes/use-sessions-url-from-root-8b8eca57dc450705.yaml @@ -0,0 +1,8 @@ +--- +fixes: + - | + Instead of trying to GET /redfish/v1/SessionService, which is usually + reachable via authentication, fail, and then guess + /redfish/v1/SessionService/Sessions as Sessions URL, we try first to use + directly the Sessions URL provided by the root service, if available. + diff --git a/releasenotes/source/conf.py b/releasenotes/source/conf.py index 03169f473e4f7e4acecc0ffc26aeff9bdbba365c..f7b8b09a4ec861872b856265d45b38aa168d8511 100644 --- a/releasenotes/source/conf.py +++ b/releasenotes/source/conf.py @@ -12,9 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Glance Release Notes documentation build configuration file, created by -# sphinx-quickstart on Tue Nov 3 17:40:50 2015. -# # This file is execfile()d with the current directory set to its # containing dir. # @@ -98,7 +95,7 @@ exclude_patterns = [] # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = 'native' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -106,6 +103,10 @@ pygments_style = 'sphinx' # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False +# openstackdocstheme options +openstackdocs_repo_name = 'openstack/sushy' +openstackdocs_use_storyboard = True + # -- Options for HTML output ---------------------------------------------- diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst index c10c897f7c49e60280d5ab638ca632e320f013e9..d0a327e735a93816bd41684f141a6e736b308494 100644 --- a/releasenotes/source/index.rst +++ b/releasenotes/source/index.rst @@ -6,3 +6,10 @@ :maxdepth: 1 unreleased + victoria + ussuri + train + stein + rocky + queens + pike diff --git a/releasenotes/source/pike.rst b/releasenotes/source/pike.rst new file mode 100644 index 0000000000000000000000000000000000000000..0a4fa1024fa812333629e6fa259a07ff0bab3b4a --- /dev/null +++ b/releasenotes/source/pike.rst @@ -0,0 +1,6 @@ +========================================= +Pike Series (1.0.0 - 1.1.x) Release Notes +========================================= + +.. release-notes:: + :branch: stable/pike diff --git a/releasenotes/source/queens.rst b/releasenotes/source/queens.rst new file mode 100644 index 0000000000000000000000000000000000000000..47688af08bfdeb07fe4fe1a94ffc1c81803c2da0 --- /dev/null +++ b/releasenotes/source/queens.rst @@ -0,0 +1,6 @@ +=========================================== +Queens Series (1.2.0 - 1.3.x) Release Notes +=========================================== + +.. release-notes:: + :branch: stable/queens diff --git a/releasenotes/source/rocky.rst b/releasenotes/source/rocky.rst new file mode 100644 index 0000000000000000000000000000000000000000..1aac5a755827027eef20ddec356e01043e190b11 --- /dev/null +++ b/releasenotes/source/rocky.rst @@ -0,0 +1,6 @@ +========================================== +Rocky Series (1.4.0 - 1.6.x) Release Notes +========================================== + +.. release-notes:: + :branch: stable/rocky diff --git a/releasenotes/source/stein.rst b/releasenotes/source/stein.rst new file mode 100644 index 0000000000000000000000000000000000000000..f7e7823b5864b9a958ccdc1f7351b9f6d0dc1f05 --- /dev/null +++ b/releasenotes/source/stein.rst @@ -0,0 +1,6 @@ +========================================== +Stein Series (1.7.0 - 1.8.x) Release Notes +========================================== + +.. release-notes:: + :branch: stable/stein diff --git a/releasenotes/source/train.rst b/releasenotes/source/train.rst new file mode 100644 index 0000000000000000000000000000000000000000..e5b02dc0d4cb8062774cac797fcc6514a34bd449 --- /dev/null +++ b/releasenotes/source/train.rst @@ -0,0 +1,6 @@ +=========================================== + Train Series (1.9.0 - 2.0.x) Release Notes +=========================================== + +.. release-notes:: + :branch: stable/train diff --git a/releasenotes/source/ussuri.rst b/releasenotes/source/ussuri.rst new file mode 100644 index 0000000000000000000000000000000000000000..42409c615063a4711ecb0d67afddb3716e9d30c8 --- /dev/null +++ b/releasenotes/source/ussuri.rst @@ -0,0 +1,6 @@ +=========================================== +Ussuri Series (3.0.0 - 3.2.x) Release Notes +=========================================== + +.. release-notes:: + :branch: stable/ussuri diff --git a/releasenotes/source/victoria.rst b/releasenotes/source/victoria.rst new file mode 100644 index 0000000000000000000000000000000000000000..604382eb35c3c574c0b08d17be4b35f51de7f453 --- /dev/null +++ b/releasenotes/source/victoria.rst @@ -0,0 +1,6 @@ +============================================= +Victoria Series (3.3.0 - 3.4.x) Release Notes +============================================= + +.. release-notes:: + :branch: stable/victoria diff --git a/requirements.txt b/requirements.txt index 3ef27e5dae9a4223ceebfeca948bb680dbaabc1b..1829f68af4a763d813b7659ae30d6e1d0a4fecd1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,5 @@ pbr!=2.1.0,>=2.0.0 # Apache-2.0 requests>=2.14.2 # Apache-2.0 -six>=1.9.0 # MIT +python-dateutil>=2.7.0 # BSD +stevedore>=1.29.0 # Apache-2.0 diff --git a/setup.cfg b/setup.cfg index 7963eb85776044359c379c51bbec87f7194b6cb7..b5c315c9e86ec656b3583021e4cd44050b55dfb8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,8 +4,9 @@ summary = Sushy is a small Python library to communicate with Redfish based syst description-file = README.rst author = OpenStack -author-email = openstack-dev@lists.openstack.org -home-page = https://docs.openstack.org/sushy +author-email = openstack-discuss@lists.openstack.org +home-page = https://docs.openstack.org/sushy/latest/ +python-requires = >=3.6 classifier = Environment :: OpenStack Intended Audience :: Information Technology @@ -13,45 +14,17 @@ classifier = License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 + Programming Language :: Python :: Implementation :: CPython + Programming Language :: Python :: 3 :: Only Programming Language :: Python :: 3 - Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 [files] packages = sushy -[build_sphinx] -source-dir = doc/source -build-dir = doc/build -all_files = 1 -warning-is-error = 1 - -[upload_sphinx] -upload-dir = doc/build/html - -[compile_catalog] -directory = sushy/locale -domain = sushy - -[update_catalog] -domain = sushy -output_dir = sushy/locale -input_file = sushy/locale/sushy.pot - -[extract_messages] -keywords = _ gettext ngettext l_ lazy_gettext -mapping_file = babel.cfg -output_file = sushy/locale/sushy.pot - -[build_releasenotes] -all_files = 1 -build-dir = releasenotes/build -source-dir = releasenotes/source - -[pbr] -autodoc_index_modules = True -api_doc_dir = reference/api -autodoc_exclude_modules = - sushy.tests.* +[entry_points] +sushy.resources.system.oems = + contoso = sushy.resources.oem.fake:get_extension diff --git a/setup.py b/setup.py index 566d84432eeda2c367f881c596d327a703beb79a..cd35c3c35bf1561bb136217711b0c9c163f4c796 100644 --- a/setup.py +++ b/setup.py @@ -13,17 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT import setuptools -# In python < 2.7.4, a lazy loading of package `pbr` will break -# setuptools if some other modules registered functions in `atexit`. -# solution from: http://bugs.python.org/issue15881#msg170215 -try: - import multiprocessing # noqa -except ImportError: - pass - setuptools.setup( setup_requires=['pbr>=2.0.0'], pbr=True) diff --git a/sushy/__init__.py b/sushy/__init__.py index afc6c8ada99c1686f09db2d818f8ef7712e781f6..4f46183ced2e105cde3f7858e67dd157df3ac602 100644 --- a/sushy/__init__.py +++ b/sushy/__init__.py @@ -17,8 +17,14 @@ import logging import pbr.version from sushy.main import Sushy -from sushy.resources.system.constants import * # noqa +from sushy.resources.chassis.constants import * # noqa +from sushy.resources.constants import * # noqa +from sushy.resources.fabric.constants import * # noqa from sushy.resources.manager.constants import * # noqa +from sushy.resources.system.constants import * # noqa +from sushy.resources.system.storage.constants import * # noqa +from sushy.resources.updateservice.constants import * # noqa +from sushy.resources.taskservice.constants import * # noqa __all__ = ('Sushy',) __version__ = pbr.version.VersionInfo( diff --git a/sushy/auth.py b/sushy/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..479aa2620de2c9d503e10fbb48f8738330147672 --- /dev/null +++ b/sushy/auth.py @@ -0,0 +1,258 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Sushy Redfish Authentication Modes + +import abc +import logging + +from sushy import exceptions + +LOG = logging.getLogger(__name__) + + +class AuthBase(object, metaclass=abc.ABCMeta): + + def __init__(self, username=None, password=None): + """A class representing a base Sushy authentication mechanism + + :param username: User account with admin/server-profile + access privilege. + :param password: User account password. + """ + self._username = username + self._password = password + self._root_resource = None + self._connector = None + + def set_context(self, root_resource, connector): + """Set the context of the authentication object. + + :param root_resource: Root sushy object + :param connector: Connector for http connections + """ + self._root_resource = root_resource + self._connector = connector + self._connector.set_auth(self) + + def authenticate(self): + """Perform authentication. + + :raises: RuntimeError + """ + if self._root_resource is None or self._connector is None: + raise RuntimeError('_root_resource / _connector is missing. ' + 'Forgot to call set_context()?') + self._do_authenticate() + + @abc.abstractmethod + def _do_authenticate(self): + """Method to establish a session to a Redfish controller. + + Needs to be implemented by extending auth class, + because each authentication type will authenticate in its own way. + """ + + @abc.abstractmethod + def can_refresh_session(self): + """Method to assert if session based refresh can be done.""" + + def close(self): + """Shutdown Redfish authentication object + + Undoes whatever should be undone to cancel authenticated session. + """ + + def __enter__(self): + """Allow object to be called with the 'with' statement.""" + return self + + def __exit__(self, exception_type, exception_value, traceback): + """Allow object to be called with the 'with' statement. + + Allow object to be called with the 'with' statement but + also ensure we call close method on exit. + """ + self.close() + + +class BasicAuth(AuthBase): + """Basic Authentication class. + + This is a class used to encapsulate a basic authentication session. + + :param username: User account with admin/server-profile + access privilege. + :param password: User account password. + """ + + def _do_authenticate(self): + """Attempts to establish a Basic Authentication Session. + + """ + self._connector.set_http_basic_auth(self._username, self._password) + + def can_refresh_session(self): + """Method to assert if session based refresh can be done.""" + return False + + +class SessionAuth(AuthBase): + """Session Authentication class. + + This is a class used to encapsulate a redfish session. + """ + + def __init__(self, username=None, password=None): + """A class representing a Session Authentication object. + + :param username: User account with admin/server-profile access + privilege. + :param password: User account password. + """ + self._session_key = None + """Our Sessions Key""" + self._session_resource_id = None + """Our Sessions Unique Resource ID or URL""" + + super(SessionAuth, self).__init__(username, + password) + + def get_session_key(self): + """Returns the session key. + + :returns: The session key. + """ + return self._session_key + + def get_session_resource_id(self): + """Returns the session resource id. + + :returns: The session resource id. + """ + return self._session_resource_id + + def _do_authenticate(self): + """Establish a redfish session. + + :raises: MissingXAuthToken + :raises: ConnectionError + :raises: AccessError + :raises: HTTPError + """ + target_uri = None + try: + target_uri = self._root_resource.get_sessions_path() + except exceptions.MissingAttributeError: + LOG.debug('Missing Sessions attribute under Links in Root ' + 'Service, we\'ll try to determine it from Session ' + 'Service') + session_service = self._root_resource.get_session_service() + session_auth_token, session_uri = ( + session_service.create_session(self._username, + self._password, + target_uri=target_uri)) + self._session_key = session_auth_token + self._session_resource_id = session_uri + self._connector.set_http_session_auth(session_auth_token) + + def can_refresh_session(self): + """Method to assert if session based refresh can be done.""" + return (self._session_key is not None + and self._session_resource_id is not None) + + def refresh_session(self): + """Method to refresh a session to a Redfish controller. + + This method is called to create a new session after + a session that has already been established + has timed-out or expired. + + :raises: MissingXAuthToken + :raises: ConnectionError + :raises: AccessError + :raises: HTTPError + """ + self.reset_session_attrs() + self._do_authenticate() + + def close(self): + """Close the Redfish Session. + + Attempts to close an established RedfishSession by + deleting it from the remote Redfish controller. + """ + if self._session_resource_id is not None: + try: + self._connector.delete(self._session_resource_id) + except (exceptions.AccessError, + exceptions.ServerSideError) as exc: + LOG.warning('Received exception "%(exception)s" while ' + 'attempting to delete the active session: ' + '%(session_id)s', + {'exception': exc, + 'session_id': self._session_resource_id}) + self.reset_session_attrs() + + def reset_session_attrs(self): + """Reset active session related attributes.""" + self._session_key = None + self._session_resource_id = None + # Requests session object data is merged with user submitted data + # per https://requests.readthedocs.io/en/master/user/advanced/ + # so we need to clear data explicitly set on the session too. + self._connector._session.auth = None + if 'X-Auth-Token' in self._connector._session.headers: + # Delete the token value that was saved to the session + # as otherwise we would end up with a dictionary containing + # a {'X-Auth-Token': null} being sent across to the remote + # bmc. + del self._connector._session.headers['X-Auth-Token'] + + +class SessionOrBasicAuth(SessionAuth): + + def __init__(self, username=None, password=None): + super(SessionOrBasicAuth, self).__init__(username, password) + self.basic_auth = BasicAuth(username=username, password=password) + + def _do_authenticate(self): + """Establish a RedfishSession. + + We will attempt to establish a redfish session. If we are unable + to establish one, fallback to basic authentication. + """ + try: + # Attempt session based authentication + super(SessionOrBasicAuth, self)._do_authenticate() + except exceptions.SushyError as e: + LOG.debug('Received exception "%(exception)s" while ' + 'attempting to establish a session. ' + 'Falling back to basic authentication.', + {'exception': e}) + + # Fall back to basic authentication + self.reset_session_attrs() + self.basic_auth.set_context(self._root_resource, self._connector) + self.basic_auth.authenticate() + + def refresh_session(self): + """Method to refresh a session to a Redfish controller. + + This method is called to create a new RedfishSession + if we have previously established a RedfishSession and + the previous session has timed-out or expired. + If we did not previously have an established session, + we simply return our BasicAuthentication requests.Session. + """ + if self.can_refresh_session(): + super(SessionOrBasicAuth, self).refresh_session() diff --git a/sushy/connector.py b/sushy/connector.py index debe8240f5efa94d66439475f6fe4dc0b0b448b9..90b8334343f96394d1070f843baaeee8fa4c2d0c 100644 --- a/sushy/connector.py +++ b/sushy/connector.py @@ -13,63 +13,171 @@ # License for the specific language governing permissions and limitations # under the License. -import json import logging +import time +from urllib import parse as urlparse import requests -from six.moves.urllib import parse from sushy import exceptions +from sushy.taskmonitor import TaskMonitor +from sushy import utils LOG = logging.getLogger(__name__) +_SERVER_SIDE_RETRIES = 5 +_SERVER_SIDE_RETRY_DELAY = 3 + + class Connector(object): - def __init__(self, url, username=None, password=None, verify=True): + def __init__( + self, url, username=None, password=None, verify=True, + response_callback=None): self._url = url + self._verify = verify self._session = requests.Session() - self._session.verify = verify - if username and password: - self._session.auth = (username, password) + self._session.verify = self._verify + self._response_callback = response_callback + + # NOTE(etingof): field studies reveal that some BMCs choke at + # long-running persistent HTTP connections (or TCP connections). + # By default, we ask HTTP server to shut down HTTP connection we've + # just used. + self._session.headers['Connection'] = 'close' + + if username or password: + LOG.warning('Passing username and password to Connector is ' + 'deprecated. Authentication is passed through ' + 'set_auth now, support for these arguments will ' + 'be removed in the future') + self.set_http_basic_auth(username, password) + + def set_auth(self, auth): + """Sets the authentication mechanism for our connector.""" + self._auth = auth + + def set_http_basic_auth(self, username, password): + """Sets the http basic authentication information.""" + self._session.auth = (username, password) + + def set_http_session_auth(self, session_auth_token): + """Sets the session authentication information.""" + self._session.auth = None + self._session.headers.update({'X-Auth-Token': session_auth_token}) def close(self): """Close this connector and the associated HTTP session.""" self._session.close() - def _op(self, method, path='', data=None, headers=None): + def _op(self, method, path='', data=None, headers=None, blocking=False, + timeout=60, server_side_retries=_SERVER_SIDE_RETRIES, + **extra_session_req_kwargs): """Generic RESTful request handler. :param method: The HTTP method to be used, e.g: GET, POST, PUT, PATCH, etc... - :param path: The sub-URI path to the resource. + :param path: The sub-URI or absolute URL path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. :returns: The response object from the requests library. :raises: ConnectionError :raises: HTTPError """ - if headers is None: - headers = {} - - if data is not None: - data = json.dumps(data) + url = path if urlparse.urlparse(path).netloc else urlparse.urljoin( + self._url, path) + headers = headers or {} + lc_headers = [k.lower() for k in headers] + if data is not None and 'content-type' not in lc_headers: headers['Content-Type'] = 'application/json' - - url = parse.urljoin(self._url, path) + if 'odata-version' not in lc_headers: + headers['OData-Version'] = '4.0' # TODO(lucasagomes): We should mask the data to remove sensitive # information - LOG.debug('HTTP request: %(method)s %(url)s; ' - 'headers: %(headers)s; body: %(data)s', - {'method': method, 'url': url, 'headers': headers, - 'data': data}) + LOG.debug('HTTP request: %(method)s %(url)s; headers: %(headers)s; ' + 'body: %(data)s; blocking: %(blocking)s; timeout: ' + '%(timeout)s; session arguments: %(session)s;', + {'method': method, 'url': url, + 'headers': utils.sanitize(headers), + 'data': utils.sanitize(data), + 'blocking': blocking, 'timeout': timeout, + 'session': extra_session_req_kwargs}) try: - response = self._session.request(method, url, data=data, - headers=headers) + response = self._session.request(method, url, json=data, + headers=headers, + **extra_session_req_kwargs) except requests.ConnectionError as e: raise exceptions.ConnectionError(url=url, error=e) - exceptions.raise_for_response(method, url, response) + if self._response_callback: + self._response_callback(response) + + # If we received an AccessError, and we + # previously established a redfish session + # there is a chance that the session has timed-out. + # Attempt to re-establish a session. + try: + exceptions.raise_for_response(method, url, response) + except exceptions.AccessError as e: + if self._auth.can_refresh_session(): + try: + self._auth.refresh_session() + except exceptions.AccessError as refresh_exc: + LOG.error("A failure occured while attempting to refresh " + "the session. Error: %s", refresh_exc.message) + raise + LOG.debug("Authentication refreshed successfully, " + "retrying the call.") + try: + response = self._session.request( + method, url, json=data, + headers=headers, + **extra_session_req_kwargs) + except exceptions.HTTPError as retry_exc: + LOG.error("Failure occured while attempting to retry " + "request after refreshing the session. Error: " + "%s", retry_exc.message) + raise + else: + if method == 'GET' and url.endswith('SessionService'): + LOG.debug('HTTP GET of SessionService failed %s, ' + 'this is expected prior to authentication', + e.message) + else: + LOG.error("Authentication error detected. Cannot proceed: " + "%s", e.message) + raise + except exceptions.ServerSideError as e: + if method.lower() != 'get' or server_side_retries <= 0: + raise + else: + LOG.warning('Got server side error %s in response to a ' + 'GET request, retrying after %d seconds', + e, _SERVER_SIDE_RETRY_DELAY) + time.sleep(_SERVER_SIDE_RETRY_DELAY) + return self._op(method, path, data=data, headers=headers, + blocking=blocking, timeout=timeout, + server_side_retries=server_side_retries - 1, + **extra_session_req_kwargs) + + if blocking and response.status_code == 202: + if not response.headers.get('Location'): + m = ('HTTP response for %(method)s request to %(url)s ' + 'returned status 202, but no Location header' + % {'method': method, 'url': url}) + raise exceptions.ConnectionError(url=url, error=m) + + mon = TaskMonitor.from_response(self, response, path) + mon.wait(timeout) + response = mon.response + exceptions.raise_for_response(method, url, response) + LOG.debug('HTTP response for %(method)s %(url)s: ' 'status code: %(code)s', {'method': method, 'url': url, @@ -77,41 +185,105 @@ class Connector(object): return response - def get(self, path='', data=None, headers=None): + def get(self, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): """HTTP GET method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. :returns: The response object from the requests library. :raises: ConnectionError :raises: HTTPError """ - return self._op('GET', path, data, headers) + return self._op('GET', path, data=data, headers=headers, + blocking=blocking, timeout=timeout, + **extra_session_req_kwargs) - def post(self, path='', data=None, headers=None): + def post(self, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): """HTTP POST method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. :returns: The response object from the requests library. :raises: ConnectionError :raises: HTTPError """ - return self._op('POST', path, data, headers) + return self._op('POST', path, data=data, headers=headers, + blocking=blocking, timeout=timeout, + **extra_session_req_kwargs) - def patch(self, path='', data=None, headers=None): + def patch(self, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): """HTTP PATCH method. :param path: Optional sub-URI path to the resource. :param data: Optional JSON data. :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. + :returns: The response object from the requests library. + :raises: ConnectionError + :raises: HTTPError + """ + return self._op('PATCH', path, data=data, headers=headers, + blocking=blocking, timeout=timeout, + **extra_session_req_kwargs) + + def put(self, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): + """HTTP PUT method. + + :param path: Optional sub-URI path to the resource. + :param data: Optional JSON data. + :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. + :returns: The response object from the requests library. + :raises: ConnectionError + :raises: HTTPError + """ + return self._op('PUT', path, data=data, headers=headers, + blocking=blocking, timeout=timeout, + **extra_session_req_kwargs) + + def delete(self, path='', data=None, headers=None, blocking=False, + timeout=60, **extra_session_req_kwargs): + """HTTP DELETE method. + + :param path: Optional sub-URI path to the resource. + :param data: Optional JSON data. + :param headers: Optional dictionary of headers. + :param blocking: Whether to block for asynchronous operations. + :param timeout: Max time in seconds to wait for blocking async call. + :param extra_session_req_kwargs: Optional keyword argument to pass + requests library arguments which would pass on to requests session + object. :returns: The response object from the requests library. :raises: ConnectionError :raises: HTTPError """ - return self._op('PATCH', path, data, headers) + return self._op('DELETE', path, data=data, headers=headers, + blocking=blocking, timeout=timeout, + **extra_session_req_kwargs) def __enter__(self): return self diff --git a/sushy/exceptions.py b/sushy/exceptions.py index db07cac585d59eee75715890b87e056e260b4342..9ddf27e8cd0e723f4f71320688c870100434c32b 100644 --- a/sushy/exceptions.py +++ b/sushy/exceptions.py @@ -13,6 +13,7 @@ # License for the specific language governing permissions and limitations # under the License. +from http import client as http_client import logging @@ -24,7 +25,9 @@ class SushyError(Exception): message = None - def __init__(self, **kwargs): + def __init__(self, message=None, **kwargs): + if message is not None: + self.message = message if self.message and kwargs: self.message = self.message % kwargs @@ -55,6 +58,26 @@ class InvalidParameterValueError(SushyError): 'Valid values are: %(valid_values)s') +class ArchiveParsingError(SushyError): + message = 'Failed parsing archive "%(path)s": %(error)s' + + +class UnknownDefaultError(SushyError): + message = 'Failed at determining default for "%(entity)s": %(error)s' + + +class ExtensionError(SushyError): + message = ('Sushy Extension Error: %(error)s') + + +class OEMExtensionNotFoundError(SushyError): + message = 'No %(resource)s OEM extension found by name "%(name)s".' + + +class MissingHeaderError(SushyError): + message = 'Response to %(target_uri)s did not contain a %(header)s header' + + class HTTPError(SushyError): """Basic exception for HTTP errors""" @@ -70,7 +93,8 @@ class HTTPError(SushyError): detail = None """Error message defined in the Redfish specification, if present.""" - message = ('HTTP %(method)s %(url)s returned code %(code)s. %(error)s') + message = ('HTTP %(method)s %(url)s returned code %(code)s. %(error)s ' + 'Extended information: %(ext_info)s') def __init__(self, method, url, response): self.status_code = response.status_code @@ -82,19 +106,32 @@ class HTTPError(SushyError): {'method': method, 'url': url, 'code': self.status_code}) error = 'unknown error' + ext_info = 'none' else: - # TODO(dtantsur): parse @Message.ExtendedInfo self.body = body.get('error', {}) self.code = self.body.get('code', 'Base.1.0.GeneralError') self.detail = self.body.get('message') - error = '%s: %s' % (self.code, self.detail or 'unknown error') - + ext_info = self.body.get('@Message.ExtendedInfo', [{}]) + message = self._get_most_severe_msg(ext_info) + self.detail = message or self.detail + error = '%s: %s' % (self.code, self.detail or 'unknown error.') kwargs = {'method': method, 'url': url, 'code': self.status_code, - 'error': error} + 'error': error, 'ext_info': ext_info} LOG.debug('HTTP response for %(method)s %(url)s: ' - 'status code: %(code)s, error: %(error)s', kwargs) + 'status code: %(code)s, error: %(error)s, ' + 'extended: %(ext_info)s', kwargs) super(HTTPError, self).__init__(**kwargs) + @staticmethod + def _get_most_severe_msg(extended_info): + if not isinstance(extended_info, list): + return extended_info.get('Message', None) + if len(extended_info) > 0: + for sev in ['Critical', 'Warning']: + for i, m in enumerate(extended_info): + if m.get('Severity') == sev: + return m.get('Message') + class BadRequestError(HTTPError): pass @@ -113,17 +150,23 @@ class AccessError(HTTPError): pass +class MissingXAuthToken(HTTPError): + message = ('No X-Auth-Token returned from remote host when ' + 'attempting to establish a session. Error: %(error)s') + + def raise_for_response(method, url, response): """Raise a correct error class, if needed.""" - if response.status_code < 400: + if response.status_code < http_client.BAD_REQUEST: return - elif response.status_code == 404: + elif response.status_code == http_client.NOT_FOUND: raise ResourceNotFoundError(method, url, response) - elif response.status_code == 400: + elif response.status_code == http_client.BAD_REQUEST: raise BadRequestError(method, url, response) - elif response.status_code in (401, 403): + elif response.status_code in (http_client.UNAUTHORIZED, + http_client.FORBIDDEN): raise AccessError(method, url, response) - elif response.status_code >= 500: + elif response.status_code >= http_client.INTERNAL_SERVER_ERROR: raise ServerSideError(method, url, response) else: raise HTTPError(method, url, response) diff --git a/sushy/main.py b/sushy/main.py index a5e25208aa9b9c5b8ea62318057ec8f590ce344d..ab2286ada7e72c370bf9736d89934f0b36c341cb 100644 --- a/sushy/main.py +++ b/sushy/main.py @@ -12,11 +12,94 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +import collections +import logging -from sushy import connector +import pkg_resources +import requests + +from sushy import auth as sushy_auth +from sushy import connector as sushy_connector +from sushy import exceptions from sushy.resources import base +from sushy.resources.chassis import chassis +from sushy.resources.compositionservice import compositionservice +from sushy.resources.fabric import fabric from sushy.resources.manager import manager +from sushy.resources.registry import message_registry +from sushy.resources.registry import message_registry_file +from sushy.resources.sessionservice import session +from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system +from sushy.resources.taskservice import taskservice +from sushy.resources.updateservice import updateservice +from sushy import taskmonitor +from sushy import utils + +LOG = logging.getLogger(__name__) + +STANDARD_REGISTRY_PATH = 'standard_registries/' + + +class ProtocolFeaturesSupportedField(base.CompositeField): + + excerpt_query = base.Field('ExcerptQuery') + """The excerpt query parameter is supported""" + + expand_query = base.Field('ExpandQuery') + """The expand query parameter is supported""" + + filter_query = base.Field('FilterQuery') + """The filter query parameter is supported""" + + only_member_query = base.Field('OnlyMemberQuery') + """The only query parameter is supported""" + + select_query = base.Field('SelectQuery') + """The select query parameter is supported""" + + +class LazyRegistries(collections.abc.MutableMapping): + """Download registries on demand. + + Redfish message registries can be very large. On top of that, + they are not used frequently. Thus, let's not pull them off + the BMC unless the consumer is actually trying to use them. + + :param service_root: Redfish service root object + :type service_root: sushy.main.Sushy + """ + + def __init__(self, service_root): + self._service_root = service_root + self._registries = None + + def __getitem__(self, key): + registries = self.registries + return registries[key] + + def __setitem__(self, key, value): + registries = self.registries + registries[key] = value + + def __delitem__(self, key): + registries = self.registries + del registries[key] + + def __iter__(self): + registries = self.registries + return iter(registries or ()) + + def __len__(self): + registries = self.registries + return len(registries) + + @property + def registries(self): + if self._registries is None: + self._registries = self._service_root.registries + + return self._registries class Sushy(base.ResourceBase): @@ -30,14 +113,43 @@ class Sushy(base.ResourceBase): uuid = base.Field('UUID') """The Redfish root service UUID""" - _systems_path = base.Field(['Systems', '@odata.id'], required=True) + product = base.Field('Product') + """The product associated with this Redfish service""" + + protocol_features_supported = ProtocolFeaturesSupportedField( + 'ProtocolFeaturesSupported') + """The information about protocol features supported by the service""" + + _composition_service_path = base.Field( + ['CompositionService', '@odata.id']) + """CompositionService path""" + + _systems_path = base.Field(['Systems', '@odata.id']) """SystemCollection path""" - _managers_path = base.Field(['Managers', '@odata.id'], required=True) + _managers_path = base.Field(['Managers', '@odata.id']) """ManagerCollection path""" + _chassis_path = base.Field(['Chassis', '@odata.id']) + """ChassisCollection path""" + + _fabrics_path = base.Field(['Fabrics', '@odata.id']) + """FabricCollection path""" + + _session_service_path = base.Field(['SessionService', '@odata.id']) + """SessionService path""" + + _registries_path = base.Field(['Registries', '@odata.id']) + """Registries path""" + + _update_service_path = base.Field(['UpdateService', '@odata.id']) + """UpdateService path""" + def __init__(self, base_url, username=None, password=None, - root_prefix='/redfish/v1/', verify=True): + root_prefix='/redfish/v1/', verify=True, + auth=None, connector=None, + public_connector=None, + language='en'): """A class representing a RootService :param base_url: The base URL to the Redfish controller. It @@ -54,15 +166,52 @@ class Sushy(base.ResourceBase): the driver will ignore verifying the SSL certificate; if it's a path the driver will use the specified certificate or one of the certificates in the directory. Defaults to True. + :param auth: An authentication mechanism to utilize. + :param connector: A user-defined connector object. Defaults to None. + :param public_connector: A user-defined connector to use for requests + on the Internet, e.g., for Message Registries. Defaults to None. + :param language: RFC 5646 language code for Message Registries. + Defaults to 'en'. """ self._root_prefix = root_prefix + if (auth is not None and (password is not None + or username is not None)): + msg = ('Username or Password were provided to Sushy ' + 'when an authentication mechanism was specified.') + raise ValueError(msg) + if auth is None: + auth = sushy_auth.SessionOrBasicAuth(username=username, + password=password) + self._auth = auth + super(Sushy, self).__init__( - connector.Connector(base_url, username, password, verify), + connector or sushy_connector.Connector(base_url, verify=verify), path=self._root_prefix) + self._public_connector = public_connector or requests + self._language = language + self._base_url = base_url + self._auth.set_context(self, self._conn) + self._auth.authenticate() + + def __del__(self): + if self._auth: + try: + self._auth.close() + + except Exception as ex: + LOG.warning('Ignoring error while closing Redfish session ' + 'with %s: %s', self._base_url, ex) + self._auth = None + + def _parse_attributes(self, json_doc): + """Parse the attributes of a resource. + + Parsed JSON fields are set to `self` as declared in the class. - def _parse_attributes(self): - super(Sushy, self)._parse_attributes() - self.redfish_version = self.json.get('RedfishVersion') + :param json_doc: parsed JSON document in form of Python types + """ + super(Sushy, self)._parse_attributes(json_doc) + self.redfish_version = json_doc.get('RedfishVersion') def get_system_collection(self): """Get the SystemCollection object @@ -71,17 +220,100 @@ class Sushy(base.ResourceBase): not found :returns: a SystemCollection object """ - return system.SystemCollection(self._conn, self._systems_path, - redfish_version=self.redfish_version) + if not self._systems_path: + raise exceptions.MissingAttributeError( + attribute='Systems/@odata.id', resource=self._path) + + return system.SystemCollection( + self._conn, self._systems_path, + redfish_version=self.redfish_version, + registries=self.lazy_registries) - def get_system(self, identity): + def get_system(self, identity=None): """Given the identity return a System object - :param identity: The identity of the System resource + :param identity: The identity of the System resource. If not given, + sushy will default to the single available System or fail + if there appear to be more or less then one System listed. + :raises: `UnknownDefaultError` if default system can't be determined. :returns: The System object """ + if identity is None: + systems_collection = self.get_system_collection() + listed_systems = systems_collection.get_members() + if len(listed_systems) != 1: + raise exceptions.UnknownDefaultError( + entity='ComputerSystem', + error='System count is not exactly one') + + identity = listed_systems[0].path + return system.System(self._conn, identity, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.lazy_registries) + + def get_chassis_collection(self): + """Get the ChassisCollection object + + :raises: MissingAttributeError, if the collection attribute is + not found + :returns: a ChassisCollection object + """ + if not self._chassis_path: + raise exceptions.MissingAttributeError( + attribute='Chassis/@odata.id', resource=self._path) + + return chassis.ChassisCollection(self._conn, self._chassis_path, + redfish_version=self.redfish_version, + registries=self.lazy_registries) + + def get_chassis(self, identity=None): + """Given the identity return a Chassis object + + :param identity: The identity of the Chassis resource. If not given, + sushy will default to the single available chassis or fail + if there appear to be more or less then one Chassis listed. + :raises: `UnknownDefaultError` if default system can't be determined. + :returns: The Chassis object + """ + if identity is None: + chassis_collection = self.get_chassis_collection() + listed_chassis = chassis_collection.get_members() + if len(listed_chassis) != 1: + raise exceptions.UnknownDefaultError( + entity='Chassis', + error='Chassis count is not exactly one') + + identity = listed_chassis[0].path + + return chassis.Chassis(self._conn, identity, + redfish_version=self.redfish_version, + registries=self.lazy_registries) + + def get_fabric_collection(self): + """Get the FabricCollection object + + :raises: MissingAttributeError, if the collection attribute is + not found + :returns: a FabricCollection object + """ + if not self._fabrics_path: + raise exceptions.MissingAttributeError( + attribute='Fabrics/@odata.id', resource=self._path) + + return fabric.FabricCollection(self._conn, self._fabrics_path, + redfish_version=self.redfish_version, + registries=self.lazy_registries) + + def get_fabric(self, identity): + """Given the identity return a Fabric object + + :param identity: The identity of the Fabric resource + :returns: The Fabric object + """ + return fabric.Fabric(self._conn, identity, + redfish_version=self.redfish_version, + registries=self.lazy_registries) def get_manager_collection(self): """Get the ManagerCollection object @@ -90,14 +322,201 @@ class Sushy(base.ResourceBase): not found :returns: a ManagerCollection object """ + if not self._managers_path: + raise exceptions.MissingAttributeError( + attribute='Managers/@odata.id', resource=self._path) + return manager.ManagerCollection(self._conn, self._managers_path, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.lazy_registries) - def get_manager(self, identity): + def get_manager(self, identity=None): """Given the identity return a Manager object - :param identity: The identity of the Manager resource + :param identity: The identity of the Manager resource. If not given, + sushy will default to the single available Manager or fail + if there appear to be more or less then one Manager listed. :returns: The Manager object """ + if identity is None: + managers_collection = self.get_manager_collection() + listed_managers = managers_collection.get_members() + if len(listed_managers) != 1: + raise exceptions.UnknownDefaultError( + entity='Manager', + error='Manager count is not exactly one') + + identity = listed_managers[0].path + return manager.Manager(self._conn, identity, - redfish_version=self.redfish_version) + redfish_version=self.redfish_version, + registries=self.lazy_registries) + + def get_session_service(self): + """Get the SessionService object + + :raises: MissingAttributeError, if the collection attribute is + not found + :returns: as SessionCollection object + """ + if not self._session_service_path: + raise exceptions.MissingAttributeError( + attribute='SessionService/@odata.id', resource=self._path) + + return sessionservice.SessionService( + self._conn, self._session_service_path, + redfish_version=self.redfish_version) + + def get_sessions_path(self): + """Returns the Sessions url""" + + try: + links_url = self.json.get('Links') + return links_url['Sessions']['@odata.id'] + except (TypeError, KeyError): + raise exceptions.MissingAttributeError( + attribute='Links/Sessions/@data.id', resource=self.path) + + def get_session(self, identity): + """Given the identity return a Session object + + :param identity: The identity of the session resource + :returns: The Session object + """ + return session.Session( + self._conn, identity, + redfish_version=self.redfish_version, + registries=self.lazy_registries) + + def get_update_service(self): + """Get the UpdateService object + + :returns: The UpdateService object + """ + if not self._update_service_path: + raise exceptions.MissingAttributeError( + attribute='UpdateService/@odata.id', resource=self._path) + + return updateservice.UpdateService( + self._conn, self._update_service_path, + redfish_version=self.redfish_version, + registries=self.lazy_registries) + + def get_task_service(self): + """Get the TaskService object + + :returns: The TaskService object + """ + return taskservice.TaskService( + self._conn, utils.get_sub_resource_path_by(self, 'Tasks'), + redfish_version=self.redfish_version, + registries=self.lazy_registries) + + def _get_registry_collection(self): + """Get MessageRegistryFileCollection object + + This resource is optional and can be empty. + + :returns: MessageRegistryFileCollection object + or None if Registries not provided + """ + + if self._registries_path: + return message_registry_file.MessageRegistryFileCollection( + self._conn, + self._registries_path, + redfish_version=self.redfish_version) + + def get_composition_service(self): + """Get the CompositionService object + + :raises: MissingAttributeError, if the composition service + attribute is not found + :returns: The CompositionService object + """ + if not self._composition_service_path: + raise exceptions.MissingAttributeError( + attribute='CompositionService/@odata.id', + resource=self._path) + + return compositionservice.CompositionService( + self._conn, self._composition_service_path, + redfish_version=self.redfish_version, + registries=self.lazy_registries) + + def _get_standard_message_registry_collection(self): + """Load packaged standard message registries + + :returns: list of MessageRegistry + """ + + message_registries = [] + resource_package_name = __name__ + for json_file in pkg_resources.resource_listdir( + resource_package_name, STANDARD_REGISTRY_PATH): + # Not using path.join according to pkg_resources docs + mes_reg = message_registry.MessageRegistry( + None, STANDARD_REGISTRY_PATH + json_file, + reader=base.JsonPackagedFileReader( + resource_package_name)) + message_registries.append(mes_reg) + + return message_registries + + @property + @utils.cache_it + def registries(self): + """Gets and combines all message registries together + + Fetches all registries if any provided by Redfish service + and combines together with packaged standard registries. + + :returns: dict of combined message registries keyed by both the + registry name (Registry_name.Major_version.Minor_version) and the + message registry file identity, with the value being the actual + registry itself. + """ + standard = self._get_standard_message_registry_collection() + + registries = {r.registry_prefix + '.' + + r.registry_version.rsplit('.', 1)[0]: r + for r in standard if r.language == self._language} + + registry_col = self._get_registry_collection() + + if registry_col: + provided = registry_col.get_members() + for r in provided: + message_registry = r.get_message_registry( + self._language, + self._public_connector) + registries[r.registry] = message_registry + if r.identity not in registries: + registries[r.identity] = message_registry + + return registries + + @property + def lazy_registries(self): + """Gets and combines all message registries together + + Fetches all registries if any provided by Redfish service + and combines together with packaged standard registries. + + :returns: dict of combined message registries where key is + Registry_name.Major_version.Minor_version and value is registry + itself. + """ + return LazyRegistries(self) + + def get_task_monitor(self, task_monitor_uri): + """Used to retrieve a TaskMonitor by task monitor URI. + + :param task_monitor_uri: Task monitor URI + :returns: A task monitor. + """ + return taskmonitor.TaskMonitor( + self._conn, + task_monitor_uri, + redfish_version=self.redfish_version, + registries=self.registries) diff --git a/sushy/resources/base.py b/sushy/resources/base.py index a6083ba483108c5d98cd248d2b6478a0884d493c..b2b40c1e1fd4fd8367d89a0b34867d3274495e0e 100644 --- a/sushy/resources/base.py +++ b/sushy/resources/base.py @@ -16,11 +16,17 @@ import abc import collections import copy +import io +import json import logging +import re +import zipfile -import six +import pkg_resources from sushy import exceptions +from sushy.resources import mappings as res_maps +from sushy.resources import oem from sushy import utils @@ -36,10 +42,9 @@ class Field(object): :param path: JSON field to fetch the value from. Either a string, or a list of strings in case of a nested field. - :param required: whether this field is required. Missing required - fields result in MissingAttributeError. + :param required: whether this field is required. Missing required, + but not defaulted, fields result in MissingAttributeError. :param default: the default value to use when the field is missing. - Only has effect when the field is not required. :param adapter: a function to call to transform and/or validate the received value. UnicodeError, ValueError or TypeError from this call are reraised as MalformedAttributeError. @@ -47,8 +52,9 @@ class Field(object): if not callable(adapter): raise TypeError("Adapter must be callable") - if isinstance(path, six.string_types): + if not isinstance(path, list): path = [path] + elif not path: raise ValueError('Path cannot be empty') @@ -57,6 +63,17 @@ class Field(object): self._default = default self._adapter = adapter + def _get_item(self, dct, key_or_callable, **context): + if not callable(key_or_callable): + return dct[key_or_callable] + + for candidate_key in dct: + if key_or_callable( + candidate_key, value=dct[candidate_key], **context): + return dct[candidate_key] + + raise KeyError(key_or_callable) + def _load(self, body, resource, nested_in=None): """Load this field from a JSON object. @@ -64,7 +81,8 @@ class Field(object): :param resource: ResourceBase instance for which the field is loaded. :param nested_in: parent resource path (for error reporting only), must be a list of strings or None. - :raises: MissingAttributeError if a required field is missing. + :raises: MissingAttributeError if a required field is missing + and not defaulted. :raises: MalformedAttributeError on invalid field value or type. :returns: loaded and verified value """ @@ -72,18 +90,32 @@ class Field(object): for path_item in self._path[:-1]: body = body.get(path_item, {}) - if name not in body: + try: + item = self._get_item(body, name) + + except KeyError: if self._required: path = (nested_in or []) + self._path - raise exceptions.MissingAttributeError( - attribute='/'.join(path), - resource=resource.path) - else: - # Do not run the adapter on the default value - return self._default + + if self._default is None: + raise exceptions.MissingAttributeError( + attribute='/'.join(path), + resource=resource.path) + + logging.warning( + 'Applying default "%s" on required, but missing ' + 'attribute "%s"' % (self._default, path)) + + # Do not run the adapter on the default value + return self._default + + # NOTE(etingof): this is just to account for schema violation + if item is None: + return try: - value = self._adapter(body[name]) + return self._adapter(item) + except (UnicodeError, ValueError, TypeError) as exc: path = (nested_in or []) + self._path raise exceptions.MalformedAttributeError( @@ -91,8 +123,6 @@ class Field(object): resource=resource.path, error=exc) - return value - def _collect_fields(resource): """Collect fields from the JSON. @@ -106,8 +136,7 @@ def _collect_fields(resource): yield (attr, field) -@six.add_metaclass(abc.ABCMeta) -class CompositeField(collections.Mapping, Field): +class CompositeField(collections.abc.Mapping, Field, metaclass=abc.ABCMeta): """Base class for fields consisting of several sub-fields.""" def __init__(self, *args, **kwargs): @@ -138,7 +167,7 @@ class CompositeField(collections.Mapping, Field): return instance # Satisfy the mapping interface, see - # https://docs.python.org/2/library/collections.html#collections.Mapping. + # https://docs.python.org/3/library/collections.abc.html#collections.abc.Mapping def __getitem__(self, key): if key in self._subfields: @@ -153,6 +182,84 @@ class CompositeField(collections.Mapping, Field): return iter(self._subfields) +class ListField(Field): + """Base class for fields consisting of a list of several sub-fields.""" + + def __init__(self, *args, **kwargs): + super(ListField, self).__init__(*args, **kwargs) + self._subfields = dict(_collect_fields(self)) + + def _load(self, body, resource, nested_in=None): + """Load the field list. + + :param body: parent JSON body. + :param resource: parent resource. + :param nested_in: parent resource name (for error reporting only). + :returns: a new list object containing subfields. + """ + nested_in = (nested_in or []) + self._path + values = super(ListField, self)._load(body, resource) + if values is None: + return None + + # Initialize the list that will contain each field instance + instances = [] + for value in values: + instance = copy.copy(self) + for attr, field in self._subfields.items(): + # Hide the Field object behind the real value + setattr(instance, attr, field._load(value, + resource, + nested_in)) + instances.append(instance) + + return instances + + def __getitem__(self, key): + return getattr(self, key) + + +class LinksField(CompositeField): + """Reference to linked resources.""" + oem_vendors = Field('Oem', adapter=list) + + +class DictionaryField(Field): + """Base class for fields consisting of dictionary of several sub-fields.""" + + def __init__(self, *args, **kwargs): + super(DictionaryField, self).__init__(*args, **kwargs) + self._subfields = dict(_collect_fields(self)) + + def _load(self, body, resource, nested_in=None): + """Load the dictionary. + + :param body: parent JSON body. + :param resource: parent resource. + :param nested_in: parent resource name (for error reporting only). + :returns: a new dictionary object containing subfields. + """ + nested_in = (nested_in or []) + self._path + values = super(DictionaryField, self)._load(body, resource) + if values is None: + return None + + instances = {} + for key, value in values.items(): + instance_value = copy.copy(self) + for attr, field in self._subfields.items(): + # Hide the Field object behind the real value + setattr(instance_value, attr, field._load(value, + resource, + nested_in)) + instances[key] = instance_value + + return instances + + def __getitem__(self, key): + return getattr(self, key) + + class MappedField(Field): """Field taking real value from a mapping.""" @@ -163,13 +270,12 @@ class MappedField(Field): a string or a list of string. In the latter case, the value will be fetched from a nested object. :param mapping: a mapping to take values from. - :param required: whether this field is required. Missing required - fields result in MissingAttributeError. + :param required: whether this field is required. Missing required, + but not defaulted, fields result in MissingAttributeError. :param default: the default value to use when the field is missing. - Only has effect when the field is not required. This value is not - matched against the mapping. + This value is not matched against the mapping. """ - if not isinstance(mapping, collections.Mapping): + if not isinstance(mapping, collections.abc.Mapping): raise TypeError("The mapping argument must be a mapping") super(MappedField, self).__init__( @@ -177,48 +283,403 @@ class MappedField(Field): adapter=mapping.get) -@six.add_metaclass(abc.ABCMeta) -class ResourceBase(object): +class MappedListField(Field): + """Field taking a list of values with a mapping for the values + + Given JSON {'field':['xxx', 'yyy']}, a sushy resource definition and + mapping {'xxx':'a', 'yyy':'b'}, the sushy object to come out will be like + resource.field = ['a', 'b'] + """ + + def __init__(self, field, mapping, required=False, default=None): + """Create a mapped list field definition. + + :param field: JSON field to fetch the list of values from. + :param mapping: a mapping for the list elements. + :param required: whether this field is required. Missing required, + but not defaulted, fields result in MissingAttributeError. + :param default: the default value to use when the field is missing. + """ + if not isinstance(mapping, collections.abc.Mapping): + raise TypeError("The mapping argument must be a mapping") + + self._mapping_adapter = mapping.get + super(MappedListField, self).__init__( + field, required=required, default=default, + adapter=lambda x: x) + + def _load(self, body, resource, nested_in=None): + """Load the mapped list. + + :param body: parent JSON body. + :param resource: parent resource. + :param nested_in: parent resource name (for error reporting only). + :returns: a new list object containing the mapped values. + """ + nested_in = (nested_in or []) + self._path + values = super(MappedListField, self)._load(body, resource) + + if values is None: + return + + instances = [self._mapping_adapter(value) for value in values + if self._mapping_adapter(value) is not None] + + return instances + + +class MessageListField(ListField): + """List of messages with details of settings update status""" + + message_id = Field('MessageId') + """The key for this message which can be used + to look up the message in a message registry + """ + + message = Field('Message') + """Human readable message, if provided""" + + severity = MappedField('Severity', + res_maps.SEVERITY_VALUE_MAP) + """Severity of the error""" + + resolution = Field('Resolution') + """Used to provide suggestions on how to resolve + the situation that caused the error + """ + + _related_properties = Field('RelatedProperties') + """List of properties described by the message""" + + message_args = Field('MessageArgs') + """List of message substitution arguments for the message + referenced by `message_id` from the message registry + """ + + +class FieldData(object): + """Contains data to be used when constructing Fields""" + + def __init__(self, status_code, headers, json_doc): + """Initializes the FieldData instance""" + self._status_code = status_code + self._headers = headers + self._json_doc = json_doc + + @property + def status_code(self): + """The status code""" + return self._status_code + + @property + def headers(self): + """The headers""" + return self._headers + + @property + def json_doc(self): + """The parsed JSON body""" + return self._json_doc + + +class AbstractDataReader(object, metaclass=abc.ABCMeta): + + def set_connection(self, connector, path): + """Sets mandatory connection parameters + + :param connector: A Connector instance + :param path: path of the resource + """ + self._conn = connector + self._path = path + + @abc.abstractmethod + def get_data(self): + """Based on data source get data and parse to JSON""" + + +class JsonDataReader(AbstractDataReader): + """Gets the data from HTTP response given by path""" + + def get_data(self): + """Gets JSON file from URI directly""" + data = self._conn.get(path=self._path) + + json_data = data.json() if data.content else {} + + return FieldData(data.status_code, data.headers, json_data) + + +class JsonPublicFileReader(AbstractDataReader): + """Loads the data from the Internet""" + + def get_data(self): + """Get JSON file from full URI""" + data = self._conn.get(self._path) + + return FieldData(data.status_code, data.headers, data.json()) + + +class JsonArchiveReader(AbstractDataReader): + """Gets the data from JSON file in archive""" + + def __init__(self, archive_file): + """Initializes the reader + + :param archive_file: file name of JSON file in archive + """ + self._archive_file = archive_file + + def get_data(self): + """Gets JSON file from archive. Currently supporting ZIP only""" + + data = self._conn.get(path=self._path) + if data.headers.get('content-type') == 'application/zip': + try: + archive = zipfile.ZipFile(io.BytesIO(data.content)) + json_data = json.loads(archive.read(self._archive_file) + .decode(encoding='utf-8')) + return FieldData(data.status_code, data.headers, json_data) + except (zipfile.BadZipfile, ValueError) as e: + raise exceptions.ArchiveParsingError( + path=self._path, error=e) + else: + LOG.error('Support for %(type)s not implemented', + {'type': data.headers['content-type']}) + + return FieldData(data.status_code, data.headers, None) + + +class JsonPackagedFileReader(AbstractDataReader): + """Gets the data from packaged file given by path""" + + def __init__(self, resource_package_name): + """Initializes the reader + + :param resource_package: Python package/module name + """ + self._resource_package_name = resource_package_name + + def get_data(self): + """Gets JSON file from packaged file denoted by path""" + + with pkg_resources.resource_stream(self._resource_package_name, + self._path) as resource: + json_data = json.loads(resource.read().decode(encoding='utf-8')) + return FieldData(None, None, json_data) + + +def get_reader(connector, path, reader=None): + """Create and configure the reader. + + :param connector: A Connector instance + :param path: sub-URI path to the resource. + :param reader: Reader to use to fetch JSON data. + :returns: the reader + """ + if reader is None: + reader = JsonDataReader() + reader.set_connection(connector, path) + + return reader + + +class ResourceBase(object, metaclass=abc.ABCMeta): redfish_version = None """The Redfish version""" - def __init__(self, connector, path='', redfish_version=None): + _oem_vendors = Field('Oem', adapter=list) + """The list of OEM extension names for this resource.""" + + links = LinksField('Links') + + def __init__(self, + connector, + path='', + redfish_version=None, + registries=None, + reader=None, + json_doc=None): """A class representing the base of any Redfish resource Invokes the ``refresh()`` method of resource for the first time from here (constructor). :param connector: A Connector instance :param path: sub-URI path to the resource. - :param redfish_version: The version of RedFish. Used to construct + :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + :param reader: Reader to use to fetch JSON data. + :param json_doc: parsed JSON document in form of Python types. """ self._conn = connector self._path = path self._json = None self.redfish_version = redfish_version - self.refresh() + self._registries = registries + # Note(deray): Indicates if the resource holds stale data or not. + # Starting off with True and eventually gets set to False when + # attribute values are fetched. + self._is_stale = True + + self._reader = get_reader(connector, path, reader) + + self.refresh(json_doc=json_doc) + + def _get_value(self, val): + """Iterate through the input to get values for all attributes + + :param val: Either a value or a resource + :returns: Attribute value, which may be a dictionary + """ + if isinstance(val, dict): + subfields = {} + for key, s_val in val.items(): + subfields[key] = self._get_value(s_val) + return subfields - def _parse_attributes(self): - """Parse the attributes of a resource.""" + elif isinstance(val, list): + return [self._get_value(val[i]) for i in range(len(val))] + + elif (isinstance(val, DictionaryField) + or isinstance(val, CompositeField) + or isinstance(val, ListField)): + subfields = {} + for attr, field in val._subfields.items(): + subfields[attr] = self._get_value(val.__getitem__(attr)) + return subfields + + return val + + def _parse_attributes(self, json_doc): + """Parse the attributes of a resource. + + Parsed JSON fields are set to `self` as declared in the class. + + :param json_doc: parsed JSON document in form of Python types + :returns: dictionary of attribute/values after parsing + """ + settings = {} for attr, field in _collect_fields(self): # Hide the Field object behind the real value - setattr(self, attr, field._load(self.json, self)) + setattr(self, attr, field._load(json_doc, self)) + + # Get the attribute/value pairs that have been parsed + settings[attr] = self._get_value(getattr(self, attr)) + + return settings + + def _get_etag(self): + """Returns the ETag of the HTTP request if any was specified. + + :returns ETag or None + """ + pattern = re.compile(r'^(W\/)?("\w*")$') + match = pattern.match(self._get_headers().get('ETag', '')) + if match: + return match.group(2) + return None + + def _get_headers(self): + """Returns the HTTP headers of the request for the resource. - def refresh(self): + :returns: dict of HTTP headers + """ + return self._reader.get_data()._headers + + def _allow_patch(self): + """Returns if the resource supports the PATCH HTTP method. + + If the resource supports the PATCH HTTP method for updates, + it will return it in the Allow HTTP header. + :returns: Boolean flag if PATCH is supported or not + """ + allow_header = self._get_headers().get('Allow', '') + methods = set([h.strip().upper() for h in allow_header.split(',')]) + return "PATCH" in methods + + def refresh(self, force=True, json_doc=None): """Refresh the resource Freshly retrieves/fetches the resource attributes and invokes ``_parse_attributes()`` method on successful retrieval. + It is recommended not to override this method in concrete ResourceBase + classes. Resource classes can place their refresh specific operations + in ``_do_refresh()`` method, if needed. This method represents the + template method in the paradigm of Template design pattern. + + :param force: if set to False, will only refresh if the resource is + marked as stale, otherwise neither it nor its subresources will + be refreshed. + :param json_doc: parsed JSON document in form of Python types. :raises: ResourceNotFoundError :raises: ConnectionError :raises: HTTPError """ - self._json = self._conn.get(path=self._path).json() + # Note(deray): Don't re-fetch / invalidate the sub-resources if the + # resource is "_not_ stale" (i.e. fresh) OR _not_ forced. + if not self._is_stale and not force: + return + + if json_doc: + self._json = json_doc + else: + self._json = self._reader.get_data().json_doc + + attributes = self._parse_attributes(self._json) LOG.debug('Received representation of %(type)s %(path)s: %(json)s', {'type': self.__class__.__name__, - 'path': self._path, 'json': self._json}) - self._parse_attributes() + 'path': self._path, 'json': attributes}) + self._do_refresh(force) + + # Mark it fresh + self._is_stale = False + + def _do_refresh(self, force): + """Primitive method to be overridden by refresh related activities. + + Derived classes are supposed to override this method with the + resource specific refresh operations to be performed. This is a + primitive method in the paradigm of Template design pattern. + + As for the base implementation of this method the approach taken is: + On refresh, all sub-resources are marked as stale. That means + invalidate (or undefine) the exposed attributes for nested resources + for fresh evaluation in subsequent calls to those exposed attributes. + In other words greedy-refresh is not done for them, unless forced by + ``force`` argument. + + :param force: should force refresh the resource and its sub-resources, + if set to True. + :raises: ResourceNotFoundError + :raises: ConnectionError + :raises: HTTPError + """ + utils.cache_clear(self, force_refresh=force) + + def invalidate(self, force_refresh=False): + """Mark the resource as stale, prompting refresh() before getting used. + + If ``force_refresh`` is set to True, then it invokes ``refresh()`` + on the resource. + + :param force_refresh: will invoke refresh on the resource, + if set to True. + :raises: ResourceNotFoundError + :raises: ConnectionError + :raises: HTTPError + """ + self._is_stale = True + if force_refresh: + self.refresh() + + @property + def oem_vendors(self): + return list( + set((self._oem_vendors or []) + (self.links.oem_vendors or [])) + ) @property def json(self): @@ -228,9 +689,35 @@ class ResourceBase(object): def path(self): return self._path + def clone_resource(self, new_resource, path=''): + """Instantiate given resource using existing BMC connection context""" + return new_resource( + self._conn, path or self.path, + redfish_version=self.redfish_version, + reader=self._reader) + + @property + def resource_name(self): + return utils.camelcase_to_underscore_joined(self.__class__.__name__) + + def get_oem_extension(self, vendor): + """Get the OEM extension instance for this resource by OEM vendor + + :param vendor: the OEM vendor string which is the vendor-specific + extensibility identifier. Examples are 'Contoso', 'Hpe'. + Possible value can be got from ``oem_vendors`` attribute. + :returns: the Redfish resource OEM extension instance. + :raises: OEMExtensionNotFoundError + """ + return oem.get_resource_extension_by_vendor( + self.resource_name, vendor, self) + + @property + def registries(self): + return self._registries + -@six.add_metaclass(abc.ABCMeta) -class ResourceCollectionBase(ResourceBase): +class ResourceCollectionBase(ResourceBase, metaclass=abc.ABCMeta): name = Field('Name') """The name of the collection""" @@ -239,18 +726,20 @@ class ResourceCollectionBase(ResourceBase): adapter=utils.get_members_identities) """A tuple with the members identities""" - def __init__(self, connector, path, redfish_version=None): + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing the base of any Redfish resource collection - It gets inherited ``ResourceBase`` and invokes the base class + It gets inherited from ``ResourceBase`` and invokes the base class constructor. :param connector: A Connector instance :param path: sub-URI path to the resource collection. - :param redfish_version: The version of RedFish. Used to construct + :param redfish_version: The version of Redfish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. """ - super(ResourceCollectionBase, self).__init__(connector, path, - redfish_version) + super(ResourceCollectionBase, self).__init__( + connector, path, redfish_version, registries) LOG.debug('Received %(count)d member(s) for %(type)s %(path)s', {'count': len(self.members_identities), 'type': self.__class__.__name__, 'path': self._path}) @@ -271,9 +760,10 @@ class ResourceCollectionBase(ResourceBase): :returns: The ``_resource_type`` object :raises: ResourceNotFoundError """ - return self._resource_type(self._conn, identity, - redfish_version=self.redfish_version) + return self._resource_type( + self._conn, identity, self.redfish_version, self.registries) + @utils.cache_it def get_members(self): """Return a list of ``_resource_type`` objects present in collection diff --git a/sushy/resources/chassis/__init__.py b/sushy/resources/chassis/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/resources/chassis/chassis.py b/sushy/resources/chassis/chassis.py new file mode 100644 index 0000000000000000000000000000000000000000..0e1eae7f0966000b5e0613bc9a51feb7a9f599a6 --- /dev/null +++ b/sushy/resources/chassis/chassis.py @@ -0,0 +1,309 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Chassis.v1_8_0.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources.chassis import mappings as cha_maps +from sushy.resources.chassis.power import power +from sushy.resources.chassis.thermal import thermal +from sushy.resources import common +from sushy.resources.manager import manager +from sushy.resources import mappings as res_maps +from sushy import utils + + +LOG = logging.getLogger(__name__) + + +class ActionsField(base.CompositeField): + reset = common.ResetActionField('#Chassis.Reset') + + +class PhysicalSecurity(base.CompositeField): + intrusion_sensor = base.MappedField('IntrusionSensor', + cha_maps.CHASSIS_INTRUSION_SENSOR_MAP) + """IntrusionSensor + This indicates the known state of the physical security sensor, such as if + it is hardware intrusion detected. + """ + + intrusion_sensor_number = base.Field('IntrusionSensorNumber') + """A numerical identifier to represent the physical security sensor""" + + intrusion_sensor_re_arm = ( + base.MappedField('IntrusionSensorReArm', + cha_maps.CHASSIS_INTRUSION_SENSOR_RE_ARM_MAP)) + """This indicates how the Normal state to be restored""" + + +class Chassis(base.ResourceBase): + """Chassis resource + + The Chassis represents the physical components of a system. This + resource represents the sheet-metal confined spaces and logical zones + such as racks, enclosures, chassis and all other containers. + """ + + chassis_type = base.MappedField('ChassisType', + cha_maps.CHASSIS_TYPE_VALUE_MAP, + required=True) + """The type of physical form factor of the chassis""" + + identity = base.Field('Id', required=True) + """Identifier for the chassis""" + + name = base.Field('Name', required=True) + """The chassis name""" + + asset_tag = base.Field('AssetTag') + """The user assigned asset tag of this chassis""" + + depth_mm = base.Field('DepthMm') + """Depth in millimeters + The depth of the chassis. The value of this property shall represent + the depth (length) of the chassis (in millimeters) as specified by the + manufacturer. + """ + + description = base.Field('Description') + """The chassis description""" + + height_mm = base.Field('HeightMm') + """Height in millimeters + The height of the chassis. The value of this property shall represent + the height of the chassis (in millimeters) as specified by the + manufacturer. + """ + + indicator_led = base.MappedField('IndicatorLED', + res_maps.INDICATOR_LED_VALUE_MAP) + """The state of the indicator LED, used to identify the chassis""" + + manufacturer = base.Field('Manufacturer') + """The manufacturer of this chassis""" + + model = base.Field('Model') + """The model number of the chassis""" + + part_number = base.Field('PartNumber') + """The part number of the chassis""" + + physical_security = PhysicalSecurity('PhysicalSecurity') + """PhysicalSecurity + This value of this property shall contain the sensor state of the physical + security. + """ + + power_state = base.MappedField('PowerState', + res_maps.POWER_STATE_VALUE_MAP) + """The current power state of the chassis""" + + serial_number = base.Field('SerialNumber') + """The serial number of the chassis""" + + sku = base.Field('SKU') + """Stock-keeping unit number (SKU) + The value of this property shall be the stock-keeping unit number for + this chassis. + """ + + status = common.StatusField('Status') + """Status and Health + This property describes the status and health of the chassis and its + children. + """ + + uuid = base.Field('UUID') + """The Universal Unique Identifier (UUID) for this Chassis.""" + + weight_kg = base.Field('WeightKg') + """Weight in kilograms + The value of this property shall represent the published mass (commonly + referred to as weight) of the chassis (in kilograms). + """ + + width_mm = base.Field('WidthMm') + """Width in millimeters + The value of this property shall represent the width of the chassis + (in millimeters) as specified by the manufacturer. + """ + + _actions = ActionsField('Actions') + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a Chassis + + :param connector: A Connector instance + :param identity: The identity of the Chassis resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(Chassis, self).__init__( + connector, identity, redfish_version, registries) + + def _get_reset_action_element(self): + reset_action = self._actions.reset + + if not reset_action: + raise exceptions.MissingActionError(action='#Chassis.Reset', + resource=self._path) + return reset_action + + def get_allowed_reset_chassis_values(self): + """Get the allowed values for resetting the chassis. + + :returns: A set of allowed values. + :raises: MissingAttributeError, if Actions/#Chassis.Reset attribute + not present. + """ + reset_action = self._get_reset_action_element() + + if not reset_action.allowed_values: + LOG.warning('Could not figure out the allowed values for the ' + 'reset chassis action for Chassis %s', self.identity) + return set(res_maps.RESET_TYPE_VALUE_MAP_REV) + + return set([res_maps.RESET_TYPE_VALUE_MAP[v] for v in + set(res_maps.RESET_TYPE_VALUE_MAP). + intersection(reset_action.allowed_values)]) + + def reset_chassis(self, value): + """Reset the chassis. + + :param value: The target value. + :raises: InvalidParameterValueError, if the target value is not + allowed. + """ + valid_resets = self.get_allowed_reset_chassis_values() + if value not in valid_resets: + raise exceptions.InvalidParameterValueError( + parameter='value', value=value, valid_values=valid_resets) + + value = res_maps.RESET_TYPE_VALUE_MAP_REV[value] + target_uri = self._get_reset_action_element().target_uri + + LOG.debug('Resetting the Chassis %s ...', self.identity) + self._conn.post(target_uri, data={'ResetType': value}) + LOG.info('The Chassis %s is being reset', self.identity) + + def set_indicator_led(self, state): + """Set IndicatorLED to the given state. + + :param state: Desired LED state, lit (INDICATOR_LED_LIT), blinking + (INDICATOR_LED_BLINKING), off (INDICATOR_LED_OFF) + :raises: InvalidParameterValueError, if any information passed is + invalid. + """ + if state not in res_maps.INDICATOR_LED_VALUE_MAP_REV: + raise exceptions.InvalidParameterValueError( + parameter='state', value=state, + valid_values=list(res_maps.INDICATOR_LED_VALUE_MAP_REV)) + + data = { + 'IndicatorLED': res_maps.INDICATOR_LED_VALUE_MAP_REV[state] + } + + self._conn.patch(self.path, data=data) + self.invalidate() + + @property + @utils.cache_it + def managers(self): + """A list of managers for this chassis. + + Returns a list of `Manager` objects representing the managers + that manage this chassis. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `Manager` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "ManagedBy"], is_collection=True) + + return [manager.Manager(self._conn, path, + self.redfish_version, self.registries) + for path in paths] + + @property + @utils.cache_it + def systems(self): + """A list of systems residing in this chassis. + + Returns a list of `System` objects representing systems being + mounted in this chassis/cabinet. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `System` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "ComputerSystems"], is_collection=True) + + from sushy.resources.system import system + return [system.System(self._conn, path, + self.redfish_version, self.registries) + for path in paths] + + @property + @utils.cache_it + def power(self): + """Property to reference `Power` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return power.Power( + self._conn, + utils.get_sub_resource_path_by(self, 'Power'), + self.redfish_version, self.registries) + + @property + @utils.cache_it + def thermal(self): + """Property to reference `Thermal` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return thermal.Thermal( + self._conn, + utils.get_sub_resource_path_by(self, 'Thermal'), + self.redfish_version, self.registries) + + +class ChassisCollection(base.ResourceCollectionBase): + + @property + def _resource_type(self): + return Chassis + + def __init__(self, connector, path, redfish_version=None, registries=None): + """A class representing a ChassisCollection + + :param connector: A Connector instance + :param path: The canonical path to the Chassis collection resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(ChassisCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/chassis/constants.py b/sushy/resources/chassis/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..3235af4b5372643969a4c275399905657b0ed3ba --- /dev/null +++ b/sushy/resources/chassis/constants.py @@ -0,0 +1,162 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values comes from the Redfish Chassis json-schema 1.8.0: +# http://redfish.dmtf.org/schemas/v1/Chassis.v1_8_0.json#/definitions/Chassis + +# Chassis Types constants + +CHASSIS_TYPE_RACK = 'rack chassis type' +"""An equipment rack, typically a 19-inch wide freestanding unit""" + +CHASSIS_TYPE_BLADE = 'blade chassis type' +"""Blade + +An enclosed or semi-enclosed, typically vertically-oriented, system +chassis which must be plugged into a multi-system chassis to function +normally. +""" + +CHASSIS_TYPE_ENCLOSURE = 'enclosure chassis type' +"""A generic term for a chassis that does not fit any other description""" + +CHASSIS_TYPE_STAND_ALONE = 'stand alone chassis type' +"""StandAlone + +A single, free-standing system, commonly called a tower or desktop +chassis. +""" + +CHASSIS_TYPE_RACK_MOUNT = 'rack mount chassis type' +"""RackMount + +A single system chassis designed specifically for mounting in an +equipment rack. +""" + +CHASSIS_TYPE_CARD = 'card chassis type' +"""Card + +A loose device or circuit board intended to be installed in a system or +other enclosure. +""" + +CHASSIS_TYPE_CARTRIDGE = 'cartridge chassis type' +"""Cartridge + +A small self-contained system intended to be plugged into a multi-system +chassis""" + +CHASSIS_TYPE_ROW = 'row chassis type' +"""A collection of equipment rack""" + +CHASSIS_TYPE_POD = 'pod chassis type' +"""Pod + +A collection of equipment racks in a large, likely transportable, +container""" + +CHASSIS_TYPE_EXPANSION = 'expansion chassis type' +"""A chassis which expands the capabilities or capacity of another chassis""" + +CHASSIS_TYPE_SIDECAR = 'sidecar chassis type' +"""Sidecar + +A chassis that mates mechanically with another chassis to expand its +capabilities or capacity. +""" + +CHASSIS_TYPE_ZONE = 'zone chassis type' +"""Zone + +A logical division or portion of a physical chassis that contains multiple +devices or systems that cannot be physically separated. +""" + +CHASSIS_TYPE_SLED = 'sled chassis type' +"""Sled + +An enclosed or semi-enclosed, system chassis which must be plugged into a +multi-system chassis to function normally similar to a blade type chassis. +""" + +CHASSIS_TYPE_SHELF = 'shelf chassis type' +"""Shelf + +An enclosed or semi-enclosed, typically horizontally-oriented, system chassis +which must be plugged into a multi-system chassis to function +normally. +""" + +CHASSIS_TYPE_DRAWER = 'drawer chassis type' +"""Drawer + +An enclosed or semi-enclosed, typically horizontally-oriented, system +chassis which may be slid into a multi-system chassis. +""" + +CHASSIS_TYPE_MODULE = 'module chassis type' +"""Module + +A small, typically removable, chassis or card which contains devices for +a particular subsystem or function. +""" + +CHASSIS_TYPE_COMPONENT = 'component chassis type' +"""Component + +A small chassis, card, or device which contains devices for a particular +subsystem or function. +""" + +CHASSIS_TYPE_IP_BASED_DRIVE = 'IP based drive chassis type' +"""A chassis in a drive form factor with IP-based network connections""" + +CHASSIS_TYPE_RACK_GROUP = 'rack group chassis type' +"""A group of racks which form a single entity or share infrastructure""" + +CHASSIS_TYPE_STORAGE_ENCLOSURE = 'storage enclosure chassis type' +"""A chassis which encloses storage""" + +CHASSIS_TYPE_OTHER = 'other chassis type' +"""A chassis that does not fit any of these definitions""" + +# Chassis IntrusionSensor constants + +CHASSIS_INTRUSION_SENSOR_NORMAL = 'normal chassis intrusion sensor' +"""No abnormal physical security conditions are detected at this time""" + +CHASSIS_INTRUSION_SENSOR_HARDWARE_INTRUSION = 'hardware intrusion chassis ' \ + 'intrusion sensor' +"""HardwareIntrusion + +A door, lock, or other mechanism protecting the internal system hardware from +being accessed is detected as being in an insecure state. +""" + +CHASSIS_INTRUSION_SENSOR_TAMPERING_DETECTED = 'tampering detected chassis ' \ + 'intrusion sensor' +"""Physical tampering of the monitored entity is detected""" + +# Chassis IntrusionSensorReArm constants + +CHASSIS_INTRUSION_SENSOR_RE_ARM_MANUAL = 'manual re arm chassis intrusion ' \ + 'sensor' +"""This sensor would be restored to the Normal state by a manual re-arm""" + +CHASSIS_INTRUSION_SENSOR_RE_ARM_AUTOMATIC = 'automatic re arm chassis ' \ + 'intrusion sensor' +"""Automatic + +This sensor would be restored to the Normal state automatically as no abnormal +physical security conditions are detected. +""" diff --git a/sushy/resources/chassis/mappings.py b/sushy/resources/chassis/mappings.py new file mode 100644 index 0000000000000000000000000000000000000000..eaa8c16a9976242d105ebac5045771a73a6c8437 --- /dev/null +++ b/sushy/resources/chassis/mappings.py @@ -0,0 +1,48 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.chassis import constants as cha_cons + +CHASSIS_TYPE_VALUE_MAP = { + 'Rack': cha_cons.CHASSIS_TYPE_RACK, + 'Blade': cha_cons.CHASSIS_TYPE_BLADE, + 'Enclosure': cha_cons.CHASSIS_TYPE_ENCLOSURE, + 'StandAlone': cha_cons.CHASSIS_TYPE_STAND_ALONE, + 'RackMount': cha_cons.CHASSIS_TYPE_RACK_MOUNT, + 'Card': cha_cons.CHASSIS_TYPE_CARD, + 'Cartridge': cha_cons.CHASSIS_TYPE_CARTRIDGE, + 'Row': cha_cons.CHASSIS_TYPE_ROW, + 'Pod': cha_cons.CHASSIS_TYPE_POD, + 'Expansion': cha_cons.CHASSIS_TYPE_EXPANSION, + 'Sidecar': cha_cons.CHASSIS_TYPE_SIDECAR, + 'Zone': cha_cons.CHASSIS_TYPE_ZONE, + 'Sled': cha_cons.CHASSIS_TYPE_SLED, + 'Shelf': cha_cons.CHASSIS_TYPE_SHELF, + 'Drawer': cha_cons.CHASSIS_TYPE_DRAWER, + 'Module': cha_cons.CHASSIS_TYPE_MODULE, + 'Component': cha_cons.CHASSIS_TYPE_COMPONENT, + 'IPBasedDrive': cha_cons.CHASSIS_TYPE_IP_BASED_DRIVE, + 'RackGroup': cha_cons.CHASSIS_TYPE_RACK_GROUP, + 'StorageEnclosure': cha_cons.CHASSIS_TYPE_STORAGE_ENCLOSURE, + 'Other': cha_cons.CHASSIS_TYPE_OTHER, +} + +CHASSIS_INTRUSION_SENSOR_MAP = { + 'Normal': cha_cons.CHASSIS_INTRUSION_SENSOR_NORMAL, + 'HardwareIntrusion': cha_cons.CHASSIS_INTRUSION_SENSOR_HARDWARE_INTRUSION, + 'TamperingDetected': cha_cons.CHASSIS_INTRUSION_SENSOR_TAMPERING_DETECTED, +} + +CHASSIS_INTRUSION_SENSOR_RE_ARM_MAP = { + 'Manual': cha_cons.CHASSIS_INTRUSION_SENSOR_RE_ARM_MANUAL, + 'Automatic': cha_cons.CHASSIS_INTRUSION_SENSOR_RE_ARM_AUTOMATIC, +} diff --git a/sushy/resources/chassis/power/__init__.py b/sushy/resources/chassis/power/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/resources/chassis/power/constants.py b/sushy/resources/chassis/power/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..1425680f289825f78532a0cc162cbdd9c047deeb --- /dev/null +++ b/sushy/resources/chassis/power/constants.py @@ -0,0 +1,69 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +# Power Supply Types +POWER_SUPPLY_TYPE_UNKNOWN = 'unknown' +"""The power supply type cannot be determined.""" + +POWER_SUPPLY_TYPE_AC = 'ac' +"""Alternating Current (AC) power supply.""" + +POWER_SUPPLY_TYPE_DC = 'dc' +"""Direct Current (DC) power supply.""" + +POWER_SUPPLY_TYPE_ACDC = 'acdc' +"""Power Supply supports both DC or AC.""" + +# Line Input Voltage Types +LINE_INPUT_VOLTAGE_TYPE_UNKNOWN = 'unknown' +"""The power supply line input voltage tpye cannot be determined.""" + +LINE_INPUT_VOLTAGE_TYPE_ACLOW = 'aclowline' +"""100-127V AC input.""" + +LINE_INPUT_VOLTAGE_TYPE_ACMID = 'acmidline' +"""200-240V AC input.""" + +LINE_INPUT_VOLTAGE_TYPE_ACHIGH = 'achighline' +"""277V AC input.""" + +LINE_INPUT_VOLTAGE_TYPE_DCNEG48 = 'dcneg48v' +"""-48V DC input.""" + +LINE_INPUT_VOLTAGE_TYPE_DC380 = 'dc380v' +"""High Voltage DC input (380V).""" + +LINE_INPUT_VOLTAGE_TYPE_AC120 = 'ac120v' +"""AC 120V nominal input.""" + +LINE_INPUT_VOLTAGE_TYPE_AC240 = 'ac240v' +"""AC 240V nominal input.""" + +LINE_INPUT_VOLTAGE_TYPE_AC277 = 'ac277v' +"""AC 277V nominal input.""" + +LINE_INPUT_VOLTAGE_TYPE_ACDCWIDE = 'acdcwiderange' +"""Wide range AC or DC input.""" + +LINE_INPUT_VOLTAGE_TYPE_ACWIDE = 'acwiderange' +"""Wide range AC input.""" + +LINE_INPUT_VOLTAGE_TYPE_DC240 = 'dc240v' +"""DC 240V nominal input.""" + +# Input Types +INPUT_TYPE_AC = 'ac' +"""Alternating Current (AC) input range.""" + +INPUT_TYPE_DC = 'dc' +"""Direct Current (DC) input range.""" diff --git a/sushy/resources/chassis/power/mappings.py b/sushy/resources/chassis/power/mappings.py new file mode 100644 index 0000000000000000000000000000000000000000..12d8a1d519341dc3845629783d156e2706e03d61 --- /dev/null +++ b/sushy/resources/chassis/power/mappings.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.chassis.power import constants as pow_cons + +POWER_SUPPLY_TYPE_MAP = { + 'Unknown': pow_cons.POWER_SUPPLY_TYPE_UNKNOWN, + 'AC': pow_cons.POWER_SUPPLY_TYPE_AC, + 'DC': pow_cons.POWER_SUPPLY_TYPE_DC, + 'ACorDC': pow_cons.POWER_SUPPLY_TYPE_ACDC, +} + +POWER_SUPPLY_INPUT_TYPE_MAP = { + 'AC': pow_cons.INPUT_TYPE_AC, + 'DC': pow_cons.INPUT_TYPE_DC, +} + +LINE_INPUT_VOLTAGE_TYPE_MAP = { + 'Unknown': pow_cons.LINE_INPUT_VOLTAGE_TYPE_UNKNOWN, + 'ACLowLine': pow_cons.LINE_INPUT_VOLTAGE_TYPE_ACLOW, + 'ACMidLine': pow_cons.LINE_INPUT_VOLTAGE_TYPE_ACMID, + 'ACHighLine': pow_cons.LINE_INPUT_VOLTAGE_TYPE_ACHIGH, + 'DCNeg48V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_DCNEG48, + 'DC380V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_DC380, + 'AC120V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_AC120, + 'AC240V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_AC240, + 'AC277V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_AC277, + 'ACandDCWideRange': pow_cons.LINE_INPUT_VOLTAGE_TYPE_ACDCWIDE, + 'ACWideRange': pow_cons.LINE_INPUT_VOLTAGE_TYPE_ACWIDE, + 'DC240V': pow_cons.LINE_INPUT_VOLTAGE_TYPE_DC240, +} diff --git a/sushy/resources/chassis/power/power.py b/sushy/resources/chassis/power/power.py new file mode 100644 index 0000000000000000000000000000000000000000..c48e1d4f3a308a3119009ba4f471dbb67ba66be1 --- /dev/null +++ b/sushy/resources/chassis/power/power.py @@ -0,0 +1,122 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Power.v1_3_0.json + +from sushy.resources import base +from sushy.resources.chassis.power import mappings as pow_maps +from sushy.resources import common +from sushy.resources import mappings as res_maps +from sushy import utils + + +class InputRangeListField(base.ListField): + """This type describes an input range for a power supply""" + + input_type = base.MappedField('InputType', + pow_maps.POWER_SUPPLY_INPUT_TYPE_MAP) + """The Input type (AC or DC)""" + + maximum_frequency_hz = base.Field('MaximumFrequencyHz', + adapter=utils.int_or_none) + """The maximum line input frequency at which this power supply input range + is effective""" + + maximum_voltage = base.Field('MaximumVoltage', adapter=utils.int_or_none) + """The maximum line input voltage at which this power supply input range + is effective""" + + minimum_frequency_hz = base.Field('MinimumFrequencyHz', + adapter=utils.int_or_none) + """The minimum line input frequency at which this power supply input range + is effective""" + + minimum_voltage = base.Field('MinimumVoltage', adapter=utils.int_or_none) + """The minimum line input voltage at which this power supply input range + is effective""" + + output_wattage = base.Field('OutputWattage', adapter=utils.int_or_none) + """The maximum capacity of this Power Supply when operating in this input + range""" + + +class PowerSupplyListField(base.ListField): + """The power supplies associated with this Power resource""" + + firmware_version = base.Field('FirmwareVersion') + """The firmware version for this Power Supply""" + + identity = base.Field('MemberId') + """Identifier of the Power Supply""" + + indicator_led = base.MappedField('IndicatorLed', + res_maps.INDICATOR_LED_VALUE_MAP) + """The state of the indicator LED, used to identify the power supply""" + + input_ranges = InputRangeListField('InputRanges', default=[]) + """This is the input ranges that the power supply can use""" + + last_power_output_watts = base.Field('LastPowerOutputWatts', + adapter=utils.int_or_none) + """The average power output of this Power Supply""" + + line_input_voltage = base.Field('LineInputVoltage', + adapter=utils.int_or_none) + """The line input voltage at which the Power Supply is operating""" + + line_input_voltage_type = base.MappedField( + 'LineInputVoltageType', + pow_maps.LINE_INPUT_VOLTAGE_TYPE_MAP) + """The line voltage type supported as an input to this Power Supply""" + + manufacturer = base.Field('Manufacturer') + """This is the manufacturer of this power supply""" + + model = base.Field('Model') + """The model number for this Power Supply""" + + name = base.Field('Name') + """Name of the Power Supply""" + + part_number = base.Field('PartNumber') + """The part number for this Power Supply""" + + power_capacity_watts = base.Field('PowerCapacityWatts', + adapter=utils.int_or_none) + """The maximum capacity of this Power Supply""" + + power_supply_type = base.MappedField('PowerSupplyType', + pow_maps.POWER_SUPPLY_TYPE_MAP) + """The Power Supply type (AC or DC)""" + + serial_number = base.Field('SerialNumber') + """The serial number for this Power Supply""" + + spare_part_number = base.Field('SparePartNumber') + """The spare part number for this Power Supply""" + + status = common.StatusField('Status') + """Status of the sensor""" + + +class Power(base.ResourceBase): + """This class represents a Power resource.""" + + identity = base.Field('Id', required=True) + """Identifier of the resource""" + + name = base.Field('Name', required=True) + """The name of the resource""" + + power_supplies = PowerSupplyListField('PowerSupplies', default=[]) + """Details of a power supplies associated with this system or device""" diff --git a/sushy/resources/chassis/thermal/__init__.py b/sushy/resources/chassis/thermal/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/resources/chassis/thermal/constants.py b/sushy/resources/chassis/thermal/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..fbf8b463bb919980776a59080777b144e5d84e7e --- /dev/null +++ b/sushy/resources/chassis/thermal/constants.py @@ -0,0 +1,18 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +FAN_READING_UNIT_PERCENTAGE = 'Percentage' +"""Indicates that the fan reading and thresholds are measured in percentage""" + +FAN_READING_UNIT_RPM = 'RPM' +"""Indicates that the fan reading and thresholds +are measured in rotations per minute.""" diff --git a/sushy/resources/chassis/thermal/mappings.py b/sushy/resources/chassis/thermal/mappings.py new file mode 100644 index 0000000000000000000000000000000000000000..e67f63fc326d1188935ff8042f3fdaf4eb4bd342 --- /dev/null +++ b/sushy/resources/chassis/thermal/mappings.py @@ -0,0 +1,18 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.chassis.thermal import constants as the_cons + +FAN_READING_UNITS_MAP = { + 'Percentage': the_cons.FAN_READING_UNIT_PERCENTAGE, + 'RPM': the_cons.FAN_READING_UNIT_RPM, +} diff --git a/sushy/resources/chassis/thermal/thermal.py b/sushy/resources/chassis/thermal/thermal.py new file mode 100644 index 0000000000000000000000000000000000000000..b058b818a88e5963366d9ff85feaf370205df874 --- /dev/null +++ b/sushy/resources/chassis/thermal/thermal.py @@ -0,0 +1,138 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Thermal.v1_3_0.json + +from sushy.resources import base +from sushy.resources.chassis.thermal import mappings as the_maps +from sushy.resources import common +from sushy.resources import mappings as res_maps +from sushy import utils + + +class Sensor(base.ListField): + """The sensor device/s associated with Thermal.""" + + identity = base.Field('MemberId', required=True) + """Identifier of the Sensor""" + + lower_threshold_critical = base.Field('LowerThresholdCritical', + adapter=utils.int_or_none) + """Below normal range but not yet fatal""" + + lower_threshold_fatal = base.Field('LowerThresholdFatal', + adapter=utils.int_or_none) + """Below normal range and is fatal""" + + lower_threshold_non_critical = base.Field('LowerThresholdNonCritical', + adapter=utils.int_or_none) + """Below normal range""" + + name = base.Field('Name') + """The name of this sensor""" + + physical_context = base.Field('PhysicalContext') + """Area or device associated with this sensor""" + + status = common.StatusField('Status') + """Status of the sensor""" + + upper_threshold_critical = base.Field('UpperThresholdCritical', + adapter=utils.int_or_none) + """Above normal range but not yet fatal""" + + upper_threshold_fatal = base.Field('UpperThresholdFatal', + adapter=utils.int_or_none) + """Above normal range and is fatal""" + + upper_threshold_non_critical = base.Field('UpperThresholdNonCritical', + adapter=utils.int_or_none) + """Above normal range""" + + +class FansListField(Sensor): + """The Fan device/s associated with Thermal.""" + + indicator_led = base.MappedField('IndicatorLED', + res_maps.INDICATOR_LED_VALUE_MAP) + """The state of the indicator LED, used to identify the fan""" + + manufacturer = base.Field('Manufacturer') + """This is the manufacturer of this Fan""" + + max_reading_range = base.Field('MaxReadingRange', + adapter=utils.int_or_none) + """Maximum value for Reading""" + + min_reading_range = base.Field('MinReadingRange', + adapter=utils.int_or_none) + """Minimum value for Reading""" + + model = base.Field('Model') + """The model of this Fan""" + + part_number = base.Field('PartNumber') + """Part number of this Fan""" + + reading = base.Field('Reading', adapter=utils.int_or_none) + """Current Fan Speed""" + + reading_units = base.MappedField('ReadingUnits', + the_maps.FAN_READING_UNITS_MAP) + """Units in which the reading and thresholds are measured""" + + serial_number = base.Field('SerialNumber') + """Serial number of this Fan""" + + +class TemperaturesListField(Sensor): + """The Temperature device/s associated with Thermal.""" + + max_allowable_operating_value = base.Field('MaxAllowableOperatingValue', + adapter=utils.int_or_none) + """Maximum allowable operating temperature for this equipment""" + + min_allowable_operating_value = base.Field('MinAllowableOperatingValue', + adapter=utils.int_or_none) + """Minimum allowable operating temperature for this equipment""" + + max_reading_range_temp = base.Field('MaxReadingRangeTemp') + """Maximum value for ReadingCelsius""" + + min_reading_range_temp = base.Field('MinReadingRangeTemp') + """Minimum value for ReadingCelsius""" + + reading_celsius = base.Field('ReadingCelsius') + """Temperature""" + + sensor_number = base.Field('SensorNumber', adapter=utils.int_or_none) + """A numerical identifier to represent the temperature sensor""" + + +class Thermal(base.ResourceBase): + """This class represents a Thermal resource.""" + + identity = base.Field('Id') + """Identifier of the resource""" + + name = base.Field('Name') + """The name of the resource""" + + status = common.StatusField('Status') + """Status of the resource""" + + fans = FansListField('Fans', default=[]) + """A tuple of Fan identities""" + + temperatures = TemperaturesListField('Temperatures', default=[]) + """A tuple of Temperature identities""" diff --git a/sushy/resources/common.py b/sushy/resources/common.py index 24322cf63e48d4e1be2aac00d3edd3a389827ab6..8d1e470378ddb198e2e6673542c92023f8e8368d 100644 --- a/sushy/resources/common.py +++ b/sushy/resources/common.py @@ -10,11 +10,88 @@ # License for the specific language governing permissions and limitations # under the License. +from dateutil import parser + from sushy.resources import base +from sushy.resources import mappings as res_maps + + +class IdRefField(base.CompositeField): + """Reference to the resource odata identity field.""" + + resource_uri = base.Field('@odata.id') + """The unique identifier for a resource""" + + +class OperationApplyTimeSupportField(base.CompositeField): + def __init__(self): + super(OperationApplyTimeSupportField, self).__init__( + path="@Redfish.OperationApplyTimeSupport") + + maintenance_window_duration_in_seconds = base.Field( + 'MaintenanceWindowDurationInSeconds', adapter=int) + """The expiry time of maintenance window in seconds""" + _maintenance_window_resource = IdRefField('MaintenanceWindowResource') + """The location of the maintenance window settings""" + + maintenance_window_start_time = base.Field( + 'MaintenanceWindowStartTime', + adapter=parser.parse) + """The start time of a maintenance window""" + + supported_values = base.Field('SupportedValues', required=True, + adapter=list) + """The types of apply times that the client is allowed request when + performing a create, delete, or action operation returned as an unmapped + list + + Deprecated: Use `mapped_supported_values`. + """ + + mapped_supported_values = base.MappedListField( + 'SupportedValues', res_maps.APPLY_TIME_VALUE_MAP, required=True) + """The types of apply times that the client is allowed request when + performing a create, delete, or action operation returned as a mapped + list""" + + +class ActionField(base.CompositeField): + target_uri = base.Field('target', required=True) + operation_apply_time_support = OperationApplyTimeSupportField() -class ResetActionField(base.CompositeField): + +class ResetActionField(ActionField): allowed_values = base.Field('ResetType@Redfish.AllowableValues', adapter=list) - target_uri = base.Field('target', required=True) + +class InitializeActionField(ActionField): + allowed_values = base.Field('InitializeType@Redfish.AllowableValues', + adapter=list) + + +class StatusField(base.CompositeField): + """This Field describes the status of a resource and its children. + + This field shall contain any state or health properties of a resource. + """ + health = base.MappedField('Health', res_maps.HEALTH_VALUE_MAP) + """Represents health of resource w/o considering its dependent resources""" + + health_rollup = base.MappedField('HealthRollup', res_maps.HEALTH_VALUE_MAP) + """Represents health state of resource and its dependent resources""" + + state = base.MappedField('State', res_maps.STATE_VALUE_MAP) + """Indicates the known state of the resource, such as if it is enabled.""" + + +class IdentifiersListField(base.ListField): + """This type describes any additional identifiers for a resource.""" + + durable_name = base.Field('DurableName') + """This indicates the world wide, persistent name of the resource.""" + + durable_name_format = base.MappedField('DurableNameFormat', + res_maps.DUR_NAME_FORMAT_VALUE_MAP) + """This represents the format of the DurableName property.""" diff --git a/sushy/resources/compositionservice/__init__.py b/sushy/resources/compositionservice/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/resources/compositionservice/compositionservice.py b/sushy/resources/compositionservice/compositionservice.py new file mode 100644 index 0000000000000000000000000000000000000000..b7331650bcf4ccc2fc6caa885351a8f4aa4b3021 --- /dev/null +++ b/sushy/resources/compositionservice/compositionservice.py @@ -0,0 +1,97 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/CompositionService.v1_1_0.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources.compositionservice import resourceblock +from sushy.resources.compositionservice import resourcezone +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class CompositionService(base.ResourceBase): + + allow_overprovisioning = base.Field('AllowOverprovisioning') + """This indicates whether this service is allowed to overprovision""" + + allow_zone_affinity = base.Field('AllowZoneAffinity') + """This indicates whether a client is allowed to request that given + composition request""" + + description = base.Field('Description') + """The composition service description""" + + identity = base.Field('Id', required=True) + """The composition service identity string""" + + name = base.Field('Name', required=True) + """The composition service name""" + + status = common.StatusField('Status') + """The status of composition service""" + + service_enabled = base.Field('ServiceEnabled') + """The status of composition service is enabled""" + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a CompositionService + + :param connector: A connector instance + :param identity: The identity of the CompositionService resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(CompositionService, self).__init__( + connector, identity, redfish_version, registries) + + def _get_resource_blocks_collection_path(self): + """Helper function to find the ResourceBlockCollections path""" + res_block_col = self.json.get('ResourceBlocks') + if not res_block_col: + raise exceptions.MissingAttributeError( + attribute='ResourceBlocks', resource=self._path) + return res_block_col.get('@odata.id') + + def _get_resource_zones_collection_path(self): + """Helper function to find the ResourceZoneCollections path""" + res_zone_col = self.json.get('ResourceZones') + if not res_zone_col: + raise exceptions.MissingAttributeError( + attribute='ResourceZones', resource=self._path) + return res_zone_col.get('@odata.id') + + @property + @utils.cache_it + def resource_blocks(self): + """Property to reference `ResourceBlockCollection` instance""" + return resourceblock.ResourceBlockCollection( + self._conn, self._get_resource_blocks_collection_path, + self.redfish_version, self.registries) + + @property + @utils.cache_it + def resource_zones(self): + """Property to reference `ResourceZoneCollection` instance""" + return resourcezone.ResourceZoneCollection( + self._conn, self._get_resource_zones_collection_path, + self.redfish_version, self.registries) diff --git a/sushy/resources/compositionservice/constants.py b/sushy/resources/compositionservice/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..ad128592c62c416d94d341e1fff978aea2cb286c --- /dev/null +++ b/sushy/resources/compositionservice/constants.py @@ -0,0 +1,31 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values come from the Redfish ResourceBlock json-schema. +# https://redfish.dmtf.org/schemas/ResourceBlock.v1_3_0.json + +# Composition state related constants +COMPOSITION_STATE_COMPOSING = 'Composing' +COMPOSITION_STATE_COMPOSED_AND_AVAILABLE = 'ComposedAndAvailable' +COMPOSITION_STATE_COMPOSED = 'Composed' +COMPOSITION_STATE_UNUSED = 'Unused' +COMPOSITION_STATE_FAILED = 'Failed' +COMPOSITION_STATE_UNAVAILABLE = 'Unavailable' + +# Resource Block type related constants +RESOURCE_BLOCK_TYPE_COMPUTE = 'Compute' +RESOURCE_BLOCK_TYPE_PROCESSOR = 'Processor' +RESOURCE_BLOCK_TYPE_MEMORY = 'Memory' +RESOURCE_BLOCK_TYPE_NETWORK = 'Network' +RESOURCE_BLOCK_TYPE_STORAGE = 'Storage' +RESOURCE_BLOCK_TYPE_COMPUTERSYSTEM = 'ComputerSystem' +RESOURCE_BLOCK_TYPE_EXPANSION = 'Expansion' diff --git a/sushy/resources/compositionservice/mappings.py b/sushy/resources/compositionservice/mappings.py new file mode 100644 index 0000000000000000000000000000000000000000..5e09f91b05ca8f62acf5a6b40863815e7bbc232b --- /dev/null +++ b/sushy/resources/compositionservice/mappings.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.compositionservice import constants as comp_cons +from sushy import utils + + +COMPOSITION_STATE_VALUE_MAP = { + 'Composing': comp_cons.COMPOSITION_STATE_COMPOSING, + 'ComposedAndAvailable': comp_cons.COMPOSITION_STATE_COMPOSED_AND_AVAILABLE, + 'Composed': comp_cons.COMPOSITION_STATE_COMPOSED, + 'Unused': comp_cons.COMPOSITION_STATE_UNUSED, + 'Failed': comp_cons.COMPOSITION_STATE_FAILED, + 'Unavailable': comp_cons.COMPOSITION_STATE_UNAVAILABLE +} + +COMPOSITION_STATE_VALUE_MAP_REV = ( + utils.revert_dictionary(COMPOSITION_STATE_VALUE_MAP)) + +RESOURCE_BLOCK_TYPE_VALUE_MAP = { + 'Compute': comp_cons.RESOURCE_BLOCK_TYPE_COMPUTE, + 'Processor': comp_cons.RESOURCE_BLOCK_TYPE_PROCESSOR, + 'Memory': comp_cons.RESOURCE_BLOCK_TYPE_MEMORY, + 'Network': comp_cons.RESOURCE_BLOCK_TYPE_NETWORK, + 'Storage': comp_cons.RESOURCE_BLOCK_TYPE_STORAGE, + 'ComputerSystem': comp_cons.RESOURCE_BLOCK_TYPE_COMPUTERSYSTEM, + 'Expansion': comp_cons.RESOURCE_BLOCK_TYPE_EXPANSION +} + +RESOURCE_BLOCK_TYPE_VALUE_MAP_REV = ( + utils.revert_dictionary(RESOURCE_BLOCK_TYPE_VALUE_MAP)) diff --git a/sushy/resources/compositionservice/resourceblock.py b/sushy/resources/compositionservice/resourceblock.py new file mode 100644 index 0000000000000000000000000000000000000000..c9b7740c3b8b5a5e3945c609e827fb4c17dfeb51 --- /dev/null +++ b/sushy/resources/compositionservice/resourceblock.py @@ -0,0 +1,116 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/ResourceBlock.v1_1_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources.compositionservice import mappings as res_maps + +LOG = logging.getLogger(__name__) + + +class CompositionStatusField(base.CompositeField): + + composition_state = base.MappedField( + 'CompositionState', + res_maps.COMPOSITION_STATE_VALUE_MAP, + required=True) + """Inform the client, state of the resource block""" + + max_compositions = base.Field('MaxCompositions') + """The maximum number of compositions""" + + number_of_compositions = base.Field('NumberOfCompositions') + """The number of compositions""" + + reserved_state = base.Field('Reserved') + """Inform the resource block has been identified by a client""" + + sharing_capable = base.Field('SharingCapable') + """Indicates if this Resource Block is capable of participating in + multiple compositions simultaneously""" + + sharing_enabled = base.Field('SharingEnabled') + """Indicates if this Resource Block is allowed to participate in + multiple compositions simultaneously""" + + +class ResourceBlock(base.ResourceBase): + + composition_status = CompositionStatusField( + 'CompositionStatus', + required=True) + """The composition state of resource block""" + + description = base.Field('Description') + """The resource block description""" + + identity = base.Field('Id', required=True) + """The resource block identity string""" + + name = base.Field('Name', required=True) + """The resource block name""" + + resource_block_type = base.MappedField( + 'ResourceBlockType', + res_maps.RESOURCE_BLOCK_TYPE_VALUE_MAP, + required=True) + """The type of resource block""" + + status = common.StatusField('Status') + """The status of resource block""" + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a ResourceBlock + + :param connector: A Connector instance + :param identity: The identity of the ResourceBlock resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(ResourceBlock, self).__init__( + connector, identity, redfish_version, registries) + + +class ResourceBlockCollection(base.ResourceCollectionBase): + + name = base.Field('Name') + """The resource block collection name""" + + description = base.Field('Description') + """The resource block collection description""" + + @property + def _resource_type(self): + return ResourceBlock + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a ResourceBlockCollection + + :param connector: A Connector instance + :param identity: A identity of the ResourceBlock resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(ResourceBlockCollection, self).__init__( + connector, identity, redfish_version, registries) diff --git a/sushy/resources/compositionservice/resourcezone.py b/sushy/resources/compositionservice/resourcezone.py new file mode 100644 index 0000000000000000000000000000000000000000..1ad8560b5a3e4e8640d84acd277bd29dcde3b8d1 --- /dev/null +++ b/sushy/resources/compositionservice/resourcezone.py @@ -0,0 +1,96 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Zone.v1_2_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common + +LOG = logging.getLogger(__name__) + + +class LinksField(base.CompositeField): + + endpoints = base.Field('Endpoints') + """The references to the endpoints that are contained in this zone""" + + involved_switches = base.Field('InvolvedSwitches') + """The references to the switches in this zone""" + + resource_blocks = base.Field('ResourceBlocks') + """The references to the Resource Blocks that are used in this zone""" + + +class ResourceZone(base.ResourceBase): + + # Note(dnuka): This patch doesn't contain 100% of the ResourceZone + + description = base.Field('Description') + """The resources zone description""" + + identity = base.Field('Id', required=True) + """The resource zone identity string""" + + links = LinksField('Links') + """The references to other resources that are related to this + resource""" + + name = base.Field('Name', required=True) + """The resource zone name""" + + status = common.StatusField('Status') + """The resource zone status""" + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a ResourceZone + + :param connector: A Connector instance + :param identity: The identity of the ResourceZone resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(ResourceZone, self).__init__( + connector, identity, redfish_version, registries) + + +class ResourceZoneCollection(base.ResourceCollectionBase): + + name = base.Field('Name') + """The resource zone collection name""" + + description = base.Field('Description') + """The resource zone collection description""" + + @property + def _resource_type(self): + return ResourceZone + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a ResourceZoneCollection + + :param connector: A Connector instance + :param identity: The identity of the ResourceZone resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(ResourceZoneCollection, self).__init__( + connector, identity, redfish_version, registries) diff --git a/sushy/resources/constants.py b/sushy/resources/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..497d2184734f004546c2e43eb0a670f81c008e50 --- /dev/null +++ b/sushy/resources/constants.py @@ -0,0 +1,167 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values comes from the Redfish System json-schema 1.0.0: +# http://redfish.dmtf.org/schemas/v1/Resource.json or +# https://redfish.dmtf.org/schemas/v1/MessageRegistry.v1_1_1.json + +# Health related constants. +HEALTH_OK = 'ok' +HEALTH_WARNING = 'warning' +HEALTH_CRITICAL = 'critical' + +# State related constants. +STATE_ENABLED = 'enabled' +STATE_DISABLED = 'disabled' +STATE_STANDBYOFFLINE = 'standby offline' +STATE_STANDBYSPARE = 'standby spare' +STATE_INTEST = 'in test' +STATE_STARTING = 'starting' +STATE_ABSENT = 'absent' +STATE_UNAVAILABLEOFFLINE = 'unavailable offline' +STATE_DEFERRING = 'deferring' +STATE_QUIESCED = 'quiesced' +STATE_UPDATING = 'updating' + +# Task state related constants +TASK_STATE_NEW = 'new' +TASK_STATE_STARTING = 'starting' +TASK_STATE_RUNNING = 'running' +TASK_STATE_SUSPENDED = 'suspended' +TASK_STATE_INTERRUPTED = 'interrupted' +TASK_STATE_PENDING = 'pending' +TASK_STATE_STOPPING = 'stopping' +TASK_STATE_COMPLETED = 'completed' +TASK_STATE_KILLED = 'killed' +TASK_STATE_EXCEPTION = 'exception' +TASK_STATE_SERVICE = 'service' +TASK_STATE_CANCELLING = 'cancelling' +TASK_STATE_CANCELLED = 'cancelled' + +# Message Registry message parameter type related constants. +PARAMTYPE_STRING = 'string' +PARAMTYPE_NUMBER = 'number' + +# Severity related constants +SEVERITY_OK = 'ok' +SEVERITY_WARNING = 'warning' +SEVERITY_CRITICAL = 'critical' + +# Indicator LED Constants + +INDICATOR_LED_LIT = 'indicator led lit' +"""The Indicator LED is lit""" + +INDICATOR_LED_BLINKING = 'indicator led blinking' +"""The Indicator LED is blinking""" + +INDICATOR_LED_OFF = 'indicator led off' +"""The Indicator LED is off""" + +INDICATOR_LED_UNKNOWN = 'indicator led unknown' +"""The state of the Indicator LED cannot be determine""" + +# System' PowerState constants + +POWER_STATE_ON = 'on' +"""The resource is powered on""" + +POWER_STATE_OFF = 'off' +"""The resource is powered off, although some components may continue to + have AUX power such as management controller""" + +POWER_STATE_POWERING_ON = 'powering on' +"""A temporary state between Off and On. This temporary state can + be very short""" + +POWER_STATE_POWERING_OFF = 'powering off' +"""A temporary state between On and Off. The power off action can take + time while the OS is in the shutdown process""" + +# Reset action constants + +RESET_TYPE_ON = 'on' +"""Turn the unit on""" + +RESET_TYPE_FORCE_ON = 'force on' +"""Turn the unit on immediately""" + +RESET_TYPE_FORCE_OFF = 'force off' +"""Turn the unit off immediately (non-graceful shutdown)""" + +RESET_TYPE_GRACEFUL_SHUTDOWN = 'graceful shutdown' +"""Perform a graceful shutdown and power off""" + +RESET_TYPE_GRACEFUL_RESTART = 'graceful restart' +"""Perform a graceful shutdown followed by a restart of the system""" + +RESET_TYPE_FORCE_RESTART = 'force restart' +"""Perform an immediate (non-graceful) shutdown, followed by a restart""" + +RESET_TYPE_NMI = 'nmi' +"""Generate a Diagnostic Interrupt (usually an NMI on x86 systems) to cease +normal operations, perform diagnostic actions and typically halt the system""" + +RESET_TYPE_PUSH_POWER_BUTTON = 'push power button' +"""Simulate the pressing of the physical power button on this unit""" + +RESET_TYPE_POWER_CYCLE = 'power cycle' +"""Perform a power cycle of the unit""" + +# Protocol type constants + +PROTOCOL_TYPE_AHCI = 'Advanced Host Controller Interface' +PROTOCOL_TYPE_CIFS = 'Common Internet File System Protocol' +PROTOCOL_TYPE_FC = 'Fibre Channel' +PROTOCOL_TYPE_FCP = 'Fibre Channel Protocol for SCSI' +PROTOCOL_TYPE_FCoE = 'Fibre Channel over Ethernet' +PROTOCOL_TYPE_FICON = 'FIbre CONnection (FICON)' +PROTOCOL_TYPE_FTP = 'File Transfer Protocol' +PROTOCOL_TYPE_HTTP = 'Hypertext Transport Protocol' +PROTOCOL_TYPE_HTTPS = 'Secure Hypertext Transport Protocol' +PROTOCOL_TYPE_I2C = 'Inter-Integrated Circuit Bus' +PROTOCOL_TYPE_NFS = 'Network File System Protocol' +PROTOCOL_TYPE_NFSv3 = 'Network File System version 3' +PROTOCOL_TYPE_NFSv4 = 'Network File System version 4' +PROTOCOL_TYPE_NVMe = 'Non-Volatile Memory Express' +PROTOCOL_TYPE_NVMeOverFabrics = 'NVMe over Fabrics' +PROTOCOL_TYPE_OEM = 'OEM specific' +PROTOCOL_TYPE_PCIe = 'PCI Express' +PROTOCOL_TYPE_RoCE = 'RDMA over Converged Ethernet Protocol' +PROTOCOL_TYPE_RoCEv2 = 'RDMA over Converged Ethernet Protocol Version 2' +PROTOCOL_TYPE_SAS = 'Serial Attached SCSI' +PROTOCOL_TYPE_SATA = 'Serial AT Attachment' +PROTOCOL_TYPE_SCP = 'Secure File Copy Protocol' +PROTOCOL_TYPE_SFTP = 'Secure File Transfer Protocol' +PROTOCOL_TYPE_SMB = 'Server Message Block (CIFS Common Internet File System)' +PROTOCOL_TYPE_TFTP = 'Trivial File Transfer Protocol' +PROTOCOL_TYPE_UHCI = 'Universal Host Controller Interface' +PROTOCOL_TYPE_USB = 'Universal Serial Bus' +PROTOCOL_TYPE_iSCSI = 'Internet SCSI' +PROTOCOL_TYPE_iWARP = 'Internet Wide Area Remote Direct Memory Access Protocol' + +# Durable name format constants + +DURABLE_NAME_FORMAT_EUI = 'IEEE-defined 64-bit Extended Unique Identifier' +DURABLE_NAME_FORMAT_FC_WWN = 'Fibre Channel World Wide Name' +DURABLE_NAME_FORMAT_NAA = 'Name Address Authority Format' +DURABLE_NAME_FORMAT_NQN = 'NVMe Qualified Name' +DURABLE_NAME_FORMAT_NSID = 'NVM Namespace Identifier' +DURABLE_NAME_FORMAT_UUID = 'Universally Unique Identifier' +DURABLE_NAME_FORMAT_iQN = 'iSCSI Qualified Name' + +# Apply time constants + +APPLY_TIME_IMMEDIATE = 'immediate' +APPLY_TIME_ON_RESET = 'on reset' +APPLY_TIME_MAINT_START = 'at maintenance window start' +APPLY_TIME_MAINT_RESET = 'in maintenance window on reset' diff --git a/sushy/resources/fabric/__init__.py b/sushy/resources/fabric/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/resources/fabric/constants.py b/sushy/resources/fabric/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..7026a99a1049c733d1101927e207cfef2affe7ac --- /dev/null +++ b/sushy/resources/fabric/constants.py @@ -0,0 +1,61 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values come from the Redfish Fabric json-schema 1.0.4: +# http://redfish.dmtf.org/schemas/v1/Fabric.v1_0_4.json#/definitions/Fabric + +# Address origin IPv4 constants + +ADDRESS_ORIGIN_IPv4_BOOTP = 'Address is provided by a BOOTP service' +ADDRESS_ORIGIN_IPv4_DHCP = 'Address is provided by a DHCPv4 service' +ADDRESS_ORIGIN_IPv4_IPv4LINKLOCAL = 'Address valid only for this segment' +ADDRESS_ORIGIN_IPv4_STATIC = 'A static address as configured by the user' + +# Address origin IPv6 constants + +ADDRESS_ORIGIN_IPv6_DHCPv6 = 'Address is provided by a DHCPv6 service' +ADDRESS_ORIGIN_IPv6_LINKLOCAL = 'Address valid only for this network segment' +ADDRESS_ORIGIN_IPv6_SLAAC = 'Stateless Address Auto Configuration service' +ADDRESS_ORIGIN_IPv6_STATIC = 'A static address as configured by the user' + +# Address state constants + +ADDRESS_STATE_DEPRECATED = 'Deprecated' +"""This address is currently within it's valid lifetime, but is now outside of +it's preferred lifetime as defined in RFC 4862.""" +ADDRESS_STATE_FAILED = 'Failed' +"""This address has failed Duplicate Address Detection testing as defined in +RFC 4862 section 5.4 and is not currently in use.""" +ADDRESS_STATE_PREFERRED = 'Preferred' +"""This address is currently within both it's valid and preferred lifetimes as +defined in RFC 4862.""" +ADDRESS_STATE_TENTATIVE = 'Tentative' +"""This address is currently undergoing Duplicate Address Detection testing as +defined in RFC 4862 section 5.4.""" + +# Entity role constants + +ENTITY_ROLE_BOTH = 'The entity is acting as both an initiator and a target' +ENTITY_ROLE_INITIATOR = 'The entity is acting as an initiator' +ENTITY_ROLE_TARGET = 'The entity is acting as a target' + +# Entity type constants + +ENTITY_TYPE_PCI_BRIDGE = 'PCI(e) Bridge' +ENTITY_TYPE_DISPLAY_CONTROLLER = 'Display Controller' +ENTITY_TYPE_DRIVE = 'Disk Drive' +ENTITY_TYPE_NETWORK_CONTROLLER = 'Network Controller' +ENTITY_TYPE_PROCESSOR = 'Processor Device' +ENTITY_TYPE_ROOT_COMPLEX = 'Root Complex' +ENTITY_TYPE_STORAGE_EXPANDER = 'Storage Expander' +ENTITY_TYPE_STORAGE_INITIATOR = 'Storage Initiator' +ENTITY_TYPE_VOLUME = 'Volume' diff --git a/sushy/resources/fabric/endpoint.py b/sushy/resources/fabric/endpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..180d3df9421cbecfb381f3f567c28008acdf6125 --- /dev/null +++ b/sushy/resources/fabric/endpoint.py @@ -0,0 +1,169 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Endpoint.v1_3_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources.fabric import mappings as fab_maps +from sushy.resources import mappings as res_maps +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class IPv4AddressField(base.CompositeField): + + address = base.Field('Address') + """This is the IPv4 Address.""" + + gateway = base.Field('Gateway') + """This is the IPv4 gateway for this address.""" + + subnet_mask = base.Field('SubnetMask') + """This is the IPv4 Subnet mask.""" + + address_origin = base.MappedField('AddressOrigin', + fab_maps.ADDRESS_ORIGIN_IPv4_VALUE_MAP) + """This indicates how the address was determined.""" + + +class IPv6AddressField(base.CompositeField): + + address = base.Field('Address') + """This is the IPv6 Address.""" + + prefix_length = base.Field('PrefixLength', adapter=utils.int_or_none) + """This is the IPv6 Address Prefix Length.""" + + address_origin = base.MappedField('AddressOrigin', + fab_maps.ADDRESS_ORIGIN_IPv6_VALUE_MAP) + """This indicates how the address was determined.""" + + address_state = base.MappedField('AddressState', + fab_maps.ADDRESS_STATE_VALUE_MAP) + """The current state of this address as defined in RFC 4862.""" + + +class IPTransportDetailsListField(base.ListField): + """IP transport details + + This array contains details for each IP transport supported by this + endpoint. The array structure can be used to model multiple IP addresses + for this endpoint. + """ + + port = base.Field('Port', adapter=utils.int_or_none) + """The UDP or TCP port number used by the Endpoint.""" + + transport_protocol = base.MappedField('TransportProtocol', + res_maps.PROTOCOL_TYPE_VALUE_MAP) + """The protocol used by the connection entity.""" + + ipv4_address = IPv4AddressField('IPv4Address') + """The IPv4 address object.""" + + ipv6_address = IPv6AddressField('IPv6Address') + """The IPv6 address object.""" + + +class PciIdField(base.CompositeField): + + device_id = base.Field('DeviceId') + """The Device ID of this PCIe function.""" + + subsystem_id = base.Field('SubsystemId') + """The Subsystem ID of this PCIefunction.""" + + subsystem_vendor_id = base.Field('SubsystemVendorId') + """The Subsystem Vendor ID of thisPCIe function.""" + + vendor_id = base.Field('VendorId') + """The Vendor ID of this PCIe function.""" + + +class ConnectedEntitiesListField(base.ListField): + """All the entities connected to this endpoint.""" + + pci_class_code = base.Field('PciClassCode') + """The Class Code, Subclass code, and Programming Interface code of + this PCIe function.""" + + pci_function_number = base.Field('PciFunctionNumber', + adapter=utils.int_or_none) + """The PCI ID of the connected entity.""" + + entity_pci_id = PciIdField('EntityPciId') + """The PCI ID of the connected entity.""" + + identifiers = common.IdentifiersListField('Identifiers', default=[]) + """Identifiers for the remote entity.""" + + entity_role = base.MappedField('EntityRole', + fab_maps.ENTITY_ROLE_VALUE_MAP) + """The role of the connected entity.""" + + entity_type = base.MappedField('EntityType', + fab_maps.ENTITY_TYPE_VALUE_MAP) + """The type of the connected entity.""" + + +class Endpoint(base.ResourceBase): + """This class represents a fabric endpoint. + + It represents the properties of an entity that sends or receives protocol + defined messages over a transport. + """ + + identity = base.Field('Id', required=True) + """Identifier for the endpoint""" + + name = base.Field('Name', required=True) + """The endpoint name""" + + description = base.Field('Description') + """The endpoint description""" + + status = common.StatusField('Status') + """The endpoint status""" + + host_reservation_memory_bytes = base.Field('HostReservationMemoryBytes', + adapter=utils.int_or_none) + """The amount of memory in Bytes that the Host should allocate to connect + to this endpoint. + """ + + endpoint_protocol = base.MappedField('EndpointProtocol', + res_maps.PROTOCOL_TYPE_VALUE_MAP) + """The protocol supported by this endpoint.""" + + pci_id = PciIdField('PciId') + """The PCI ID of the endpoint.""" + + IP_transport_details = IPTransportDetailsListField('IPTransportDetails') + """This array contains details for each IP transport supported by this + endpoint. The array structure can be used to model multiple IP addresses + for this endpoint.""" + + connected_entities = ConnectedEntitiesListField('ConnectedEntities') + """All entities connected to this endpoint.""" + + +class EndpointCollection(base.ResourceCollectionBase): + """Represents a collection of endpoints associated with the fabric.""" + + @property + def _resource_type(self): + return Endpoint diff --git a/sushy/resources/fabric/fabric.py b/sushy/resources/fabric/fabric.py new file mode 100644 index 0000000000000000000000000000000000000000..cb91b1df0e91482ef0e18d0781ec444bebae111a --- /dev/null +++ b/sushy/resources/fabric/fabric.py @@ -0,0 +1,93 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Fabric.v1_0_4.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources.fabric import endpoint as fab_endpoint +from sushy.resources import mappings as res_maps +from sushy import utils + + +LOG = logging.getLogger(__name__) + + +class Fabric(base.ResourceBase): + """Fabric resource + + The Fabric represents a simple fabric consisting of one or more + switches, zero or more endpoints, and zero or more zones. + """ + + identity = base.Field('Id', required=True) + """Identifier for the fabric""" + + name = base.Field('Name', required=True) + """The fabric name""" + + description = base.Field('Description') + """The fabric description""" + + max_zones = base.Field('MaxZones', adapter=utils.int_or_none) + """The maximum number of zones the switch can currently configure""" + + status = common.StatusField('Status') + """The fabric status""" + + fabric_type = base.MappedField('FabricType', + res_maps.PROTOCOL_TYPE_VALUE_MAP) + """The protocol being sent over this fabric""" + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a Fabric + + :param connector: A Connector instance + :param identity: The identity of the Fabric resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(Fabric, self).__init__( + connector, identity, redfish_version, registries) + + @property + @utils.cache_it + def endpoints(self): + return fab_endpoint.EndpointCollection( + self._conn, utils.get_sub_resource_path_by(self, 'Endpoints'), + self.redfish_version, self.registries) + + +class FabricCollection(base.ResourceCollectionBase): + + @property + def _resource_type(self): + return Fabric + + def __init__(self, connector, path, redfish_version=None, registries=None): + """A class representing a FabricCollection + + :param connector: A Connector instance + :param path: The canonical path to the Fabric collection resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(FabricCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/fabric/mappings.py b/sushy/resources/fabric/mappings.py new file mode 100644 index 0000000000000000000000000000000000000000..c1a34dde1db766a38e977e1745412c744750ffff --- /dev/null +++ b/sushy/resources/fabric/mappings.py @@ -0,0 +1,64 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.fabric import constants as fab_cons +from sushy import utils + +ADDRESS_ORIGIN_IPv4_VALUE_MAP = { + 'BOOTP': fab_cons.ADDRESS_ORIGIN_IPv4_BOOTP, + 'DHCP': fab_cons.ADDRESS_ORIGIN_IPv4_DHCP, + 'IPv4LinkLocal': fab_cons.ADDRESS_ORIGIN_IPv4_IPv4LINKLOCAL, + 'Static': fab_cons.ADDRESS_ORIGIN_IPv4_STATIC, +} + + +ADDRESS_ORIGIN_IPv6_VALUE_MAP = { + 'DHCPv6': fab_cons.ADDRESS_ORIGIN_IPv6_DHCPv6, + 'LinkLocal': fab_cons.ADDRESS_ORIGIN_IPv6_LINKLOCAL, + 'SLAAC': fab_cons.ADDRESS_ORIGIN_IPv6_SLAAC, + 'Static': fab_cons.ADDRESS_ORIGIN_IPv6_STATIC, +} + + +ADDRESS_STATE_VALUE_MAP = { + 'Deprecated': fab_cons.ADDRESS_STATE_DEPRECATED, + 'Failed': fab_cons.ADDRESS_STATE_FAILED, + 'Preferred': fab_cons.ADDRESS_STATE_PREFERRED, + 'Tentative': fab_cons.ADDRESS_STATE_TENTATIVE, +} + + +ENTITY_ROLE_VALUE_MAP = { + 'Both': fab_cons.ENTITY_ROLE_BOTH, + 'Initiator': fab_cons.ENTITY_ROLE_INITIATOR, + 'Target': fab_cons.ENTITY_ROLE_TARGET, +} + +ENTITY_ROLE_VALUE_MAP_REV = utils.revert_dictionary(ENTITY_ROLE_VALUE_MAP) + + +ENTITY_TYPE_VALUE_MAP = { + 'Bridge': fab_cons.ENTITY_TYPE_PCI_BRIDGE, + 'DisplayController': fab_cons.ENTITY_TYPE_DISPLAY_CONTROLLER, + 'Drive': fab_cons.ENTITY_TYPE_DRIVE, + 'NetworkController': fab_cons.ENTITY_TYPE_NETWORK_CONTROLLER, + 'Processor': fab_cons.ENTITY_TYPE_PROCESSOR, + 'RootComplex': fab_cons.ENTITY_TYPE_ROOT_COMPLEX, + 'StorageExpander': fab_cons.ENTITY_TYPE_STORAGE_EXPANDER, + 'StorageInitiator': fab_cons.ENTITY_TYPE_STORAGE_INITIATOR, + 'Volume': fab_cons.ENTITY_TYPE_VOLUME, +} + +ENTITY_TYPE_VALUE_MAP_REV = utils.revert_dictionary(ENTITY_TYPE_VALUE_MAP) diff --git a/sushy/resources/manager/constants.py b/sushy/resources/manager/constants.py index a1b3a9f67ad27196b58900a45e7b050ccfb8b083..4459c65d02840b4ee5e6c8adcfdc0449dbb12e59 100644 --- a/sushy/resources/manager/constants.py +++ b/sushy/resources/manager/constants.py @@ -13,10 +13,15 @@ # Values comes from the Redfish System json-schema 1.0.0: # http://redfish.dmtf.org/schemas/v1/Manager.v1_0_0.json#/definitions/Manager # noqa +from sushy.resources import constants as res_cons + # Manager Reset action constants -RESET_MANAGER_GRACEFUL_RESTART = 'graceful restart' -RESET_MANAGER_FORCE_RESTART = 'force restart' +RESET_MANAGER_GRACEFUL_RESTART = res_cons.RESET_TYPE_GRACEFUL_RESTART +"""Perform a graceful shutdown followed by a restart of the system""" + +RESET_MANAGER_FORCE_RESTART = res_cons.RESET_TYPE_FORCE_RESTART +"""Perform an immediate (non-graceful) shutdown, followed by a restart""" # Manager Type constants @@ -76,3 +81,17 @@ COMMAND_SHELL_IPMI = 'command shell ipmi' COMMAND_SHELL_OEM = 'command shell oem' """Command Shell connection using an OEM-specific protocol""" + +# Supported Virtual Media Type constants + +VIRTUAL_MEDIA_CD = 'cd' +VIRTUAL_MEDIA_DVD = 'dvd' +VIRTUAL_MEDIA_FLOPPY = 'floppy' +VIRTUAL_MEDIA_USBSTICK = 'usb' + +# Connected Via constants + +CONNECTED_VIA_APPLET = 'applet' +CONNECTED_VIA_NOT_CONNECTED = 'not_connected' +CONNECTED_VIA_OEM = 'oem' +CONNECTED_VIA_URI = 'uri' diff --git a/sushy/resources/manager/manager.py b/sushy/resources/manager/manager.py index d027947ea014b3918ad2bf4ddae2428610b60bbd..e943591918837088a1e1d7f4d56cabdc3e61f573 100644 --- a/sushy/resources/manager/manager.py +++ b/sushy/resources/manager/manager.py @@ -10,12 +10,18 @@ # License for the specific language governing permissions and limitations # under the License. +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Manager.v1_4_0.json + import logging from sushy import exceptions from sushy.resources import base from sushy.resources import common from sushy.resources.manager import mappings as mgr_maps +from sushy.resources.manager import virtual_media +from sushy import utils + LOG = logging.getLogger(__name__) @@ -35,6 +41,10 @@ class RemoteAccessField(base.CompositeField): class Manager(base.ResourceBase): + auto_dst_enabled = base.Field('AutoDSTEnabled') + """Indicates whether the manager is configured for automatic DST + adjustment""" + firmware_version = base.Field('FirmwareVersion') """The manager firmware version""" @@ -72,17 +82,21 @@ class Manager(base.ResourceBase): uuid = base.Field('UUID') """The manager UUID""" - _actions = ActionsField('Actions', required=True) + _actions = ActionsField('Actions') - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a Manager :param connector: A Connector instance :param identity: The identity of the Manager resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(Manager, self).__init__(connector, identity, redfish_version) + super(Manager, self).__init__( + connector, identity, redfish_version, registries) def get_supported_graphical_console_types(self): """Get the supported values for Graphical Console connection types. @@ -178,6 +192,51 @@ class Manager(base.ResourceBase): self._conn.post(target_uri, data={'ResetType': value}) LOG.info('The Manager %s is being reset', self.identity) + @property + @utils.cache_it + def virtual_media(self): + return virtual_media.VirtualMediaCollection( + self._conn, utils.get_sub_resource_path_by(self, 'VirtualMedia'), + self.redfish_version, self.registries) + + @property + @utils.cache_it + def systems(self): + """A list of systems managed by this manager. + + Returns a list of `System` objects representing systems being + managed by this manager. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `System` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "ManagerForServers"], is_collection=True) + + from sushy.resources.system import system + return [system.System(self._conn, path, + self.redfish_version, self.registries) + for path in paths] + + @property + @utils.cache_it + def chassis(self): + """A list of chassis managed by this manager. + + Returns a list of `Chassis` objects representing the chassis + or cabinets managed by this manager. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `Chassis` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "ManagerForChassis"], is_collection=True) + + from sushy.resources.chassis import chassis + return [chassis.Chassis(self._conn, path, + self.redfish_version, self.registries) + for path in paths] + class ManagerCollection(base.ResourceCollectionBase): @@ -185,13 +244,15 @@ class ManagerCollection(base.ResourceCollectionBase): def _resource_type(self): return Manager - def __init__(self, connector, path, redfish_version=None): + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing a ManagerCollection :param connector: A Connector instance :param path: The canonical path to the Manager collection resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(ManagerCollection, self).__init__(connector, path, - redfish_version) + super(ManagerCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/manager/mappings.py b/sushy/resources/manager/mappings.py index 451dc6e198f87cca4bfd801b40a1a96c68e19688..a544115d58a50ae88b6151b9841aa26a7e9e8dc7 100644 --- a/sushy/resources/manager/mappings.py +++ b/sushy/resources/manager/mappings.py @@ -59,3 +59,17 @@ COMMAND_SHELL_VALUE_MAP = { COMMAND_SHELL_VALUE_MAP_REV = ( utils.revert_dictionary(COMMAND_SHELL_VALUE_MAP)) + +MEDIA_TYPE_VALUE_MAP = { + 'CD': mgr_cons.VIRTUAL_MEDIA_CD, + 'DVD': mgr_cons.VIRTUAL_MEDIA_DVD, + 'Floppy': mgr_cons.VIRTUAL_MEDIA_FLOPPY, + 'USBStick': mgr_cons.VIRTUAL_MEDIA_USBSTICK +} + +CONNECTED_VIA_VALUE_MAP = { + "Applet": mgr_cons.CONNECTED_VIA_APPLET, + "NotConnected": mgr_cons.CONNECTED_VIA_NOT_CONNECTED, + "Oem": mgr_cons.CONNECTED_VIA_OEM, + "URI": mgr_cons.CONNECTED_VIA_URI +} diff --git a/sushy/resources/manager/virtual_media.py b/sushy/resources/manager/virtual_media.py new file mode 100644 index 0000000000000000000000000000000000000000..a1000223e2d37fd5741e7fe0e149d58450bb5d99 --- /dev/null +++ b/sushy/resources/manager/virtual_media.py @@ -0,0 +1,152 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/VirtualMedia.v1_2_0.json + +from http import client as http_client + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources.manager import mappings as mgr_maps + + +class ActionsField(base.CompositeField): + + insert_media = common.ActionField("#VirtualMedia.InsertMedia") + eject_media = common.ActionField("#VirtualMedia.EjectMedia") + + +class VirtualMedia(base.ResourceBase): + + identity = base.Field('Id', required=True) + """Virtual Media resource identity string""" + + name = base.Field('Name', required=True) + """The name of resource""" + + image = base.Field('Image') + """A URI providing the location of the selected image""" + + image_name = base.Field('ImageName') + """The image name""" + + inserted = base.Field('Inserted') + """Indicates if virtual media is inserted in the virtual device""" + + write_protected = base.Field('WriteProtected') + """Indicates the media is write protected""" + + media_types = base.Field( + 'MediaTypes', adapter=( + lambda x: [mgr_maps.MEDIA_TYPE_VALUE_MAP[v] for v in x + if v in mgr_maps.MEDIA_TYPE_VALUE_MAP]), + default=[]) + """List of supported media types as virtual media""" + + connected_via = base.MappedField('ConnectedVia', + mgr_maps.CONNECTED_VIA_VALUE_MAP) + """Current virtual media connection methods + + Applet: Connected to a client application + NotConnected: No current connection + Oem: Connected via an OEM-defined method + URI: Connected to a URI location + """ + + _actions = ActionsField('Actions') + """Insert/eject action for virtual media""" + + def _get_insert_media_uri(self): + insert_media = self._actions.insert_media if self._actions else None + use_patch = False + if not insert_media: + insert_uri = self.path + use_patch = self._allow_patch() + if not use_patch: + raise exceptions.MissingActionError( + action='#VirtualMedia.InsertMedia', resource=self._path) + else: + insert_uri = insert_media.target_uri + return insert_uri, use_patch + + def _get_eject_media_uri(self): + eject_media = self._actions.eject_media if self._actions else None + use_patch = False + if not eject_media: + eject_uri = self.path + use_patch = self._allow_patch() + if not use_patch: + raise exceptions.MissingActionError( + action='#VirtualMedia.EjectMedia', resource=self._path) + else: + eject_uri = eject_media.target_uri + return eject_uri, use_patch + + def insert_media(self, image, inserted=True, write_protected=False): + """Attach remote media to virtual media + + :param image: a URI providing the location of the selected image + :param inserted: specify if the image is to be treated as inserted upon + completion of the action. + :param write_protected: indicates the media is write protected + """ + target_uri, use_patch = self._get_insert_media_uri() + payload = {"Image": image, "Inserted": inserted, + "WriteProtected": write_protected} + if use_patch: + headers = None + etag = self._get_etag() + if etag is not None: + headers = {"If-Match": etag} + self._conn.patch(target_uri, data=payload, headers=headers) + else: + self._conn.post(target_uri, data=payload) + self.invalidate() + + def eject_media(self): + """Detach remote media from virtual media + + After ejecting media inserted will be False and image_name will be + empty. + """ + try: + target_uri, use_patch = self._get_eject_media_uri() + if use_patch: + payload = { + "Image": None, + "Inserted": False + } + headers = None + etag = self._get_etag() + if etag is not None: + headers = {"If-Match": etag} + self._conn.patch(target_uri, data=payload, headers=headers) + else: + self._conn.post(target_uri) + except exceptions.HTTPError as response: + # Some vendors like HPE iLO has this kind of implementation. + # It needs to pass an empty dict. + if response.status_code in ( + http_client.UNSUPPORTED_MEDIA_TYPE, + http_client.BAD_REQUEST): + self._conn.post(target_uri, data={}) + self.invalidate() + + +class VirtualMediaCollection(base.ResourceCollectionBase): + """A collection of virtual media attached to a Manager""" + + @property + def _resource_type(self): + return VirtualMedia diff --git a/sushy/resources/mappings.py b/sushy/resources/mappings.py new file mode 100644 index 0000000000000000000000000000000000000000..c81fed2c4b9c1105f4fec8982032365615fefc65 --- /dev/null +++ b/sushy/resources/mappings.py @@ -0,0 +1,128 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources import constants as res_cons +from sushy import utils + + +STATE_VALUE_MAP = { + 'Enabled': res_cons.STATE_ENABLED, + 'Disabled': res_cons.STATE_DISABLED, + 'Absent': res_cons.STATE_ABSENT, +} + +STATE_VALUE_MAP_REV = ( + utils.revert_dictionary(STATE_VALUE_MAP)) + +HEALTH_VALUE_MAP = { + 'OK': res_cons.HEALTH_OK, + 'Warning': res_cons.HEALTH_WARNING, + 'Critical': res_cons.HEALTH_CRITICAL +} + +HEALTH_VALUE_MAP_REV = ( + utils.revert_dictionary(HEALTH_VALUE_MAP)) + +PARAMTYPE_VALUE_MAP = { + 'string': res_cons.PARAMTYPE_STRING, + 'number': res_cons.PARAMTYPE_NUMBER +} + +SEVERITY_VALUE_MAP = { + 'OK': res_cons.SEVERITY_OK, + 'Warning': res_cons.SEVERITY_WARNING, + 'Critical': res_cons.SEVERITY_CRITICAL +} + +INDICATOR_LED_VALUE_MAP = { + 'Lit': res_cons.INDICATOR_LED_LIT, + 'Blinking': res_cons.INDICATOR_LED_BLINKING, + 'Off': res_cons.INDICATOR_LED_OFF, + 'Unknown': res_cons.INDICATOR_LED_UNKNOWN, +} + +INDICATOR_LED_VALUE_MAP_REV = utils.revert_dictionary(INDICATOR_LED_VALUE_MAP) + +POWER_STATE_VALUE_MAP = { + 'On': res_cons.POWER_STATE_ON, + 'Off': res_cons.POWER_STATE_OFF, + 'PoweringOn': res_cons.POWER_STATE_POWERING_ON, + 'PoweringOff': res_cons.POWER_STATE_POWERING_OFF, +} + +POWER_STATE_MAP_REV = utils.revert_dictionary(POWER_STATE_VALUE_MAP) + +RESET_TYPE_VALUE_MAP = { + 'On': res_cons.RESET_TYPE_ON, + 'ForceOff': res_cons.RESET_TYPE_FORCE_OFF, + 'GracefulShutdown': res_cons.RESET_TYPE_GRACEFUL_SHUTDOWN, + 'GracefulRestart': res_cons.RESET_TYPE_GRACEFUL_RESTART, + 'ForceRestart': res_cons.RESET_TYPE_FORCE_RESTART, + 'Nmi': res_cons.RESET_TYPE_NMI, + 'ForceOn': res_cons.RESET_TYPE_FORCE_ON, + 'PushPowerButton': res_cons.RESET_TYPE_PUSH_POWER_BUTTON, + 'PowerCycle': res_cons.RESET_TYPE_POWER_CYCLE, +} + +RESET_TYPE_VALUE_MAP_REV = utils.revert_dictionary(RESET_TYPE_VALUE_MAP) + +PROTOCOL_TYPE_VALUE_MAP = { + 'AHCI': res_cons.PROTOCOL_TYPE_AHCI, + 'FC': res_cons.PROTOCOL_TYPE_FC, + 'FCP': res_cons.PROTOCOL_TYPE_FCP, + 'FCoE': res_cons.PROTOCOL_TYPE_FCoE, + 'FICON': res_cons.PROTOCOL_TYPE_FICON, + 'FTP': res_cons.PROTOCOL_TYPE_FTP, + 'HTTP': res_cons.PROTOCOL_TYPE_HTTP, + 'HTTPS': res_cons.PROTOCOL_TYPE_HTTPS, + 'I2C': res_cons.PROTOCOL_TYPE_I2C, + 'NFSv3': res_cons.PROTOCOL_TYPE_NFSv3, + 'NFSv4': res_cons.PROTOCOL_TYPE_NFSv4, + 'NVMe': res_cons.PROTOCOL_TYPE_NVMe, + 'NVMeOverFabrics': res_cons.PROTOCOL_TYPE_NVMeOverFabrics, + 'OEM': res_cons.PROTOCOL_TYPE_OEM, + 'PCIe': res_cons.PROTOCOL_TYPE_PCIe, + 'RoCE': res_cons.PROTOCOL_TYPE_RoCE, + 'RoCEv2': res_cons.PROTOCOL_TYPE_RoCEv2, + 'SAS': res_cons.PROTOCOL_TYPE_SAS, + 'SATA': res_cons.PROTOCOL_TYPE_SATA, + 'SFTP': res_cons.PROTOCOL_TYPE_SFTP, + 'SMB': res_cons.PROTOCOL_TYPE_SMB, + 'UHCI': res_cons.PROTOCOL_TYPE_UHCI, + 'USB': res_cons.PROTOCOL_TYPE_USB, + 'iSCSI': res_cons.PROTOCOL_TYPE_iSCSI, + 'iWARP': res_cons.PROTOCOL_TYPE_iWARP, +} + +DUR_NAME_FORMAT_VALUE_MAP = { + 'EUI': res_cons.DURABLE_NAME_FORMAT_EUI, + 'FC_WWN': res_cons.DURABLE_NAME_FORMAT_FC_WWN, + 'NAA': res_cons.DURABLE_NAME_FORMAT_NAA, + 'NQN': res_cons.DURABLE_NAME_FORMAT_NQN, + 'NSID': res_cons.DURABLE_NAME_FORMAT_NSID, + 'UUID': res_cons.DURABLE_NAME_FORMAT_UUID, + 'iQN': res_cons.DURABLE_NAME_FORMAT_iQN, +} + +APPLY_TIME_VALUE_MAP = { + 'Immediate': res_cons.APPLY_TIME_IMMEDIATE, + 'OnReset': res_cons.APPLY_TIME_ON_RESET, + 'AtMaintenanceWindowStart': + res_cons.APPLY_TIME_MAINT_START, + 'InMaintenanceWindowOnReset': + res_cons.APPLY_TIME_MAINT_RESET, +} + +APPLY_TIME_VALUE_MAP_REV = utils.revert_dictionary(APPLY_TIME_VALUE_MAP) diff --git a/sushy/resources/oem/__init__.py b/sushy/resources/oem/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b852678dfa6c2f5363d6c17f5f3b742efa957523 --- /dev/null +++ b/sushy/resources/oem/__init__.py @@ -0,0 +1,15 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.oem.common import get_resource_extension_by_vendor + +__all__ = ('get_resource_extension_by_vendor',) diff --git a/sushy/resources/oem/base.py b/sushy/resources/oem/base.py new file mode 100644 index 0000000000000000000000000000000000000000..93032fe78dee9f8adfc71b57d759b1aac37a09d3 --- /dev/null +++ b/sushy/resources/oem/base.py @@ -0,0 +1,73 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from sushy.resources import base + + +LOG = logging.getLogger(__name__) + + +class OEMResourceBase(base.ResourceBase): + + def __init__(self, + connector, + path='', + redfish_version=None, + registries=None, + reader=None): + """Class representing an OEM vendor extension + + :param connector: A Connector instance + :param path: sub-URI path to the resource. + :param redfish_version: The version of Redfish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + self._parent_resource = None + self._vendor_id = None + + super(OEMResourceBase, self).__init__( + connector, path, redfish_version, registries, reader) + + def set_parent_resource(self, parent_resource, vendor_id): + self._parent_resource = parent_resource + self._vendor_id = vendor_id + # NOTE(etingof): this is required to pull OEM subtree + self.invalidate(force_refresh=True) + return self + + def _parse_attributes(self, json_doc): + """Parse the attributes of a resource. + + Parsed JSON fields are set to `self` as declared in the class. + + :param json_doc: parsed JSON document in form of Python types + """ + oem_json = json_doc.get( + 'Oem', {}).get(self._vendor_id, {}) + + # NOTE(etingof): temporary copy Actions into Oem subtree for parsing + # all fields at once + + oem_json = oem_json.copy() + + oem_actions_json = { + 'Actions': json_doc.get( + 'Actions', {}).get('Oem', {}) + } + + oem_json.update(oem_actions_json) + + super(OEMResourceBase, self)._parse_attributes(oem_json) diff --git a/sushy/resources/oem/common.py b/sushy/resources/oem/common.py new file mode 100644 index 0000000000000000000000000000000000000000..5a0b9c0ef960dd33a1ed3c9c08739487c43d4e14 --- /dev/null +++ b/sushy/resources/oem/common.py @@ -0,0 +1,116 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +import stevedore + +from sushy import exceptions +from sushy import utils + + +LOG = logging.getLogger(__name__) + +_global_extn_mgrs_by_resource = {} + + +def _raise(m, ep, e): + raise exceptions.ExtensionError( + error='Failed to load entry point target: %(error)s' % {'error': e}) + + +def _create_extension_manager(namespace): + """Create the resource specific ExtensionManager instance. + + Use stevedore to find all vendor extensions of resource from their + namespace and return the ExtensionManager instance. + :param namespace: The namespace for the entry points. It maps to a + specific Sushy resource type. + :returns: the ExtensionManager instance + :raises ExtensionError: on resource OEM extension load error. + """ + # namespace format is: + # ``sushy.resources..oems`` + resource_name = namespace.split('.')[-2] + + extension_manager = ( + stevedore.ExtensionManager(namespace=namespace, + propagate_map_exceptions=True, + on_load_failure_callback=_raise)) + + LOG.debug('Resource OEM extensions for "%(resource)s" under namespace ' + '"%(namespace)s":', + {'resource': resource_name, 'namespace': namespace}) + for extension in extension_manager: + LOG.debug('Found vendor: %(name)s target: %(target)s', + {'name': extension.name, + 'target': extension.entry_point_target}) + + if not extension_manager.names(): + m = (('No extensions found for "%(resource)s" under namespace ' + '"%(namespace)s"') % + {'resource': resource_name, + 'namespace': namespace}) + LOG.error(m) + raise exceptions.ExtensionError(error=m) + + return extension_manager + + +@utils.synchronized +def _get_extension_manager_of_resource(resource_name): + """Get the resource specific ExtensionManager instance. + + :param resource_name: The name of the resource e.g. + 'system' / 'ethernet_interface' / 'update_service' + :returns: the ExtensionManager instance + :raises ExtensionError: on resource OEM extension load error. + """ + global _global_extn_mgrs_by_resource + + if resource_name not in _global_extn_mgrs_by_resource: + resource_namespace = 'sushy.resources.' + resource_name + '.oems' + _global_extn_mgrs_by_resource[resource_name] = ( + _create_extension_manager(resource_namespace) + ) + return _global_extn_mgrs_by_resource[resource_name] + + +def get_resource_extension_by_vendor( + resource_name, vendor, resource): + """Helper method to get Resource specific OEM extension object for vendor + + :param resource_name: The underscore joined name of the resource e.g. + 'system' / 'ethernet_interface' / 'update_service' + :param vendor: This is the OEM vendor string which is the vendor-specific + extensibility identifier. Examples are: 'Contoso', 'Hpe'. As a matter + of fact the lowercase of this string will be the plugin entry point + name. + :param resource: The Sushy resource instance + :returns: The object returned by ``plugin(*args, **kwds)`` of extension. + :raises OEMExtensionNotFoundError: if no valid resource OEM extension + found. + """ + if resource_name in _global_extn_mgrs_by_resource: + resource_extn_mgr = _global_extn_mgrs_by_resource[resource_name] + else: + resource_extn_mgr = _get_extension_manager_of_resource(resource_name) + + try: + resource_vendor_extn = resource_extn_mgr[vendor.lower()] + except KeyError: + raise exceptions.OEMExtensionNotFoundError( + resource=resource_name, name=vendor.lower()) + + oem_resource = resource_vendor_extn.plugin() + return resource.clone_resource( + oem_resource).set_parent_resource(resource, vendor) diff --git a/sushy/resources/oem/fake.py b/sushy/resources/oem/fake.py new file mode 100644 index 0000000000000000000000000000000000000000..60fd610b92a0c30525c10c4a30f6245b3f816aed --- /dev/null +++ b/sushy/resources/oem/fake.py @@ -0,0 +1,42 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources.oem import base as oem_base + +LOG = logging.getLogger(__name__) + + +class ProductionLocationField(base.CompositeField): + facility_name = base.Field('FacilityName') + country = base.Field('Country') + + +class ContosoActionsField(base.CompositeField): + reset = common.ResetActionField('#Contoso.Reset') + + +class FakeOEMSystemExtension(oem_base.OEMResourceBase): + + data_type = base.Field('@odata.type') + production_location = ProductionLocationField('ProductionLocation') + _actions = ContosoActionsField('Actions') + + def get_reset_system_path(self): + return self._actions.reset.target_uri + + +def get_extension(*args, **kwargs): + return FakeOEMSystemExtension diff --git a/sushy/resources/registry/__init__.py b/sushy/resources/registry/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/resources/registry/message_registry.py b/sushy/resources/registry/message_registry.py new file mode 100644 index 0000000000000000000000000000000000000000..d4ea8112b5a10f1f8b269eee3a56ce0eb7e82436 --- /dev/null +++ b/sushy/resources/registry/message_registry.py @@ -0,0 +1,143 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/v1/MessageRegistry.v1_1_1.json + +import logging + +from sushy.resources import base +from sushy.resources import constants as res_cons +from sushy.resources import mappings as res_maps + +LOG = logging.getLogger(__name__) + + +class MessageDictionaryField(base.DictionaryField): + + description = base.Field('Description', required=True, default='') + """Indicates how and when the message is returned by the Redfish service""" + + message = base.Field('Message', required=True) + """Template text of the message + + Template can include placeholders for message arguments in form + % where denotes a position passed from MessageArgs. + """ + + number_of_args = base.Field('NumberOfArgs', required=True) + """Number of arguments to be expected to be passed in as MessageArgs + for this message + """ + + param_types = base.Field('ParamTypes', + adapter=lambda x: + [res_maps.PARAMTYPE_VALUE_MAP[v.lower()] + for v in x]) + """Mapped MessageArg types, in order, for the message""" + + resolution = base.Field('Resolution', required=True) + """Suggestions on how to resolve the situation that caused the error""" + + severity = base.MappedField('Severity', + res_maps.SEVERITY_VALUE_MAP, + required=True, + default=res_cons.SEVERITY_WARNING) + """Mapped severity of the message""" + + +class MessageRegistry(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The Message registry identity string""" + + name = base.Field('Name', required=True) + """The name of the message registry""" + + description = base.Field('Description') + """Human-readable description of the message registry""" + + language = base.Field('Language', required=True) + """RFC 5646 compliant language code for the registry""" + + owning_entity = base.Field('OwningEntity', required=True) + """Organization or company that publishes this registry""" + + registry_prefix = base.Field('RegistryPrefix', required=True) + """Prefix used in messageIDs which uniquely identifies all of + the messages in this registry as belonging to this registry + """ + + registry_version = base.Field('RegistryVersion', required=True) + """Message registry version which is used in the middle portion + of a messageID + """ + + messages = MessageDictionaryField('Messages') + """List of messages in this registry""" + + +def parse_message(message_registries, message_field): + """Using message registries parse the message and substitute any parms + + :param message_registries: dict of Message Registries + :param message_field: settings.MessageListField to parse + + :returns: parsed settings.MessageListField with missing attributes filled + """ + + reg_msg = None + if '.' in message_field.message_id: + registry, msg_key = message_field.message_id.rsplit('.', 1) + + if (registry in message_registries and msg_key + in message_registries[registry].messages): + reg_msg = message_registries[registry].messages[msg_key] + else: + # Some firmware only reports the MessageKey and no RegistryName. + # Fall back to the MessageRegistryFile with Id of Messages next, and + # BaseMessages as a last resort + registry = 'unknown' + msg_key = message_field.message_id + + mrf_ids = ['Messages', 'BaseMessages'] + for mrf_id in mrf_ids: + if (mrf_id in message_registries and msg_key in + message_registries[mrf_id].messages): + reg_msg = message_registries[mrf_id].messages[msg_key] + break + + if not reg_msg: + LOG.warning( + 'Unable to find message for registry %(registry)s, ' + 'message ID %(msg_key)s', { + 'registry': registry, + 'msg_key': msg_key}) + if message_field.message is None: + message_field.message = 'unknown' + return message_field + + msg = reg_msg.message + for i in range(1, reg_msg.number_of_args + 1): + if i <= len(message_field.message_args): + msg = msg.replace('%%%i' % i, + str(message_field.message_args[i - 1])) + else: + msg = msg.replace('%%%i' % i, 'unknown') + + message_field.message = msg + if not message_field.severity: + message_field.severity = reg_msg.severity + if not message_field.resolution: + message_field.resolution = reg_msg.resolution + + return message_field diff --git a/sushy/resources/registry/message_registry_file.py b/sushy/resources/registry/message_registry_file.py new file mode 100644 index 0000000000000000000000000000000000000000..248c8d16bc74e74f40ca4a616fb036a52878e116 --- /dev/null +++ b/sushy/resources/registry/message_registry_file.py @@ -0,0 +1,165 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/v1/MessageRegistryFileCollection.json +# https://redfish.dmtf.org/schemas/v1/MessageRegistryFile.v1_1_0.json + +import logging + +from sushy.resources import base +from sushy.resources.registry import message_registry + +LOG = logging.getLogger(__name__) + + +class LocationListField(base.ListField): + """Location for each registry file of languages supported + + There are 3 options where the file can be hosted: + + * locally as a single file, + * locally as a part of archive (zip or other), + * publicly on the Internet. + """ + + language = base.Field('Language') + """File's RFC5646 language code or the string 'default'""" + + uri = base.Field('Uri') + """Location URI for co-located registry file with the Redfish service""" + + archive_uri = base.Field('ArchiveUri') + """Location URI for archive file""" + + archive_file = base.Field('ArchiveFile') + """File name for registry if using archive_uri""" + + publication_uri = base.Field('PublicationUri') + """Location URI of publicly available schema""" + + +class RegistryType(base.ResourceBase): + _odata_type = base.Field('@odata.type', required=True) + + +class MessageRegistryFile(base.ResourceBase): + + identity = base.Field('Id', required=True) + """Identity of Message Registry file resource""" + + description = base.Field('Description') + """Description of Message Registry file resource""" + + name = base.Field('Name', required=True) + """Name of Message Registry file resource""" + + languages = base.Field('Languages', required=True) + """List of RFC 5646 language codes supported by this resource""" + + registry = base.Field('Registry', required=True, default='UNKNOWN.0.0') + """Prefix for MessageId used for messages from this resource + + This attribute is in form Registry_name.Major_version.Minor_version + """ + + location = LocationListField('Location', required=True) + """List of locations of Registry files for each supported language""" + + def get_message_registry(self, language, public_connector): + """Load message registry file depending on its source + + Will try to find `MessageRegistry` based on `odata.type` property and + provided language. If desired language is not found, will pick a + registry that has 'default' language. + + :param language: RFC 5646 language code for registry files + :param public_connector: connector to use when downloading registry + from the Internet + """ + + # NOTE (etingof): as per RFC5646, languages are case-insensitive + language = language.lower() + + locations = [ + l for l in self.location if l.language.lower() == language] + + locations += [ + l for l in self.location if l.language.lower() == 'default'] + + for location in locations: + if location.uri: + args = self._conn, + kwargs = { + 'path': location.uri, + 'reader': None, + 'redfish_version': self.redfish_version + } + + elif location.archive_uri: + args = self._conn, + kwargs = { + 'path': location.archive_uri, + 'reader': base.JsonArchiveReader(location.archive_file), + 'redfish_version': self.redfish_version + } + + elif location.publication_uri: + args = public_connector, + kwargs = { + 'path': location.publication_uri, + 'reader': base.JsonPublicFileReader(), + 'redfish_version': self.redfish_version + } + + else: + LOG.warning('Incomplete location for language %(language)s', + {'language': language}) + continue + + try: + registry = RegistryType(*args, **kwargs) + + except Exception as exc: + LOG.warning( + 'Cannot load message registry type from location ' + '%(location)s: %(error)s', { + 'location': kwargs['path'], + 'error': exc}) + continue + + if registry._odata_type.endswith('MessageRegistry'): + try: + return message_registry.MessageRegistry(*args, **kwargs) + + except Exception as exc: + LOG.warning( + 'Cannot load message registry from location ' + '%(location)s: %(error)s', { + 'location': kwargs['path'], + 'error': exc}) + continue + + LOG.debug('Ignoring unsupported flavor of registry %(registry)s', + {'registry': registry._odata_type}) + return + + LOG.warning('No message registry found for %(language)s or ' + 'default', {'language': language}) + + +class MessageRegistryFileCollection(base.ResourceCollectionBase): + """Collection of Message Registry Files""" + + @property + def _resource_type(self): + return MessageRegistryFile diff --git a/sushy/resources/sessionservice/__init__.py b/sushy/resources/sessionservice/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/resources/sessionservice/session.py b/sushy/resources/sessionservice/session.py new file mode 100644 index 0000000000000000000000000000000000000000..99fb75cdde4ffec46161224a9ccd0d9b07c5223a --- /dev/null +++ b/sushy/resources/sessionservice/session.py @@ -0,0 +1,86 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Session.v1_1_0.json + +import logging + +from sushy.resources import base + +LOG = logging.getLogger(__name__) + + +class Session(base.ResourceBase): + + description = base.Field('Description') + """The session service description""" + + identity = base.Field('Id', required=True) + """The session service identify string""" + + name = base.Field('Name', required=True) + """The session service name""" + + username = base.Field('UserName') + """The UserName for the account for this session.""" + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a Session + + :param connector: A Connector instance + :param identity: The identity of the Session resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(Session, self).__init__( + connector, identity, redfish_version, registries) + + def delete(self): + """Method for deleting a Session. + + :raises: ServerSideError + """ + self._conn.delete(self.path) + + +class SessionCollection(base.ResourceCollectionBase): + + name = base.Field('Name') + """The session collection name""" + + description = base.Field('Description') + """The session collection description""" + + @property + def _resource_type(self): + return Session + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a SessionCollection + + :param connector: A Connector instance + :param identity: The identity of the Session resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(SessionCollection, self).__init__( + connector, identity, redfish_version, registries) diff --git a/sushy/resources/sessionservice/sessionservice.py b/sushy/resources/sessionservice/sessionservice.py new file mode 100644 index 0000000000000000000000000000000000000000..24f52764522298afe37517ef090a4b772740d04b --- /dev/null +++ b/sushy/resources/sessionservice/sessionservice.py @@ -0,0 +1,127 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/SessionService.v1_1_3.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources.sessionservice import session +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class SessionService(base.ResourceBase): + + description = base.Field('Description') + """The session service description""" + + identity = base.Field('Id', required=True) + """The session service identify string""" + + name = base.Field('Name', required=True) + """The session service name""" + + service_enabled = base.Field('ServiceEnabled') + """Tells us if session service is enabled""" + + session_timeout = base.Field('SessionTimeout') + """The session service timeout""" + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a SessionService + + :param connector: A Connector instance + :param identity: The identity of the SessionService resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + try: + super(SessionService, self).__init__( + connector, identity, redfish_version, registries) + + except exceptions.AccessError as ae: + LOG.debug('Received access error "%s" when trying to refresh the ' + 'SessionService. If this happens before ' + 'authentication, we\'ll have to guess the Sessions URL.', + ae) + + def _get_sessions_collection_path(self): + """Helper function to find the SessionCollections path""" + sessions_col = self.json.get('Sessions') + if not sessions_col: + raise exceptions.MissingAttributeError( + attribute='Sessions', resource=self._path) + return sessions_col.get('@odata.id') + + @property + @utils.cache_it + def sessions(self): + """Property to provide reference to the `SessionCollection` instance + + It is calculated once when the first time it is queried. On refresh, + this property gets reset. + """ + return session.SessionCollection( + self._conn, self._get_sessions_collection_path(), + self.redfish_version, self.registries) + + def close_session(self, session_uri): + """This function is for closing a session based on its id. + + :raises: ServerSideError + """ + self._conn.delete(session_uri) + + def create_session(self, username, password, target_uri=None): + """This function will try to create a session. + + :param username: the username of the user requesting a new session + :param password: the password associated to the user requesting + a new session + :param target_uri: the "Sessions" uri, usually in the form: + '/redfish/v1/SessionService/Sessions' + :returns: A session key and uri in the form of a tuple + :raises: MissingXAuthToken + :raises: ConnectionError + :raises: AccessError + :raises: HTTPError + """ + if not target_uri: + try: + target_uri = self._get_sessions_collection_path() + except Exception: + # Defaulting to /Sessions + target_uri = self.path + '/Sessions' + + data = {'UserName': username, 'Password': password} + LOG.debug("Requesting new session from %s.", target_uri) + rsp = self._conn.post(target_uri, data=data) + session_key = rsp.headers.get('X-Auth-Token') + if session_key is None: + raise exceptions.MissingXAuthToken( + method='POST', url=target_uri, response=rsp) + + session_uri = rsp.headers.get('Location') + if session_uri is None: + LOG.warning("Received X-Auth-Token but NO session uri.") + + return session_key, session_uri diff --git a/sushy/resources/settings.py b/sushy/resources/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..3b18f6e170b9dc3605ec5b35a02a954ce7cde4ca --- /dev/null +++ b/sushy/resources/settings.py @@ -0,0 +1,203 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Settings.v1_2_0.json + +import logging + +from dateutil import parser + +from sushy.resources import base +from sushy.resources import common +from sushy.resources import constants as res_cons +from sushy.resources import mappings as res_maps +from sushy.resources.registry import message_registry + +# Settings update statuses + +UPDATE_UNKNOWN = 0 +"""Update status unknown""" + +UPDATE_SUCCESS = 1 +"""Update was successful""" + +UPDATE_FAILURE = 2 +"""Update encountered errors""" + +UPDATE_PENDING = 3 +"""Update waiting for being applied""" + +NO_UPDATES = 4 +"""No updates made""" + + +class SettingsUpdate(object): + """Contains Settings update status and details of the update""" + + def __init__(self, status, messages): + self._status = status + self._messages = messages + + @property + def status(self): + """The status of the update""" + return self._status + + @property + def messages(self): + """List of :class:`.MessageListField` with messages from the update""" + return self._messages + + +LOG = logging.getLogger(__name__) + + +class MaintenanceWindowField(base.CompositeField): + + maintenance_window_duration_in_seconds = base.Field( + 'MaintenanceWindowDurationInSeconds', + required=True) + """The expiry time of maintenance window in seconds""" + + maintenance_window_start_time = base.Field( + 'MaintenanceWindowStartTime', + required=True, + adapter=parser.parse) + """The start time of a maintenance window""" + + +class SettingsApplyTimeField(base.CompositeField): + def __init__(self): + super(SettingsApplyTimeField, self).__init__( + path="@Redfish.SettingsApplyTime") + + apply_time = base.Field('ApplyTime', adapter=str) + """When the future configuration should be applied""" + + apply_time_allowable_values = base.Field( + 'ApplyTime@Redfish.AllowableValues', adapter=list) + """The list of allowable ApplyTime values""" + + maintenance_window_start_time = base.Field('MaintenanceWindowStartTime', + adapter=parser.parse) + """The start time of a maintenance window""" + + maintenance_window_duration_in_seconds = base.Field( + 'MaintenanceWindowDurationInSeconds', adapter=int) + """The expiry time of maintenance window in seconds""" + + +class SettingsField(base.CompositeField): + """The settings of a resource + + Represents the future state and configuration of the resource. The + field is added to resources that support future state and + configuration. + + This field includes several properties to help clients monitor when + the resource is consumed by the service and determine the results of + applying the values, which may or may not have been successful. + """ + + def __init__(self): + super(SettingsField, self).__init__(path="@Redfish.Settings") + + time = base.Field('Time') + """Indicates the time the settings were applied to the server""" + + _etag = base.Field('ETag') + """The ETag of the resource to which the settings were applied, + after the application + """ + + _settings_object_idref = common.IdRefField("SettingsObject") + """Reference to the resource the client may PUT/PATCH in order + to change this resource + """ + + _supported_apply_times = base.MappedListField( + 'SupportedApplyTimes', + res_maps.APPLY_TIME_VALUE_MAP) + """List of supported apply times""" + + @property + def maintenance_window(self): + """MaintenanceWindow field + + Indicates if a given resource has a maintenance window assignment + for applying settings or operations + """ + LOG.warning('The @Redfish.MaintenanceWindow annotation does not ' + 'appear within @Redfish.Settings. Instead use the ' + 'maintenance_window property in the target resource ' + '(e.g. System resource)') + return None + + messages = base.MessageListField("Messages") + """Represents the results of the last time the values of the Settings + resource were applied to the server""" + + @property + def operation_apply_time_support(self): + """OperationApplyTimeSupport field + + Indicates if a client is allowed to request for a specific apply + time of a create, delete, or action operation of a given resource + """ + LOG.warning('Redfish ApplyTime annotations do not appear within ' + '@Redfish.Settings. Instead use the apply_time_settings ' + 'property in the target resource (e.g. Bios resource)') + return None + + def commit(self, connector, value): + """Commits new settings values + + The new values will be applied when the system or a service + restarts. + + :param connector: A Connector instance + :param value: Value representing JSON whose structure is specific + to each resource and the caller must format it correctly + """ + + connector.patch(self.resource_uri, data=value) + + @property + def resource_uri(self): + return self._settings_object_idref.resource_uri + + def get_status(self, registries): + """Determines the status of last update based + + Uses message id-s and severity to determine the status. + + :param registries: registries to use to parse message + :returns: :class:`.SettingsUpdate` object containing status + and any messages + """ + + if not self.time: + return SettingsUpdate(NO_UPDATES, None) + + parsed_msgs = [] + for m in self.messages: + parsed_msgs.append( + message_registry.parse_message(registries, m)) + any_errors = any(m for m in parsed_msgs + if not m.severity == res_cons.SEVERITY_OK) + + if any_errors: + status = UPDATE_FAILURE + else: + status = UPDATE_SUCCESS + return SettingsUpdate(status, parsed_msgs) diff --git a/sushy/resources/system/bios.py b/sushy/resources/system/bios.py new file mode 100644 index 0000000000000000000000000000000000000000..bb265963cb466c02a6be7321c429ab6edd41c24b --- /dev/null +++ b/sushy/resources/system/bios.py @@ -0,0 +1,249 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Bios.v1_0_3.json + +from http import client as http_client +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources import mappings as res_maps +from sushy.resources import settings +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class ActionsField(base.CompositeField): + change_password = common.ActionField('#Bios.ChangePassword') + reset_bios = common.ActionField('#Bios.ResetBios') + + +class Bios(base.ResourceBase): + + def __init__(self, connector, path, redfish_version=None, registries=None): + """A class representing a Bios + + :param connector: A Connector instance + :param path: Sub-URI path to the Bios resource + :param registries: Dict of message registries to be used when + parsing messages of attribute update status + """ + super(Bios, self).__init__( + connector, path, redfish_version, registries) + + identity = base.Field('Id', required=True) + """The Bios resource identity string""" + + name = base.Field('Name') + """The name of the resource""" + + description = base.Field('Description') + """Human-readable description of the BIOS resource""" + + _attribute_registry = base.Field('AttributeRegistry') + """The Resource ID of the Attribute Registry + for the BIOS Attributes resource + """ + + _settings = settings.SettingsField() + """Results of last BIOS attribute update""" + + attributes = base.Field('Attributes') + """Vendor-specific key-value dict of effective BIOS attributes + + Attributes cannot be updated directly. + To update use :py:func:`~set_attribute` or :py:func:`~set_attributes` + """ + + maintenance_window = settings.MaintenanceWindowField( + '@Redfish.MaintenanceWindow') + """Indicates if a given resource has a maintenance window assignment + for applying settings or operations""" + + _actions = ActionsField('Actions') + + _apply_time_settings = settings.SettingsApplyTimeField() + + @property + @utils.cache_it + def _pending_settings_resource(self): + """Pending BIOS settings resource""" + return Bios( + self._conn, self._settings.resource_uri, + registries=None, + redfish_version=self.redfish_version) + + @property + def pending_attributes(self): + """Pending BIOS attributes + + BIOS attributes that have been committed to the system, + but for them to take effect system restart is necessary + """ + return self._pending_settings_resource.attributes + + @property + def apply_time_settings(self): + return self._pending_settings_resource._apply_time_settings + + def set_attribute(self, key, value, apply_time=None, + maint_window_start_time=None, + maint_window_duration=None): + """Update an attribute + + Attribute update is not immediate but requires system restart. + Committed attributes can be checked at :py:attr:`~pending_attributes` + property + + :param key: Attribute name + :param value: Attribute value + :param apply_time: When to update the attribute. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param maint_window_start_time: The start time of a maintenance window, + datetime. Required when updating during maintenance window and + default maintenance window not set by the system. + :param maint_window_duration: Duration of maintenance time since + maintenance window start time in seconds. Required when updating + during maintenance window and default maintenance window not + set by the system. + """ + self.set_attributes({key: value}, apply_time, maint_window_start_time, + maint_window_duration) + + def set_attributes(self, value, apply_time=None, + maint_window_start_time=None, + maint_window_duration=None): + """Update many attributes at once + + Attribute update is not immediate but requires system restart. + Committed attributes can be checked at :py:attr:`~pending_attributes` + property + + :param value: Key-value pairs for attribute name and value + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param maint_window_start_time: The start time of a maintenance window, + datetime. Required when updating during maintenance window and + default maintenance window not set by the system. + :param maint_window_duration: Duration of maintenance time since + maintenance window start time in seconds. Required when updating + during maintenance window and default maintenance window not + set by the system. + """ + payload = {'Attributes': value} + if (not apply_time + and (maint_window_start_time or maint_window_duration)): + raise ValueError('"apply_time" missing when passing maintenance ' + 'window settings') + if apply_time: + prop = '@Redfish.SettingsApplyTime' + payload[prop] = { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + } + if maint_window_start_time and not maint_window_duration: + raise ValueError('"maint_window_duration" missing') + if not maint_window_start_time and maint_window_duration: + raise ValueError('"maint_window_start_time" missing') + if maint_window_start_time and maint_window_duration: + payload[prop]['MaintenanceWindowStartTime'] =\ + maint_window_start_time.isoformat() + payload[prop]['MaintenanceWindowDurationInSeconds'] =\ + maint_window_duration + self._settings.commit(self._conn, + payload) + utils.cache_clear(self, force_refresh=False, + only_these=['_pending_settings_resource']) + + def _get_reset_bios_action_element(self): + actions = self._actions + + if not actions: + raise exceptions.MissingAttributeError(attribute="Actions", + resource=self._path) + + reset_bios_action = actions.reset_bios + + if not reset_bios_action: + raise exceptions.MissingActionError(action='#Bios.ResetBios', + resource=self._path) + return reset_bios_action + + def _get_change_password_element(self): + actions = self._actions + + if not actions: + raise exceptions.MissingAttributeError(attribute="Actions", + resource=self._path) + + change_password_action = actions.change_password + + if not change_password_action: + raise exceptions.MissingActionError(action='#Bios.ChangePassword', + resource=self._path) + return change_password_action + + def reset_bios(self): + """Reset the BIOS attributes to default""" + + target_uri = self._get_reset_bios_action_element().target_uri + + LOG.debug('Resetting BIOS attributes %s ...', self.identity) + try: + self._conn.post(target_uri) + except exceptions.HTTPError as resp: + # Send empty payload, if BMC expects body + if resp.status_code in [http_client.UNSUPPORTED_MEDIA_TYPE, + http_client.BAD_REQUEST]: + self._conn.post(target_uri, data={}) + else: + raise + + LOG.info('BIOS attributes %s is being reset', self.identity) + + def change_password(self, new_password, old_password, password_name): + """Change BIOS password""" + + target_uri = self._get_change_password_element().target_uri + + LOG.debug('Changing BIOS password %s ...', self.identity) + self._conn.post(target_uri, data={'NewPassword': new_password, + 'OldPassword': old_password, + 'PasswordName': password_name}) + LOG.info('BIOS password %s is being changed', self.identity) + + @property + def update_status(self): + """Status of the last attribute update + + :returns: :class:`sushy.resources.settings.SettingsUpdate` object + containing status and any messages + """ + return self._settings.get_status(self._registries) + + @property + def supported_apply_times(self): + """List of supported BIOS update apply times + + :returns: List of supported update apply time names + """ + return self._settings._supported_apply_times diff --git a/sushy/resources/system/constants.py b/sushy/resources/system/constants.py index f0cb2e157ec6dfc546931dedf6820d6664bb2088..92a8be001ca2422852dbb638b96d0ddab20f0797 100644 --- a/sushy/resources/system/constants.py +++ b/sushy/resources/system/constants.py @@ -13,37 +13,65 @@ # License for the specific language governing permissions and limitations # under the License. -# Values comes from the Redfish System json-schema 1.0.0: +# Values come from the Redfish System json-schema 1.0.0: # http://redfish.dmtf.org/schemas/v1/ComputerSystem.v1_0_0.json#/definitions/ComputerSystem # noqa +from sushy.resources import constants as res_cons + # Reset action constants -RESET_ON = 'on' -RESET_FORCE_OFF = 'force off' -RESET_GRACEFUL_SHUTDOWN = 'graceful shutdown' -RESET_GRACEFUL_RESTART = 'graceful restart' -RESET_FORCE_RESTART = 'force restart' -RESET_NMI = 'nmi' -RESET_FORCE_ON = 'force on' -RESET_PUSH_POWER_BUTTON = 'push power button' +RESET_ON = res_cons.RESET_TYPE_ON +RESET_FORCE_OFF = res_cons.RESET_TYPE_FORCE_OFF +RESET_GRACEFUL_SHUTDOWN = res_cons.RESET_TYPE_GRACEFUL_SHUTDOWN +RESET_GRACEFUL_RESTART = res_cons.RESET_TYPE_GRACEFUL_RESTART +RESET_FORCE_RESTART = res_cons.RESET_TYPE_FORCE_RESTART +RESET_NMI = res_cons.RESET_TYPE_NMI +RESET_FORCE_ON = res_cons.RESET_TYPE_FORCE_ON +RESET_PUSH_POWER_BUTTON = res_cons.RESET_TYPE_PUSH_POWER_BUTTON # System' PowerState constants -SYSTEM_POWER_STATE_ON = 'on' +SYSTEM_POWER_STATE_ON = res_cons.POWER_STATE_ON """The system is powered on""" -SYSTEM_POWER_STATE_OFF = 'off' +SYSTEM_POWER_STATE_OFF = res_cons.POWER_STATE_OFF """The system is powered off, although some components may continue to have AUX power such as management controller""" -SYSTEM_POWER_STATE_POWERING_ON = 'powering on' +SYSTEM_POWER_STATE_POWERING_ON = res_cons.POWER_STATE_POWERING_ON """A temporary state between Off and On. This temporary state can be very short""" -SYSTEM_POWER_STATE_POWERING_OFF = 'powering off' +SYSTEM_POWER_STATE_POWERING_OFF = res_cons.POWER_STATE_POWERING_OFF """A temporary state between On and Off. The power off action can take time while the OS is in the shutdown process""" +# Indicator LED Constants + +SYSTEM_INDICATOR_LED_LIT = res_cons.INDICATOR_LED_LIT +"""The Indicator LED is lit + +Deprecated: Use `sushy.resources.constants.INDICATOR_LED_LIT`. +""" + +SYSTEM_INDICATOR_LED_BLINKING = res_cons.INDICATOR_LED_BLINKING +"""The Indicator LED is blinking + +Deprecated: Use `sushy.resources.constants.INDICATOR_LED_BLINKING`. +""" + +SYSTEM_INDICATOR_LED_OFF = res_cons.INDICATOR_LED_OFF +"""The Indicator LED is off + +Deprecated: Use `sushy.resources.constants.INDICATOR_LED_OFF`. +""" + +SYSTEM_INDICATOR_LED_UNKNOWN = res_cons.INDICATOR_LED_UNKNOWN +"""The state of the Indicator LED cannot be determine + +Deprecated: Use `sushy.resources.constants.INDICATOR_LED_UNKNOWN`. +""" + # Boot source target constants BOOT_SOURCE_TARGET_NONE = 'none' @@ -98,8 +126,8 @@ BOOT_SOURCE_ENABLED_CONTINUOUS = 'continuous' BOOT_SOURCE_ENABLED_DISABLED = 'disabled' # Processor related constants -# Values comes from the Redfish Processor json-schema 1.0.0: -# http://redfish.dmtf.org/schemas/v1/Processor.v1_0_0.json +# Values comes from the Redfish Processor json-schema 1.3.0: +# http://redfish.dmtf.org/schemas/v1/Processor.v1_3_0.json # Processor Architecture constants @@ -108,3 +136,71 @@ PROCESSOR_ARCH_IA_64 = 'Intel Itanium' PROCESSOR_ARCH_ARM = 'ARM' PROCESSOR_ARCH_MIPS = 'MIPS' PROCESSOR_ARCH_OEM = 'OEM-defined' + +# Processor type constants + +PROCESSOR_TYPE_ACCELERATOR = 'An Accelerator' +PROCESSOR_TYPE_CPU = 'A Central Processing Unit' +PROCESSOR_TYPE_CORE = 'A Core in a Processor' +PROCESSOR_TYPE_DSP = 'A Digital Signal Processor' +PROCESSOR_TYPE_FPGA = 'A Field Programmable Gate Array' +PROCESSOR_TYPE_GPU = 'A Graphics Processing Unit' +PROCESSOR_TYPE_OEM = 'An OEM-defined Processing Unit' +PROCESSOR_TYPE_THREAD = 'A Thread in a Processor' + +# Processor InstructionSet constants + +PROCESSOR_INSTRUCTIONSET_ARM_A32 = 'ARM 32-bit' +PROCESSOR_INSTRUCTIONSET_ARM_A64 = 'ARM 64-bit' +PROCESSOR_INSTRUCTIONSET_IA_64 = 'Intel IA-64' +PROCESSOR_INSTRUCTIONSET_MIPS32 = 'MIPS 32-bit' +PROCESSOR_INSTRUCTIONSET_MIPS64 = 'MIPS 64-bit' +PROCESSOR_INSTRUCTIONSET_OEM = 'OEM-defined' +PROCESSOR_INSTRUCTIONSET_x86 = 'x86 32-bit' +PROCESSOR_INSTRUCTIONSET_x86_64 = 'x86 64-bit' + +# System type constants + +SYSTEM_TYPE_PHYSICAL = "Physical" +"""A physical computer system""" +SYSTEM_TYPE_VIRTUAL = "Virtual" +"""A virtual machine instance""" +SYSTEM_TYPE_OS = "OS" +"""An operating system instance""" +SYSTEM_TYPE_PHYSICALLY_PARTITIONED = "PhysicallyPartitioned" +"""A hardware-based partition of a computer system""" +SYSTEM_TYPE_VIRTUALLY_PARTITIONED = "VirtuallyPartitioned" +"""A virtual or software-based partition of a computer system""" +SYSTEM_TYPE_COMPOSED = "Composed" +"""A computer system created by binding resource blocks together""" + +# Secure boot constants + +SECURE_BOOT_ENABLED = "Enabled" +"""UEFI secure boot is enabled.""" + +SECURE_BOOT_DISABLED = "Disabled" +"""UEFI secure boot is disabled.""" + +SECURE_BOOT_MODE_SETUP = "SetupMode" +SECURE_BOOT_MODE_USER = "UserMode" +SECURE_BOOT_MODE_AUDIT = "AuditMode" +SECURE_BOOT_MODE_DEPLOYED = "DeployedMode" + +SECURE_BOOT_RESET_KEYS_TO_DEFAULT = "ResetAllKeysToDefault" +SECURE_BOOT_RESET_KEYS_DELETE_ALL = "DeleteAllKeys" +SECURE_BOOT_RESET_KEYS_DELETE_PK = "DeletePK" + +SECURE_BOOT_PLATFORM_KEY = "PK" +SECURE_BOOT_KEY_EXCHANGE_KEYS = "KEK" +SECURE_BOOT_ALLOWED_KEYS_DATABASE = "db" +SECURE_BOOT_DENIED_KEYS_DATABASE = "dbx" +SECURE_BOOT_RECOVERY_KEYS_DATABASE = "dbr" +SECURE_BOOT_TIMESTAMP_DATABASE = "dbt" + +SECURE_BOOT_DEFAULT_PLATFORM_KEY = "PKDefault" +SECURE_BOOT_DEFAULT_KEY_EXCHANGE_KEYS = "KEKDefault" +SECURE_BOOT_DEFAULT_ALLOWED_KEYS_DATABASE = "dbDefault" +SECURE_BOOT_DEFAULT_DENIED_KEYS_DATABASE = "dbxDefault" +SECURE_BOOT_DEFAULT_RECOVERY_KEYS_DATABASE = "dbrDefault" +SECURE_BOOT_DEFAULT_TIMESTAMP_DATABASE = "dbtDefault" diff --git a/sushy/resources/system/ethernet_interface.py b/sushy/resources/system/ethernet_interface.py new file mode 100644 index 0000000000000000000000000000000000000000..b6eccec81e1fe3b3b2e3aa7587f07958e35cc418 --- /dev/null +++ b/sushy/resources/system/ethernet_interface.py @@ -0,0 +1,75 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/EthernetInterface.v1_4_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources import constants as res_cons +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class EthernetInterface(base.ResourceBase): + """This class adds the EthernetInterface resource""" + + identity = base.Field('Id', required=True) + """The Ethernet Interface identity string""" + + name = base.Field('Name') + """The name of the resource or array element""" + + description = base.Field('Description') + """Description""" + + permanent_mac_address = base.Field('PermanentMACAddress') + """This is the permanent MAC address assigned to this interface (port) """ + + mac_address = base.Field('MACAddress') + """This is the currently configured MAC address of the interface.""" + + speed_mbps = base.Field('SpeedMbps') + """This is the current speed in Mbps of this interface.""" + + status = common.StatusField("Status") + """Describes the status and health of this interface.""" + + +class EthernetInterfaceCollection(base.ResourceCollectionBase): + + @property + def _resource_type(self): + return EthernetInterface + + @property + @utils.cache_it + def summary(self): + """Summary of MAC addresses and interfaces state + + This filters the MACs whose health is OK, + which means the MACs in both 'Enabled' and 'Disabled' States + are returned. + + :returns: dictionary in the format + {'aa:bb:cc:dd:ee:ff': sushy.STATE_ENABLED, + 'aa:bb:aa:aa:aa:aa': sushy.STATE_DISABLED} + """ + mac_dict = {} + for eth in self.get_members(): + if eth.mac_address is not None and eth.status is not None: + if eth.status.health == res_cons.HEALTH_OK: + mac_dict[eth.mac_address] = eth.status.state + return mac_dict diff --git a/sushy/resources/system/mappings.py b/sushy/resources/system/mappings.py index 6a7949b03d4738bbea04ade54224913084b77150..5e77b2bd99fe87c121c4f5236f9859252100798f 100644 --- a/sushy/resources/system/mappings.py +++ b/sushy/resources/system/mappings.py @@ -30,15 +30,6 @@ RESET_SYSTEM_VALUE_MAP = { RESET_SYSTEM_VALUE_MAP_REV = utils.revert_dictionary(RESET_SYSTEM_VALUE_MAP) -SYSTEM_POWER_STATE_MAP = { - 'On': sys_cons.SYSTEM_POWER_STATE_ON, - 'Off': sys_cons.SYSTEM_POWER_STATE_OFF, - 'PoweringOn': sys_cons.SYSTEM_POWER_STATE_POWERING_ON, - 'PoweringOff': sys_cons.SYSTEM_POWER_STATE_POWERING_OFF, -} - -SYSTEM_POWER_STATE_MAP_REV = utils.revert_dictionary(SYSTEM_POWER_STATE_MAP) - BOOT_SOURCE_TARGET_MAP = { 'None': sys_cons.BOOT_SOURCE_TARGET_NONE, 'Pxe': sys_cons.BOOT_SOURCE_TARGET_PXE, @@ -58,7 +49,7 @@ BOOT_SOURCE_TARGET_MAP = { BOOT_SOURCE_TARGET_MAP_REV = utils.revert_dictionary(BOOT_SOURCE_TARGET_MAP) BOOT_SOURCE_MODE_MAP = { - 'BIOS': sys_cons.BOOT_SOURCE_MODE_BIOS, + 'Legacy': sys_cons.BOOT_SOURCE_MODE_BIOS, 'UEFI': sys_cons.BOOT_SOURCE_MODE_UEFI, } @@ -82,3 +73,93 @@ PROCESSOR_ARCH_VALUE_MAP = { PROCESSOR_ARCH_VALUE_MAP_REV = ( utils.revert_dictionary(PROCESSOR_ARCH_VALUE_MAP)) + +PROCESSOR_TYPE_VALUE_MAP = { + 'Accelerator': sys_cons.PROCESSOR_TYPE_ACCELERATOR, + 'CPU': sys_cons.PROCESSOR_TYPE_CPU, + 'Core': sys_cons.PROCESSOR_TYPE_CORE, + 'DSP': sys_cons.PROCESSOR_TYPE_DSP, + 'FPGA': sys_cons.PROCESSOR_TYPE_FPGA, + 'GPU': sys_cons.PROCESSOR_TYPE_GPU, + 'OEM': sys_cons.PROCESSOR_TYPE_OEM, + 'Thread': sys_cons.PROCESSOR_TYPE_THREAD +} + +PROCESSOR_TYPE_VALUE_MAP_REV = ( + utils.revert_dictionary(PROCESSOR_TYPE_VALUE_MAP)) + +PROCESSOR_INSTRUCTIONSET_VALUE_MAP = { + 'ARM-A32': sys_cons.PROCESSOR_INSTRUCTIONSET_ARM_A32, + 'ARM-A64': sys_cons.PROCESSOR_INSTRUCTIONSET_ARM_A64, + 'IA-64': sys_cons.PROCESSOR_INSTRUCTIONSET_IA_64, + 'MIPS32': sys_cons.PROCESSOR_INSTRUCTIONSET_MIPS32, + 'MIPS64': sys_cons.PROCESSOR_INSTRUCTIONSET_MIPS64, + 'OEM': sys_cons.PROCESSOR_INSTRUCTIONSET_OEM, + 'x86': sys_cons.PROCESSOR_INSTRUCTIONSET_x86, + 'x86-64': sys_cons.PROCESSOR_INSTRUCTIONSET_x86_64 +} + +PROCESSOR_INSTRUCTIONSET_VALUE_MAP_REV = ( + utils.revert_dictionary(PROCESSOR_INSTRUCTIONSET_VALUE_MAP)) + +SYSTEM_TYPE_VALUE_MAP = { + 'Physical': sys_cons.SYSTEM_TYPE_PHYSICAL, + 'Virtual': sys_cons.SYSTEM_TYPE_VIRTUAL, + 'OS': sys_cons.SYSTEM_TYPE_OS, + 'PhysicallyPartitioned': sys_cons.SYSTEM_TYPE_PHYSICALLY_PARTITIONED, + 'VirtuallyPartitioned': sys_cons.SYSTEM_TYPE_VIRTUALLY_PARTITIONED, + 'Composed': sys_cons.SYSTEM_TYPE_COMPOSED +} + +SYSTEM_TYPE_VALUE_MAP_REV = ( + utils.revert_dictionary(SYSTEM_TYPE_VALUE_MAP)) + +SECURE_BOOT_STATE = { + 'Enabled': sys_cons.SECURE_BOOT_ENABLED, + 'Disabled': sys_cons.SECURE_BOOT_DISABLED, +} + +SECURE_BOOT_STATE_REV = utils.revert_dictionary(SECURE_BOOT_STATE) + +SECURE_BOOT_MODE = { + 'SetupMode': sys_cons.SECURE_BOOT_MODE_SETUP, + 'UserMode': sys_cons.SECURE_BOOT_MODE_USER, + 'AuditMode': sys_cons.SECURE_BOOT_MODE_AUDIT, + 'DeployedMode': sys_cons.SECURE_BOOT_MODE_DEPLOYED, +} + +SECURE_BOOT_MODE_REV = utils.revert_dictionary(SECURE_BOOT_MODE) + +SECURE_BOOT_RESET_KEYS = { + 'ResetAllKeysToDefault': sys_cons.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + 'DeleteAllKeys': sys_cons.SECURE_BOOT_RESET_KEYS_DELETE_ALL, + 'DeletePK': sys_cons.SECURE_BOOT_RESET_KEYS_DELETE_PK, +} + +SECURE_BOOT_RESET_KEYS_REV = utils.revert_dictionary(SECURE_BOOT_RESET_KEYS) + +SECURE_BOOT_DATABASE_TYPE = { + 'PK': sys_cons.SECURE_BOOT_PLATFORM_KEY, + 'KEK': sys_cons.SECURE_BOOT_KEY_EXCHANGE_KEYS, + 'db': sys_cons.SECURE_BOOT_ALLOWED_KEYS_DATABASE, + 'dbx': sys_cons.SECURE_BOOT_DENIED_KEYS_DATABASE, + 'dbr': sys_cons.SECURE_BOOT_RECOVERY_KEYS_DATABASE, + 'dbt': sys_cons.SECURE_BOOT_TIMESTAMP_DATABASE, + 'PKDefault': sys_cons.SECURE_BOOT_DEFAULT_PLATFORM_KEY, + 'KEKDefault': sys_cons.SECURE_BOOT_DEFAULT_KEY_EXCHANGE_KEYS, + 'dbDefault': sys_cons.SECURE_BOOT_DEFAULT_ALLOWED_KEYS_DATABASE, + 'dbxDefault': sys_cons.SECURE_BOOT_DEFAULT_DENIED_KEYS_DATABASE, + 'dbrDefault': sys_cons.SECURE_BOOT_DEFAULT_RECOVERY_KEYS_DATABASE, + 'dbtDefault': sys_cons.SECURE_BOOT_DEFAULT_TIMESTAMP_DATABASE, +} + +SECURE_BOOT_DATABASE_TYPE_REV = utils.revert_dictionary( + SECURE_BOOT_DATABASE_TYPE) + +SECURE_BOOT_DATABASE_RESET_KEYS = { + 'ResetAllKeysToDefault': sys_cons.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + 'DeleteAllKeys': sys_cons.SECURE_BOOT_RESET_KEYS_DELETE_ALL, +} + +SECURE_BOOT_DATABASE_RESET_KEYS_REV = utils.revert_dictionary( + SECURE_BOOT_DATABASE_RESET_KEYS) diff --git a/sushy/resources/system/processor.py b/sushy/resources/system/processor.py index 72fa416c5cb623443fe6b4b57850f25fd586631f..9c21c35cf61aa68e5ff72da845550c6b3d688bbc 100644 --- a/sushy/resources/system/processor.py +++ b/sushy/resources/system/processor.py @@ -12,11 +12,17 @@ # License for the specific language governing permissions and limitations # under the License. +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Processor.v1_3_0.json + import collections import logging +from sushy import exceptions from sushy.resources import base +from sushy.resources import common from sushy.resources.system import mappings as sys_maps +from sushy import utils # Representation of Summary of Processor information ProcessorSummary = collections.namedtuple('ProcessorSummary', @@ -24,6 +30,27 @@ ProcessorSummary = collections.namedtuple('ProcessorSummary', LOG = logging.getLogger(__name__) +class ProcessorIdField(base.CompositeField): + + effective_family = base.Field('EffectiveFamily') + """The processor effective family""" + + effective_model = base.Field('EffectiveModel') + """The processor effective model""" + + identification_registers = base.Field('IdentificationRegisters') + """The processor identification registers""" + + microcode_info = base.Field('MicrocodeInfo') + """The processor microcode info""" + + step = base.Field('Step') + """The processor stepping""" + + vendor_id = base.Field('VendorID') + """The processor vendor id""" + + class Processor(base.ResourceBase): identity = base.Field('Id', required=True) @@ -32,16 +59,16 @@ class Processor(base.ResourceBase): socket = base.Field('Socket') """The socket or location of the processor""" - # TODO(deray): Create mappings for the processor_type - processor_type = base.Field('ProcessorType') + processor_type = base.MappedField( + 'ProcessorType', sys_maps.PROCESSOR_TYPE_VALUE_MAP) """The type of processor""" processor_architecture = base.MappedField( 'ProcessorArchitecture', sys_maps.PROCESSOR_ARCH_VALUE_MAP) """The architecture of the processor""" - # TODO(deray): Create mappings for the instruction_set - instruction_set = base.Field('InstructionSet') + instruction_set = base.MappedField( + 'InstructionSet', sys_maps.PROCESSOR_INSTRUCTIONSET_VALUE_MAP) """The instruction set of the processor""" manufacturer = base.Field('Manufacturer') @@ -50,24 +77,50 @@ class Processor(base.ResourceBase): model = base.Field('Model') """The product model number of this device""" - max_speed_mhz = base.Field('MaxSpeedMHz', adapter=int) + max_speed_mhz = base.Field('MaxSpeedMHz', adapter=utils.int_or_none) """The maximum clock speed of the processor in MHz.""" - total_cores = base.Field('TotalCores', adapter=int) + processor_id = ProcessorIdField('ProcessorId') + """The processor id""" + + status = common.StatusField('Status') + """The processor status""" + + total_cores = base.Field('TotalCores', adapter=utils.int_or_none) """The total number of cores contained in this processor""" - total_threads = base.Field('TotalThreads', adapter=int) + total_threads = base.Field('TotalThreads', adapter=utils.int_or_none) """The total number of execution threads supported by this processor""" - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a Processor :param connector: A Connector instance :param identity: The identity of the processor :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(Processor, self).__init__(connector, identity, redfish_version) + super(Processor, self).__init__( + connector, identity, redfish_version, registries) + + def _get_processor_collection_path(self): + """Helper function to find the ProcessorCollection path""" + pro_col = self.json.get('ProcessorCollection') + if not pro_col: + raise exceptions.MissingAttributeError( + attribute='ProcessorCollection', resource=self._path) + return pro_col.get('@odata.id') + + @property + @utils.cache_it + def sub_processors(self): + """A reference to the collection of Sub-Processors""" + return ProcessorCollection( + self.conn, self._get_processor_collection_path, + redfish_version=self.redfish_version) class ProcessorCollection(base.ResourceCollectionBase): @@ -76,10 +129,8 @@ class ProcessorCollection(base.ResourceCollectionBase): def _resource_type(self): return Processor - _summary = None - """The summary of processors of the system in general detail""" - @property + @utils.cache_it def summary(self): """Property to provide ProcessorSummary info @@ -89,39 +140,31 @@ class ProcessorCollection(base.ResourceCollectionBase): :returns: A namedtuple containing the ``count`` of processors in regards to logical CPUs, and their ``architecture``. """ - if self._summary is None: - count, architecture = 0, None - for proc in self.get_members(): - # Note(deray): It attempts to detect the number of CPU cores. - # It returns the number of logical CPUs. - if proc.total_threads is not None: - count += proc.total_threads - - # Note(deray): Bail out of checking the architecture info - # if you have already got hold of any one of the processors' - # architecture information. - if (architecture is None - and proc.processor_architecture is not None): - architecture = proc.processor_architecture - - self._summary = ProcessorSummary(count=count, - architecture=architecture) - - return self._summary - - def __init__(self, connector, path, redfish_version=None): + count, architecture = 0, None + for proc in self.get_members(): + # Note(deray): It attempts to detect the number of CPU cores. + # It returns the number of logical CPUs. + if proc.total_threads is not None: + count += proc.total_threads + + # Note(deray): Bail out of checking the architecture info + # if you have already got hold of any one of the processors' + # architecture information. + if (architecture is None + and proc.processor_architecture is not None): + architecture = proc.processor_architecture + + return ProcessorSummary(count=count, architecture=architecture) + + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing a ProcessorCollection :param connector: A Connector instance :param path: The canonical path to the Processor collection resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(ProcessorCollection, self).__init__(connector, path, - redfish_version) - - def refresh(self): - """Refresh the resource""" - super(ProcessorCollection, self).refresh() - # Reset summary attribute - self._summary = None + super(ProcessorCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/system/secure_boot.py b/sushy/resources/system/secure_boot.py new file mode 100644 index 0000000000000000000000000000000000000000..aa6136f76f3cb514ebddeda2a8485168c2078d23 --- /dev/null +++ b/sushy/resources/system/secure_boot.py @@ -0,0 +1,144 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/SecureBoot.v1_1_0.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources.system import mappings +from sushy.resources.system import secure_boot_database +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class ResetKeysActionField(common.ActionField): + + allowed_values = base.Field('ResetKeysType@Redfish.AllowableValues', + adapter=list) + + +class ActionsField(base.CompositeField): + + reset_keys = ResetKeysActionField('#SecureBoot.ResetKeys') + """Action that resets the UEFI Secure Boot keys.""" + + +class SecureBoot(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The Bios resource identity string""" + + name = base.Field('Name') + """The name of the resource""" + + description = base.Field('Description') + """Human-readable description of the BIOS resource""" + + current_boot = base.MappedField('SecureBootCurrentBoot', + mappings.SECURE_BOOT_STATE) + """The UEFI Secure Boot state during the current boot cycle.""" + + enabled = base.Field('SecureBootEnable') + """Whether the UEFI Secure Boot takes effect on next boot. + + This property can be enabled in UEFI boot mode only. + """ + + mode = base.MappedField('SecureBootMode', mappings.SECURE_BOOT_MODE) + """The current UEFI Secure Boot Mode.""" + + # TODO(dtantsur): SecureBootDatabases + + _actions = ActionsField('Actions') + + def __init__(self, connector, path, redfish_version=None, registries=None): + """A class representing secure boot settings. + + :param connector: A Connector instance + :param path: Sub-URI path to the SecureBoot resource + :param registries: Dict of message registries to be used when + parsing messages of attribute update status + """ + super().__init__(connector, path, redfish_version, registries) + + @property + @utils.cache_it + def databases(self): + """A collection of secure boot databases. + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + + :raises: MissingAttributeError if 'SecureBootDatabases/@odata.id' field + is missing. + :returns: `SimpleStorageCollection` instance + """ + return secure_boot_database.SecureBootDatabaseCollection( + self._conn, utils.get_sub_resource_path_by( + self, "SecureBootDatabases"), + redfish_version=self.redfish_version, + registries=self.registries) + + def _get_reset_action_element(self): + reset_action = self._actions.reset_keys + if not reset_action: + raise exceptions.MissingActionError(action='#SecureBoot.ResetKeys', + resource=self._path) + return reset_action + + def get_allowed_reset_keys_values(self): + """Get the allowed values for resetting the keys. + + :returns: A set with the allowed values. + """ + reset_action = self._get_reset_action_element() + + if not reset_action.allowed_values: + LOG.warning('Could not figure out the allowed values for the ' + 'reset keys action for %s', self.identity) + return set(mappings.SECURE_BOOT_RESET_KEYS_REV) + + return set([mappings.SECURE_BOOT_RESET_KEYS[v] for v in + set(mappings.SECURE_BOOT_RESET_KEYS). + intersection(reset_action.allowed_values)]) + + def reset_keys(self, reset_type): + """Reset secure boot keys. + + :param reset_type: Reset type, one of `SECORE_BOOT_RESET_KEYS_*` + constants. + """ + valid_resets = self.get_allowed_reset_keys_values() + if reset_type not in valid_resets: + raise exceptions.InvalidParameterValueError( + parameter='reset_type', value=reset_type, + valid_values=valid_resets) + + target_uri = self._get_reset_action_element().target_uri + self._conn.post(target_uri, data={'ResetKeysType': reset_type}) + + def set_enabled(self, enabled): + """Enable/disable secure boot. + + :param enabled: True, if secure boot is enabled for next boot. + """ + if not isinstance(enabled, bool): + raise exceptions.InvalidParameterValueError( + "Expected a boolean for 'enabled', got %r" % enabled) + + self._conn.patch(self.path, data={'SecureBootEnable': enabled}) diff --git a/sushy/resources/system/secure_boot_database.py b/sushy/resources/system/secure_boot_database.py new file mode 100644 index 0000000000000000000000000000000000000000..aa4e9b11a183b0a377baaad821f4e82a5b0d57e1 --- /dev/null +++ b/sushy/resources/system/secure_boot_database.py @@ -0,0 +1,112 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources.system import mappings + +LOG = logging.getLogger(__name__) + + +class ResetKeysActionField(common.ActionField): + + allowed_values = base.Field('ResetKeysType@Redfish.AllowableValues', + adapter=list) + + +class ActionsField(base.CompositeField): + + reset_keys = ResetKeysActionField('#SecureBootDatabase.ResetKeys') + """Action that resets the UEFI Secure Boot keys.""" + + +class SecureBootDatabase(base.ResourceBase): + + # TODO(dtantsur): certificates + + database_id = base.MappedField('DatabaseId', + mappings.SECURE_BOOT_DATABASE_TYPE) + """Standard UEFI database type.""" + + description = base.Field('Description') + """The system description""" + + identity = base.Field('Id', required=True) + """The secure boot database identity string""" + + name = base.Field('Name') + """The secure boot database name""" + + # TODO(dtantsur): signatures + + _actions = ActionsField('Actions') + + def _get_reset_action_element(self): + reset_action = self._actions.reset_keys + if not reset_action: + raise exceptions.MissingActionError( + action='#SecureBootDatabase.ResetKeys', resource=self._path) + return reset_action + + def get_allowed_reset_keys_values(self): + """Get the allowed values for resetting the keys. + + :returns: A set with the allowed values. + """ + reset_action = self._get_reset_action_element() + + if not reset_action.allowed_values: + LOG.warning('Could not figure out the allowed values for the ' + 'reset keys action for %s', self.identity) + return set(mappings.SECURE_BOOT_DATABASE_RESET_KEYS_REV) + + return set([mappings.SECURE_BOOT_DATABASE_RESET_KEYS[v] for v in + set(mappings.SECURE_BOOT_DATABASE_RESET_KEYS). + intersection(reset_action.allowed_values)]) + + def reset_keys(self, reset_type): + """Reset secure boot keys. + + :param reset_type: Reset type, one of `SECORE_BOOT_RESET_KEYS_*` + constants. + """ + valid_resets = self.get_allowed_reset_keys_values() + if reset_type not in valid_resets: + raise exceptions.InvalidParameterValueError( + parameter='reset_type', value=reset_type, + valid_values=valid_resets) + + target_uri = self._get_reset_action_element().target_uri + self._conn.post(target_uri, data={'ResetKeysType': reset_type}) + + +class SecureBootDatabaseCollection(base.ResourceCollectionBase): + + @property + def _resource_type(self): + return SecureBootDatabase + + def __init__(self, connector, path, redfish_version=None, registries=None): + """A class representing a ComputerSystemCollection + + :param connector: A Connector instance + :param path: The canonical path to the System collection resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(SecureBootDatabaseCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/system/simple_storage.py b/sushy/resources/system/simple_storage.py new file mode 100644 index 0000000000000000000000000000000000000000..2abbc28d81c70fa61ef43d3eca6e2b11d49e16dd --- /dev/null +++ b/sushy/resources/system/simple_storage.py @@ -0,0 +1,86 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/SimpleStorage.v1_2_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources import constants as res_cons +from sushy import utils + + +LOG = logging.getLogger(__name__) + + +class DeviceListField(base.ListField): + """The storage device/s associated with SimpleStorage.""" + + name = base.Field('Name', required=True) + """The name of the storage device""" + + capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) + """The size of the storage device.""" + + status = common.StatusField('Status') + """Describes the status and health of a storage device.""" + + +class SimpleStorage(base.ResourceBase): + """This class represents a simple storage. + + It represents the properties of a storage controller and its + directly-attached devices. A storage device can be a disk drive or optical + media device. + """ + + identity = base.Field('Id', required=True) + """The SimpleStorage identity string""" + + name = base.Field('Name') + """The name of the resource""" + + devices = DeviceListField('Devices', default=[]) + """The storage devices associated with this resource.""" + + +class SimpleStorageCollection(base.ResourceCollectionBase): + """Represents a collection of simple storage associated with system.""" + + @property + def _resource_type(self): + return SimpleStorage + + @property + @utils.cache_it + def disks_sizes_bytes(self): + """Sizes of each Disk in bytes in SimpleStorage collection resource. + + Returns the list of cached values until it (or its parent resource) + is refreshed. + """ + return sorted(device.capacity_bytes + for simpl_stor in self.get_members() + for device in simpl_stor.devices + if (device.status.state == res_cons.STATE_ENABLED + and device.capacity_bytes is not None)) + + @property + def max_size_bytes(self): + """Max size available (in bytes) among all enabled Disk resources. + + Returns the cached value until it (or its parent resource) is + refreshed. + """ + return utils.max_safe(self.disks_sizes_bytes) diff --git a/sushy/resources/system/storage/__init__.py b/sushy/resources/system/storage/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/resources/system/storage/constants.py b/sushy/resources/system/storage/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..e896a4a3fce9455202b8cee42e5b8dc0a7e07ca2 --- /dev/null +++ b/sushy/resources/system/storage/constants.py @@ -0,0 +1,117 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Volume Initialization Types +VOLUME_INIT_TYPE_FAST = 'fast' +"""The volume is prepared for use quickly, typically by erasing just the +beginning and end of the space so that partitioning can be performed.""" + +VOLUME_INIT_TYPE_SLOW = 'slow' +"""The volume is prepared for use slowly, typically by completely erasing +the volume.""" + +# VolumeType Types +VOLUME_TYPE_RAW_DEVICE = 'rawdevice' +"""The volume is a raw physical device without any RAID or other +virtualization applied.""" + +VOLUME_TYPE_NON_REDUNDANT = 'nonredundant' +"""The volume is a non-redundant storage device.""" + +VOLUME_TYPE_MIRRORED = 'mirrored' +"""The volume is a mirrored device.""" + +VOLUME_TYPE_STRIPED_WITH_PARITY = 'stripedwithparity' +"""The volume is a device which uses parity to retain redundant information.""" + +VOLUME_TYPE_SPANNED_MIRRORS = 'spannedmirrors' +"""The volume is a spanned set of mirrored devices.""" + +VOLUME_TYPE_SPANNED_STRIPES_WITH_PARITY = 'spannedstripeswithparity' +"""The volume is a spanned set of devices which uses parity to retain +redundant information.""" + +# RAIDType Types +RAID_TYPE_RAID0 = 'RAID0' +"""A placement policy where consecutive logical blocks of data are uniformly +distributed across a set of independent storage devices without offering any +form of redundancy.""" + +RAID_TYPE_RAID1 = 'RAID1' +"""A placement policy where each logical block of data is stored on more than +one independent storage device.""" + +RAID_TYPE_RAID3 = 'RAID3' +"""A placement policy using parity-based protection where logical bytes of +data are uniformly distributed across a set of independent storage devices and +where the parity is stored on a dedicated independent storage device.""" + +RAID_TYPE_RAID4 = 'RAID4' +"""A placement policy using parity-based protection where logical blocks of +data are uniformly distributed across a set of independent storage devices and +where the parity is stored on a dedicated independent storage device.""" + +RAID_TYPE_RAID5 = 'RAID5' +"""A placement policy using parity-based protection for storing stripes of 'n' +logical blocks of data and one logical block of parity across a set of 'n+1' +independent storage devices where the parity and data blocks are interleaved +across the storage devices.""" + +RAID_TYPE_RAID6 = 'RAID6' +"""A placement policy using parity-based protection for storing stripes of 'n' +logical blocks of data and two logical blocks of independent parity across a +set of 'n+2' independent storage devices where the parity and data blocks are +interleaved across the storage devices.""" + +RAID_TYPE_RAID10 = 'RAID10' +"""A placement policy that creates a striped device (RAID 0) over a set of +mirrored devices (RAID 1).""" + +RAID_TYPE_RAID01 = 'RAID01' +"""A data placement policy that creates a mirrored device (RAID 1) over a set +of striped devices (RAID 0).""" + +RAID_TYPE_RAID6TP = 'RAID6TP' +"""A placement policy that uses parity-based protection for storing stripes of +'n' logical blocks of data and three logical blocks of independent parity +across a set of 'n+3' independent storage devices where the parity and data +blocks are interleaved across the storage devices.""" + +RAID_TYPE_RAID1E = 'RAID1E' +"""A placement policy that uses a form of mirroring implemented over a set of +independent storage devices where logical blocks are duplicated on a pair of +independent storage devices so that data is uniformly distributed across the +storage devices.""" + +RAID_TYPE_RAID50 = 'RAID50' +"""A placement policy that uses a RAID 0 stripe set over two or more RAID 5 +sets of independent storage devices.""" + +RAID_TYPE_RAID60 = 'RAID60' +"""A placement policy that uses a RAID 0 stripe set over two or more RAID 6 +sets of independent storage devices.""" + +RAID_TYPE_RAID00 = 'RAID00' +"""A placement policy that creates a RAID 0 stripe set over two or more RAID 0 +sets.""" + +RAID_TYPE_RAID10E = 'RAID10E' +"""A placement policy that uses a RAID 0 stripe set over two or more RAID 10 +sets.""" + +RAID_TYPE_RAID1Triple = 'RAID1Triple' +"""A placement policy where each logical block of data is mirrored three times +across a set of three independent storage devices.""" + +RAID_TYPE_RAID10Triple = 'RAID10Triple' +"""A placement policy that uses a striped device (RAID 0) over a set of triple +mirrored devices (RAID 1Triple).""" diff --git a/sushy/resources/system/storage/drive.py b/sushy/resources/system/storage/drive.py new file mode 100644 index 0000000000000000000000000000000000000000..c9df1d278da4f567abfe1259a45ac94186fdb81f --- /dev/null +++ b/sushy/resources/system/storage/drive.py @@ -0,0 +1,88 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Drive.v1_4_0.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources import mappings as res_maps +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class Drive(base.ResourceBase): + """This class represents a disk drive or other physical storage medium.""" + + block_size_bytes = base.Field('BlockSizeBytes', adapter=utils.int_or_none) + """The size of the smallest addressable unit of this drive in bytes""" + + capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) + """The size in bytes of this Drive""" + + identifiers = common.IdentifiersListField('Identifiers', default=[]) + """The Durable names for the drive""" + + identity = base.Field('Id', required=True) + """The Drive identity string""" + + indicator_led = base.MappedField('IndicatorLED', + res_maps.INDICATOR_LED_VALUE_MAP) + """Whether the indicator LED is lit or off""" + + manufacturer = base.Field('Manufacturer') + """This is the manufacturer of this drive""" + + media_type = base.Field('MediaType') + """The type of media contained in this drive""" + + model = base.Field('Model') + """This is the model number for the drive""" + + name = base.Field('Name') + """The name of the resource""" + + part_number = base.Field('PartNumber') + """The part number for this drive""" + + protocol = base.MappedField('Protocol', res_maps.PROTOCOL_TYPE_VALUE_MAP) + """Protocol this drive is using to communicate to the storage controller""" + + serial_number = base.Field('SerialNumber') + """The serial number for this drive""" + + status = common.StatusField('Status') + """This type describes the status and health of the drive""" + + def set_indicator_led(self, state): + """Set IndicatorLED to the given state. + + :param state: Desired LED state, lit (INDICATOR_LED_LIT), blinking + (INDICATOR_LED_BLINKING), off (INDICATOR_LED_OFF) + :raises: InvalidParameterValueError, if any information passed is + invalid. + """ + if state not in res_maps.INDICATOR_LED_VALUE_MAP_REV: + raise exceptions.InvalidParameterValueError( + parameter='state', value=state, + valid_values=list(res_maps.INDICATOR_LED_VALUE_MAP_REV)) + + data = { + 'IndicatorLED': res_maps.INDICATOR_LED_VALUE_MAP_REV[state] + } + + self._conn.patch(self.path, data=data) + self.invalidate() diff --git a/sushy/resources/system/storage/mappings.py b/sushy/resources/system/storage/mappings.py new file mode 100644 index 0000000000000000000000000000000000000000..8fec80d12a2add42a5732269af035b6d4c97d50a --- /dev/null +++ b/sushy/resources/system/storage/mappings.py @@ -0,0 +1,52 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.system.storage import constants as store_cons +from sushy import utils + +VOLUME_INIT_TYPE_MAP = { + 'Fast': store_cons.VOLUME_INIT_TYPE_FAST, + 'Slow': store_cons.VOLUME_INIT_TYPE_SLOW +} + +VOLUME_INIT_TYPE_MAP_REV = ( + utils.revert_dictionary(VOLUME_INIT_TYPE_MAP) +) + +VOLUME_TYPE_TYPE_MAP = { + 'RawDevice': store_cons.VOLUME_TYPE_RAW_DEVICE, + 'NonRedundant': store_cons.VOLUME_TYPE_NON_REDUNDANT, + 'Mirrored': store_cons.VOLUME_TYPE_MIRRORED, + 'StripedWithParity': store_cons.VOLUME_TYPE_STRIPED_WITH_PARITY, + 'SpannedMirrors': store_cons.VOLUME_TYPE_SPANNED_MIRRORS, + 'SpannedStripesWithParity': + store_cons.VOLUME_TYPE_SPANNED_STRIPES_WITH_PARITY +} + +RAID_TYPE_TYPE_MAP = { + 'RAID0': store_cons.RAID_TYPE_RAID0, + 'RAID1': store_cons.RAID_TYPE_RAID1, + 'RAID3': store_cons.RAID_TYPE_RAID3, + 'RAID4': store_cons.RAID_TYPE_RAID4, + 'RAID5': store_cons.RAID_TYPE_RAID5, + 'RAID6': store_cons.RAID_TYPE_RAID6, + 'RAID10': store_cons.RAID_TYPE_RAID10, + 'RAID01': store_cons.RAID_TYPE_RAID01, + 'RAID6TP': store_cons.RAID_TYPE_RAID6TP, + 'RAID1E': store_cons.RAID_TYPE_RAID1E, + 'RAID50': store_cons.RAID_TYPE_RAID50, + 'RAID60': store_cons.RAID_TYPE_RAID60, + 'RAID00': store_cons.RAID_TYPE_RAID00, + 'RAID10E': store_cons.RAID_TYPE_RAID10E, + 'RAID1Triple': store_cons.RAID_TYPE_RAID1Triple, + 'RAID10Triple': store_cons.RAID_TYPE_RAID10Triple, +} diff --git a/sushy/resources/system/storage/storage.py b/sushy/resources/system/storage/storage.py new file mode 100644 index 0000000000000000000000000000000000000000..f658437ac745a949e05d6c0781ddbe0b8fcd34d3 --- /dev/null +++ b/sushy/resources/system/storage/storage.py @@ -0,0 +1,186 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Storage.v1_4_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources import mappings as res_maps +from sushy.resources.system.storage import drive +from sushy.resources.system.storage import mappings +from sushy.resources.system.storage import volume +from sushy import utils + + +LOG = logging.getLogger(__name__) + + +class StorageControllersListField(base.ListField): + """The set of storage controllers represented by this resource.""" + + member_id = base.Field('MemberId', required=True) + """Uniquely identifies the member within the collection.""" + + name = base.Field('Name', required=True) + """The name of the storage controller""" + + status = common.StatusField('Status') + """Describes the status and health of the resource and its children.""" + + identifiers = common.IdentifiersListField('Identifiers', default=[]) + """The Durable names for the storage controller.""" + + speed_gbps = base.Field('SpeedGbps') + """The maximum speed of the storage controller's device interface.""" + + controller_protocols = base.MappedListField( + 'SupportedControllerProtocols', res_maps.PROTOCOL_TYPE_VALUE_MAP) + """The protocols by which this storage controller can be communicated to""" + + device_protocols = base.MappedListField('SupportedDeviceProtocols', + res_maps.PROTOCOL_TYPE_VALUE_MAP) + """The protocols which the controller can use tocommunicate with devices""" + + raid_types = base.MappedListField('SupportedRAIDTypes', + mappings.RAID_TYPE_TYPE_MAP) + """The set of RAID types supported by the storage controller.""" + + +class Storage(base.ResourceBase): + """This class represents the storage subsystem resources. + + A storage subsystem represents a set of storage controllers (physical or + virtual) and the resources such as drives and volumes that can be accessed + from that subsystem. + """ + + identity = base.Field('Id', required=True) + """The Storage identity string""" + + name = base.Field('Name') + """The name of the resource""" + + drives_identities = base.Field('Drives', + adapter=utils.get_members_identities) + """A tuple with the drive identities""" + + status = common.StatusField('Status') + """Describes the status and health of the resource and its children.""" + + def get_drive(self, drive_identity): + """Given the drive identity return a ``Drive`` object + + :param drive_identity: The identity of the ``Drive`` + :returns: The ``Drive`` object + :raises: ResourceNotFoundError + """ + return drive.Drive(self._conn, drive_identity, + redfish_version=self.redfish_version, + registries=self.registries) + + @property + @utils.cache_it + def drives(self): + """Return a list of `Drive` objects present in the storage resource. + + It is set once when the first time it is queried. On subsequent + invocations, it returns a cached list of `Drives` objects until it is + marked stale. + + :returns: A list of `Drive` objects + :raises: ResourceNotFoundError + """ + return [self.get_drive(id_) for id_ in self.drives_identities] + + @property + @utils.cache_it + def drives_sizes_bytes(self): + """Sizes of all Drives in bytes in Storage resource. + + Returns the list of cached values until it (or its parent resource) + is refreshed. + """ + return sorted(drv.capacity_bytes for drv in self.drives) + + @property + def drives_max_size_bytes(self): + """Max size available in bytes among all Drives of this collection.""" + return utils.max_safe(self.drives_sizes_bytes) + + @property + @utils.cache_it + def volumes(self): + """Property to reference `VolumeCollection` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done at that + point). Here only the actual refresh of the sub-resource happens, + if resource is stale. + """ + return volume.VolumeCollection( + self._conn, utils.get_sub_resource_path_by(self, 'Volumes'), + redfish_version=self.redfish_version) + + storage_controllers = StorageControllersListField('StorageControllers', + default=[]) + """The storage devices associated with this resource.""" + + +class StorageCollection(base.ResourceCollectionBase): + """This class represents the collection of Storage resources""" + + @property + def _resource_type(self): + return Storage + + @property + @utils.cache_it + def drives_sizes_bytes(self): + """Sizes of each Drive in bytes in Storage collection resource. + + Returns the list of cached values until it (or its parent resource) + is refreshed. + """ + return sorted(drive_size for storage_ in self.get_members() + for drive_size in storage_.drives_sizes_bytes) + + @property + def max_drive_size_bytes(self): + """Max size available (in bytes) among all Drive resources. + + Returns the cached value until it (or its parent resource) is + refreshed. + """ + return utils.max_safe(self.drives_sizes_bytes) + + @property + @utils.cache_it + def volumes_sizes_bytes(self): + """Sizes of each Volume in bytes in Storage collection resource. + + Returns the list of cached values until it (or its parent resource) + is refreshed. + """ + return sorted(volume_size for storage_ in self.get_members() + for volume_size in storage_.volumes.volumes_sizes_bytes) + + @property + def max_volume_size_bytes(self): + """Max size available (in bytes) among all Volume resources. + + Returns the cached value until it (or its parent resource) is + refreshed. + """ + return utils.max_safe(self.volumes_sizes_bytes) diff --git a/sushy/resources/system/storage/volume.py b/sushy/resources/system/storage/volume.py new file mode 100644 index 0000000000000000000000000000000000000000..29522b10e76c9e24c3906debea32f5bb67e13bf4 --- /dev/null +++ b/sushy/resources/system/storage/volume.py @@ -0,0 +1,312 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# http://redfish.dmtf.org/schemas/v1/Volume.v1_0_3.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources import constants as res_cons +from sushy.resources import mappings as res_maps +from sushy.resources.system.storage import constants as store_cons +from sushy.resources.system.storage import mappings as store_maps +from sushy.resources.task_monitor import TaskMonitor as TaskMonitorDepr +from sushy.taskmonitor import TaskMonitor +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class ActionsField(base.CompositeField): + initialize = common.InitializeActionField('#Volume.Initialize') + + +class Volume(base.ResourceBase): + """This class adds the Storage Volume resource""" + + identity = base.Field('Id', required=True) + """The Volume identity string""" + + name = base.Field('Name') + """The name of the resource""" + + capacity_bytes = base.Field('CapacityBytes', adapter=utils.int_or_none) + """The size in bytes of this Volume.""" + + volume_type = base.MappedField('VolumeType', + store_maps.VOLUME_TYPE_TYPE_MAP) + """The type of this volume.""" + + raid_type = base.MappedField('RAIDType', store_maps.RAID_TYPE_TYPE_MAP) + """The RAID type of this volume.""" + + encrypted = base.Field('Encrypted', adapter=bool) + """Is this Volume encrypted.""" + + identifiers = common.IdentifiersListField('Identifiers', default=[]) + """The Durable names for the volume.""" + + block_size_bytes = base.Field('BlockSizeBytes', adapter=int) + """The size of the smallest addressable unit of this volume in bytes.""" + + operation_apply_time_support = common.OperationApplyTimeSupportField() + """Indicates if a client is allowed to request for a specific apply + time of a create, delete, or action operation of a given resource""" + + _actions = ActionsField('Actions') + + def _get_initialize_action_element(self): + initialize_action = self._actions.initialize + if not initialize_action: + raise exceptions.MissingActionError(action='#Volume.Initialize', + resource=self._path) + return initialize_action + + def get_allowed_initialize_volume_values(self): + """Get the allowed values for initializing the volume. + + :returns: A set with the allowed values. + """ + action = self._get_initialize_action_element() + + if not action.allowed_values: + LOG.warning('Could not figure out the allowed values for the ' + 'initialize volume action for Volume %s', + self.identity) + return set(store_maps.VOLUME_INIT_TYPE_MAP_REV) + + return set([store_maps.VOLUME_INIT_TYPE_MAP[v] for v in + set(store_maps.VOLUME_INIT_TYPE_MAP). + intersection(action.allowed_values)]) + + def _initialize(self, value=store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=None, timeout=500): + valid_values = self.get_allowed_initialize_volume_values() + if value not in valid_values: + raise exceptions.InvalidParameterValueError( + parameter='value', value=value, valid_values=valid_values) + value = store_maps.VOLUME_INIT_TYPE_MAP_REV[value] + payload = {'InitializeType': value} + blocking = False + oat_prop = '@Redfish.OperationApplyTime' + if apply_time: + payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + if (payload and payload.get(oat_prop) == res_maps. + APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): + blocking = True + target_uri = self._get_initialize_action_element().target_uri + r = self._conn.post(target_uri, data=payload, blocking=blocking, + timeout=timeout) + return r, target_uri + + def initialize(self, value=store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=None, timeout=500): + """Initialize the volume. + + :param value: The InitializeType value. + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. + :raises: InvalidParameterValueError, if the target value is not + allowed. + :raises: ConnectionError + :raises: HTTPError + :returns: TaskMonitor if async task or None if successful init + """ + r, target_uri = self._initialize(value, apply_time, timeout) + if r.status_code == 202: + return TaskMonitor.from_response( + self._conn, r, target_uri, self.redfish_version, + self.registries) + + def initialize_volume(self, value=store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=None, timeout=500): + """Initialize the volume. + + Deprecated: Use initialize + + :param value: The InitializeType value. + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. + :raises: InvalidParameterValueError, if the target value is not + allowed. + :raises: ConnectionError + :raises: HTTPError + :returns: TaskMonitor if async task or None if successful init + """ + r, _ = self._initialize(value, apply_time, timeout) + if r.status_code == 202: + return (TaskMonitorDepr(self, r.headers.get('location')) + .set_retry_after(r.headers.get('retry-after'))) + + def _delete(self, payload=None, apply_time=None, timeout=500): + blocking = False + oat_prop = '@Redfish.OperationApplyTime' + if apply_time: + if payload is None: + payload = {} + payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + if (payload and payload.get(oat_prop) == res_maps. + APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): + blocking = True + r = self._conn.delete(self._path, data=payload, blocking=blocking, + timeout=timeout) + return r + + def delete(self, payload=None, apply_time=None, timeout=500): + """Delete the volume. + + :param payload: May contain @Redfish.OperationApplyTime property + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. + :raises: ConnectionError + :raises: HTTPError + :returns: TaskMonitor if async task or None if successful deletion + """ + r = self._delete(payload, apply_time, timeout) + if r.status_code == 202: + return TaskMonitor.from_response( + self._conn, r, self._path, self.redfish_version, + self.registries) + + def delete_volume(self, payload=None, apply_time=None, timeout=500): + """Delete the volume. + + Deprecated: Use delete + + :param payload: May contain @Redfish.OperationApplyTime property + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. + :raises: ConnectionError + :raises: HTTPError + :returns: TaskMonitor if async task or None if successful deletion + """ + r = self._delete(payload, apply_time, timeout) + if r.status_code == 202: + return (TaskMonitorDepr(self._conn, r.headers.get('location')) + .set_retry_after(r.headers.get('retry-after'))) + + +class VolumeCollection(base.ResourceCollectionBase): + """This class represents the Storage Volume collection""" + + @property + def _resource_type(self): + return Volume + + @property + @utils.cache_it + def volumes_sizes_bytes(self): + """Sizes of all Volumes in bytes in VolumeCollection resource. + + Returns the list of cached values until it (or its parent resource) + is refreshed. + """ + return sorted(vol.capacity_bytes for vol in self.get_members()) + + @property + def max_volume_size_bytes(self): + """Max size available (in bytes) among all Volume resources. + + Returns the cached value until it (or its parent resource) is + refreshed. + """ + return utils.max_safe(self.volumes_sizes_bytes) + + # NOTE(etingof): for backward compatibility + max_size_bytes = max_volume_size_bytes + + operation_apply_time_support = common.OperationApplyTimeSupportField() + """Indicates if a client is allowed to request for a specific apply + time of a create, delete, or action operation of a given resource""" + + def _create(self, payload, apply_time=None, timeout=500): + blocking = False + oat_prop = '@Redfish.OperationApplyTime' + if apply_time: + if payload is None: + payload = {} + payload[oat_prop] = res_maps.APPLY_TIME_VALUE_MAP_REV[apply_time] + if (payload and payload.get(oat_prop) == res_maps. + APPLY_TIME_VALUE_MAP_REV[res_cons.APPLY_TIME_IMMEDIATE]): + blocking = True + r = self._conn.post(self._path, data=payload, blocking=blocking, + timeout=timeout) + location = r.headers.get('Location') + return r, location + + def create(self, payload, apply_time=None, timeout=500): + """Create a volume. + + :param payload: The payload representing the new volume to create. + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. + :raises: ConnectionError + :raises: HTTPError + :returns: Newly created Volume resource or TaskMonitor if async task + """ + r, location = self._create(payload, apply_time, timeout) + if r.status_code == 201: + if location: + self.refresh() + return self.get_member(location) + elif r.status_code == 202: + return TaskMonitor.from_response( + self._conn, r, self._path, self.redfish_version, + self.registries) + + def create_volume(self, payload, apply_time=None, timeout=500): + """Create a volume. + + Deprecated: Use create. + + :param payload: The payload representing the new volume to create. + :param apply_time: When to update the attributes. Optional. + APPLY_TIME_IMMEDIATE - Immediate, + APPLY_TIME_ON_RESET - On reset, + APPLY_TIME_MAINT_START - During specified maintenance time + APPLY_TIME_MAINT_RESET - On reset during specified maintenance time + :param timeout: Max time in seconds to wait for blocking async call. + :raises: ConnectionError + :raises: HTTPError + :returns: Newly created Volume resource or TaskMonitor if async task + """ + r, location = self._create(payload, apply_time, timeout) + if r.status_code == 201: + if location: + self.refresh() + return self.get_member(location) + elif r.status_code == 202: + return (TaskMonitorDepr(self._conn, location) + .set_retry_after(r.headers.get('retry-after'))) diff --git a/sushy/resources/system/system.py b/sushy/resources/system/system.py index f9eee1a3af77bc9c2c519c8bae6903333b8b793d..b7d3a8014f63cf3785872f8d9673858471c2ca28 100644 --- a/sushy/resources/system/system.py +++ b/sushy/resources/system/system.py @@ -13,14 +13,28 @@ # License for the specific language governing permissions and limitations # under the License. +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/v1/ComputerSystem.v1_10_0.json + +import collections import logging from sushy import exceptions from sushy.resources import base +from sushy.resources.chassis import chassis from sushy.resources import common +from sushy.resources.manager import manager +from sushy.resources import mappings as res_maps +from sushy.resources import settings +from sushy.resources.system import bios from sushy.resources.system import constants as sys_cons +from sushy.resources.system import ethernet_interface from sushy.resources.system import mappings as sys_maps from sushy.resources.system import processor +from sushy.resources.system import secure_boot +from sushy.resources.system import simple_storage as sys_simple_storage +from sushy.resources.system.storage import storage as sys_storage +from sushy import utils LOG = logging.getLogger(__name__) @@ -52,7 +66,7 @@ class MemorySummaryField(base.CompositeField): This signifies health state of memory along with its dependent resources. """ - size_gib = base.Field('TotalSystemMemoryGiB', adapter=int) + size_gib = base.Field('TotalSystemMemoryGiB', adapter=utils.int_or_none) """The size of memory of the system in GiB. This signifies the total installed, operating system-accessible memory @@ -69,7 +83,7 @@ class System(base.ResourceBase): """The system BIOS version""" boot = BootField('Boot', required=True) - """A dictionary containg the current boot device, frequency and mode""" + """A dictionary containing the current boot device, frequency and mode""" description = base.Field('Description') """The system description""" @@ -80,8 +94,8 @@ class System(base.ResourceBase): identity = base.Field('Id', required=True) """The system identity string""" - # TODO(lucasagomes): Create mappings for the indicator_led - indicator_led = base.Field('IndicatorLED') + indicator_led = base.MappedField('IndicatorLED', + res_maps.INDICATOR_LED_VALUE_MAP) """Whether the indicator LED is lit or off""" manufacturer = base.Field('Manufacturer') @@ -94,7 +108,7 @@ class System(base.ResourceBase): """The system part number""" power_state = base.MappedField('PowerState', - sys_maps.SYSTEM_POWER_STATE_MAP) + res_maps.POWER_STATE_VALUE_MAP) """The system power state""" serial_number = base.Field('SerialNumber') @@ -103,8 +117,11 @@ class System(base.ResourceBase): sku = base.Field('SKU') """The system stock-keeping unit""" - # TODO(lucasagomes): Create mappings for the system_type - system_type = base.Field('SystemType') + status = common.StatusField('Status') + """The system status""" + + system_type = base.MappedField('SystemType', + sys_maps.SYSTEM_TYPE_VALUE_MAP) """The system type""" uuid = base.Field('UUID') @@ -113,19 +130,26 @@ class System(base.ResourceBase): memory_summary = MemorySummaryField('MemorySummary') """The summary info of memory of the system in general detail""" - _processors = None # ref to ProcessorCollection instance + maintenance_window = settings.MaintenanceWindowField( + '@Redfish.MaintenanceWindow') + """Indicates if a given resource has a maintenance window assignment + for applying settings or operations""" _actions = ActionsField('Actions', required=True) - def __init__(self, connector, identity, redfish_version=None): + def __init__(self, connector, identity, redfish_version=None, + registries=None): """A class representing a ComputerSystem :param connector: A Connector instance :param identity: The identity of the System resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of registries to be used in any resource + that needs registries to parse messages. """ - super(System, self).__init__(connector, identity, redfish_version) + super(System, self).__init__( + connector, identity, redfish_version, registries) def _get_reset_action_element(self): reset_action = self._actions.reset @@ -185,41 +209,44 @@ class System(base.ResourceBase): set(sys_maps.BOOT_SOURCE_TARGET_MAP). intersection(self.boot.allowed_values)]) - def set_system_boot_source(self, target, - enabled=sys_cons.BOOT_SOURCE_ENABLED_ONCE, - mode=None): - """Set the boot source. + def set_system_boot_options(self, target=None, enabled=None, mode=None): + """Set boot source and/or boot frequency and/or boot mode. - Set the boot source to use on next reboot of the System. + Set the boot source and/or boot frequency and/or boot mode to use + on next reboot of the System. - :param target: The target boot source. + :param target: The target boot source, optional. :param enabled: The frequency, whether to set it for the next reboot only (BOOT_SOURCE_ENABLED_ONCE) or persistent to all future reboots (BOOT_SOURCE_ENABLED_CONTINUOUS) or disabled - (BOOT_SOURCE_ENABLED_DISABLED). - :param mode: The boot mode, UEFI (BOOT_SOURCE_MODE_UEFI) or - BIOS (BOOT_SOURCE_MODE_BIOS). + (BOOT_SOURCE_ENABLED_DISABLED), optional. + :param mode: The boot mode (UEFI: BOOT_SOURCE_MODE_UEFI or + BIOS: BOOT_SOURCE_MODE_BIOS), optional. :raises: InvalidParameterValueError, if any information passed is invalid. """ - valid_targets = self.get_allowed_system_boot_source_values() - if target not in valid_targets: - raise exceptions.InvalidParameterValueError( - parameter='target', value=target, valid_values=valid_targets) + data = collections.defaultdict(dict) - if enabled not in sys_maps.BOOT_SOURCE_ENABLED_MAP_REV: - raise exceptions.InvalidParameterValueError( - parameter='enabled', value=enabled, - valid_values=list(sys_maps.BOOT_SOURCE_TARGET_MAP_REV)) + if target is not None: + valid_targets = self.get_allowed_system_boot_source_values() + if target not in valid_targets: + raise exceptions.InvalidParameterValueError( + parameter='target', value=target, + valid_values=valid_targets) - data = { - 'Boot': { - 'BootSourceOverrideTarget': - sys_maps.BOOT_SOURCE_TARGET_MAP_REV[target], - 'BootSourceOverrideEnabled': - sys_maps.BOOT_SOURCE_ENABLED_MAP_REV[enabled] - } - } + fishy_target = sys_maps.BOOT_SOURCE_TARGET_MAP_REV[target] + + data['Boot']['BootSourceOverrideTarget'] = fishy_target + + if enabled is not None: + if enabled not in sys_maps.BOOT_SOURCE_ENABLED_MAP_REV: + raise exceptions.InvalidParameterValueError( + parameter='enabled', value=enabled, + valid_values=list(sys_maps.BOOT_SOURCE_ENABLED_MAP_REV)) + + fishy_freq = sys_maps.BOOT_SOURCE_ENABLED_MAP_REV[enabled] + + data['Boot']['BootSourceOverrideEnabled'] = fishy_freq if mode is not None: if mode not in sys_maps.BOOT_SOURCE_MODE_MAP_REV: @@ -227,42 +254,203 @@ class System(base.ResourceBase): parameter='mode', value=mode, valid_values=list(sys_maps.BOOT_SOURCE_MODE_MAP_REV)) - data['Boot']['BootSourceOverrideMode'] = ( - sys_maps.BOOT_SOURCE_MODE_MAP_REV[mode]) + fishy_mode = sys_maps.BOOT_SOURCE_MODE_MAP_REV[mode] + + data['Boot']['BootSourceOverrideMode'] = fishy_mode # TODO(lucasagomes): Check the return code and response body ? # Probably we should call refresh() as well. self._conn.patch(self.path, data=data) - # TODO(lucasagomes): All system have a Manager and Chassis object, - # include a get_manager() and get_chassis() once we have an abstraction - # for those resources. + # TODO(etingof): we should remove this method, eventually + def set_system_boot_source( + self, target, enabled=sys_cons.BOOT_SOURCE_ENABLED_ONCE, + mode=None): + """Set boot source and/or boot frequency and/or boot mode. + + Set the boot source and/or boot frequency and/or boot mode to use + on next reboot of the System. + + This method is obsoleted by `set_system_boot_options`. + + :param target: The target boot source. + :param enabled: The frequency, whether to set it for the next + reboot only (BOOT_SOURCE_ENABLED_ONCE) or persistent to all + future reboots (BOOT_SOURCE_ENABLED_CONTINUOUS) or disabled + (BOOT_SOURCE_ENABLED_DISABLED). + Default is `BOOT_SOURCE_ENABLED_ONCE`. + :param mode: The boot mode (UEFI: BOOT_SOURCE_MODE_UEFI or + BIOS: BOOT_SOURCE_MODE_BIOS), optional. + :raises: InvalidParameterValueError, if any information passed is + invalid. + """ + self.set_system_boot_options(target, enabled, mode) + + def set_indicator_led(self, state): + """Set IndicatorLED to the given state. + + :param state: Desired LED state, lit (INDICATOR_LED_LIT), blinking + (INDICATOR_LED_BLINKING), off (INDICATOR_LED_OFF) + :raises: InvalidParameterValueError, if any information passed is + invalid. + """ + if state not in res_maps.INDICATOR_LED_VALUE_MAP_REV: + raise exceptions.InvalidParameterValueError( + parameter='state', value=state, + valid_values=list(res_maps.INDICATOR_LED_VALUE_MAP_REV)) + + data = { + 'IndicatorLED': res_maps.INDICATOR_LED_VALUE_MAP_REV[state] + } + + self._conn.patch(self.path, data=data) + self.invalidate() def _get_processor_collection_path(self): """Helper function to find the ProcessorCollection path""" - processor_col = self.json.get('Processors') - if not processor_col: - raise exceptions.MissingAttributeError(attribute='Processors', - resource=self._path) - return processor_col.get('@odata.id') + return utils.get_sub_resource_path_by(self, 'Processors') @property + @utils.cache_it def processors(self): - """Property to provide reference to `ProcessorCollection` instance + """Property to reference `ProcessorCollection` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return processor.ProcessorCollection( + self._conn, self._get_processor_collection_path(), + redfish_version=self.redfish_version, + registries=self.registries) + + @property + @utils.cache_it + def ethernet_interfaces(self): + """Property to reference `EthernetInterfaceCollection` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return ethernet_interface.EthernetInterfaceCollection( + self._conn, + utils.get_sub_resource_path_by(self, "EthernetInterfaces"), + redfish_version=self.redfish_version, + registries=self.registries) + + @property + @utils.cache_it + def bios(self): + """Property to reference `Bios` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return bios.Bios( + self._conn, + utils.get_sub_resource_path_by(self, 'Bios'), + redfish_version=self.redfish_version, + registries=self.registries) + + @property + @utils.cache_it + def simple_storage(self): + """A collection of simple storage associated with system. + + This returns a reference to `SimpleStorageCollection` instance. + SimpleStorage represents the properties of a storage controller and its + directly-attached devices. + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. - It is calculated once when the first time it is queried. On refresh, - this property gets reset. + :raises: MissingAttributeError if 'SimpleStorage/@odata.id' field + is missing. + :returns: `SimpleStorageCollection` instance """ - if self._processors is None: - self._processors = processor.ProcessorCollection( - self._conn, self._get_processor_collection_path(), - redfish_version=self.redfish_version) + return sys_simple_storage.SimpleStorageCollection( + self._conn, utils.get_sub_resource_path_by(self, "SimpleStorage"), + redfish_version=self.redfish_version, + registries=self.registries) - return self._processors + @property + @utils.cache_it + def storage(self): + """A collection of storage subsystems associated with system. + + This returns a reference to `StorageCollection` instance. + A storage subsystem represents a set of storage controllers (physical + or virtual) and the resources such as drives and volumes that can be + accessed from that subsystem. + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + + :raises: MissingAttributeError if 'Storage/@odata.id' field + is missing. + :returns: `StorageCollection` instance + """ + return sys_storage.StorageCollection( + self._conn, utils.get_sub_resource_path_by(self, "Storage"), + redfish_version=self.redfish_version, + registries=self.registries) + + @property + @utils.cache_it + def managers(self): + """A list of managers for this system. + + Returns a list of `Manager` objects representing the managers + that manage this system. - def refresh(self): - super(System, self).refresh() - self._processors = None + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `Manager` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "ManagedBy"], is_collection=True) + + return [manager.Manager(self._conn, path, + redfish_version=self.redfish_version, + registries=self.registries) + for path in paths] + + @property + @utils.cache_it + def chassis(self): + """A list of chassis where this system resides. + + Returns a list of `Chassis` objects representing the chassis + or cabinets where this system is mounted. + + :raises: MissingAttributeError if '@odata.id' field is missing. + :returns: A list of `Chassis` instances + """ + paths = utils.get_sub_resource_path_by( + self, ["Links", "Chassis"], is_collection=True) + + return [chassis.Chassis(self._conn, path, + redfish_version=self.redfish_version, + registries=self.registries) + for path in paths] + + @property + @utils.cache_it + def secure_boot(self): + """Property to reference `SecureBoot` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return secure_boot.SecureBoot( + self._conn, + utils.get_sub_resource_path_by(self, 'SecureBoot'), + redfish_version=self.redfish_version, + registries=self.registries) class SystemCollection(base.ResourceCollectionBase): @@ -271,13 +459,15 @@ class SystemCollection(base.ResourceCollectionBase): def _resource_type(self): return System - def __init__(self, connector, path, redfish_version=None): + def __init__(self, connector, path, redfish_version=None, registries=None): """A class representing a ComputerSystemCollection :param connector: A Connector instance :param path: The canonical path to the System collection resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages """ - super(SystemCollection, self).__init__(connector, path, - redfish_version) + super(SystemCollection, self).__init__( + connector, path, redfish_version, registries) diff --git a/sushy/resources/task_monitor.py b/sushy/resources/task_monitor.py new file mode 100644 index 0000000000000000000000000000000000000000..f34b9db2def80fa2a56fec947df66961032e6d5a --- /dev/null +++ b/sushy/resources/task_monitor.py @@ -0,0 +1,117 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is described in Redfish specification section "Asynchronous operations" +# www.dmtf.org/sites/default/files/standards/documents/DSP0266_1.7.0.pdf + + +from datetime import datetime +from datetime import timedelta +import logging + +from dateutil import parser + +from sushy.resources import base + +LOG = logging.getLogger(__name__) + + +class TaskMonitor(base.ResourceBase): + """Deprecated: Use sushy.taskmonitor.TaskMonitor""" + + def __init__(self, + connector, + path='', + redfish_version=None): + """A class representing a Redfish Task Monitor + + :param connector: A Connector instance + :param path: sub-URI path to the resource. + :param redfish_version: The version of Redfish. Used to construct + the object according to schema of the given version. + """ + LOG.warning('sushy.resources.task_monitor.TaskMonitor is deprecated. ' + 'Use sushy.taskmonitor.TaskMonitor') + super(TaskMonitor, self).__init__(connector, path, redfish_version) + self._retry_after = None + self._location_header = None + self._in_progress = True + self._response = None + + @staticmethod + def _to_datetime(retry_after): + if isinstance(retry_after, int) or retry_after.isdigit(): + # Retry-After: 120 + return datetime.now() + timedelta(seconds=int(retry_after)) + else: + # Retry-After: Fri, 31 Dec 1999 23:59:59 GMT + return parser.parse(retry_after) + + def set_retry_after(self, value): + """Set the time the client should wait before querying the task status + + :param value: The value of the Retry-After header, which can be the + number of seconds to wait or an `HTTP-date` string as + defined by RFC 7231 + :return: The TaskMonitor object + """ + self._retry_after = self._to_datetime(value or 1) + return self + + @property + def retry_after(self): + """Time the client should wait before querying the task status + + :return: The Retry-After time in `datetime` format + """ + return self._retry_after + + @property + def sleep_for(self): + """Seconds the client should wait before querying the operation status + + :return: The number of seconds to wait + """ + return max(0, (self._retry_after - datetime.now()).total_seconds()) + + @property + def location_header(self): + """The Location header returned from the GET on the Task Monitor + + :return: The Location header (an absolute URL) + """ + return self._location_header + + @property + def in_progress(self): + """Checks the status of the async task + + :return: True if the async task is still in progress, False otherwise + """ + if not self._in_progress: + return False + r = self._conn.get(self._path) + self._response = r + self._location_header = r.headers.get('location') + if r.status_code == 202: + self.set_retry_after(r.headers.get('retry-after')) + else: + self._in_progress = False + return self._in_progress + + @property + def response(self): + """The response from the last TaskMonitor in_progress check + + :return: The `requests` response object or None + """ + return self._response diff --git a/sushy/resources/taskservice/__init__.py b/sushy/resources/taskservice/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/resources/taskservice/constants.py b/sushy/resources/taskservice/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..a185c57dbc3338f7caba222d5d92765b46818cc6 --- /dev/null +++ b/sushy/resources/taskservice/constants.py @@ -0,0 +1,19 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values come from the Redfish UpdateService json-schema. +# https://redfish.dmtf.org/schemas/v1/TaskService.v1_1_5.json#/definitions/OverWritePolicy + +# Overwrite Policy constants + +OVERWRITE_POLICY_OLDEST = 'oldest completed' +OVERWRITE_POLICY_MANUAL = 'manual only' diff --git a/sushy/resources/taskservice/mappings.py b/sushy/resources/taskservice/mappings.py new file mode 100644 index 0000000000000000000000000000000000000000..138422b7b0a6c33ce9cc03826797e0bc6fbe64f9 --- /dev/null +++ b/sushy/resources/taskservice/mappings.py @@ -0,0 +1,43 @@ +# Copyright (c) 2020 Dell, Inc. or its subsidiaries +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from sushy.resources import constants as res_cons +from sushy.resources.taskservice import constants as ts_cons +from sushy import utils + + +TASK_STATE_VALUE_MAP = { + 'New': res_cons.TASK_STATE_NEW, + 'Starting': res_cons.TASK_STATE_STARTING, + 'Running': res_cons.TASK_STATE_RUNNING, + 'Suspended': res_cons.TASK_STATE_SUSPENDED, + 'Interrupted': res_cons.TASK_STATE_INTERRUPTED, + 'Pending': res_cons.TASK_STATE_PENDING, + 'Stopping': res_cons.TASK_STATE_STOPPING, + 'Completed': res_cons.TASK_STATE_COMPLETED, + 'Killed': res_cons.TASK_STATE_KILLED, + 'Exception': res_cons.TASK_STATE_EXCEPTION, + 'Service': res_cons.TASK_STATE_SERVICE, + 'Cancelling': res_cons.TASK_STATE_CANCELLING, + 'Cancelled': res_cons.TASK_STATE_CANCELLED +} + +OVERWRITE_POLICY_VALUE_MAP = { + 'Oldest': ts_cons.OVERWRITE_POLICY_OLDEST, + 'Manual': ts_cons.OVERWRITE_POLICY_MANUAL, +} + +OVERWRITE_POLICY_VALUE_MAP_REV = ( + utils.revert_dictionary(OVERWRITE_POLICY_VALUE_MAP)) diff --git a/sushy/resources/taskservice/task.py b/sushy/resources/taskservice/task.py new file mode 100644 index 0000000000000000000000000000000000000000..445c1199e789355ef5d1288214d6632c1977610a --- /dev/null +++ b/sushy/resources/taskservice/task.py @@ -0,0 +1,110 @@ +# Copyright (c) 2020 Dell, Inc. or its subsidiaries +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/Task.v1_4_3.json + +from http import client as http_client +import logging + +from sushy.resources import base +from sushy.resources import mappings as res_maps +from sushy.resources.registry import message_registry +from sushy.resources.taskservice import mappings as task_maps +from sushy import utils + + +LOG = logging.getLogger(__name__) + + +class Task(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The Task identity""" + + name = base.Field('Name', required=True) + """The Task name""" + + description = base.Field('Description') + """The Task description""" + + task_monitor = base.Field('TaskMonitor') + """An opaque URL that the client can use to monitor an asynchronous + operation""" + + start_time = base.Field('StartTime') + """Start time of the Task""" + + end_time = base.Field('EndTime') + """End time of the Task""" + + percent_complete = base.Field('PercentComplete', adapter=utils.int_or_none) + """Percentage complete of the Task""" + + task_state = base.MappedField('TaskState', task_maps.TASK_STATE_VALUE_MAP) + """The Task state""" + + task_status = base.MappedField('TaskStatus', res_maps.HEALTH_VALUE_MAP) + """The Task status""" + + messages = base.MessageListField("Messages") + """List of :class:`.MessageListField` with messages from the Task""" + + def __init__(self, connector, identity, redfish_version=None, + registries=None, json_doc=None): + """A class representing a Task + + :param connector: A Connector instance + :param identity: The identity of the task + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + :param field_data: the data to use populating the fields + """ + super(Task, self).__init__( + connector, identity, redfish_version, registries, + json_doc=json_doc) + + @property + def is_processing(self): + """Indicates if the Task is processing""" + return self.status_code == http_client.ACCEPTED + + def parse_messages(self): + """Parses the messages""" + for m in self.messages: + message_registry.parse_message(self._registries, m) + + +class TaskCollection(base.ResourceCollectionBase): + + @property + def _resource_type(self): + return Task + + @property + @utils.cache_it + def summary(self): + """Summary of task ids and corresponding state + + :returns: dictionary in the format + {'jid_123456789': sushy.TASK_STATE_NEW, + 'jid_123454321': sushy.TASK_STATE_RUNNING} + """ + task_dict = {} + for task in self.get_members(): + task_dict[task.identity] = task.task_state + return task_dict diff --git a/sushy/resources/taskservice/taskmonitor.py b/sushy/resources/taskservice/taskmonitor.py new file mode 100644 index 0000000000000000000000000000000000000000..7aabc298225afb268343d4a17fb9f0611e4b0fe1 --- /dev/null +++ b/sushy/resources/taskservice/taskmonitor.py @@ -0,0 +1,43 @@ +# Copyright (c) 2020 Dell, Inc. or its subsidiaries +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from sushy import taskmonitor + +LOG = logging.getLogger(__name__) + + +def TaskMonitor(connector, + task_monitor, + redfish_version=None, + registries=None, + field_data=None): + """A class representing a task monitor + + Deprecated, use sushy.taskmonitor.TaskMonitor. + + :param connector: A Connector instance + :param task_monitor: The task monitor URI + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. + :param field_data: the data to use populating the fields. + """ + LOG.warning('sushy.resources.taskservice.taskmonitor.TaskMonitor ' + 'is deprecated. Use sushy.taskmonitor.TaskMonitor.') + return taskmonitor.TaskMonitor(connector, task_monitor, redfish_version, + registries, field_data) diff --git a/sushy/resources/taskservice/taskservice.py b/sushy/resources/taskservice/taskservice.py new file mode 100644 index 0000000000000000000000000000000000000000..3d7f1d31f3d8578f7aa0c5b9349d3e19fe8805eb --- /dev/null +++ b/sushy/resources/taskservice/taskservice.py @@ -0,0 +1,76 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/v1/TaskService.v1_1_5.json + +import logging + +from sushy.resources import base +from sushy.resources import common +from sushy.resources.taskservice import mappings as ts_maps +from sushy.resources.taskservice import task +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class TaskService(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The task service identity""" + + name = base.Field('Name', required=True) + """The task service name""" + + service_enabled = base.Field('ServiceEnabled') + """The status of whether this service is enabled""" + + status = common.StatusField('Status') + """The status of the task service""" + + overwrite_policy = base.MappedField( + 'CompletedTaskOverWritePolicy', ts_maps.OVERWRITE_POLICY_VALUE_MAP) + """The overwrite policy for completed tasks""" + + event_on_task_state_change = base.Field( + 'LifeCycleEventOnTaskStateChange', adapter=bool) + """Whether a task state change sends an event""" + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a TaskService + + :param connector: A Connector instance + :param identity: The identity of the TaskService resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(TaskService, self).__init__( + connector, identity, redfish_version, registries) + + @property + @utils.cache_it + def tasks(self): + """Property to reference `TaskCollection` instance + + It is set once when the first time it is queried. On refresh, + this property is marked as stale (greedy-refresh not done). + Here the actual refresh of the sub-resource happens, if stale. + """ + return task.TaskCollection( + self._conn, utils.get_sub_resource_path_by(self, 'Tasks'), + redfish_version=self.redfish_version, + registries=self.registries) diff --git a/sushy/resources/updateservice/__init__.py b/sushy/resources/updateservice/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/resources/updateservice/constants.py b/sushy/resources/updateservice/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..a0bc2efc67a630763c7884eb00ebbfa3fdcf298d --- /dev/null +++ b/sushy/resources/updateservice/constants.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Values come from the Redfish UpdateService json-schema. +# https://redfish.dmtf.org/schemas/UpdateService.v1_2_2.json#/definitions/TransferProtocolType + +from sushy.resources import constants as res_cons + +# Transfer Protocol Type constants + +UPDATE_PROTOCOL_CIFS = res_cons.PROTOCOL_TYPE_CIFS +UPDATE_PROTOCOL_FTP = res_cons.PROTOCOL_TYPE_FTP +UPDATE_PROTOCOL_SFTP = res_cons.PROTOCOL_TYPE_SFTP +UPDATE_PROTOCOL_HTTP = res_cons.PROTOCOL_TYPE_HTTP +UPDATE_PROTOCOL_HTTPS = res_cons.PROTOCOL_TYPE_HTTPS +UPDATE_PROTOCOL_SCP = res_cons.PROTOCOL_TYPE_SCP +UPDATE_PROTOCOL_TFTP = res_cons.PROTOCOL_TYPE_TFTP +UPDATE_PROTOCOL_OEM = res_cons.PROTOCOL_TYPE_OEM +UPDATE_PROTOCOL_NFS = res_cons.PROTOCOL_TYPE_NFS diff --git a/sushy/resources/updateservice/mappings.py b/sushy/resources/updateservice/mappings.py new file mode 100644 index 0000000000000000000000000000000000000000..eb2be62ceab946a06b14d49cc870ee07b84bba93 --- /dev/null +++ b/sushy/resources/updateservice/mappings.py @@ -0,0 +1,33 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from sushy.resources.updateservice import constants as ups_cons +from sushy import utils + + +TRANSFER_PROTOCOL_TYPE_VALUE_MAP = { + 'CIFS': ups_cons.UPDATE_PROTOCOL_CIFS, + 'FTP': ups_cons.UPDATE_PROTOCOL_FTP, + 'SFTP': ups_cons.UPDATE_PROTOCOL_SFTP, + 'HTTP': ups_cons.UPDATE_PROTOCOL_HTTP, + 'HTTPS': ups_cons.UPDATE_PROTOCOL_HTTPS, + 'SCP': ups_cons.UPDATE_PROTOCOL_SCP, + 'TFTP': ups_cons.UPDATE_PROTOCOL_TFTP, + 'OEM': ups_cons.UPDATE_PROTOCOL_OEM, + 'NFS': ups_cons.UPDATE_PROTOCOL_NFS, + 'NSF': ups_cons.UPDATE_PROTOCOL_NFS +} + +TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV = ( + utils.revert_dictionary(TRANSFER_PROTOCOL_TYPE_VALUE_MAP)) + +TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV[ups_cons.UPDATE_PROTOCOL_NFS] = 'NFS' diff --git a/sushy/resources/updateservice/softwareinventory.py b/sushy/resources/updateservice/softwareinventory.py new file mode 100644 index 0000000000000000000000000000000000000000..726e74341f100fbae6a828ff9d67aecce099b8d7 --- /dev/null +++ b/sushy/resources/updateservice/softwareinventory.py @@ -0,0 +1,100 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/SoftwareInventory.v1_2_0.json + +import logging + +from sushy.resources import base +from sushy.resources import common + +LOG = logging.getLogger(__name__) + + +class SoftwareInventory(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The software inventory identity""" + + lowest_supported_version = base.Field('LowestSupportedVersion') + """The lowest supported version of the software""" + + manufacturer = base.Field('Manufacturer') + """The manufacturer of the software""" + + name = base.Field('Name', required=True) + """The software inventory name""" + + release_date = base.Field('ReleaseDate') + """Release date of the software""" + + related_item = base.Field('RelatedItem') + """The ID(s) of the resources associated with the software inventory + item""" + + status = common.StatusField('Status') + """The status of the software inventory""" + + software_id = base.Field('SoftwareId') + """The identity of the software""" + + uefi_device_paths = base.Field('UefiDevicePaths') + """Represents the UEFI Device Path(s)""" + + updateable = base.Field('Updateable') + """Indicates whether this software can be updated by the update + service""" + + version = base.Field('Version') + """The version of the software""" + + def __init__(self, connector, identity, + redfish_version=None, registries=None): + """A class representing a SoftwareInventory + + :param connector: A Connector instance + :param identity: The identity of the SoftwareInventory resources + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(SoftwareInventory, self).__init__( + connector, identity, redfish_version, registries) + + +class SoftwareInventoryCollection(base.ResourceCollectionBase): + + name = base.Field('Name') + """The software inventory collection name""" + + description = base.Field('Description') + """The software inventory collection description""" + + @property + def _resource_type(self): + return SoftwareInventory + + def __init__(self, connector, identity, + redfish_version=None, registries=None): + """A class representing a SoftwareInventoryCollection + + :param connector: A Connector instance + :param identity: The identity of SoftwareInventory resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(SoftwareInventoryCollection, self).__init__( + connector, identity, redfish_version, registries) diff --git a/sushy/resources/updateservice/updateservice.py b/sushy/resources/updateservice/updateservice.py new file mode 100644 index 0000000000000000000000000000000000000000..fbdc491637a43384338e281f04aaefd51731499f --- /dev/null +++ b/sushy/resources/updateservice/updateservice.py @@ -0,0 +1,196 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is referred from Redfish standard schema. +# https://redfish.dmtf.org/schemas/UpdateService.v1_2_2.json + +import logging + +from sushy import exceptions +from sushy.resources import base +from sushy.resources import common +from sushy.resources.updateservice import constants as up_cons +from sushy.resources.updateservice import mappings as up_maps +from sushy.resources.updateservice import softwareinventory +from sushy import taskmonitor +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class ActionsField(base.CompositeField): + + simple_update = common.ActionField('#UpdateService.SimpleUpdate') + + +class UpdateService(base.ResourceBase): + + identity = base.Field('Id', required=True) + """The update service identity""" + + http_push_uri = base.Field('HttpPushUri') + """The URI used to perform an HTTP or HTTPS push update to the Update + Service""" + + http_push_uri_targets = base.Field('HttpPushUriTargets') + """The array of URIs indicating the target for applying the""" + \ + """update image""" + + http_push_uri_targets_busy = base.Field('HttpPushUriTargetsBusy') + """This represents if the HttpPushUriTargets property is reserved""" + \ + """by anyclient""" + + name = base.Field('Name', required=True) + """The update service name""" + + service_enabled = base.Field('ServiceEnabled') + """The status of whether this service is enabled""" + + status = common.StatusField('Status') + """The status of the update service""" + + _actions = ActionsField('Actions', required=True) + + _firmware_inventory_path = base.Field(['FirmwareInventory', '@odata.id']) + + _software_inventory_path = base.Field(['SoftwareInventory', '@odata.id']) + + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """A class representing a UpdateService + + :param connector: A Connector instance + :param identity: The identity of the UpdateService resource + :param redfish_version: The version of RedFish. Used to construct + the object according to schema of given version + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages + """ + super(UpdateService, self).__init__( + connector, identity, redfish_version, registries) + + def _get_simple_update_element(self): + simple_update_action = self._actions.simple_update + if not simple_update_action: + raise exceptions.MissingAttributeError( + action='#UpdateService.SimpleUpdate', + resource=self._path) + return simple_update_action + + def _get_legacy_transfer_protocols(self): + """Get the backward-compatible values for transfer protocol. + + :returns: A set of allowed values. + """ + LOG.warning( + 'Could not figure out the allowed values for the simple ' + 'update action for UpdateService %s', self.identity) + return set(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP) + + def get_allowed_transfer_protocols(self): + """Get the allowed values for transfer protocol. + + :returns: A set of allowed values. + :raises: MissingAttributeError, if Actions/#UpdateService.SimpleUpdate + attribute not present. + """ + simple_update_action = self._get_simple_update_element() + + if not getattr(simple_update_action, 'transfer_protocol', None): + LOG.debug( + 'Server does not constrain allowed transfer protocols for ' + 'simple update action of UpdateService %s', self.identity) + return set(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV) + + return {simple_update_action.transfer_protocol} + + def simple_update(self, image_uri, targets=None, + transfer_protocol=up_cons.UPDATE_PROTOCOL_HTTP): + """Simple Update is used to update software components. + + :returns: A task monitor. + """ + valid_transfer_protocols = self.get_allowed_transfer_protocols() + + if transfer_protocol in valid_transfer_protocols: + transfer_protocol = up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV[ + transfer_protocol] + + else: + legacy_transfer_protocols = self._get_legacy_transfer_protocols() + + if transfer_protocol not in legacy_transfer_protocols: + raise exceptions.InvalidParameterValueError( + parameter='transfer_protocol', value=transfer_protocol, + valid_values=valid_transfer_protocols) + + LOG.warning( + 'Legacy transfer protocol constant %s is being used. ' + 'Consider migrating to any of: %s', transfer_protocol, + ', '.join(up_maps.TRANSFER_PROTOCOL_TYPE_VALUE_MAP_REV)) + + target_uri = self._get_simple_update_element().target_uri + + LOG.debug( + 'Updating software component %s via ' + '%s ...', image_uri, target_uri) + + data = {'ImageURI': image_uri, 'TransferProtocol': transfer_protocol} + if targets: + data['Targets'] = targets + rsp = self._conn.post(target_uri, data=data) + + return taskmonitor.TaskMonitor.from_response( + self._conn, rsp, target_uri, self.redfish_version, self.registries) + + def get_task_monitor(self, task_monitor): + """Used to retrieve a TaskMonitor. + + Deprecated: Use sushy.Sushy.get_task_monitor + :returns: A task monitor. + """ + return taskmonitor.TaskMonitor( + self._conn, + task_monitor, + redfish_version=self.redfish_version, + registries=self.registries) + + @property + @utils.cache_it + def software_inventory(self): + """Property to reference SoftwareInventory collection instance""" + if not self._software_inventory_path: + raise exceptions.MissingAttributeError( + attribute='SoftwareInventory/@odata.id', + resource=self._software_inventory_path) + + return softwareinventory.SoftwareInventoryCollection( + self._conn, + self._software_inventory_path, + redfish_version=self.redfish_version, + registries=self.registries) + + @property + @utils.cache_it + def firmware_inventory(self): + """Property to reference FirmwareInventory collection instance""" + if not self._firmware_inventory_path: + raise exceptions.MissingAttributeError( + attribute='FirmwareInventory/@odata.id', + resource=self._firmware_inventory_path) + + return softwareinventory.SoftwareInventoryCollection( + self._conn, + self._firmware_inventory_path, + redfish_version=self.redfish_version, + registries=self.registries) diff --git a/sushy/standard_registries/Base.1.0.0.json b/sushy/standard_registries/Base.1.0.0.json new file mode 100644 index 0000000000000000000000000000000000000000..ce3c24f2c1be3a9575017bc1545f31121260c1d6 --- /dev/null +++ b/sushy/standard_registries/Base.1.0.0.json @@ -0,0 +1,466 @@ +{ + "@Redfish.Copyright": "Copyright © 2014-2015 Distributed Management Task Force, Inc. (DMTF). All rights reserved.", + "@Redfish.License": "Creative Commons Attribution 4.0 License. For full text see link: https://creativecommons.org/licenses/by/4.0/", + "@odata.type": "#MessageRegistry.1.0.0.MessageRegistry", + "Id": "Base.1.0.0", + "Name": "Base Message Registry", + "Language": "en", + "Description": "This registry defines the base messages for Redfish", + "RegistryPrefix": "Base", + "RegistryVersion": "1.0.0", + "OwningEntity": "DMTF", + "Messages": { + "Success": { + "Description": "Indicates that all conditions of a successful operation have been met.", + "Message": "Successfully Completed Request", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "GeneralError": { + "Description": "Indicates that a general error has occurred.", + "Message": "A general error has occurred. See ExtendedInfo for more information.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "See ExtendedInfo for more information." + }, + "Created": { + "Description": "Indicates that all conditions of a successful creation operation have been met.", + "Message": "The resource has been created successfully", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "PropertyDuplicate": { + "Description": "Indicates that a duplicate property was included in the request body.", + "Message": "The property %1 was duplicated in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the duplicate property from the request body and resubmit the request if the operation failed." + }, + "PropertyUnknown": { + "Description": "Indicates that an unknown property was included in the request body.", + "Message": "The property %1 is not in the list of valid properties for the resource.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the unknown property from the request body and resubmit the request if the operation failed." + }, + "PropertyValueTypeError": { + "Description": "Indicates that a property was given the wrong value type, such as when a number is supplied for a property that requires a string.", + "Message": "The value %1 for the property %2 is of a different type than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueFormatError": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the property %2 is of a different format than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueNotInList": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This values not in an enumeration", + "Message": "The value %1 for the property %2 is not in the list of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Choose a value from the enumeration list that the implementation can support and resubmit the request if the operation failed." + }, + "PropertyNotWritable": { + "Description": "Indicates that a property was given a value in the request body, but the property is a readonly property.", + "Message": "The property %1 is a read only property and cannot be assigned a value.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the property from the request body and resubmit the request if the operation failed." + }, + "PropertyMissing": { + "Description": "Indicates that a required property was not supplied as part of the request.", + "Message": "The property %1 is a required property and must be included in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the property is in the request body and has a valid value and resubmit the request if the operation failed." + }, + "MalformedJSON": { + "Description": "Indicates that the request body was malformed JSON. Could be duplicate, syntax error,etc.", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." + }, + "ActionNotSupported": { + "Description": "Indicates that the action supplied with the POST operation is not supported by the resource.", + "Message": "The action %1 is not supported by the resource.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "The action supplied cannot be resubmitted to the implementation. Perhaps the action was invalid, the wrong resource was the target or the implementation documentation may be of assistance." + }, + "ActionParameterMissing": { + "Description": "Indicates that the action requested was missing a parameter that is required to process the action.", + "Message": "The action %1 requires the parameter %2 to be present in the request body.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Supply the action with the required parameter in the request body when the request is resubmitted." + }, + "ActionParameterDuplicate": { + "Description": "Indicates that the action was supplied with a duplicated parameter in the request body.", + "Message": "The action %1 was submitted with more than one value for the parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the action with only one instance of the parameter in the request body if the operation failed." + }, + "ActionParameterUnknown": { + "Description": "Indicates that an action was submitted but a parameter supplied did not match any of the known parameters.", + "Message": "The action %1 was submitted with with the invalid parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the invalid parameter and resubmit the request if the operation failed." + }, + "ActionParameterValueTypeError": { + "Description": "Indicates that a parameter was given the wrong value type, such as when a number is supplied for a parameter that requires a string.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterValueFormatError": { + "Description": "Indicates that a parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterNotSupported": { + "Description": "Indicates that the parameter supplied for the action is not supported on the resource.", + "Message": "The parameter %1 for the action %2 is not supported on the target resource.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Remove the parameter supplied and resubmit the request if the operation failed." + }, + "QueryParameterValueTypeError": { + "Description": "Indicates that a query parameter was given the wrong value type, such as when a number is supplied for a query parameter that requires a string.", + "Message": "The value %1 for the query parameter %2 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterValueFormatError": { + "Description": "Indicates that a query parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterOutOfRange": { + "Description": "Indicates that a query parameter was supplied that is out of range for the given resource. This can happen with values that are too low or beyond that possible for the supplied resource, such as when a page is requested that is beyond the last page.", + "Message": "The value %1 for the query parameter %2 is out of range %3.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Reduce the value for the query parameter to a value that is within range, such as a start or count value that is within bounds of the number of resources in a collection or a page that is within the range of valid pages." + }, + "QueryNotSupportedOnResource": { + "Description": "Indicates that query is not supported on the given resource, such as when a start/count query is attempted on a resource that is not a collection.", + "Message": "Querying is not supported on the requested resource.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "QueryNotSupported": { + "Description": "Indicates that query is not supported on the implementation.", + "Message": "Querying is not supported by the implementation.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "SessionLimitExceeded": { + "Description": "Indicates that a session establishment has been requested but the operation failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Message": "The session establishment failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other sessions before trying to establish the session or increase the limit of simultaneous sessions (if supported)." + }, + "EventSubscriptionLimitExceeded": { + "Description": "Indicates that a event subscription establishment has been requested but the operation failed due to the number of simultaneous connection exceeding the limit of the implementation.", + "Message": "The event subscription failed due to the number of simultaneous subscriptions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other subscriptions before trying to establish the event subscription or increase the limit of simultaneous subscriptions (if supported)." + }, + "ResourceCannotBeDeleted": { + "Description": "Indicates that a delete operation was attempted on a resource that cannot be deleted.", + "Message": "The delete request failed because the resource requested cannot be deleted.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not attempt to delete a non-deletable resource." + }, + "ResourceInUse": { + "Description": "Indicates that a change was requested to a resource but the change was rejected due to the resource being in use or transition.", + "Message": "The change to the requested resource failed because the resource is in use or in transition.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the condition and resubmit the request if the operation failed." + }, + "ResourceAlreadyExists": { + "Description": "Indicates that a resource change or creation was attempted but that the operation cannot proceed because the resource already exists.", + "Message": "The requested resource already exists.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not repeat the create operation as the resource has already been created." + }, + "CreateFailedMissingReqProperties": { + "Description": "Indicates that a create was attempted on a resource but that properties that are required for the create operation were missing from the request.", + "Message": "The create operation failed because the required property %1 was missing from the request.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Correct the body to include the required property with a valid value and resubmit the request if the operation failed." + }, + "CreateLimitReachedForResource": { + "Description": "Indicates that no more resources can be created on the resource as it has reached its create limit.", + "Message": "The create operation failed because the resource has reached the limit of possible resources.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either delete resources and resubmit the request if the operation failed or do not resubmit the request." + }, + "ServiceShuttingDown": { + "Description": "Indicates that the operation failed as the service is shutting down, such as when the service reboots.", + "Message": "The operation failed because the service is shutting down and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "When the service becomes available, resubmit the request if the operation failed." + }, + "ServiceInUnknownState": { + "Description": "Indicates that the operation failed because the service is in an unknown state and cannot accept additional requests.", + "Message": "The operation failed because the service is in an unknown state and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Restart the service and resubmit the request if the operation failed." + }, + "NoValidSession": { + "Description": "Indicates that the operation failed because a valid session is required in order to access any resources.", + "Message": "There is no valid session established with the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Establish as session before attempting any operations." + }, + "InsufficientPrivilege": { + "Description": "Indicates that the credentials associated with the established session do not have sufficient privileges for the requested operation", + "Message": "There are insufficient privileges for the account or credentials associated with the current session to perform the requested operation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either abandon the operation or change the associated access rights and resubmit the request if the operation failed." + }, + "AccountModified": { + "Description": "Indicates that the account was successfully modified.", + "Message": "The account was successfully modifed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountNotModified": { + "Description": "Indicates that the modification requested for the account was not successful.", + "Message": "The account modification request failed.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "The modification may have failed due to permission issues or issues with the request body." + }, + "AccountRemoved": { + "Description": "Indicates that the account was successfully removed.", + "Message": "The account was successfully removed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountForSessionNoLongerExists": { + "Description": "Indicates that the account for the session has been removed, thus the session has been removed as well.", + "Message": "The account for the current session has been removed, thus the current session has been removed as well.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "Attempt to connect with a valid account." + }, + "InvalidObject": { + "Description": "Indicates that the object in question is invalid according to the implementation. Examples include a firmware update malformed URI.", + "Message": "The object at %1 is invalid.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Either the object is malformed or the URI is not correct. Correct the condition and resubmit the request if it failed." + }, + "InternalError": { + "Description": "Indicates that the request failed for an unknown internal error but that the service is still operational.", + "Message": "The request failed due to an internal service error. The service is still operational.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Resubmit the request. If the problem persists, consider resetting the service." + }, + "UnrecognizedRequestBody": { + "Description": "Indicates that the service encountered an unrecognizable request body that could not even be interpreted as malformed JSON.", + "Message": "The service detected a malformed request body that it was unable to interpret.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Correct the request body and resubmit the request if it failed." + }, + "ResourceMissingAtURI": { + "Description": "Indicates that the operation expected an image or other resource at the provided URI but none was found. Examples of this are in requests that require URIs like Firmware Update.", + "Message": "The resource at the URI %1 was not found.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place a valid resource at thr URI or correct the URI and resubmit the request." + }, + "ResourceAtUriInUnknownFormat": { + "Description": "Indicates that the URI was valid but the resource or image at that URI was in a format not supported by the service.", + "Message": "The resource at %1 is in a format not recognized by the service.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place an image or resource or file that is recognized by the service at the URI." + }, + "ResourceAtUriUnauthorized": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unauthorized.", + "Message": "While accessing the resource at %1, the service received an authorization error %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Ensure that the appropriate access is provided for the service in order for it to access the URI." + }, + "CouldNotEstablishConnection": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unsuccessful because a session could not be established.", + "Message": "The service failed to establish a connection with the URI %1.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the URI contains a valid and reachable node name, protocol information and other URI components." + }, + "SourceDoesNotSupportProtocol": { + "Description": "Indicates that while attempting to access, connect to or transfer a resource/file/image from another location that the other end of the connection did not support the protocol", + "Message": "The other end of the connection at %1 does not support the specified protocol %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Change protocols or URIs. " + }, + "AccessDenied": { + "Description": "Indicates that while attempting to access, connect to or transfer to/from another resource, the service was denied access.", + "Message": "While attempting to establish a connection to %1, the service was denied access.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Attempt to ensure that the URI is correct and that the service has the appropriate credentials." + }, + "ServiceTemporarilyUnavailable": { + "Description": "Indicates the service is temporarily unavailable.", + "Message": "The service is temporarily unavailable. Retry in %1 seconds.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Wait for the indicated retry duration and retry the operation." + }, + "InvalidIndex": { + "Description": "The Index is not valid.", + "Message": "The Index %1 is not a valid offset into the array.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "number" + ], + "Resolution": "Verify the index value provided is within the bounds of the array." + }, + "PropertyValueModified": { + "Description": "Indicates that a property was given the correct value type but the value of that property was modified. Examples are truncated or rounded values.", + "Message": "The property %1 was assigned the value %2 due to modification by the service.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "No resolution is required." + } + } +} diff --git a/sushy/standard_registries/Base.1.2.0.json b/sushy/standard_registries/Base.1.2.0.json new file mode 100644 index 0000000000000000000000000000000000000000..f250bc8969def28251806c7d8fdb1eddddf6d622 --- /dev/null +++ b/sushy/standard_registries/Base.1.2.0.json @@ -0,0 +1,517 @@ +{ + "@Redfish.Copyright": "Copyright 2014-2015, 2017 Distributed Management Task Force, Inc. (DMTF). All rights reserved.", + "@Redfish.License": "Creative Commons Attribution 4.0 License. For full text see link: https://creativecommons.org/licenses/by/4.0/", + "@odata.type": "#MessageRegistry.v1_0_0.MessageRegistry", + "Id": "Base.1.2.0", + "Name": "Base Message Registry", + "Language": "en", + "Description": "This registry defines the base messages for Redfish", + "RegistryPrefix": "Base", + "RegistryVersion": "1.2.0", + "OwningEntity": "DMTF", + "Messages": { + "Success": { + "Description": "Indicates that all conditions of a successful operation have been met.", + "Message": "Successfully Completed Request", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "GeneralError": { + "Description": "Indicates that a general error has occurred.", + "Message": "A general error has occurred. See ExtendedInfo for more information.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "See ExtendedInfo for more information." + }, + "Created": { + "Description": "Indicates that all conditions of a successful creation operation have been met.", + "Message": "The resource has been created successfully", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "PropertyDuplicate": { + "Description": "Indicates that a duplicate property was included in the request body.", + "Message": "The property %1 was duplicated in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the duplicate property from the request body and resubmit the request if the operation failed." + }, + "PropertyUnknown": { + "Description": "Indicates that an unknown property was included in the request body.", + "Message": "The property %1 is not in the list of valid properties for the resource.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the unknown property from the request body and resubmit the request if the operation failed." + }, + "PropertyValueTypeError": { + "Description": "Indicates that a property was given the wrong value type, such as when a number is supplied for a property that requires a string.", + "Message": "The value %1 for the property %2 is of a different type than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueFormatError": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported.", + "Message": "The value %1 for the property %2 is of a different format than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueNotInList": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This values not in an enumeration", + "Message": "The value %1 for the property %2 is not in the list of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Choose a value from the enumeration list that the implementation can support and resubmit the request if the operation failed." + }, + "PropertyNotWritable": { + "Description": "Indicates that a property was given a value in the request body, but the property is a readonly property.", + "Message": "The property %1 is a read only property and cannot be assigned a value.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the property from the request body and resubmit the request if the operation failed." + }, + "PropertyMissing": { + "Description": "Indicates that a required property was not supplied as part of the request.", + "Message": "The property %1 is a required property and must be included in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the property is in the request body and has a valid value and resubmit the request if the operation failed." + }, + "MalformedJSON": { + "Description": "Indicates that the request body was malformed JSON. Could be duplicate, syntax error,etc.", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." + }, + "EmptyJSON": { + "Description": "Indicates that the request body contained an empty JSON object when one or more properties are expected in the body.", + "Message": "The request body submitted contained an empty JSON object and the service is unable to process it.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Add properties in the JSON object and resubmit the request." + }, + "ActionNotSupported": { + "Description": "Indicates that the action supplied with the POST operation is not supported by the resource.", + "Message": "The action %1 is not supported by the resource.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "The action supplied cannot be resubmitted to the implementation. Perhaps the action was invalid, the wrong resource was the target or the implementation documentation may be of assistance." + }, + "ActionParameterMissing": { + "Description": "Indicates that the action requested was missing a parameter that is required to process the action.", + "Message": "The action %1 requires the parameter %2 to be present in the request body.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Supply the action with the required parameter in the request body when the request is resubmitted." + }, + "ActionParameterDuplicate": { + "Description": "Indicates that the action was supplied with a duplicated parameter in the request body.", + "Message": "The action %1 was submitted with more than one value for the parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the action with only one instance of the parameter in the request body if the operation failed." + }, + "ActionParameterUnknown": { + "Description": "Indicates that an action was submitted but a parameter supplied did not match any of the known parameters.", + "Message": "The action %1 was submitted with the invalid parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the invalid parameter and resubmit the request if the operation failed." + }, + "ActionParameterValueTypeError": { + "Description": "Indicates that a parameter was given the wrong value type, such as when a number is supplied for a parameter that requires a string.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterValueFormatError": { + "Description": "Indicates that a parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterNotSupported": { + "Description": "Indicates that the parameter supplied for the action is not supported on the resource.", + "Message": "The parameter %1 for the action %2 is not supported on the target resource.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Remove the parameter supplied and resubmit the request if the operation failed." + }, + "QueryParameterValueTypeError": { + "Description": "Indicates that a query parameter was given the wrong value type, such as when a number is supplied for a query parameter that requires a string.", + "Message": "The value %1 for the query parameter %2 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterValueFormatError": { + "Description": "Indicates that a query parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterOutOfRange": { + "Description": "Indicates that a query parameter was supplied that is out of range for the given resource. This can happen with values that are too low or beyond that possible for the supplied resource, such as when a page is requested that is beyond the last page.", + "Message": "The value %1 for the query parameter %2 is out of range %3.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Reduce the value for the query parameter to a value that is within range, such as a start or count value that is within bounds of the number of resources in a collection or a page that is within the range of valid pages." + }, + "QueryNotSupportedOnResource": { + "Description": "Indicates that query is not supported on the given resource, such as when a start/count query is attempted on a resource that is not a collection.", + "Message": "Querying is not supported on the requested resource.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "QueryNotSupported": { + "Description": "Indicates that query is not supported on the implementation.", + "Message": "Querying is not supported by the implementation.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "SessionLimitExceeded": { + "Description": "Indicates that a session establishment has been requested but the operation failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Message": "The session establishment failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other sessions before trying to establish the session or increase the limit of simultaneous sessions (if supported)." + }, + "EventSubscriptionLimitExceeded": { + "Description": "Indicates that a event subscription establishment has been requested but the operation failed due to the number of simultaneous connection exceeding the limit of the implementation.", + "Message": "The event subscription failed due to the number of simultaneous subscriptions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other subscriptions before trying to establish the event subscription or increase the limit of simultaneous subscriptions (if supported)." + }, + "ResourceCannotBeDeleted": { + "Description": "Indicates that a delete operation was attempted on a resource that cannot be deleted.", + "Message": "The delete request failed because the resource requested cannot be deleted.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not attempt to delete a non-deletable resource." + }, + "ResourceInUse": { + "Description": "Indicates that a change was requested to a resource but the change was rejected due to the resource being in use or transition.", + "Message": "The change to the requested resource failed because the resource is in use or in transition.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the condition and resubmit the request if the operation failed." + }, + "ResourceAlreadyExists": { + "Description": "Indicates that a resource change or creation was attempted but that the operation cannot proceed because the resource already exists.", + "Message": "The requested resource of type %1 with the property %2 with the value %3 already exists.", + "Severity": "Critical", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Do not repeat the create operation as the resource has already been created." + }, + "ResourceNotFound": { + "Description": "Indicates that the operation expected a resource identifier that corresponds to an existing resource but one was not found.", + "Message": "The requested resource of type %1 named %2 was not found.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Provide a valid resource identifier and resubmit the request." + }, + "CreateFailedMissingReqProperties": { + "Description": "Indicates that a create was attempted on a resource but that properties that are required for the create operation were missing from the request.", + "Message": "The create operation failed because the required property %1 was missing from the request.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Correct the body to include the required property with a valid value and resubmit the request if the operation failed." + }, + "CreateLimitReachedForResource": { + "Description": "Indicates that no more resources can be created on the resource as it has reached its create limit.", + "Message": "The create operation failed because the resource has reached the limit of possible resources.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either delete resources and resubmit the request if the operation failed or do not resubmit the request." + }, + "ServiceShuttingDown": { + "Description": "Indicates that the operation failed as the service is shutting down, such as when the service reboots.", + "Message": "The operation failed because the service is shutting down and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "When the service becomes available, resubmit the request if the operation failed." + }, + "ServiceInUnknownState": { + "Description": "Indicates that the operation failed because the service is in an unknown state and cannot accept additional requests.", + "Message": "The operation failed because the service is in an unknown state and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Restart the service and resubmit the request if the operation failed." + }, + "NoValidSession": { + "Description": "Indicates that the operation failed because a valid session is required in order to access any resources.", + "Message": "There is no valid session established with the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Establish as session before attempting any operations." + }, + "InsufficientPrivilege": { + "Description": "Indicates that the credentials associated with the established session do not have sufficient privileges for the requested operation", + "Message": "There are insufficient privileges for the account or credentials associated with the current session to perform the requested operation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either abandon the operation or change the associated access rights and resubmit the request if the operation failed." + }, + "AccountModified": { + "Description": "Indicates that the account was successfully modified.", + "Message": "The account was successfully modified.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountNotModified": { + "Description": "Indicates that the modification requested for the account was not successful.", + "Message": "The account modification request failed.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "The modification may have failed due to permission issues or issues with the request body." + }, + "AccountRemoved": { + "Description": "Indicates that the account was successfully removed.", + "Message": "The account was successfully removed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountForSessionNoLongerExists": { + "Description": "Indicates that the account for the session has been removed, thus the session has been removed as well.", + "Message": "The account for the current session has been removed, thus the current session has been removed as well.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "Attempt to connect with a valid account." + }, + "InvalidObject": { + "Description": "Indicates that the object in question is invalid according to the implementation. Examples include a firmware update malformed URI.", + "Message": "The object at %1 is invalid.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Either the object is malformed or the URI is not correct. Correct the condition and resubmit the request if it failed." + }, + "InternalError": { + "Description": "Indicates that the request failed for an unknown internal error but that the service is still operational.", + "Message": "The request failed due to an internal service error. The service is still operational.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Resubmit the request. If the problem persists, consider resetting the service." + }, + "UnrecognizedRequestBody": { + "Description": "Indicates that the service encountered an unrecognizable request body that could not even be interpreted as malformed JSON.", + "Message": "The service detected a malformed request body that it was unable to interpret.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Correct the request body and resubmit the request if it failed." + }, + "ResourceMissingAtURI": { + "Description": "Indicates that the operation expected an image or other resource at the provided URI but none was found. Examples of this are in requests that require URIs like Firmware Update.", + "Message": "The resource at the URI %1 was not found.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place a valid resource at the URI or correct the URI and resubmit the request." + }, + "ResourceAtUriInUnknownFormat": { + "Description": "Indicates that the URI was valid but the resource or image at that URI was in a format not supported by the service.", + "Message": "The resource at %1 is in a format not recognized by the service.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place an image or resource or file that is recognized by the service at the URI." + }, + "ResourceAtUriUnauthorized": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unauthorized.", + "Message": "While accessing the resource at %1, the service received an authorization error %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Ensure that the appropriate access is provided for the service in order for it to access the URI." + }, + "CouldNotEstablishConnection": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unsuccessful because a session could not be established.", + "Message": "The service failed to establish a connection with the URI %1.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the URI contains a valid and reachable node name, protocol information and other URI components." + }, + "SourceDoesNotSupportProtocol": { + "Description": "Indicates that while attempting to access, connect to or transfer a resource/file/image from another location that the other end of the connection did not support the protocol", + "Message": "The other end of the connection at %1 does not support the specified protocol %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Change protocols or URIs. " + }, + "AccessDenied": { + "Description": "Indicates that while attempting to access, connect to or transfer to/from another resource, the service denied access.", + "Message": "While attempting to establish a connection to %1, the service denied access.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Attempt to ensure that the URI is correct and that the service has the appropriate credentials." + }, + "ServiceTemporarilyUnavailable": { + "Description": "Indicates the service is temporarily unavailable.", + "Message": "The service is temporarily unavailable. Retry in %1 seconds.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Wait for the indicated retry duration and retry the operation." + }, + "InvalidIndex": { + "Description": "The Index is not valid.", + "Message": "The Index %1 is not a valid offset into the array.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "number" + ], + "Resolution": "Verify the index value provided is within the bounds of the array." + }, + "PropertyValueModified": { + "Description": "Indicates that a property was given the correct value type but the value of that property was modified. Examples are truncated or rounded values.", + "Message": "The property %1 was assigned the value %2 due to modification by the service.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "No resolution is required." + }, + "ResourceInStandby": { + "Description": "Indicates that the request could not be performed because the resource is in standby.", + "Message": "The request could not be performed because the resource is in standby.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the resource is in the correct power state and resubmit the request." + }, + "ResourceExhaustion": { + "Description": "Indicates that a resource could not satisfy the request due to some unavailability of resources. An example is that available capacity has been allocated.", + "Message": "The resource %1 was unable to satisfy the request due to unavailability of resources.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the resources are available and resubmit the request." + }, + "StringValueTooLong": { + "Description": "Indicates that a string value passed to the given resource exceeded its length limit. An example is when a shorter limit is imposed by an implementation than that allowed by the specification.", + "Message": "The string %1 exceeds the length limit %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Resubmit the request with an appropriate string length." + } + } +} diff --git a/sushy/standard_registries/Base.1.3.0.json b/sushy/standard_registries/Base.1.3.0.json new file mode 100644 index 0000000000000000000000000000000000000000..980c9c2e939b280e813b7dd59f327dfa48ef3aee --- /dev/null +++ b/sushy/standard_registries/Base.1.3.0.json @@ -0,0 +1,535 @@ +{ + "@Redfish.Copyright": "Copyright 2014-2015, 2017-2018 DMTF. All rights reserved.", + "@Redfish.License": "Creative Commons Attribution 4.0 License. For full text see link: https://creativecommons.org/licenses/by/4.0/", + "@odata.type": "#MessageRegistry.v1_0_0.MessageRegistry", + "Id": "Base.1.3.0", + "Name": "Base Message Registry", + "Language": "en", + "Description": "This registry defines the base messages for Redfish", + "RegistryPrefix": "Base", + "RegistryVersion": "1.3.0", + "OwningEntity": "DMTF", + "Messages": { + "Success": { + "Description": "Indicates that all conditions of a successful operation have been met.", + "Message": "Successfully Completed Request", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "GeneralError": { + "Description": "Indicates that a general error has occurred.", + "Message": "A general error has occurred. See ExtendedInfo for more information.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "See ExtendedInfo for more information." + }, + "Created": { + "Description": "Indicates that all conditions of a successful creation operation have been met.", + "Message": "The resource has been created successfully", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "PropertyDuplicate": { + "Description": "Indicates that a duplicate property was included in the request body.", + "Message": "The property %1 was duplicated in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the duplicate property from the request body and resubmit the request if the operation failed." + }, + "PropertyUnknown": { + "Description": "Indicates that an unknown property was included in the request body.", + "Message": "The property %1 is not in the list of valid properties for the resource.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the unknown property from the request body and resubmit the request if the operation failed." + }, + "PropertyValueTypeError": { + "Description": "Indicates that a property was given the wrong value type, such as when a number is supplied for a property that requires a string.", + "Message": "The value %1 for the property %2 is of a different type than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueFormatError": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported.", + "Message": "The value %1 for the property %2 is of a different format than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueNotInList": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This values not in an enumeration", + "Message": "The value %1 for the property %2 is not in the list of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Choose a value from the enumeration list that the implementation can support and resubmit the request if the operation failed." + }, + "PropertyValueOutOfRange": { + "Description": "Indicates that a property was given the correct value type but the value of that property is outside the supported range.", + "Message": "The value %1 for the property %2 is not in the supported range of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyNotWritable": { + "Description": "Indicates that a property was given a value in the request body, but the property is a readonly property.", + "Message": "The property %1 is a read only property and cannot be assigned a value.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the property from the request body and resubmit the request if the operation failed." + }, + "PropertyMissing": { + "Description": "Indicates that a required property was not supplied as part of the request.", + "Message": "The property %1 is a required property and must be included in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the property is in the request body and has a valid value and resubmit the request if the operation failed." + }, + "MalformedJSON": { + "Description": "Indicates that the request body was malformed JSON. Could be duplicate, syntax error,etc.", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." + }, + "EmptyJSON": { + "Description": "Indicates that the request body contained an empty JSON object when one or more properties are expected in the body.", + "Message": "The request body submitted contained an empty JSON object and the service is unable to process it.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Add properties in the JSON object and resubmit the request." + }, + "ActionNotSupported": { + "Description": "Indicates that the action supplied with the POST operation is not supported by the resource.", + "Message": "The action %1 is not supported by the resource.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "The action supplied cannot be resubmitted to the implementation. Perhaps the action was invalid, the wrong resource was the target or the implementation documentation may be of assistance." + }, + "ActionParameterMissing": { + "Description": "Indicates that the action requested was missing a parameter that is required to process the action.", + "Message": "The action %1 requires the parameter %2 to be present in the request body.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Supply the action with the required parameter in the request body when the request is resubmitted." + }, + "ActionParameterDuplicate": { + "Description": "Indicates that the action was supplied with a duplicated parameter in the request body.", + "Message": "The action %1 was submitted with more than one value for the parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the action with only one instance of the parameter in the request body if the operation failed." + }, + "ActionParameterUnknown": { + "Description": "Indicates that an action was submitted but a parameter supplied did not match any of the known parameters.", + "Message": "The action %1 was submitted with the invalid parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the invalid parameter and resubmit the request if the operation failed." + }, + "ActionParameterValueTypeError": { + "Description": "Indicates that a parameter was given the wrong value type, such as when a number is supplied for a parameter that requires a string.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterValueFormatError": { + "Description": "Indicates that a parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterNotSupported": { + "Description": "Indicates that the parameter supplied for the action is not supported on the resource.", + "Message": "The parameter %1 for the action %2 is not supported on the target resource.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Remove the parameter supplied and resubmit the request if the operation failed." + }, + "QueryParameterValueTypeError": { + "Description": "Indicates that a query parameter was given the wrong value type, such as when a number is supplied for a query parameter that requires a string.", + "Message": "The value %1 for the query parameter %2 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterValueFormatError": { + "Description": "Indicates that a query parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterOutOfRange": { + "Description": "Indicates that a query parameter was supplied that is out of range for the given resource. This can happen with values that are too low or beyond that possible for the supplied resource, such as when a page is requested that is beyond the last page.", + "Message": "The value %1 for the query parameter %2 is out of range %3.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Reduce the value for the query parameter to a value that is within range, such as a start or count value that is within bounds of the number of resources in a collection or a page that is within the range of valid pages." + }, + "QueryNotSupportedOnResource": { + "Description": "Indicates that query is not supported on the given resource, such as when a start/count query is attempted on a resource that is not a collection.", + "Message": "Querying is not supported on the requested resource.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "QueryNotSupported": { + "Description": "Indicates that query is not supported on the implementation.", + "Message": "Querying is not supported by the implementation.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "SessionLimitExceeded": { + "Description": "Indicates that a session establishment has been requested but the operation failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Message": "The session establishment failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other sessions before trying to establish the session or increase the limit of simultaneous sessions (if supported)." + }, + "EventSubscriptionLimitExceeded": { + "Description": "Indicates that a event subscription establishment has been requested but the operation failed due to the number of simultaneous connection exceeding the limit of the implementation.", + "Message": "The event subscription failed due to the number of simultaneous subscriptions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other subscriptions before trying to establish the event subscription or increase the limit of simultaneous subscriptions (if supported)." + }, + "ResourceCannotBeDeleted": { + "Description": "Indicates that a delete operation was attempted on a resource that cannot be deleted.", + "Message": "The delete request failed because the resource requested cannot be deleted.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not attempt to delete a non-deletable resource." + }, + "ResourceInUse": { + "Description": "Indicates that a change was requested to a resource but the change was rejected due to the resource being in use or transition.", + "Message": "The change to the requested resource failed because the resource is in use or in transition.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the condition and resubmit the request if the operation failed." + }, + "ResourceAlreadyExists": { + "Description": "Indicates that a resource change or creation was attempted but that the operation cannot proceed because the resource already exists.", + "Message": "The requested resource of type %1 with the property %2 with the value %3 already exists.", + "Severity": "Critical", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Do not repeat the create operation as the resource has already been created." + }, + "ResourceNotFound": { + "Description": "Indicates that the operation expected a resource identifier that corresponds to an existing resource but one was not found.", + "Message": "The requested resource of type %1 named %2 was not found.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Provide a valid resource identifier and resubmit the request." + }, + "CreateFailedMissingReqProperties": { + "Description": "Indicates that a create was attempted on a resource but that properties that are required for the create operation were missing from the request.", + "Message": "The create operation failed because the required property %1 was missing from the request.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Correct the body to include the required property with a valid value and resubmit the request if the operation failed." + }, + "CreateLimitReachedForResource": { + "Description": "Indicates that no more resources can be created on the resource as it has reached its create limit.", + "Message": "The create operation failed because the resource has reached the limit of possible resources.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either delete resources and resubmit the request if the operation failed or do not resubmit the request." + }, + "ServiceShuttingDown": { + "Description": "Indicates that the operation failed as the service is shutting down, such as when the service reboots.", + "Message": "The operation failed because the service is shutting down and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "When the service becomes available, resubmit the request if the operation failed." + }, + "ServiceInUnknownState": { + "Description": "Indicates that the operation failed because the service is in an unknown state and cannot accept additional requests.", + "Message": "The operation failed because the service is in an unknown state and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Restart the service and resubmit the request if the operation failed." + }, + "NoValidSession": { + "Description": "Indicates that the operation failed because a valid session is required in order to access any resources.", + "Message": "There is no valid session established with the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Establish as session before attempting any operations." + }, + "InsufficientPrivilege": { + "Description": "Indicates that the credentials associated with the established session do not have sufficient privileges for the requested operation", + "Message": "There are insufficient privileges for the account or credentials associated with the current session to perform the requested operation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either abandon the operation or change the associated access rights and resubmit the request if the operation failed." + }, + "AccountModified": { + "Description": "Indicates that the account was successfully modified.", + "Message": "The account was successfully modified.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountNotModified": { + "Description": "Indicates that the modification requested for the account was not successful.", + "Message": "The account modification request failed.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "The modification may have failed due to permission issues or issues with the request body." + }, + "AccountRemoved": { + "Description": "Indicates that the account was successfully removed.", + "Message": "The account was successfully removed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountForSessionNoLongerExists": { + "Description": "Indicates that the account for the session has been removed, thus the session has been removed as well.", + "Message": "The account for the current session has been removed, thus the current session has been removed as well.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "Attempt to connect with a valid account." + }, + "InvalidObject": { + "Description": "Indicates that the object in question is invalid according to the implementation. Examples include a firmware update malformed URI.", + "Message": "The object at %1 is invalid.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Either the object is malformed or the URI is not correct. Correct the condition and resubmit the request if it failed." + }, + "InternalError": { + "Description": "Indicates that the request failed for an unknown internal error but that the service is still operational.", + "Message": "The request failed due to an internal service error. The service is still operational.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Resubmit the request. If the problem persists, consider resetting the service." + }, + "UnrecognizedRequestBody": { + "Description": "Indicates that the service encountered an unrecognizable request body that could not even be interpreted as malformed JSON.", + "Message": "The service detected a malformed request body that it was unable to interpret.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Correct the request body and resubmit the request if it failed." + }, + "ResourceMissingAtURI": { + "Description": "Indicates that the operation expected an image or other resource at the provided URI but none was found. Examples of this are in requests that require URIs like Firmware Update.", + "Message": "The resource at the URI %1 was not found.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place a valid resource at the URI or correct the URI and resubmit the request." + }, + "ResourceAtUriInUnknownFormat": { + "Description": "Indicates that the URI was valid but the resource or image at that URI was in a format not supported by the service.", + "Message": "The resource at %1 is in a format not recognized by the service.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place an image or resource or file that is recognized by the service at the URI." + }, + "ResourceAtUriUnauthorized": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unauthorized.", + "Message": "While accessing the resource at %1, the service received an authorization error %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Ensure that the appropriate access is provided for the service in order for it to access the URI." + }, + "CouldNotEstablishConnection": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unsuccessful because a session could not be established.", + "Message": "The service failed to establish a connection with the URI %1.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the URI contains a valid and reachable node name, protocol information and other URI components." + }, + "SourceDoesNotSupportProtocol": { + "Description": "Indicates that while attempting to access, connect to or transfer a resource/file/image from another location that the other end of the connection did not support the protocol", + "Message": "The other end of the connection at %1 does not support the specified protocol %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Change protocols or URIs. " + }, + "AccessDenied": { + "Description": "Indicates that while attempting to access, connect to or transfer to/from another resource, the service denied access.", + "Message": "While attempting to establish a connection to %1, the service denied access.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Attempt to ensure that the URI is correct and that the service has the appropriate credentials." + }, + "ServiceTemporarilyUnavailable": { + "Description": "Indicates the service is temporarily unavailable.", + "Message": "The service is temporarily unavailable. Retry in %1 seconds.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Wait for the indicated retry duration and retry the operation." + }, + "InvalidIndex": { + "Description": "The Index is not valid.", + "Message": "The Index %1 is not a valid offset into the array.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "number" + ], + "Resolution": "Verify the index value provided is within the bounds of the array." + }, + "PropertyValueModified": { + "Description": "Indicates that a property was given the correct value type but the value of that property was modified. Examples are truncated or rounded values.", + "Message": "The property %1 was assigned the value %2 due to modification by the service.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "No resolution is required." + }, + "ResourceInStandby": { + "Description": "Indicates that the request could not be performed because the resource is in standby.", + "Message": "The request could not be performed because the resource is in standby.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the resource is in the correct power state and resubmit the request." + }, + "ResourceExhaustion": { + "Description": "Indicates that a resource could not satisfy the request due to some unavailability of resources. An example is that available capacity has been allocated.", + "Message": "The resource %1 was unable to satisfy the request due to unavailability of resources.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the resources are available and resubmit the request." + }, + "StringValueTooLong": { + "Description": "Indicates that a string value passed to the given resource exceeded its length limit. An example is when a shorter limit is imposed by an implementation than that allowed by the specification.", + "Message": "The string %1 exceeds the length limit %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Resubmit the request with an appropriate string length." + }, + "SessionTerminated": { + "Description": "Indicates that the DELETE operation on the Session resource resulted in the successful termination of the session.", + "Message": "The session was successfully terminated.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + } + } +} diff --git a/sushy/standard_registries/Base.1.3.1.json b/sushy/standard_registries/Base.1.3.1.json new file mode 100644 index 0000000000000000000000000000000000000000..a6bb996f77bbfb945240aa83ed9b45d8b05e983b --- /dev/null +++ b/sushy/standard_registries/Base.1.3.1.json @@ -0,0 +1,535 @@ +{ + "@Redfish.Copyright": "Copyright 2014-2018 DMTF. All rights reserved.", + "@Redfish.License": "Creative Commons Attribution 4.0 License. For full text see link: https://creativecommons.org/licenses/by/4.0/", + "@odata.type": "#MessageRegistry.v1_0_0.MessageRegistry", + "Id": "Base.1.3.1", + "Name": "Base Message Registry", + "Language": "en", + "Description": "This registry defines the base messages for Redfish", + "RegistryPrefix": "Base", + "RegistryVersion": "1.3.1", + "OwningEntity": "DMTF", + "Messages": { + "Success": { + "Description": "Indicates that all conditions of a successful operation have been met.", + "Message": "Successfully Completed Request", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "GeneralError": { + "Description": "Indicates that a general error has occurred. Use in ExtendedInfo is discouraged. When used in ExtendedInfo, implementations are expected to include a Resolution property with this error to indicate how to resolve the problem.", + "Message": "A general error has occurred. See Resolution for information on how to resolve the error.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "None." + }, + "Created": { + "Description": "Indicates that all conditions of a successful creation operation have been met.", + "Message": "The resource has been created successfully", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "PropertyDuplicate": { + "Description": "Indicates that a duplicate property was included in the request body.", + "Message": "The property %1 was duplicated in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the duplicate property from the request body and resubmit the request if the operation failed." + }, + "PropertyUnknown": { + "Description": "Indicates that an unknown property was included in the request body.", + "Message": "The property %1 is not in the list of valid properties for the resource.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the unknown property from the request body and resubmit the request if the operation failed." + }, + "PropertyValueTypeError": { + "Description": "Indicates that a property was given the wrong value type, such as when a number is supplied for a property that requires a string.", + "Message": "The value %1 for the property %2 is of a different type than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueFormatError": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported.", + "Message": "The value %1 for the property %2 is of a different format than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueNotInList": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This values not in an enumeration", + "Message": "The value %1 for the property %2 is not in the list of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Choose a value from the enumeration list that the implementation can support and resubmit the request if the operation failed." + }, + "PropertyValueOutOfRange": { + "Description": "Indicates that a property was given the correct value type but the value of that property is outside the supported range.", + "Message": "The value %1 for the property %2 is not in the supported range of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyNotWritable": { + "Description": "Indicates that a property was given a value in the request body, but the property is a readonly property.", + "Message": "The property %1 is a read only property and cannot be assigned a value.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the property from the request body and resubmit the request if the operation failed." + }, + "PropertyMissing": { + "Description": "Indicates that a required property was not supplied as part of the request.", + "Message": "The property %1 is a required property and must be included in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the property is in the request body and has a valid value and resubmit the request if the operation failed." + }, + "MalformedJSON": { + "Description": "Indicates that the request body was malformed JSON. Could be duplicate, syntax error,etc.", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." + }, + "EmptyJSON": { + "Description": "Indicates that the request body contained an empty JSON object when one or more properties are expected in the body.", + "Message": "The request body submitted contained an empty JSON object and the service is unable to process it.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Add properties in the JSON object and resubmit the request." + }, + "ActionNotSupported": { + "Description": "Indicates that the action supplied with the POST operation is not supported by the resource.", + "Message": "The action %1 is not supported by the resource.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "The action supplied cannot be resubmitted to the implementation. Perhaps the action was invalid, the wrong resource was the target or the implementation documentation may be of assistance." + }, + "ActionParameterMissing": { + "Description": "Indicates that the action requested was missing a parameter that is required to process the action.", + "Message": "The action %1 requires the parameter %2 to be present in the request body.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Supply the action with the required parameter in the request body when the request is resubmitted." + }, + "ActionParameterDuplicate": { + "Description": "Indicates that the action was supplied with a duplicated parameter in the request body.", + "Message": "The action %1 was submitted with more than one value for the parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the action with only one instance of the parameter in the request body if the operation failed." + }, + "ActionParameterUnknown": { + "Description": "Indicates that an action was submitted but a parameter supplied did not match any of the known parameters.", + "Message": "The action %1 was submitted with the invalid parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the invalid parameter and resubmit the request if the operation failed." + }, + "ActionParameterValueTypeError": { + "Description": "Indicates that a parameter was given the wrong value type, such as when a number is supplied for a parameter that requires a string.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterValueFormatError": { + "Description": "Indicates that a parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterNotSupported": { + "Description": "Indicates that the parameter supplied for the action is not supported on the resource.", + "Message": "The parameter %1 for the action %2 is not supported on the target resource.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Remove the parameter supplied and resubmit the request if the operation failed." + }, + "QueryParameterValueTypeError": { + "Description": "Indicates that a query parameter was given the wrong value type, such as when a number is supplied for a query parameter that requires a string.", + "Message": "The value %1 for the query parameter %2 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterValueFormatError": { + "Description": "Indicates that a query parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterOutOfRange": { + "Description": "Indicates that a query parameter was supplied that is out of range for the given resource. This can happen with values that are too low or beyond that possible for the supplied resource, such as when a page is requested that is beyond the last page.", + "Message": "The value %1 for the query parameter %2 is out of range %3.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Reduce the value for the query parameter to a value that is within range, such as a start or count value that is within bounds of the number of resources in a collection or a page that is within the range of valid pages." + }, + "QueryNotSupportedOnResource": { + "Description": "Indicates that query is not supported on the given resource, such as when a start/count query is attempted on a resource that is not a collection.", + "Message": "Querying is not supported on the requested resource.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "QueryNotSupported": { + "Description": "Indicates that query is not supported on the implementation.", + "Message": "Querying is not supported by the implementation.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "SessionLimitExceeded": { + "Description": "Indicates that a session establishment has been requested but the operation failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Message": "The session establishment failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other sessions before trying to establish the session or increase the limit of simultaneous sessions (if supported)." + }, + "EventSubscriptionLimitExceeded": { + "Description": "Indicates that a event subscription establishment has been requested but the operation failed due to the number of simultaneous connection exceeding the limit of the implementation.", + "Message": "The event subscription failed due to the number of simultaneous subscriptions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other subscriptions before trying to establish the event subscription or increase the limit of simultaneous subscriptions (if supported)." + }, + "ResourceCannotBeDeleted": { + "Description": "Indicates that a delete operation was attempted on a resource that cannot be deleted.", + "Message": "The delete request failed because the resource requested cannot be deleted.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not attempt to delete a non-deletable resource." + }, + "ResourceInUse": { + "Description": "Indicates that a change was requested to a resource but the change was rejected due to the resource being in use or transition.", + "Message": "The change to the requested resource failed because the resource is in use or in transition.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the condition and resubmit the request if the operation failed." + }, + "ResourceAlreadyExists": { + "Description": "Indicates that a resource change or creation was attempted but that the operation cannot proceed because the resource already exists.", + "Message": "The requested resource of type %1 with the property %2 with the value %3 already exists.", + "Severity": "Critical", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Do not repeat the create operation as the resource has already been created." + }, + "ResourceNotFound": { + "Description": "Indicates that the operation expected a resource identifier that corresponds to an existing resource but one was not found.", + "Message": "The requested resource of type %1 named %2 was not found.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Provide a valid resource identifier and resubmit the request." + }, + "CreateFailedMissingReqProperties": { + "Description": "Indicates that a create was attempted on a resource but that properties that are required for the create operation were missing from the request.", + "Message": "The create operation failed because the required property %1 was missing from the request.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Correct the body to include the required property with a valid value and resubmit the request if the operation failed." + }, + "CreateLimitReachedForResource": { + "Description": "Indicates that no more resources can be created on the resource as it has reached its create limit.", + "Message": "The create operation failed because the resource has reached the limit of possible resources.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either delete resources and resubmit the request if the operation failed or do not resubmit the request." + }, + "ServiceShuttingDown": { + "Description": "Indicates that the operation failed as the service is shutting down, such as when the service reboots.", + "Message": "The operation failed because the service is shutting down and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "When the service becomes available, resubmit the request if the operation failed." + }, + "ServiceInUnknownState": { + "Description": "Indicates that the operation failed because the service is in an unknown state and cannot accept additional requests.", + "Message": "The operation failed because the service is in an unknown state and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Restart the service and resubmit the request if the operation failed." + }, + "NoValidSession": { + "Description": "Indicates that the operation failed because a valid session is required in order to access any resources.", + "Message": "There is no valid session established with the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Establish as session before attempting any operations." + }, + "InsufficientPrivilege": { + "Description": "Indicates that the credentials associated with the established session do not have sufficient privileges for the requested operation", + "Message": "There are insufficient privileges for the account or credentials associated with the current session to perform the requested operation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either abandon the operation or change the associated access rights and resubmit the request if the operation failed." + }, + "AccountModified": { + "Description": "Indicates that the account was successfully modified.", + "Message": "The account was successfully modified.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountNotModified": { + "Description": "Indicates that the modification requested for the account was not successful.", + "Message": "The account modification request failed.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "The modification may have failed due to permission issues or issues with the request body." + }, + "AccountRemoved": { + "Description": "Indicates that the account was successfully removed.", + "Message": "The account was successfully removed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountForSessionNoLongerExists": { + "Description": "Indicates that the account for the session has been removed, thus the session has been removed as well.", + "Message": "The account for the current session has been removed, thus the current session has been removed as well.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "Attempt to connect with a valid account." + }, + "InvalidObject": { + "Description": "Indicates that the object in question is invalid according to the implementation. Examples include a firmware update malformed URI.", + "Message": "The object at %1 is invalid.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Either the object is malformed or the URI is not correct. Correct the condition and resubmit the request if it failed." + }, + "InternalError": { + "Description": "Indicates that the request failed for an unknown internal error but that the service is still operational.", + "Message": "The request failed due to an internal service error. The service is still operational.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Resubmit the request. If the problem persists, consider resetting the service." + }, + "UnrecognizedRequestBody": { + "Description": "Indicates that the service encountered an unrecognizable request body that could not even be interpreted as malformed JSON.", + "Message": "The service detected a malformed request body that it was unable to interpret.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Correct the request body and resubmit the request if it failed." + }, + "ResourceMissingAtURI": { + "Description": "Indicates that the operation expected an image or other resource at the provided URI but none was found. Examples of this are in requests that require URIs like Firmware Update.", + "Message": "The resource at the URI %1 was not found.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place a valid resource at the URI or correct the URI and resubmit the request." + }, + "ResourceAtUriInUnknownFormat": { + "Description": "Indicates that the URI was valid but the resource or image at that URI was in a format not supported by the service.", + "Message": "The resource at %1 is in a format not recognized by the service.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place an image or resource or file that is recognized by the service at the URI." + }, + "ResourceAtUriUnauthorized": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unauthorized.", + "Message": "While accessing the resource at %1, the service received an authorization error %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Ensure that the appropriate access is provided for the service in order for it to access the URI." + }, + "CouldNotEstablishConnection": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unsuccessful because a session could not be established.", + "Message": "The service failed to establish a connection with the URI %1.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the URI contains a valid and reachable node name, protocol information and other URI components." + }, + "SourceDoesNotSupportProtocol": { + "Description": "Indicates that while attempting to access, connect to or transfer a resource/file/image from another location that the other end of the connection did not support the protocol", + "Message": "The other end of the connection at %1 does not support the specified protocol %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Change protocols or URIs. " + }, + "AccessDenied": { + "Description": "Indicates that while attempting to access, connect to or transfer to/from another resource, the service denied access.", + "Message": "While attempting to establish a connection to %1, the service denied access.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Attempt to ensure that the URI is correct and that the service has the appropriate credentials." + }, + "ServiceTemporarilyUnavailable": { + "Description": "Indicates the service is temporarily unavailable.", + "Message": "The service is temporarily unavailable. Retry in %1 seconds.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Wait for the indicated retry duration and retry the operation." + }, + "InvalidIndex": { + "Description": "The Index is not valid.", + "Message": "The Index %1 is not a valid offset into the array.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "number" + ], + "Resolution": "Verify the index value provided is within the bounds of the array." + }, + "PropertyValueModified": { + "Description": "Indicates that a property was given the correct value type but the value of that property was modified. Examples are truncated or rounded values.", + "Message": "The property %1 was assigned the value %2 due to modification by the service.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "No resolution is required." + }, + "ResourceInStandby": { + "Description": "Indicates that the request could not be performed because the resource is in standby.", + "Message": "The request could not be performed because the resource is in standby.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the resource is in the correct power state and resubmit the request." + }, + "ResourceExhaustion": { + "Description": "Indicates that a resource could not satisfy the request due to some unavailability of resources. An example is that available capacity has been allocated.", + "Message": "The resource %1 was unable to satisfy the request due to unavailability of resources.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the resources are available and resubmit the request." + }, + "StringValueTooLong": { + "Description": "Indicates that a string value passed to the given resource exceeded its length limit. An example is when a shorter limit is imposed by an implementation than that allowed by the specification.", + "Message": "The string %1 exceeds the length limit %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Resubmit the request with an appropriate string length." + }, + "SessionTerminated": { + "Description": "Indicates that the DELETE operation on the Session resource resulted in the successful termination of the session.", + "Message": "The session was successfully terminated.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + } + } +} diff --git a/sushy/standard_registries/Base.1.4.0.json b/sushy/standard_registries/Base.1.4.0.json new file mode 100644 index 0000000000000000000000000000000000000000..343f8bc3fff4a8059dc97e427369c50b5cd74fd2 --- /dev/null +++ b/sushy/standard_registries/Base.1.4.0.json @@ -0,0 +1,542 @@ +{ + "@Redfish.Copyright": "Copyright 2014-2018 DMTF. All rights reserved.", + "@Redfish.License": "Creative Commons Attribution 4.0 License. For full text see link: https://creativecommons.org/licenses/by/4.0/", + "@odata.type": "#MessageRegistry.v1_0_0.MessageRegistry", + "Id": "Base.1.4.0", + "Name": "Base Message Registry", + "Language": "en", + "Description": "This registry defines the base messages for Redfish", + "RegistryPrefix": "Base", + "RegistryVersion": "1.4.0", + "OwningEntity": "DMTF", + "Messages": { + "Success": { + "Description": "Indicates that all conditions of a successful operation have been met.", + "Message": "Successfully Completed Request", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "GeneralError": { + "Description": "Indicates that a general error has occurred. Use in ExtendedInfo is discouraged. When used in ExtendedInfo, implementations are expected to include a Resolution property with this error to indicate how to resolve the problem.", + "Message": "A general error has occurred. See Resolution for information on how to resolve the error.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "None." + }, + "Created": { + "Description": "Indicates that all conditions of a successful creation operation have been met.", + "Message": "The resource has been created successfully", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "NoOperation": { + "Description": "Indicates that the requested operation will not perform any changes on the service.", + "Message": "The request body submitted contain no data to act upon and no changes to the resource took place.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Add properties in the JSON object and resubmit the request." + }, + "PropertyDuplicate": { + "Description": "Indicates that a duplicate property was included in the request body.", + "Message": "The property %1 was duplicated in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the duplicate property from the request body and resubmit the request if the operation failed." + }, + "PropertyUnknown": { + "Description": "Indicates that an unknown property was included in the request body.", + "Message": "The property %1 is not in the list of valid properties for the resource.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the unknown property from the request body and resubmit the request if the operation failed." + }, + "PropertyValueTypeError": { + "Description": "Indicates that a property was given the wrong value type, such as when a number is supplied for a property that requires a string.", + "Message": "The value %1 for the property %2 is of a different type than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueFormatError": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported.", + "Message": "The value %1 for the property %2 is of a different format than the property can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the property in the request body and resubmit the request if the operation failed." + }, + "PropertyValueNotInList": { + "Description": "Indicates that a property was given the correct value type but the value of that property was not supported. This values not in an enumeration", + "Message": "The value %1 for the property %2 is not in the list of acceptable values.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Choose a value from the enumeration list that the implementation can support and resubmit the request if the operation failed." + }, + "PropertyNotWritable": { + "Description": "Indicates that a property was given a value in the request body, but the property is a readonly property.", + "Message": "The property %1 is a read only property and cannot be assigned a value.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Remove the property from the request body and resubmit the request if the operation failed." + }, + "PropertyMissing": { + "Description": "Indicates that a required property was not supplied as part of the request.", + "Message": "The property %1 is a required property and must be included in the request.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the property is in the request body and has a valid value and resubmit the request if the operation failed." + }, + "MalformedJSON": { + "Description": "Indicates that the request body was malformed JSON. Could be duplicate, syntax error,etc.", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." + }, + "EmptyJSON": { + "Description": "Indicates that the request body contained an empty JSON object when one or more properties are expected in the body.", + "Message": "The request body submitted contained an empty JSON object and the service is unable to process it.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Add properties in the JSON object and resubmit the request." + }, + "ActionNotSupported": { + "Description": "Indicates that the action supplied with the POST operation is not supported by the resource.", + "Message": "The action %1 is not supported by the resource.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "The action supplied cannot be resubmitted to the implementation. Perhaps the action was invalid, the wrong resource was the target or the implementation documentation may be of assistance." + }, + "ActionParameterMissing": { + "Description": "Indicates that the action requested was missing a parameter that is required to process the action.", + "Message": "The action %1 requires the parameter %2 to be present in the request body.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Supply the action with the required parameter in the request body when the request is resubmitted." + }, + "ActionParameterDuplicate": { + "Description": "Indicates that the action was supplied with a duplicated parameter in the request body.", + "Message": "The action %1 was submitted with more than one value for the parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the action with only one instance of the parameter in the request body if the operation failed." + }, + "ActionParameterUnknown": { + "Description": "Indicates that an action was submitted but a parameter supplied did not match any of the known parameters.", + "Message": "The action %1 was submitted with the invalid parameter %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the invalid parameter and resubmit the request if the operation failed." + }, + "ActionParameterValueTypeError": { + "Description": "Indicates that a parameter was given the wrong value type, such as when a number is supplied for a parameter that requires a string.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterValueFormatError": { + "Description": "Indicates that a parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 in the action %3 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Correct the value for the parameter in the request body and resubmit the request if the operation failed." + }, + "ActionParameterNotSupported": { + "Description": "Indicates that the parameter supplied for the action is not supported on the resource.", + "Message": "The parameter %1 for the action %2 is not supported on the target resource.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Remove the parameter supplied and resubmit the request if the operation failed." + }, + "QueryParameterValueTypeError": { + "Description": "Indicates that a query parameter was given the wrong value type, such as when a number is supplied for a query parameter that requires a string.", + "Message": "The value %1 for the query parameter %2 is of a different type than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterValueFormatError": { + "Description": "Indicates that a query parameter was given the correct value type but the value of that parameter was not supported. This includes value size/length exceeded.", + "Message": "The value %1 for the parameter %2 is of a different format than the parameter can accept.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Correct the value for the query parameter in the request and resubmit the request if the operation failed." + }, + "QueryParameterOutOfRange": { + "Description": "Indicates that a query parameter was supplied that is out of range for the given resource. This can happen with values that are too low or beyond that possible for the supplied resource, such as when a page is requested that is beyond the last page.", + "Message": "The value %1 for the query parameter %2 is out of range %3.", + "Severity": "Warning", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Reduce the value for the query parameter to a value that is within range, such as a start or count value that is within bounds of the number of resources in a collection or a page that is within the range of valid pages." + }, + "QueryNotSupportedOnResource": { + "Description": "Indicates that query is not supported on the given resource, such as when a start/count query is attempted on a resource that is not a collection.", + "Message": "Querying is not supported on the requested resource.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "QueryNotSupported": { + "Description": "Indicates that query is not supported on the implementation.", + "Message": "Querying is not supported by the implementation.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the query parameters and resubmit the request if the operation failed." + }, + "SessionLimitExceeded": { + "Description": "Indicates that a session establishment has been requested but the operation failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Message": "The session establishment failed due to the number of simultaneous sessions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other sessions before trying to establish the session or increase the limit of simultaneous sessions (if supported)." + }, + "EventSubscriptionLimitExceeded": { + "Description": "Indicates that a event subscription establishment has been requested but the operation failed due to the number of simultaneous connection exceeding the limit of the implementation.", + "Message": "The event subscription failed due to the number of simultaneous subscriptions exceeding the limit of the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Reduce the number of other subscriptions before trying to establish the event subscription or increase the limit of simultaneous subscriptions (if supported)." + }, + "ResourceCannotBeDeleted": { + "Description": "Indicates that a delete operation was attempted on a resource that cannot be deleted.", + "Message": "The delete request failed because the resource requested cannot be deleted.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Do not attempt to delete a non-deletable resource." + }, + "ResourceInUse": { + "Description": "Indicates that a change was requested to a resource but the change was rejected due to the resource being in use or transition.", + "Message": "The change to the requested resource failed because the resource is in use or in transition.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Remove the condition and resubmit the request if the operation failed." + }, + "ResourceAlreadyExists": { + "Description": "Indicates that a resource change or creation was attempted but that the operation cannot proceed because the resource already exists.", + "Message": "The requested resource of type %1 with the property %2 with the value %3 already exists.", + "Severity": "Critical", + "NumberOfArgs": 3, + "ParamTypes": [ + "string", + "string", + "string" + ], + "Resolution": "Do not repeat the create operation as the resource has already been created." + }, + "ResourceNotFound": { + "Description": "Indicates that the operation expected a resource identifier that corresponds to an existing resource but one was not found.", + "Message": "The requested resource of type %1 named %2 was not found.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Provide a valid resource identifier and resubmit the request." + }, + "CreateFailedMissingReqProperties": { + "Description": "Indicates that a create was attempted on a resource but that properties that are required for the create operation were missing from the request.", + "Message": "The create operation failed because the required property %1 was missing from the request.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Correct the body to include the required property with a valid value and resubmit the request if the operation failed." + }, + "CreateLimitReachedForResource": { + "Description": "Indicates that no more resources can be created on the resource as it has reached its create limit.", + "Message": "The create operation failed because the resource has reached the limit of possible resources.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either delete resources and resubmit the request if the operation failed or do not resubmit the request." + }, + "ServiceShuttingDown": { + "Description": "Indicates that the operation failed as the service is shutting down, such as when the service reboots.", + "Message": "The operation failed because the service is shutting down and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "When the service becomes available, resubmit the request if the operation failed." + }, + "ServiceInUnknownState": { + "Description": "Indicates that the operation failed because the service is in an unknown state and cannot accept additional requests.", + "Message": "The operation failed because the service is in an unknown state and can no longer take incoming requests.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Restart the service and resubmit the request if the operation failed." + }, + "NoValidSession": { + "Description": "Indicates that the operation failed because a valid session is required in order to access any resources.", + "Message": "There is no valid session established with the implementation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Establish as session before attempting any operations." + }, + "InsufficientPrivilege": { + "Description": "Indicates that the credentials associated with the established session do not have sufficient privileges for the requested operation", + "Message": "There are insufficient privileges for the account or credentials associated with the current session to perform the requested operation.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Either abandon the operation or change the associated access rights and resubmit the request if the operation failed." + }, + "AccountModified": { + "Description": "Indicates that the account was successfully modified.", + "Message": "The account was successfully modified.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountNotModified": { + "Description": "Indicates that the modification requested for the account was not successful.", + "Message": "The account modification request failed.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "The modification may have failed due to permission issues or issues with the request body." + }, + "AccountRemoved": { + "Description": "Indicates that the account was successfully removed.", + "Message": "The account was successfully removed.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "AccountForSessionNoLongerExists": { + "Description": "Indicates that the account for the session has been removed, thus the session has been removed as well.", + "Message": "The account for the current session has been removed, thus the current session has been removed as well.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "Attempt to connect with a valid account." + }, + "InvalidObject": { + "Description": "Indicates that the object in question is invalid according to the implementation. Examples include a firmware update malformed URI.", + "Message": "The object at %1 is invalid.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Either the object is malformed or the URI is not correct. Correct the condition and resubmit the request if it failed." + }, + "InternalError": { + "Description": "Indicates that the request failed for an unknown internal error but that the service is still operational.", + "Message": "The request failed due to an internal service error. The service is still operational.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Resubmit the request. If the problem persists, consider resetting the service." + }, + "UnrecognizedRequestBody": { + "Description": "Indicates that the service encountered an unrecognizable request body that could not even be interpreted as malformed JSON.", + "Message": "The service detected a malformed request body that it was unable to interpret.", + "Severity": "Warning", + "NumberOfArgs": 0, + "Resolution": "Correct the request body and resubmit the request if it failed." + }, + "ResourceMissingAtURI": { + "Description": "Indicates that the operation expected an image or other resource at the provided URI but none was found. Examples of this are in requests that require URIs like Firmware Update.", + "Message": "The resource at the URI %1 was not found.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place a valid resource at the URI or correct the URI and resubmit the request." + }, + "ResourceAtUriInUnknownFormat": { + "Description": "Indicates that the URI was valid but the resource or image at that URI was in a format not supported by the service.", + "Message": "The resource at %1 is in a format not recognized by the service.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Place an image or resource or file that is recognized by the service at the URI." + }, + "ResourceAtUriUnauthorized": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unauthorized.", + "Message": "While accessing the resource at %1, the service received an authorization error %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Ensure that the appropriate access is provided for the service in order for it to access the URI." + }, + "CouldNotEstablishConnection": { + "Description": "Indicates that the attempt to access the resource/file/image at the URI was unsuccessful because a session could not be established.", + "Message": "The service failed to establish a connection with the URI %1.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the URI contains a valid and reachable node name, protocol information and other URI components." + }, + "SourceDoesNotSupportProtocol": { + "Description": "Indicates that while attempting to access, connect to or transfer a resource/file/image from another location that the other end of the connection did not support the protocol", + "Message": "The other end of the connection at %1 does not support the specified protocol %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Change protocols or URIs. " + }, + "AccessDenied": { + "Description": "Indicates that while attempting to access, connect to or transfer to/from another resource, the service denied access.", + "Message": "While attempting to establish a connection to %1, the service denied access.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Attempt to ensure that the URI is correct and that the service has the appropriate credentials." + }, + "ServiceTemporarilyUnavailable": { + "Description": "Indicates the service is temporarily unavailable.", + "Message": "The service is temporarily unavailable. Retry in %1 seconds.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Wait for the indicated retry duration and retry the operation." + }, + "InvalidIndex": { + "Description": "The Index is not valid.", + "Message": "The Index %1 is not a valid offset into the array.", + "Severity": "Warning", + "NumberOfArgs": 1, + "ParamTypes": [ + "number" + ], + "Resolution": "Verify the index value provided is within the bounds of the array." + }, + "PropertyValueModified": { + "Description": "Indicates that a property was given the correct value type but the value of that property was modified. Examples are truncated or rounded values.", + "Message": "The property %1 was assigned the value %2 due to modification by the service.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "No resolution is required." + }, + "ResourceInStandby": { + "Description": "Indicates that the request could not be performed because the resource is in standby.", + "Message": "The request could not be performed because the resource is in standby.", + "Severity": "Critical", + "NumberOfArgs": 0, + "Resolution": "Ensure that the resource is in the correct power state and resubmit the request." + }, + "ResourceExhaustion": { + "Description": "Indicates that a resource could not satisfy the request due to some unavailability of resources. An example is that available capacity has been allocated.", + "Message": "The resource %1 was unable to satisfy the request due to unavailability of resources.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Ensure that the resources are available and resubmit the request." + }, + "StringValueTooLong": { + "Description": "Indicates that a string value passed to the given resource exceeded its length limit. An example is when a shorter limit is imposed by an implementation than that allowed by the specification.", + "Message": "The string %1 exceeds the length limit %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Resubmit the request with an appropriate string length." + }, + "SessionTerminated": { + "Description": "Indicates that the DELETE operation on the Session resource resulted in the successful termination of the session.", + "Message": "The session was successfully terminated.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "No resolution is required." + }, + "ResourceTypeIncompatible": { + "Description": "Indicates that the resource type of the operation does not match that for the operation destination. Examples of when this can happen include during a POST to a collection using the wrong resource type, an update where the @odata.types do not match or on a major version incompatability.", + "Message": "The @odata.type of the request body %1 is incompatible with the @odata.type of the resource which is %2.", + "Severity": "Critical", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "string" + ], + "Resolution": "Resubmit the request with a payload compatible with the resource's schema." + } + } +} diff --git a/sushy/taskmonitor.py b/sushy/taskmonitor.py new file mode 100644 index 0000000000000000000000000000000000000000..1e05a3e358ebe88d867ba08ac4509db99e242fac --- /dev/null +++ b/sushy/taskmonitor.py @@ -0,0 +1,273 @@ +# Copyright (c) 2021 Dell, Inc. or its subsidiaries +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime +from http import client as http_client +import json +import logging +import time +from urllib.parse import urljoin + +from dateutil import parser +import requests + +from sushy import exceptions +from sushy.resources.taskservice import task +from sushy import utils + +LOG = logging.getLogger(__name__) + + +class TaskMonitor(object): + def __init__(self, + connector, + task_monitor_uri, + redfish_version=None, + registries=None, + field_data=None, + response=None): + """A class representing a task monitor + + :param connector: A Connector instance + :param task_monitor_uri: The task monitor URI + :param redfish_version: The version of Redfish. Used to construct + the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. + :param field_data: the data to use populating the fields. Deprecated + use response. + :param response: Raw response + """ + self._connector = connector + self._task_monitor_uri = task_monitor_uri + self._redfish_version = redfish_version + self._registries = registries + self._field_data = field_data + if self._field_data is not None: + LOG.warning('TaskMonitor field_data is deprecated in TaskMonitor. ' + 'Use response.') + self._task = None + self._response = response + + # Backward compability for deprecated field_data + if self._field_data and not self._response: + self._response = requests.Response() + self._response.status_code = self._field_data.status_code + self._response.headers = self._field_data.headers + self._response._content = json.dumps( + self._field_data.json_doc).encode('utf-8') + + if (self._response and self._response.content + and self._response.status_code == http_client.ACCEPTED): + self._task = task.Task(self._connector, self._task_monitor_uri, + redfish_version=self._redfish_version, + registries=self._registries, + json_doc=self._response.json()) + else: + self.refresh() + + def refresh(self): + """Refresh the Task + + Freshly retrieves/fetches the Task. + :raises: ResourceNotFoundError + :raises: ConnectionError + :raises: HTTPError + """ + self._response = self._connector.get(path=self.task_monitor_uri) + + if self._response.status_code == http_client.ACCEPTED: + # A Task should have been returned, but wasn't + if not self._response.content: + self._task = None + return + + # Assume that the body contains a Task since we got a 202 + if not self._task: + self._task = task.Task(self._connector, self._task_monitor_uri, + redfish_version=self._redfish_version, + registries=self._registries, + json_doc=self._response.json()) + else: + self._task.refresh(json_doc=self._response.json()) + else: + self._task = None + + @property + def task_monitor(self): + """The TaskMonitor URI + + Deprecated: Use task_monitor_uri + + :returns: The TaskMonitor URI. + """ + LOG.warning('task_monitor is deprecated in TaskMonitor. ' + 'Use task_monitor_uri.') + return self._task_monitor_uri + + @property + def task_monitor_uri(self): + """The TaskMonitor URI + + :returns: The TaskMonitor URI. + """ + return self._task_monitor_uri + + @property + def is_processing(self): + """Indicates if the task is still processing + + :returns: A boolean indicating if the task is still processing. + """ + return self._response.status_code == http_client.ACCEPTED + + @property + def check_is_processing(self): + """Refreshes task and check if it is still processing + + :returns: A boolean indicating if the task is still processing. + """ + if not self.is_processing: + return False + + self.refresh() + + return self.is_processing + + @property + def retry_after(self): + """The amount of time to sleep before retrying + + Deprecated: use sleep_for. This is not working with Retry-After header + in date format. + + :returns: The amount of time in seconds to wait before calling + is_processing. + """ + LOG.warning('TaskMonitor retry_after is deprecated, use sleep_for.') + return utils.int_or_none(self._response.headers.get('Retry-After')) + + @property + def sleep_for(self): + """Seconds the client should wait before querying the operation status + + Defaults to 1 second if Retry-After not specified in response. + + :returns: The number of seconds to wait + """ + retry_after = self._response.headers.get('Retry-After') + if retry_after is None: + return 1 + + if isinstance(retry_after, int) or retry_after.isdigit(): + return retry_after + + return max(0, (parser.parse(retry_after) + - datetime.now().astimezone()).total_seconds()) + + @property + def cancellable(self): + """The amount of time to sleep before retrying + + :returns: A Boolean indicating if the Task is cancellable. + """ + allow = self._response.headers.get('Allow') + + cancellable = False + if allow and allow.upper() == 'DELETE': + cancellable = True + + return cancellable + + @property + def task(self): + """The executing task + + :returns: The Task being executed. + """ + + return self._task + + @property + def response(self): + """Unprocessed response. + + Intended to be used internally. + :returns: Unprocessed response. + """ + return self._response + + def get_task(self): + """Construct Task instance from task monitor URI. + + :returns: Task instance. + """ + return task.Task(self._connector, self._task_monitor_uri, + redfish_version=self._redfish_version, + registries=self._registries) + + def wait(self, timeout_sec): + """Waits until task is completed or it times out. + + :param timeout_sec: Timeout to wait + :raises: ConnectionError when times out + """ + timeout_at = time.time() + timeout_sec + + while self.check_is_processing: + + LOG.debug('Waiting for task monitor %(url)s; sleeping for ' + '%(sleep)s seconds', + {'url': self.task_monitor_uri, + 'sleep': self.sleep_for}) + time.sleep(self.sleep_for) + if time.time() >= timeout_at and self.check_is_processing: + m = ('Timeout waiting for task monitor %(url)s ' + '(timeout = %(timeout)s)' + % {'url': self.task_monitor_uri, + 'timeout': timeout_sec}) + raise exceptions.ConnectionError(url=self.task_monitor_uri, + error=m) + + @staticmethod + def from_response(conn, response, target_uri, redfish_version=None, + registries=None): + """Construct TaskMonitor instance from received response. + + :response: Unprocessed response + :target_uri: URI used to initiate async operation + :redfish_version: Redfish version. Optional when used internally. + :registries: Redfish registries. Optional when used internally. + :returns: TaskMonitor instance + :raises: MissingHeaderError if Location is missing in response + """ + json_data = response.json() if response.content else {} + + header = 'Location' + task_monitor_uri = response.headers.get(header) + task_uri_data = json_data.get('@odata.id') + + if task_uri_data: + task_monitor_uri = urljoin(task_monitor_uri, task_uri_data) + + if not task_monitor_uri: + raise exceptions.MissingHeaderError(target_uri=target_uri, + header=header) + + return TaskMonitor(conn, + task_monitor_uri, + redfish_version=redfish_version, + registries=registries, + response=response) diff --git a/sushy/tests/unit/json_samples/TestRegistry.zip b/sushy/tests/unit/json_samples/TestRegistry.zip new file mode 100644 index 0000000000000000000000000000000000000000..565c7c88648262e4bbc7bd9738514bf72d74accc Binary files /dev/null and b/sushy/tests/unit/json_samples/TestRegistry.zip differ diff --git a/sushy/tests/unit/json_samples/bare_minimum_root.json b/sushy/tests/unit/json_samples/bare_minimum_root.json new file mode 100644 index 0000000000000000000000000000000000000000..1f5581415b3a4a226b24d3843653e5a536d1cf78 --- /dev/null +++ b/sushy/tests/unit/json_samples/bare_minimum_root.json @@ -0,0 +1,11 @@ +{ + "@odata.type": "#ServiceRoot.v1_0_2.ServiceRoot", + "Id": "RootService", + "Name": "Root Service", + "RedfishVersion": "1.0.2", + "UUID": "92384634-2938-2342-8820-489239905423", + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#ServiceRoot", + "@odata.id": "/redfish/v1/", + "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/bios.json b/sushy/tests/unit/json_samples/bios.json new file mode 100644 index 0000000000000000000000000000000000000000..f7a3b7a32c6738a14a1de74a68473abdea5bd369 --- /dev/null +++ b/sushy/tests/unit/json_samples/bios.json @@ -0,0 +1,57 @@ +{ + "@odata.type": "#Bios.v1_0_3.Bios", + "Id": "BIOS", + "Name": "BIOS Configuration Current Settings", + "AttributeRegistry": "BiosAttributeRegistryP89.v1_0_0", + "Attributes": { + "AdminPhone": "", + "BootMode": "Uefi", + "EmbeddedSata": "Raid", + "NicBoot1": "NetworkBoot", + "NicBoot2": "Disabled", + "PowerProfile": "MaxPerf", + "ProcCoreDisable": 0, + "ProcHyperthreading": "Enabled", + "ProcTurboMode": "Enabled", + "UsbControl": "UsbEnabled" + }, + "@Redfish.Settings": { + "@odata.type": "#Settings.v1_0_0.Settings", + "ETag": "9234ac83b9700123cc32", + "Messages": [ + { + "MessageId": "Test.1.0.Failed", + "RelatedProperties": [ + "#/Attributes/ProcTurboMode" + ], + "MessageArgs": [ + "arg1" + ] + } + ], + "SettingsObject": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" + }, + "Time": "2016-03-07T14:44.30-05:00", + "SupportedApplyTimes": [ + "OnReset", + "InMaintenanceWindowOnReset" + ] + }, + "@Redfish.MaintenanceWindow": { + "@odata.type": "#Settings.v1_2_0.MaintenanceWindow", + "MaintenanceWindowDurationInSeconds": 600, + "MaintenanceWindowStartTime": "2020-09-01T04:30:00-06:00" + }, + "Actions": { + "#Bios.ResetBios": { + "target": "/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios" + }, + "#Bios.ChangePassword": { + "target": "/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ChangePassword" + } + }, + "@odata.etag": "123", + "@odata.context": "/redfish/v1/$metadata#Bios.Bios", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS" +} diff --git a/sushy/tests/unit/json_samples/bios_settings.json b/sushy/tests/unit/json_samples/bios_settings.json new file mode 100644 index 0000000000000000000000000000000000000000..2751a5287a1903a48181e36f36bff80e33e6bb4f --- /dev/null +++ b/sushy/tests/unit/json_samples/bios_settings.json @@ -0,0 +1,28 @@ +{ + "@odata.type": "#Bios.v1_0_3.Bios", + "Id": "Settings", + "Name": "BIOS Configuration Pending Settings", + "AttributeRegistry": "BiosAttributeRegistryP89.v1_0_0", + "Attributes": { + "AdminPhone": "(404) 555-1212", + "BootMode": "Uefi", + "EmbeddedSata": "Ahci", + "NicBoot1": "NetworkBoot", + "NicBoot2": "NetworkBoot", + "PowerProfile": "MaxPerf", + "ProcCoreDisable": 0, + "ProcHyperthreading": "Enabled", + "ProcTurboMode": "Disabled", + "UsbControl": "UsbEnabled" + }, + "@Redfish.SettingsApplyTime": { + "@odata.type": "#Settings.v1_1_0.PreferredApplyTime", + "ApplyTime": "OnReset", + "ApplyTime@Redfish.AllowableValues": [ "OnReset", "Immediate", "AtMaintenanceWindowStart", "InMaintenanceWindowOnReset" ], + "MaintenanceWindowStartTime": "2017-05-03T23:12:37-05:00", + "MaintenanceWindowDurationInSeconds": 600 + }, + "@odata.context": "/redfish/v1/$metadata#Bios.Bios", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings", + "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/chassis.json b/sushy/tests/unit/json_samples/chassis.json new file mode 100644 index 0000000000000000000000000000000000000000..45cd1e750582f51a03536c85b49ee81aaa93a09d --- /dev/null +++ b/sushy/tests/unit/json_samples/chassis.json @@ -0,0 +1,98 @@ +{ + "@odata.type": "#Chassis.v1_8_0.Chassis", + "Id": "Blade1", + "Name": "Blade", + "Description": "Test description", + "ChassisType": "Blade", + "AssetTag": "45Z-2381", + "Manufacturer": "Contoso", + "Model": "SX1000", + "SKU": "6914260", + "SerialNumber": "529QB9450R6", + "PartNumber": "166480-S23", + "UUID": "FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF", + "PowerState": "On", + "IndicatorLED": "Off", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "HeightMm": 44.45, + "WidthMm": 431.8, + "DepthMm": 711, + "WeightKg": 15.31, + "Location": { + "PartLocation": { + "ServiceLabel": "Blade 1", + "LocationType": "Slot", + "LocationOrdinalValue": 0, + "Reference": "Front", + "Orientation": "LeftToRight" + } + }, + "PhysicalSecurity": { + "IntrusionSensor": "Normal", + "IntrusionSensorNumber": 123, + "IntrusionSensorReArm": "Manual" + }, + "Thermal": { + "@odata.id": "/redfish/v1/Chassis/Blade1/Thermal" + }, + "Links": { + "ComputerSystems": [ + { + "@odata.id": "/redfish/v1/Systems/529QB9450R6" + } + ], + "ManagedBy": [ + { + "@odata.id": "/redfish/v1/Managers/Blade1BMC" + } + ], + "ContainedBy": { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl" + }, + "CooledBy": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Thermal#/Fans/0" + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Thermal#/Fans/1" + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Thermal#/Fans/2" + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Thermal#/Fans/3" + } + ], + "PoweredBy": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/0" + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/1" + } + ] + }, + "Actions": { + "#Chassis.Reset": { + "target": "/redfish/v1/Chassis/Blade1/Actions/Chassis.Reset", + "ResetType@Redfish.AllowableValues": [ + "ForceRestart", + "GracefulRestart", + "On", + "ForceOff", + "GracefulShutdown", + "Nmi", + "ForceOn", + "PushPowerButton", + "PowerCycle" + ] + }, + "Oem": {} + }, + "@odata.context": "/redfish/v1/$metadata#Chassis.Chassis", + "@odata.id": "/redfish/v1/Chassis/Blade1", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/chassis_collection.json b/sushy/tests/unit/json_samples/chassis_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..0af4cad3da853e1fa3c643b4c027a92fadb70ba8 --- /dev/null +++ b/sushy/tests/unit/json_samples/chassis_collection.json @@ -0,0 +1,25 @@ +{ + "@odata.type": "#ChassisCollection.ChassisCollection", + "Name": "Chassis Collection", + "Members@odata.count": 5, + "Members": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade1" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade2" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade3" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade4" + } + ], + "@odata.context": "/redfish/v1/$metadata#ChassisCollection.ChassisCollection", + "@odata.id": "/redfish/v1/Chassis", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/compositionservice.json b/sushy/tests/unit/json_samples/compositionservice.json new file mode 100644 index 0000000000000000000000000000000000000000..07bd0e2321e7876f291c75a85367c215a391eaf8 --- /dev/null +++ b/sushy/tests/unit/json_samples/compositionservice.json @@ -0,0 +1,21 @@ +{ + "@odata.context": "/redfish/v1/$metadata#CompositionService.CompositionService", + "@odata.type": "#CompositionService.v1_1_0.CompositionService", + "@odata.id": "/redfish/v1/CompositionService", + "AllowOverprovisioning": false, + "AllowZoneAffinity": true, + "Description": "CompositionService1", + "Id": "CompositionService", + "Name": "Composition Service", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "ServiceEnabled": true, + "ResourceBlocks": { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks" + }, + "ResourceZones": { + "@odata.id": "/redfish/v1/CompositionService/ResourceZones" + } +} diff --git a/sushy/tests/unit/json_samples/drive.json b/sushy/tests/unit/json_samples/drive.json new file mode 100644 index 0000000000000000000000000000000000000000..9488041aa395e53c107219f83413b2a5588afd6b --- /dev/null +++ b/sushy/tests/unit/json_samples/drive.json @@ -0,0 +1,46 @@ +{ + "@odata.type": "#Drive.v1_4_0.Drive", + "Id": "32ADF365C6C1B7BD", + "Name": "Drive Sample", + "IndicatorLED": "Lit", + "Model": "C123", + "Revision": "100A", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "CapacityBytes": 899527000000, + "FailurePredicted": false, + "Protocol": "SAS", + "MediaType": "HDD", + "Manufacturer": "Contoso", + "SerialNumber": "1234570", + "PartNumber": "C123-1111", + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "NAA", + "DurableName": "32ADF365C6C1B7BD" + } + ], + "HotspareType": "Global", + "EncryptionAbility": "SelfEncryptingDrive", + "EncryptionStatus": "Unlocked", + "RotationSpeedRPM": 15000, + "BlockSizeBytes": 512, + "CapableSpeedGbs": 12, + "NegotiatedSpeedGbs": 12, + "Links": { + "@odata.type": "#Drive.v1_2_0.Links" + }, + "Actions": { + "@odata.type": "#Drive.v1_0_0.Actions", + "#Drive.SecureErase": { + "target": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD/Actions/Drive.SecureErase" + } + }, + "@odata.context": "/redfish/v1/$metadata#Drive.Drive", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/drive2.json b/sushy/tests/unit/json_samples/drive2.json new file mode 100644 index 0000000000000000000000000000000000000000..4f6fb8a1ef92ecc80d676489b45ef192fbec76fd --- /dev/null +++ b/sushy/tests/unit/json_samples/drive2.json @@ -0,0 +1,51 @@ +{ + "@odata.type": "#Drive.v1_4_0.Drive", + "Id": "35D38F11ACEF7BD3", + "Name": "Drive Sample", + "IndicatorLED": "Lit", + "Model": "C123", + "Revision": "100A", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "CapacityBytes": 899527000000, + "FailurePredicted": false, + "Protocol": "SAS", + "MediaType": "HDD", + "Manufacturer": "Contoso", + "SerialNumber": "1234567", + "PartNumber": "C123-1111", + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "NAA", + "DurableName": "35D38F11ACEF7BD3" + } + ], + "HotspareType": "None", + "EncryptionAbility": "SelfEncryptingDrive", + "EncryptionStatus": "Unlocked", + "RotationSpeedRPM": 15000, + "BlockSizeBytes": 512, + "CapableSpeedGbs": 12, + "NegotiatedSpeedGbs": 12, + "Links": { + "@odata.type": "#Drive.v1_2_0.Links", + "Volumes": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1" + } + ] + }, + "Actions": { + "@odata.type": "#Drive.v1_0_0.Actions", + "#Drive.SecureErase": { + "target": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3/Actions/Drive.SecureErase" + } + }, + "@odata.context": "/redfish/v1/$metadata#Drive.Drive", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/drive3.json b/sushy/tests/unit/json_samples/drive3.json new file mode 100644 index 0000000000000000000000000000000000000000..25c0cb3d08a29712d3795bc802f6e68b2736727d --- /dev/null +++ b/sushy/tests/unit/json_samples/drive3.json @@ -0,0 +1,54 @@ +{ + "@odata.type": "#Drive.v1_4_0.Drive", + "Id": "3D58ECBC375FD9F2", + "Name": "Drive Sample", + "IndicatorLED": "Lit", + "Model": "C123", + "Revision": "100A", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "CapacityBytes": 899527000000, + "FailurePredicted": false, + "Protocol": "SAS", + "MediaType": "HDD", + "Manufacturer": "Contoso", + "SerialNumber": "1234568", + "PartNumber": "C123-1111", + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "NAA", + "DurableName": "32ADF365C6C1B7BD" + } + ], + "HotspareType": "None", + "EncryptionAbility": "SelfEncryptingDrive", + "EncryptionStatus": "Unlocked", + "RotationSpeedRPM": 15000, + "BlockSizeBytes": 512, + "CapableSpeedGbs": 12, + "NegotiatedSpeedGbs": 12, + "Links": { + "@odata.type": "#Drive.v1_2_0.Links", + "Volumes": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3" + } + ] + }, + "Actions": { + "@odata.type": "#Drive.v1_0_0.Actions", + "#Drive.SecureErase": { + "target": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2/Actions/Drive.SecureErase" + } + }, + "@odata.context": "/redfish/v1/$metadata#Drive.Drive", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/endpoint.json b/sushy/tests/unit/json_samples/endpoint.json new file mode 100644 index 0000000000000000000000000000000000000000..6e43ceb4ef6d04357c63f06c2a1ebfd2c287f2f6 --- /dev/null +++ b/sushy/tests/unit/json_samples/endpoint.json @@ -0,0 +1,40 @@ +{ + "@odata.type":"#Endpoint.v1_1_0.Endpoint", + "Id":"Drive1", + "Name":"SAS Drive", + "Description":"The SAS Drive in Enclosure 2 Bay 0", + "EndpointProtocol":"SAS", + "ConnectedEntities":[ + { + "EntityType":"Drive", + "EntityRole":"Target", + "Identifiers":[ + { + "DurableNameFormat":"NAA", + "DurableName":"32ADF365C6C1B7C3" + } + ], + "Oem":{} + } + ], + "Links": + { + "MutuallyExclusiveEndpoints":[ + { + "@odata.id":"/redfish/v1/Fabrics/SAS/Endpoints/Enclosure2" + } + ], + "Ports":[ + { + "@odata.id":"/redfish/v1/Fabrics/SAS/Switches/Switch1/Ports/8" + }, + { + "@odata.id":"/redfish/v1/Fabrics/SAS/Switches/Switch2/Ports/8" + } + ], + "Oem":{} + }, + "Oem":{}, + "@odata.context":"/redfish/v1/$metadata#Endpoint.Endpoint", + "@odata.id":"/redfish/v1/Fabrics/SAS/Endpoints/Drive1" +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/endpoint_collection.json b/sushy/tests/unit/json_samples/endpoint_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..6dd664f7f15a0aa7f00e0ce5ad9d797079efebfe --- /dev/null +++ b/sushy/tests/unit/json_samples/endpoint_collection.json @@ -0,0 +1,13 @@ +{ + "@odata.type": "#EndpointCollection.EndpointCollection", + "Name": "Endpoint Collection", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/Fabrics/SAS/Endpoints/Drive1" + } + ], + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#EndpointCollection.EndpointCollection", + "@odata.id": "/redfish/v1/Fabrics/SAS/Endpoints" +} diff --git a/sushy/tests/unit/json_samples/error.json b/sushy/tests/unit/json_samples/error.json index 89598b18e99a99e798dfb03cba8eca89a72f7c15..155af2a651cdc4016f7f52b45108099dde7d4a5b 100644 --- a/sushy/tests/unit/json_samples/error.json +++ b/sushy/tests/unit/json_samples/error.json @@ -29,6 +29,13 @@ ], "Severity": "Warning", "Resolution": "Remove the property from the request body and resubmit the request if the operation failed" + }, + { + "@odata.type": "/redfish/v1/$metadata#Message.1.0.0.Message", + "MessageId": "Base.1.0.MalformedJSON", + "Message": "The request body submitted was malformed JSON and could not be parsed by the receiving service.", + "Severity": "Critical", + "Resolution": "Ensure that the request body is valid JSON and resubmit the request." } ] } diff --git a/sushy/tests/unit/json_samples/error_single_ext_info.json b/sushy/tests/unit/json_samples/error_single_ext_info.json new file mode 100644 index 0000000000000000000000000000000000000000..d56a0a6e462fd40097c44410236bd29439bab880 --- /dev/null +++ b/sushy/tests/unit/json_samples/error_single_ext_info.json @@ -0,0 +1,13 @@ +{ + "error": { + "code": "Base.1.5.GeneralError", + "message": "A general error has occurred. See ExtendedInfo for more information.", + "@Message.ExtendedInfo": { + "@odata.type": "#Message.v1_0_0.Message", + "MessageId": "Base.1.5.GeneralError", + "Message": "A general error has occurred. See Resolution for information on how to resolve the error.", + "Resolution": "Redfish request contains unsupported media type. Correct the request body and resubmit.", + "Severity": "Warning" + } + } +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/ethernet_interfaces.json b/sushy/tests/unit/json_samples/ethernet_interfaces.json new file mode 100644 index 0000000000000000000000000000000000000000..d3015fb55789b5068551ed0817bd1ea144d08f91 --- /dev/null +++ b/sushy/tests/unit/json_samples/ethernet_interfaces.json @@ -0,0 +1,37 @@ +{ + "@odata.type": "#EthernetInterface.v1_4_0.EthernetInterface", + "Id": "1", + "Name": "Ethernet Interface", + "Description": "System NIC 1", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "PermanentMACAddress": "12:44:6A:3B:04:11", + "MACAddress": "12:44:6A:3B:04:11", + "SpeedMbps": 1000, + "FullDuplex": true, + "HostName": "web483", + "FQDN": "web483.contoso.com", + "IPv6DefaultGateway": "fe80::3ed9:2bff:fe34:600", + "NameServers": [ + "names.contoso.com" + ], + "IPv4Addresses": [{ + "Address": "192.168.0.10", + "SubnetMask": "255.255.252.0", + "AddressOrigin": "Static", + "Gateway": "192.168.0.1" + }], + "IPv6Addresses": [{ + "Address": "fe80::1ec1:deff:fe6f:1e24", + "PrefixLength": 64, + "AddressOrigin": "Static", + "AddressState": "Preferred" + }], + "VLANs": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/12446A3B0411/VLANs" + }, + "@odata.context": "/redfish/v1/$metadata#EthernetInterface.EthernetInterface", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/12446A3B0411" +} diff --git a/sushy/tests/unit/json_samples/ethernet_interfaces_collection.json b/sushy/tests/unit/json_samples/ethernet_interfaces_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..4623de05f9fa0635063eaf467f81ff55c27c1663 --- /dev/null +++ b/sushy/tests/unit/json_samples/ethernet_interfaces_collection.json @@ -0,0 +1,12 @@ +{ + "@odata.type": "#EthernetInterfaceCollection.EthernetInterfaceCollection", + "Name": "Ethernet Interface Collection", + "Description": "System NICs on Contoso Servers", + "Members@odata.count": 1, + "Members": [{ + "@odata.id": "/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/12446A3B0411" + }], + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#EthernetInterfaceCollection.EthernetInterfaceCollection", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/EthernetInterfaces" +} diff --git a/sushy/tests/unit/json_samples/fabric.json b/sushy/tests/unit/json_samples/fabric.json new file mode 100644 index 0000000000000000000000000000000000000000..9876f07665c4a630520aafa1f985a09df56f846d --- /dev/null +++ b/sushy/tests/unit/json_samples/fabric.json @@ -0,0 +1,29 @@ +{ + "@odata.type": "#Fabric.v1_0_3.Fabric", + "Id": "SAS", + "Name": "SAS Fabric", + "FabricType": "SAS", + "Description": "A SAS Fabric with redundant switches.", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "Zones": { + "@odata.id": "/redfish/v1/Fabrics/SAS/Zones" + }, + "Endpoints": { + "@odata.id": "/redfish/v1/Fabrics/SAS/Endpoints" + }, + "Switches": { + "@odata.id": "/redfish/v1/Fabrics/SAS/Switches" + }, + "Links": { + "Oem": {} + }, + "Actions": { + "Oem": {} + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#Fabric.Fabric", + "@odata.id": "/redfish/v1/Fabrics/SAS" +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/fabric_collection.json b/sushy/tests/unit/json_samples/fabric_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..f58a5d6d65522b9fbd94026860d7b409bcd1cfc5 --- /dev/null +++ b/sushy/tests/unit/json_samples/fabric_collection.json @@ -0,0 +1,16 @@ +{ + "@odata.type": "#FabricCollection.FabricCollection", + "Name": "Fabric Collection", + "Members@odata.count": 2, + "Members": [ + { + "@odata.id": "/redfish/v1/Fabrics/SAS1" + }, + { + "@odata.id": "/redfish/v1/Fabrics/SAS2" + } + ], + "@odata.context": "/redfish/v1/$metadata#FabricCollection.FabricCollection", + "@odata.id": "/redfish/v1/Fabrics", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/firmwareinventory_collection.json b/sushy/tests/unit/json_samples/firmwareinventory_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..36a7b81413c15388df7403c1a8a84c5bc5000623 --- /dev/null +++ b/sushy/tests/unit/json_samples/firmwareinventory_collection.json @@ -0,0 +1,19 @@ +{ + "@odata.context": "/redfish/v1/$metadata#SoftwareInventoryCollection.SoftwareInventoryCollection", + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory", + "@odata.type": "#SoftwareInventoryCollection.SoftwareInventoryCollection", + "Description": "Collection of Firmware Inventory", + "Members": [ + { + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory/Current-101560-25.5.6.0009" + }, + { + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory/Installed-101560-25.5.6.0009" + }, + { + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory/Previous-102302-18.8.9" + } + ], + "Members@odata.count": 3, + "Name": "Firmware Inventory Collection" +} diff --git a/sushy/tests/unit/json_samples/manager.json b/sushy/tests/unit/json_samples/manager.json index e79e386545a0a03d1c3654683ce92d76f4163bf2..f22ec0b89b9a794b0cde5aeb41d588b2887b568f 100644 --- a/sushy/tests/unit/json_samples/manager.json +++ b/sushy/tests/unit/json_samples/manager.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Manager.v1_1_0.Manager", + "@odata.type": "#Manager.v1_4_0.Manager", "Id": "BMC", "Name": "Manager", "ManagerType": "BMC", @@ -9,6 +9,7 @@ "Model": "Joo Janta 200", "DateTime": "2015-03-13T04:14:33+06:00", "DateTimeLocalOffset": "+06:00", + "AutoDSTEnabled": false, "Status": { "State": "Enabled", "Health": "OK" diff --git a/sushy/tests/unit/json_samples/message_registry.json b/sushy/tests/unit/json_samples/message_registry.json new file mode 100644 index 0000000000000000000000000000000000000000..24687010ff5ef4a2163cf45b6dca450e5d7277d0 --- /dev/null +++ b/sushy/tests/unit/json_samples/message_registry.json @@ -0,0 +1,49 @@ +{ + "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", + "Id": "Test.1.1.1", + "Name": "Test Message Registry", + "Language": "en", + "Description": "This registry defines messages for sushy testing", + "RegistryPrefix": "Test", + "RegistryVersion": "1.1.1", + "OwningEntity": "sushy", + "Messages": { + "Success": { + "Description": "Everything OK", + "Message": "Everything done successfully.", + "Severity": "OK", + "NumberOfArgs": 0, + "Resolution": "None" + }, + "Failed": { + "Description": "Nothing is OK", + "Message": "The property %1 broke everything.", + "Severity": "Critical", + "NumberOfArgs": 1, + "ParamTypes": [ + "string" + ], + "Resolution": "Panic" + }, + "TooBig": { + "Description": "Value too big", + "Message": "Property's %1 value cannot be greater than %2.", + "Severity": "Warning", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Try again" + }, + "MissingThings": { + "Message": "Property's %1 value cannot be less than %2.", + "NumberOfArgs": 2, + "ParamTypes": [ + "string", + "number" + ], + "Resolution": "Try Later" + } + } +} diff --git a/sushy/tests/unit/json_samples/message_registry_file.json b/sushy/tests/unit/json_samples/message_registry_file.json new file mode 100644 index 0000000000000000000000000000000000000000..714105eb69520d82890f059386808c7bfffa8386 --- /dev/null +++ b/sushy/tests/unit/json_samples/message_registry_file.json @@ -0,0 +1,18 @@ +{ + "@odata.type": "#MessageRegistryFile.v1_1_1.MessageRegistryFile", + "Id": "Test", + "Name": "Test Message Registry File", + "Description": "Message Registry file for testing", + "Languages": ["en"], + "Registry": "Test.1.0", + "Location": [ + {"Language": "default", + "Uri": "/redfish/v1/Registries/Test/Test.1.0.json", + "ArchiveUri": "/redfish/v1/Registries/Archive.zip", + "ArchiveFile": "Test.1.0.json", + "PublicationUri": "https://example.com/Registries/Test.1.0.json" + } + ], + "@odata.context": "/redfish/v1/$metadata#MessageRegistryFile.MessageRegistryFile", + "@odata.id": "/redfish/v1/Registries/Test" +} diff --git a/sushy/tests/unit/json_samples/message_registry_file_collection.json b/sushy/tests/unit/json_samples/message_registry_file_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..87905ab11f7e70382d9069c91ba67562db7a1f72 --- /dev/null +++ b/sushy/tests/unit/json_samples/message_registry_file_collection.json @@ -0,0 +1,12 @@ +{ + "@odata.type": "#MessageRegistryFileCollection.MessageRegistryFileCollection", + "Name": "Message Registry Test Collection", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/Registries/Test" + } + ], + "@odata.context": "/redfish/v1/$metadata#MessageRegistryFileCollection.MessageRegistryFileCollection", + "@odata.id": "/redfish/v1/Registries" +} diff --git a/sushy/tests/unit/json_samples/power.json b/sushy/tests/unit/json_samples/power.json new file mode 100644 index 0000000000000000000000000000000000000000..59c4ccd75377eba3251f4d9e6970f2782cfefbca --- /dev/null +++ b/sushy/tests/unit/json_samples/power.json @@ -0,0 +1,159 @@ +{ + "@odata.type": "#Power.v1_3_0.Power", + "Id": "Power", + "Name": "Quad Blade Chassis Power", + "PowerSupplies": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/0", + "MemberId": "0", + "Name": "Power Supply 0", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "PowerSupplyType": "AC", + "LineInputVoltageType": "AC240V", + "LineInputVoltage": 220, + "PowerCapacityWatts": 1450, + "InputRanges": [ + { + "InputType": "AC", + "MinimumVoltage": 185, + "MaximumVoltage": 250, + "MinimumFrequencyHz": 47, + "MaximumFrequencyHz": 63, + "OutputWattage": 1450 + } + ], + "LastPowerOutputWatts": 650, + "Model": "325457-A06", + "Manufacturer": "Cyberdyne", + "FirmwareVersion": "2.20", + "SerialNumber": "1S0000523", + "PartNumber": "425-591-654", + "SparePartNumber": "425-591-654", + "Redundancy": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/Redundancy/0" + } + ], + "RelatedItem": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade1" + }, + { + "@odata.id":"/redfish/v1/Chassis/Blade2" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade3" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade4" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9450R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9451R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9452R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9453R6" + } + ] + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/1", + "MemberId": "1", + "Name": "Power Supply 1", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "PowerSupplyType": "AC", + "LineInputVoltageType": "AC240V", + "LineInputVoltage": 222, + "PowerCapacityWatts": 1450, + "InputRanges": [ + { + "InputType": "AC", + "MinimumVoltage": 185, + "MaximumVoltage": 250, + "MinimumFrequencyHz": 47, + "MaximumFrequencyHz": 63, + "OutputWattage": 1450 + } + ], + "LastPowerOutputWatts": 635, + "Model": "325457-A06", + "Manufacturer": "Cyberdyne", + "FirmwareVersion": "2.20", + "SerialNumber": "1S0000524", + "PartNumber": "425-591-654", + "SparePartNumber": "425-591-654", + "Redundancy": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/Redundancy/0" + } + ], + "RelatedItem": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade1" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade2" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade3" + }, + { + "@odata.id": "/redfish/v1/Chassis/Blade4" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9450R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9451R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9452R6" + }, + { + "@odata.id": "/redfish/v1/Systems/529QB9453R6" + } + ] + } + ], + "Redundancy": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/Redundancy/0", + "MemberId": "0", + "Name": "Power Supply Redundancy", + "RedundancySet": [ + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/0" + }, + { + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power#/PowerSupplies/1" + } + ], + "Mode": "N+m", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "MinNumNeeded": 1, + "MaxNumSupported": 2 + } + ], + "@odata.context": "/redfish/v1/$metadata#Power.Power", + "@odata.id": "/redfish/v1/Chassis/MultiBladeEncl/Power" +} diff --git a/sushy/tests/unit/json_samples/processor.json b/sushy/tests/unit/json_samples/processor.json index 19ce6423ba4731d081454821d14dc3fc4667eadc..d6e03c73406a8b3bb25c4a0dfbf9d97e588b16a9 100644 --- a/sushy/tests/unit/json_samples/processor.json +++ b/sushy/tests/unit/json_samples/processor.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Processor.v1_0_2.Processor", + "@odata.type": "#Processor.v1_3_0.Processor", "Id": "CPU1", "Socket": "CPU 1", "ProcessorType": "CPU", @@ -7,7 +7,7 @@ "InstructionSet": "x86-64", "Manufacturer": "Intel(R) Corporation", "Model": "Multi-Core Intel(R) Xeon(R) processor 7xxx Series", - "ProcessorID": { + "ProcessorId": { "VendorID": "GenuineIntel", "IdentificationRegisters": "0x34AC34DC8901274A", "EffectiveFamily": "0x42", @@ -20,7 +20,8 @@ "TotalThreads": 16, "Status": { "State": "Enabled", - "Health": "OK" + "Health": "OK", + "HealthRollup": "OK" }, "@odata.context": "/redfish/v1/$metadata#Systems/Members/437XR1138R2/Processors/Members/$entity", "@odata.id": "/redfish/v1/Systems/437XR1138R2/Processors/CPU1", diff --git a/sushy/tests/unit/json_samples/processor2.json b/sushy/tests/unit/json_samples/processor2.json index a0508514cb61d6f641459ed7695486efff7d5943..e1a2e76241a8d443ce56aeec9516603f4fda6678 100644 --- a/sushy/tests/unit/json_samples/processor2.json +++ b/sushy/tests/unit/json_samples/processor2.json @@ -1,5 +1,5 @@ { - "@odata.type": "#Processor.v1_0_2.Processor", + "@odata.type": "#Processor.v1_3_0.Processor", "Id": "CPU2", "Socket": "CPU 2", "ProcessorType": "CPU", diff --git a/sushy/tests/unit/json_samples/resourceblock.json b/sushy/tests/unit/json_samples/resourceblock.json new file mode 100644 index 0000000000000000000000000000000000000000..871b88f31b9e04a5e86caf24ecaaedb977ef95ad --- /dev/null +++ b/sushy/tests/unit/json_samples/resourceblock.json @@ -0,0 +1,48 @@ +{ + "@odata.context": "/redfish/v1/$metadata#ResourceBlock.ResourceBlock", + "@odata.type": "#ResourceBlock.v1_3_0.ResourceBlock", + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock3", + "Id": "DriveBlock3", + "Name": "Drive Block 3", + "Description": "ResourceBlock1", + "ResourceBlockType": "Storage", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "CompositionStatus": { + "Reserved": false, + "CompositionState": "Composed", + "MaxCompositions": 1, + "NumberOfCompositions": 0, + "SharingCapable": true, + "SharingEnabled": false + }, + "Processors": [], + "Memory": [], + "Storage": [ + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock3/Storage/Block3NVMe" + } + ], + "Links": { + "ComputerSystems": [ + { + "@odata.id": "/redfish/v1/Systems/ComposedSystem" + } + ], + "Chassis": [ + { + "@odata.id": "/redfish/v1/Chassis/ComposableModule3" + } + ], + "Zones": [ + { + "@odata.id": "/redfish/v1/CompositionService/ResourceZones/1" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceZones/2" + } + ] + } +} diff --git a/sushy/tests/unit/json_samples/resourceblock_collection.json b/sushy/tests/unit/json_samples/resourceblock_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..de3373774be64834785fd14859ad9b255ec0620f --- /dev/null +++ b/sushy/tests/unit/json_samples/resourceblock_collection.json @@ -0,0 +1,11 @@ +{ + "@odata.type": "#ResourceBlockCollection.ResourceBlockCollection", + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks", + "Name": "Resource Block Collection", + "Members@odata.count": 1, + "Members": [ + { "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1" } + ] +} + + diff --git a/sushy/tests/unit/json_samples/resourcezone.json b/sushy/tests/unit/json_samples/resourcezone.json new file mode 100644 index 0000000000000000000000000000000000000000..31c4ef34d38af7bfd92f9399b7d4ab266b3ecb88 --- /dev/null +++ b/sushy/tests/unit/json_samples/resourcezone.json @@ -0,0 +1,50 @@ +{ + "@odata.context": "/redfish/v1/$metadata#Zone.Zone", + "@odata.type": "#Zone.v1_2_1.Zone", + "@odata.id": "/redfish/v1/CompositionService/ResourceZones/1", + "Id": "1", + "Name": "Resource Zone 1", + "Description": "ResourceZone1", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "Links": { + "ResourceBlocks": [ + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock3" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock4" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock5" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock6" + }, + { + "@odata.id": "/redfish/v1/CompositionService/ResourceBlocks/DriveBlock7" + } + ] + }, + "@Redfish.CollectionCapabilities": { + "@odata.type": "#CollectionCapabilities.v1_0_0.CollectionCapabilities", + "Capabilities": [ + { + "CapabilitiesObject": { + "@odata.id": "/redfish/v1/Systems/Capabilities" + }, + "UseCase": "ComputerSystemComposition", + "Links": { + "TargetCollection": { + "@odata.id": "/redfish/v1/Systems" + } + } + } + ] + } +} diff --git a/sushy/tests/unit/json_samples/resourcezone_collection.json b/sushy/tests/unit/json_samples/resourcezone_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..423405a368a22fe38c3f74dc712b00c9e9eb5225 --- /dev/null +++ b/sushy/tests/unit/json_samples/resourcezone_collection.json @@ -0,0 +1,13 @@ +{ + "@odata.type": "#ZoneCollection.ZoneCollection", + "@odata.id": "/redfish/v1/CompositionService/ResourceZones", + "Name": "Resource Zone Collection", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/CompositionService/ResourceZones/1" + } + ] +} + + diff --git a/sushy/tests/unit/json_samples/root.json b/sushy/tests/unit/json_samples/root.json index 29709d5f604a176e07cfe777db8a2c175544b3f2..c1705ab85b192d4994643fa41fa38d6715ec34f2 100644 --- a/sushy/tests/unit/json_samples/root.json +++ b/sushy/tests/unit/json_samples/root.json @@ -1,9 +1,17 @@ { - "@odata.type": "#ServiceRoot.v1_0_2.ServiceRoot", + "@odata.type": "#ServiceRoot.v1_3_1.ServiceRoot", "Id": "RootService", "Name": "Root Service", "RedfishVersion": "1.0.2", "UUID": "92384634-2938-2342-8820-489239905423", + "Product": "Product", + "ProtocolFeaturesSupported": { + "ExcerptQuery": true, + "ExpandQuery": false, + "FilterQuery": true, + "OnlyMemberQuery": true, + "SelectQuery": false + }, "Systems": { "@odata.id": "/redfish/v1/Systems" }, @@ -13,15 +21,24 @@ "Managers": { "@odata.id": "/redfish/v1/Managers" }, + "Fabrics": { + "@odata.id": "/redfish/v1/Fabrics" + }, "Tasks": { "@odata.id": "/redfish/v1/TaskService" }, "SessionService": { "@odata.id": "/redfish/v1/SessionService" }, + "UpdateService": { + "@odata.id": "/redfish/v1/UpdateService" + }, "AccountService": { "@odata.id": "/redfish/v1/AccountService" }, + "CompositionService": { + "@odata.id": "/redfish/v1/CompositionService" + }, "EventService": { "@odata.id": "/redfish/v1/EventService" }, @@ -30,6 +47,9 @@ "@odata.id": "/redfish/v1/SessionService/Sessions" } }, + "Registries": { + "@odata.id": "/redfish/v1/Registries" + }, "Oem": {}, "@odata.context": "/redfish/v1/$metadata#ServiceRoot", "@odata.id": "/redfish/v1/", diff --git a/sushy/tests/unit/json_samples/secure_boot.json b/sushy/tests/unit/json_samples/secure_boot.json new file mode 100644 index 0000000000000000000000000000000000000000..ac4b49507154394dc4fac8ec592e3e5f33020a68 --- /dev/null +++ b/sushy/tests/unit/json_samples/secure_boot.json @@ -0,0 +1,22 @@ +{ + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot", + "@odata.type": "#SecureBoot.v1_1_0.SecureBoot", + "Id": "SecureBoot", + "Name": "UEFI Secure Boot", + "Actions": { + "#SecureBoot.ResetKeys": { + "target": "/redfish/v1/Systems/437XR1138R2/SecureBoot/Actions/SecureBoot.ResetKeys", + "ResetKeysType@Redfish.AllowableValues": [ + "ResetAllKeysToDefault", + "DeleteAllKeys", + "DeletePK" + ] + } + } , + "SecureBootEnable": false, + "SecureBootCurrentBoot": "Disabled", + "SecureBootMode": "DeployedMode", + "SecureBootDatabases": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases" + } +} diff --git a/sushy/tests/unit/json_samples/secure_boot_database.json b/sushy/tests/unit/json_samples/secure_boot_database.json new file mode 100644 index 0000000000000000000000000000000000000000..53e7e94f4a48f240ff9b755eb110fdadcd3ffd64 --- /dev/null +++ b/sushy/tests/unit/json_samples/secure_boot_database.json @@ -0,0 +1,26 @@ +{ + "@odata.type": "#SecureBootDatabase.v1_0_0.SecureBootDatabase", + "Id": "db", + "Name": "db - Authorized Signature Database", + "Description": "UEFI db Secure Boot Database", + "DatabaseId": "db", + "Certificates": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db/Certificates/" + }, + "Signatures": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db/Signatures/" + }, + "Actions": { + "#SecureBootDatabase.ResetKeys": { + "target": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db/Actions/SecureBootDatabase.ResetKeys", + "ResetKeysType@Redfish.AllowableValues": [ + "ResetAllKeysToDefault", + "DeleteAllKeys" + ] + }, + "Oem": {} + }, + "Oem": {}, + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db", + "@Redfish.Copyright": "Copyright 2014-2021 DMTF. For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/secure_boot_database_collection.json b/sushy/tests/unit/json_samples/secure_boot_database_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..ed6b22b2383c5d8cfbb8812c3a012d5e20e2273c --- /dev/null +++ b/sushy/tests/unit/json_samples/secure_boot_database_collection.json @@ -0,0 +1,34 @@ +{ + "@odata.type": "#SecureBootDatabaseCollection.SecureBootDatabaseCollection", + "Name": "UEFI SecureBoot Database Collection", + "Members@odata.count": 8, + "Members": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/PK" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/KEK" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/dbx" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/PKDefault" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/KEKDefault" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/dbDefault" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/dbxDefault" + } + ], + "Oem": {}, + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases", + "@Redfish.Copyright": "Copyright 2014-2021 DMTF. For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/session.json b/sushy/tests/unit/json_samples/session.json new file mode 100644 index 0000000000000000000000000000000000000000..817960249546566a3f547a409803939830482fe5 --- /dev/null +++ b/sushy/tests/unit/json_samples/session.json @@ -0,0 +1,11 @@ +{ + "@odata.type": "#Session.v1_1_0.Session", + "Id": "1234567890ABCDEF", + "Name": "User Session", + "Description": "Manager User Session", + "UserName": "Administrator", + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#Session.Session", + "@odata.id": "/redfish/v1/SessionService/Sessions/1234567890ABCDEF", + "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/session_collection.json b/sushy/tests/unit/json_samples/session_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..30090e1f2f5bcaa1c4df324160c61fd86835bf71 --- /dev/null +++ b/sushy/tests/unit/json_samples/session_collection.json @@ -0,0 +1,12 @@ +{ + "@odata.type": "#SessionCollection.SessionCollection", + "Name": "Session Collection", + "Members@odata.count": 1, + "@odata.id": "/redfish/v1/SessionService/Sessions", + "@odata.context": "/redfish/v1/$metadata#SessionService/Sessions/$entity", + "Members": [ + { + "@odata.id": "/redfish/v1/SessionService/Sessions/104f9d68f58abb85" + } + ] +} diff --git a/sushy/tests/unit/json_samples/session_creation_headers.json b/sushy/tests/unit/json_samples/session_creation_headers.json new file mode 100644 index 0000000000000000000000000000000000000000..e2d3b38460c04d740de98de3c377bdce19386061 --- /dev/null +++ b/sushy/tests/unit/json_samples/session_creation_headers.json @@ -0,0 +1,18 @@ +{ + "Content-Security-Policy": "default-src 'none'; script-src 'self' 'unsafe-inline' 'unsafe-eval'; connect-src 'self'; img-src 'self'; frame-src 'self'; font-src 'self'; object-src 'self'; style-src 'self' 'unsafe-inline'", + "ETag": "'W/\"7dc5e2b9\"'", + "Cache-Control": "max-age=0, no-cache, no-store, must-revalidate", + "Location": "/redfish/v1/SessionService/Sessions/151edd65d41c0b89", + "Connection": "Keep-Alive", + "X-XSS-Protection": "1; mode=block", + "X-Auth-Token": "adc530e2016a0ea98c76c087f0e4b76f", + "Expires": "0", + "X-Frame-Options": "SAMEORIGIN", + "Content-Length": "392", + "X-Content-Type-Options": "nosniff", + "Content-Type": "application/json;charset=utf-8", + "OData-Version": "4.0", + "Keep-Alive": "timeout=1, max=32", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "Date": "Tue, 06 Jun 2017 17:07:48 GMT" +} diff --git a/sushy/tests/unit/json_samples/session_error.json b/sushy/tests/unit/json_samples/session_error.json new file mode 100644 index 0000000000000000000000000000000000000000..9ac70d911e970df5f8b2079a03e32c1bc822313e --- /dev/null +++ b/sushy/tests/unit/json_samples/session_error.json @@ -0,0 +1,17 @@ +{ + "error": { + "code": "Base.1.0.GeneralError", + "message": "A general error has occurred. See ExtendedInfo for more information.", + "@Message.ExtendedInfo": [ + { + "@odata.type": "/redfish/v1/$metadata#MessageRegistry.1.0.0.MessageRegistry", + "MessageId": "Base.1.0.NoValidSession", + "RelatedProperties": [], + "Message": "There is no valid session established with the implementation.", + "MessageArgs": [], + "Severity": "Critical", + "Resolution": "Establish as session before attempting any operations." + } + ] + } +} diff --git a/sushy/tests/unit/json_samples/session_service.json b/sushy/tests/unit/json_samples/session_service.json new file mode 100644 index 0000000000000000000000000000000000000000..2e73020f622384ee0f9baf23ea245fcc20436c73 --- /dev/null +++ b/sushy/tests/unit/json_samples/session_service.json @@ -0,0 +1,18 @@ +{ + "@odata.type": "#SessionService.v1_1_3.SessionService", + "Id": "SessionService", + "Name": "Session Service", + "Description": "Session Service", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "ServiceEnabled": true, + "SessionTimeout": 30, + "Sessions": { + "@odata.id": "/redfish/v1/SessionService/Sessions" + }, + "@odata.context": "/redfish/v1/$metadata#SessionService", + "@odata.id": "/redfish/v1/SessionService", + "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/settings.json b/sushy/tests/unit/json_samples/settings.json new file mode 100644 index 0000000000000000000000000000000000000000..69de0c67413c32fc4c5f580cd17f0400ed15e17f --- /dev/null +++ b/sushy/tests/unit/json_samples/settings.json @@ -0,0 +1,26 @@ +{ + "@Redfish.Settings": { + "@odata.type": "#Settings.v1_2_0.Settings", + "ETag": "9234ac83b9700123cc32", + "Messages": [{ + "MessageId": "Test.1.0.Failed", + "Message": "Settings %1 update failed due to invalid value", + "Severity": "Critical", + "Resolution": "Fix the value and try again", + "MessageArgs": [ + "arg1" + ], + "RelatedProperties": [ + "#/Attributes/ProcTurboMode" + ] + }], + "SettingsObject": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS/Settings" + }, + "Time": "2016-03-07T14:44:30-05:00", + "SupportedApplyTimes": [ + "OnReset", + "InMaintenanceWindowOnReset" + ] + } +} diff --git a/sushy/tests/unit/json_samples/simple_storage.json b/sushy/tests/unit/json_samples/simple_storage.json new file mode 100644 index 0000000000000000000000000000000000000000..3e55a46c8c0374917ff5cb53f05fff9e8d1dd83d --- /dev/null +++ b/sushy/tests/unit/json_samples/simple_storage.json @@ -0,0 +1,59 @@ +{ + "@odata.type": "#SimpleStorage.v1_2_0.SimpleStorage", + "Id": "1", + "Name": "Simple Storage Controller", + "Description": "System SATA", + "UefiDevicePath": "Acpi(PNP0A03,0)/Pci(1F|1)/Ata(Primary,Master)/HD(Part3, Sig00110011)", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK", + "HealthRollup": "Warning" + }, + "Devices": [ + { + "@odata.type": "#SimpleStorage.v1_1_0.Device", + "Name": "SATA Bay 1", + "Manufacturer": "Contoso", + "Model": "3000GT8", + "CapacityBytes": 8000000000000, + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + } + }, + { + "@odata.type": "#SimpleStorage.v1_1_0.Device", + "Name": "SATA Bay 2", + "Manufacturer": "Contoso", + "Model": "3000GT7", + "CapacityBytes": 4000000000000, + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "Critical" + } + }, + { + "@odata.type": "#SimpleStorage.v1_1_0.Device", + "Name": "SATA Bay 3", + "CapacityBytes": 9000000000000, + "Status": { + "@odata.type": "#Resource.Status", + "State": "Absent" + } + }, + { + "@odata.type": "#SimpleStorage.v1_1_0.Device", + "Name": "SATA Bay 4", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Absent" + } + } + ], + "@odata.context": "/redfish/v1/$metadata#SimpleStorage.SimpleStorage", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SimpleStorage/1", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/simple_storage_collection.json b/sushy/tests/unit/json_samples/simple_storage_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..cb352af4f891d0245f9e12c4b9003b6b50c2e7a5 --- /dev/null +++ b/sushy/tests/unit/json_samples/simple_storage_collection.json @@ -0,0 +1,13 @@ +{ + "@odata.type": "#SimpleStorageCollection.SimpleStorageCollection", + "Name": "Simple Storage Collection", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SimpleStorage/1" + } + ], + "@odata.context": "/redfish/v1/$metadata#SimpleStorageCollection.SimpleStorageCollection", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SimpleStorage", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/softwareinventory.json b/sushy/tests/unit/json_samples/softwareinventory.json new file mode 100644 index 0000000000000000000000000000000000000000..2e3264fa58f7cd63ffd8de50c4a9dca294802ca0 --- /dev/null +++ b/sushy/tests/unit/json_samples/softwareinventory.json @@ -0,0 +1,29 @@ +{ + "@odata.type": "#SoftwareInventory.v1_2_0.SoftwareInventory", + "Id": "BMC", + "Name": "Contoso BMC Firmware", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "Updateable": true, + "Manufacturer": "Contoso", + "ReleaseDate": "2017-08-22T12:00:00", + "Version": "1.45.455b66-rev4", + "SoftwareId": "1624A9DF-5E13-47FC-874A-DF3AFF143089", + "LowestSupportedVersion": "1.30.367a12-rev1", + "UefiDevicePaths": [ + "BMC(0x1,0x0ABCDEF)" + ], + "RelatedItem": [ + { + "@odata.id": "/redfish/v1/Managers/1" + } + ], + "Actions": { + "Oem": {} + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#SoftwareInventory.SoftwareInventory", + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory/BMC" +} diff --git a/sushy/tests/unit/json_samples/storage.json b/sushy/tests/unit/json_samples/storage.json new file mode 100644 index 0000000000000000000000000000000000000000..d3a6322857615a0962044f0e50f2a85d70fe2a78 --- /dev/null +++ b/sushy/tests/unit/json_samples/storage.json @@ -0,0 +1,78 @@ +{ + "@odata.type": "#Storage.v1_4_0.Storage", + "Id": "1", + "Name": "Local Storage Controller", + "Description": "Integrated RAID Controller", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK", + "HealthRollup": "OK" + }, + "StorageControllers": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1#/StorageControllers/0", + "@odata.type": "#Storage.v1_3_0.StorageController", + "MemberId": "0", + "Name": "Contoso Integrated RAID", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "NAA", + "DurableName": "345C59DBD970859C" + } + ], + "Manufacturer": "Contoso", + "Model": "12Gbs Integrated RAID", + "SerialNumber": "2M220100SL", + "PartNumber": "CT18754", + "SpeedGbps": 12, + "FirmwareVersion": "1.0.0.7", + "SupportedControllerProtocols": [ + "PCIe" + ], + "SupportedDeviceProtocols": [ + "SAS", + "SATA" + ], + "SupportedRAIDTypes": [ + "RAID0", + "RAID1" + ] + } + ], + "Drives": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2" + } + ], + "Volumes": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes" + }, + "Links": { + "@odata.type": "#Storage.v1_0_0.Storage" + }, + "Actions": { + "@odata.type": "#Storage.v1_0_0.Actions", + "#Storage.SetEncryptionKey": { + "target": "/redfish/v1/Systems/437XR1138R2/Storage/1/Actions/Storage.SetEncryptionKey" + } + }, + "@odata.context": "/redfish/v1/$metadata#Storage.Storage", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/storage_collection.json b/sushy/tests/unit/json_samples/storage_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..16155fd2b572d2b1b9b10fd3ad1e12a45bc44bbd --- /dev/null +++ b/sushy/tests/unit/json_samples/storage_collection.json @@ -0,0 +1,13 @@ +{ + "@odata.type": "#StorageCollection.StorageCollection", + "Name": "Storage Collection", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1" + } + ], + "@odata.context": "/redfish/v1/$metadata#StorageCollection.StorageCollection", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/system.json b/sushy/tests/unit/json_samples/system.json index 5dd7da6d508483e2d55a990e40d053a72cf5c864..588139345c46790dbe3876b5ab34e2ced42b7c90 100644 --- a/sushy/tests/unit/json_samples/system.json +++ b/sushy/tests/unit/json_samples/system.json @@ -1,11 +1,12 @@ { - "@odata.type": "#ComputerSystem.v1_1_0.ComputerSystem", + "@odata.type": "#ComputerSystem.v1_10_0.ComputerSystem", "Id": "437XR1138R2", "Name": "WebFrontEnd483", "SystemType": "Physical", "AssetTag": "Chicago-45Z-2381", "Manufacturer": "Contoso", - "Model": "3500RX", + "Model": "3500", + "SubModel": "RX", "SKU": "8675309", "SerialNumber": "437XR1138R2", "PartNumber": "224071-J23", @@ -17,6 +18,9 @@ "Health": "OK", "HealthRollup": "OK" }, + "HostingRoles": [ + "ApplicationServer" + ], "IndicatorLED": "Off", "PowerState": "On", "Boot": { @@ -49,18 +53,18 @@ ], "Oem": { "Contoso": { - "@odata.type": "http://Contoso.com/Schema#Contoso.ComputerSystem", + "@odata.type": "#Contoso.ComputerSystem", "ProductionLocation": { "FacilityName": "PacWest Production Facility", "Country": "USA" } }, "Chipwise": { - "@odata.type": "http://Chipwise.com/Schema#Chipwise.ComputerSystem", + "@odata.type": "#Chipwise.ComputerSystem", "Style": "Executive" } }, - "BiosVersion": "P79 v1.33 (02/28/2015)", + "BiosVersion": "P79 v1.45 (12/06/2017)", "ProcessorSummary": { "Count": 2, "ProcessorFamily": "Multi-Core Intel(R) Xeon(R) processor 7xxx Series", @@ -72,6 +76,8 @@ }, "MemorySummary": { "TotalSystemMemoryGiB": 96, + "TotalSystemPersistentMemoryGiB": 0, + "MemoryMirroring": "None", "Status": { "State": "Enabled", "Health": "OK", @@ -79,7 +85,10 @@ } }, "Bios": { - "@odata.id": "/redfish/v1/Systems/437XR1138R2/BIOS" + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Bios" + }, + "SecureBoot": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/SecureBoot" }, "Processors": { "@odata.id": "/redfish/v1/Systems/437XR1138R2/Processors" @@ -118,8 +127,18 @@ "GracefulRestart", "ForceRestart", "Nmi", - "ForceOn" - ] + "ForceOn", + "PushPowerButton" + ], + "@Redfish.OperationApplyTimeSupport": { + "@odata.type": "#Settings.v1_2_0.OperationApplyTimeSupport", + "SupportedValues": [ "Immediate", "AtMaintenanceWindowStart" ], + "MaintenanceWindowStartTime": "2017-05-03T23:12:37-05:00", + "MaintenanceWindowDurationInSeconds": 600, + "MaintenanceWindowResource": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2" + } + } }, "Oem": { "#Contoso.Reset": { @@ -127,6 +146,11 @@ } } }, + "@Redfish.MaintenanceWindow": { + "@odata.type": "#Settings.v1_2_0.MaintenanceWindow", + "MaintenanceWindowDurationInSeconds": 1, + "MaintenanceWindowStartTime": "2016-03-07T14:44:30-05:05" + }, "@odata.context": "/redfish/v1/$metadata#ComputerSystem.ComputerSystem", "@odata.id": "/redfish/v1/Systems/437XR1138R2", "@Redfish.Copyright": "Copyright 2014-2016 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." diff --git a/sushy/tests/unit/json_samples/task.json b/sushy/tests/unit/json_samples/task.json new file mode 100644 index 0000000000000000000000000000000000000000..306a2d74e84c273fc4db1c7c230cdf09628eed0d --- /dev/null +++ b/sushy/tests/unit/json_samples/task.json @@ -0,0 +1,26 @@ +{ + "@odata.type":"#Task.v1_4_3.Task", + "Id":"545", + "Name":"Task 545", + "Description": "Task description", + "TaskMonitor":"/taskmon/545", + "TaskState":"Completed", + "StartTime":"2012-03-07T14:44+06:00", + "EndTime":"2012-03-07T14:45+06:00", + "TaskStatus":"OK", + "PercentComplete": 100, + "Messages":[ + { + "MessageId":"Base.1.0.PropertyNotWriteable", + "RelatedProperties":[ + "SKU" + ], + "Message":"Property %1 is read only.", + "MessageArgs":[ + "SKU" + ], + "Severity":"Warning" + } + ], + "@odata.id":"/redfish/v1/TaskService/Tasks/545" + } \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/task2.json b/sushy/tests/unit/json_samples/task2.json new file mode 100644 index 0000000000000000000000000000000000000000..fd4af6f6092cfe7611bd64a297861dd193d22700 --- /dev/null +++ b/sushy/tests/unit/json_samples/task2.json @@ -0,0 +1,11 @@ +{ + "@odata.type":"#Task.v1_4_3.Task", + "Id":"546", + "Name":"Task 546", + "Description": "Task description", + "TaskMonitor":"/taskmon/546", + "TaskState":"Pending", + "TaskStatus":"OK", + "PercentComplete": 55, + "@odata.id":"/redfish/v1/TaskService/Tasks/546" + } \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/task_collection.json b/sushy/tests/unit/json_samples/task_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..6c15a614f62d345f2205eafeae4caf29b9807c88 --- /dev/null +++ b/sushy/tests/unit/json_samples/task_collection.json @@ -0,0 +1,16 @@ +{ + "@odata.context": "/redfish/v1/$metadata#TaskCollection.TaskCollection", + "@odata.id": "/redfish/v1/TaskService/Tasks", + "@odata.type": "#TaskCollection.TaskCollection", + "Description": "Collection of Tasks", + "Members": [ + { + "@odata.id": "/redfish/v1/TaskService/Tasks/545" + }, + { + "@odata.id": "/redfish/v1/TaskService/Tasks/546" + } + ], + "Members@odata.count": 2, + "Name": "Task Collection" +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/task_monitor.json b/sushy/tests/unit/json_samples/task_monitor.json new file mode 100644 index 0000000000000000000000000000000000000000..5ed2a04c362d68048842871080bdfec6b653308e --- /dev/null +++ b/sushy/tests/unit/json_samples/task_monitor.json @@ -0,0 +1,25 @@ +{ + "@odata.type":"#Task.v1_4_3.Task", + "Id":"545", + "Name":"Task 545", + "Description": "Task description", + "TaskMonitor":"/taskmon/545", + "TaskState":"Completed", + "StartTime":"2012-03-07T14:44+06:00", + "EndTime":"2012-03-07T14:45+06:00", + "TaskStatus":"OK", + "PercentComplete": 100, + "Messages":[ + { + "MessageId":"Base.1.0.PropertyNotWriteable", + "RelatedProperties":[ + "SKU" + ], + "Message":"Property %1 is read only.", + "MessageArgs":[ + "SKU" + ], + "Severity":"Warning" + } + ] + } diff --git a/sushy/tests/unit/json_samples/taskservice.json b/sushy/tests/unit/json_samples/taskservice.json new file mode 100644 index 0000000000000000000000000000000000000000..8b89c9576014ceda6322f1d9a1f1638188e39768 --- /dev/null +++ b/sushy/tests/unit/json_samples/taskservice.json @@ -0,0 +1,19 @@ +{ + "@odata.type": "#TaskService.v1_1_2.TaskService", + "Id": "TaskService", + "Name": "Tasks Service", + "DateTime": "2015-03-13T04:14:33+06:00", + "CompletedTaskOverWritePolicy": "Manual", + "LifeCycleEventOnTaskStateChange": true, + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "ServiceEnabled": true, + "Tasks": { + "@odata.id": "/redfish/v1/TaskService/Tasks" + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#TaskService.TaskService", + "@odata.id": "/redfish/v1/TaskService" +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/thermal.json b/sushy/tests/unit/json_samples/thermal.json new file mode 100644 index 0000000000000000000000000000000000000000..9e7f698a3dfe74c0dd354ad34b7a364bfdd3b67f --- /dev/null +++ b/sushy/tests/unit/json_samples/thermal.json @@ -0,0 +1,52 @@ +{ + "@odata.type": "#Thermal.v1_3_0.Thermal", + "Id": "Thermal", + "Name": "Blade Thermal", + "Temperatures": [ + { + "@odata.id": "/redfish/v1/Chassis/Blade1/Thermal#/Temperatures/0", + "MemberId": "0", + "Name": "CPU Temp", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "ReadingCelsius": 62, + "UpperThresholdNonCritical": 75, + "UpperThresholdCritical": 90, + "UpperThresholdFatal": 95, + "MinReadingRangeTemp": 0, + "MaxReadingRangeTemp": 120, + "PhysicalContext": "CPU", + "RelatedItem": [ + { + "@odata.id": "/redfish/v1/Systems/529QB9450R6/Processors/CPU" + } + ] + } + ], + "Fans": [ + { + "@odata.id": "/redfish/v1/Chassis/Blade1/Thermal#/Fans/0", + "MemberId": "0", + "Name": "CPU Fan", + "PhysicalContext": "CPU", + "Status": { + "State": "Enabled", + "Health": "OK" + }, + "Reading": 6000, + "ReadingUnits": "RPM", + "LowerThresholdFatal": 2000, + "MinReadingRange": 0, + "MaxReadingRange": 10000, + "RelatedItem": [ + { + "@odata.id": "/redfish/v1/Systems/529QB9450R6/Processors/CPU" + } + ] + } + ], + "@odata.context": "/redfish/v1/$metadata#Thermal.Thermal", + "@odata.id": "/redfish/v1/Chassis/Blade1/Thermal" +} diff --git a/sushy/tests/unit/json_samples/updateservice.json b/sushy/tests/unit/json_samples/updateservice.json new file mode 100644 index 0000000000000000000000000000000000000000..1a67b5e91952c106f2ae1fd6d9536b6fe9c2f274 --- /dev/null +++ b/sushy/tests/unit/json_samples/updateservice.json @@ -0,0 +1,30 @@ +{ + "@odata.type": "#UpdateService.v1_2_1.UpdateService", + "Id": "UpdateService", + "Name": "Update service", + "Status": { + "State": "Enabled", + "Health": "OK", + "HealthRollup": "OK" + }, + "ServiceEnabled": true, + "HttpPushUri": "/FWUpdate", + "HttpPushUriTargets": ["/FWUpdate"], + "HttpPushUriTargetsBusy": false, + "FirmwareInventory": { + "@odata.id": "/redfish/v1/UpdateService/FirmwareInventory" + }, + "SoftwareInventory": { + "@odata.id": "/redfish/v1/UpdateService/SoftwareInventory" + }, + "Actions": { + "#UpdateService.SimpleUpdate": { + "target": "/redfish/v1/UpdateService/Actions/SimpleUpdate", + "@Redfish.ActionInfo": "/redfish/v1/UpdateService/SimpleUpdateActionInfo" + }, + "Oem": {} + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#UpdateService.UpdateService", + "@odata.id": "/redfish/v1/UpdateService" +} diff --git a/sushy/tests/unit/json_samples/updateservice_no_inv.json b/sushy/tests/unit/json_samples/updateservice_no_inv.json new file mode 100644 index 0000000000000000000000000000000000000000..c26a79f05dc0e4f038668571aa3392d906db5933 --- /dev/null +++ b/sushy/tests/unit/json_samples/updateservice_no_inv.json @@ -0,0 +1,24 @@ +{ + "@odata.type": "#UpdateService.v1_2_1.UpdateService", + "Id": "UpdateService", + "Name": "Update service", + "Status": { + "State": "Enabled", + "Health": "OK", + "HealthRollup": "OK" + }, + "ServiceEnabled": true, + "HttpPushUri": "/FWUpdate", + "HttpPushUriTargets": ["/FWUpdate"], + "HttpPushUriTargetsBusy": false, + "Actions": { + "#UpdateService.SimpleUpdate": { + "target": "/redfish/v1/UpdateService/Actions/SimpleUpdate", + "@Redfish.ActionInfo": "/redfish/v1/UpdateService/SimpleUpdateActionInfo" + }, + "Oem": {} + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#UpdateService.UpdateService", + "@odata.id": "/redfish/v1/UpdateService" +} diff --git a/sushy/tests/unit/json_samples/virtual_media.json b/sushy/tests/unit/json_samples/virtual_media.json new file mode 100644 index 0000000000000000000000000000000000000000..61af2a501009fb17741afd591859dfff168b1b8a --- /dev/null +++ b/sushy/tests/unit/json_samples/virtual_media.json @@ -0,0 +1,27 @@ +{ + "@odata.type": "#VirtualMedia.v1_2_0.VirtualMedia", + "Id": "Floppy1", + "Name": "Virtual Removable Media", + "MediaTypes": [ + "Floppy", + "USBStick" + ], + "Actions": { + "#VirtualMedia.EjectMedia": { + "target": "/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions/VirtualMedia.EjectMedia", + "title": "Mock Eject Media" + }, + "#VirtualMedia.InsertMedia": { + "target": "/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions/VirtualMedia.InsertMedia", + "title": "Mock Insert Media" + } + }, + "Image": "https://www.dmtf.org/freeImages/Sardine.img", + "ImageName": "Sardine2.1.43.35.6a", + "ConnectedVia": "URI", + "Inserted": true, + "WriteProtected": false, + "@odata.context": "/redfish/v1/$metadata#VirtualMedia.VirtualMedia", + "@odata.id": "/redfish/v1/Managers/BMC/VirtualMedia/Floppy1", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} diff --git a/sushy/tests/unit/json_samples/virtual_media_collection.json b/sushy/tests/unit/json_samples/virtual_media_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..5052d829c046753ef0310a4ccf6f59b50ed81355 --- /dev/null +++ b/sushy/tests/unit/json_samples/virtual_media_collection.json @@ -0,0 +1,15 @@ +{ + "@odata.type": "#VirtualMediaCollection.VirtualMediaCollection", + "Name": "Virtual Media Services", + "Description": "Redfish-BMC Virtual Media Service Settings", + "Members@odata.count": 1, + "Members": [ + { + "@odata.id": "/redfish/v1/Managers/BMC/VirtualMedia/Floppy1" + } + ], + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#VirtualMediaCollection.VirtualMediaCollection", + "@odata.id": "/redfish/v1/Managers/BMC/VirtualMedia", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/volume.json b/sushy/tests/unit/json_samples/volume.json new file mode 100644 index 0000000000000000000000000000000000000000..f19b528b354b319c1963d61a3d4d47cf61dec894 --- /dev/null +++ b/sushy/tests/unit/json_samples/volume.json @@ -0,0 +1,44 @@ +{ + "@odata.type": "#Volume.v1_0_3.Volume", + "Id": "1", + "Name": "Virtual Disk 1", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "Encrypted": false, + "VolumeType": "Mirrored", + "CapacityBytes": 899527000000, + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "UUID", + "DurableName": "38f1818b-111e-463a-aa19-fa54f792e468" + } + ], + "Links": { + "@odata.type": "#Volume.v1_0_0.Links", + "Drives": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3" + } + ] + }, + "Actions": { + "@odata.type": "#Volume.v1_0_0.Actions", + "#Volume.Initialize": { + "target": "/redfish/v1/Systems/3/Storage/RAIDIntegrated/Volumes/1/Actions/Volume.Initialize", + "InitializeType@Redfish.AllowableValues": [ + "Fast", + "Slow" + ] + } + }, + "@odata.context": "/redfish/v1/$metadata#Volume.Volume", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/volume2.json b/sushy/tests/unit/json_samples/volume2.json new file mode 100644 index 0000000000000000000000000000000000000000..a1804f14895efc017eaafb617df8805814826e3d --- /dev/null +++ b/sushy/tests/unit/json_samples/volume2.json @@ -0,0 +1,41 @@ +{ + "@odata.type": "#Volume.v1_0_3.Volume", + "Id": "2", + "Name": "Virtual Disk 2", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "Encrypted": false, + "VolumeType": "NonRedundant", + "CapacityBytes": 107374182400, + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "UUID", + "DurableName": "0324c96c-8031-4f5e-886c-50cd90aca854" + } + ], + "Links": { + "@odata.type": "#Volume.v1_0_0.Links", + "Drives": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2" + } + ] + }, + "Actions": { + "@odata.type": "#Volume.v1_0_0.Actions", + "#Volume.Initialize": { + "target": "/redfish/v1/Systems/3/Storage/RAIDIntegrated/Volumes/1/Actions/Volume.Initialize", + "InitializeType@Redfish.AllowableValues": [ + "Fast", + "Slow" + ] + } + }, + "@odata.context": "/redfish/v1/$metadata#Volume.Volume", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/volume3.json b/sushy/tests/unit/json_samples/volume3.json new file mode 100644 index 0000000000000000000000000000000000000000..d5b6b86545c96722b3e37cc686d335a46ee55636 --- /dev/null +++ b/sushy/tests/unit/json_samples/volume3.json @@ -0,0 +1,41 @@ +{ + "@odata.type": "#Volume.v1_0_3.Volume", + "Id": "3", + "Name": "Virtual Disk 3", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "Encrypted": false, + "VolumeType": "NonRedundant", + "CapacityBytes": 1073741824000, + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "UUID", + "DurableName": "eb179a30-6f87-4fdb-8f92-639eb7aaabcb" + } + ], + "Links": { + "@odata.type": "#Volume.v1_0_0.Links", + "Drives": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2" + } + ] + }, + "Actions": { + "@odata.type": "#Volume.v1_0_0.Actions", + "#Volume.Initialize": { + "target": "/redfish/v1/Systems/3/Storage/RAIDIntegrated/Volumes/1/Actions/Volume.Initialize", + "InitializeType@Redfish.AllowableValues": [ + "Fast", + "Slow" + ] + } + }, + "@odata.context": "/redfish/v1/$metadata#Volume.Volume", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/volume4.json b/sushy/tests/unit/json_samples/volume4.json new file mode 100644 index 0000000000000000000000000000000000000000..e5c5be405e451fe589cf1a2f3c3462576096c224 --- /dev/null +++ b/sushy/tests/unit/json_samples/volume4.json @@ -0,0 +1,42 @@ +{ + "@odata.type": "#Volume.v1_0_3.Volume", + "Id": "4", + "Name": "My Volume 4", + "Status": { + "@odata.type": "#Resource.Status", + "State": "Enabled", + "Health": "OK" + }, + "Encrypted": false, + "VolumeType": "Mirrored", + "RAIDType": "RAID1", + "CapacityBytes": 107374182400, + "Identifiers": [ + { + "@odata.type": "#Resource.v1_1_0.Identifier", + "DurableNameFormat": "UUID", + "DurableName": "eb179a30-6f87-4fdb-8f92-639eb7aaabcb" + } + ], + "Links": { + "@odata.type": "#Volume.v1_0_0.Links", + "Drives": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2" + } + ] + }, + "Actions": { + "@odata.type": "#Volume.v1_0_0.Actions", + "#Volume.Initialize": { + "target": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4/Actions/Volume.Initialize", + "InitializeType@Redfish.AllowableValues": [ + "Fast", + "Slow" + ] + } + }, + "@odata.context": "/redfish/v1/$metadata#Volume.Volume", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/json_samples/volume_collection.json b/sushy/tests/unit/json_samples/volume_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..71123a84f36de0073473970d611501f06cb1aa24 --- /dev/null +++ b/sushy/tests/unit/json_samples/volume_collection.json @@ -0,0 +1,30 @@ +{ + "@odata.type": "#VolumeCollection.VolumeCollection", + "Name": "Storage Volume Collection", + "Description": "Storage Volume Collection", + "Members@odata.count": 3, + "Members": [ + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2" + }, + { + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3" + } + ], + "@Redfish.OperationApplyTimeSupport": { + "@odata.type": "#Settings.v1_2_0.OperationApplyTimeSupport", + "SupportedValues": [ "Immediate", "OnReset", "AtMaintenanceWindowStart" ], + "MaintenanceWindowStartTime": "2017-05-03T23:12:37-05:00", + "MaintenanceWindowDurationInSeconds": 600, + "MaintenanceWindowResource": { + "@odata.id": "/redfish/v1/Systems/437XR1138R2" + } + }, + "Oem": {}, + "@odata.context": "/redfish/v1/$metadata#VolumeCollection.VolumeCollection", + "@odata.id": "/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes", + "@Redfish.Copyright": "Copyright 2014-2017 Distributed Management Task Force, Inc. (DMTF). For the full DMTF copyright policy, see http://www.dmtf.org/about/policies/copyright." +} \ No newline at end of file diff --git a/sushy/tests/unit/resources/chassis/__init__.py b/sushy/tests/unit/resources/chassis/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/tests/unit/resources/chassis/test_chassis.py b/sushy/tests/unit/resources/chassis/test_chassis.py new file mode 100644 index 0000000000000000000000000000000000000000..7bdbc134703747f11b0f29dbc16088ccebd92a2e --- /dev/null +++ b/sushy/tests/unit/resources/chassis/test_chassis.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +import sushy +from sushy import exceptions +from sushy.resources.chassis import chassis +from sushy.resources.manager import manager +from sushy.resources.system import system +from sushy.tests.unit import base + + +class ChassisTestCase(base.TestCase): + + def setUp(self): + super(ChassisTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/chassis.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.chassis = chassis.Chassis(self.conn, '/redfish/v1/Chassis/Blade1', + redfish_version='1.8.0') + + def test__parse_attributes(self): + # | WHEN | + self.chassis._parse_attributes(self.json_doc) + # | THEN | + self.assertEqual('1.8.0', self.chassis.redfish_version) + self.assertEqual('Blade1', self.chassis.identity) + self.assertEqual('Blade', self.chassis.name) + self.assertEqual('Test description', self.chassis.description) + self.assertEqual('45Z-2381', self.chassis.asset_tag) + self.assertEqual(sushy.CHASSIS_TYPE_BLADE, + self.chassis.chassis_type) + self.assertEqual('Contoso', self.chassis.manufacturer) + self.assertEqual('SX1000', self.chassis.model) + self.assertEqual('529QB9450R6', self.chassis.serial_number) + self.assertEqual('6914260', self.chassis.sku) + self.assertEqual('166480-S23', self.chassis.part_number) + self.assertEqual('FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF', + self.chassis.uuid) + self.assertEqual(sushy.INDICATOR_LED_OFF, + self.chassis.indicator_led) + self.assertEqual(sushy.POWER_STATE_ON, + self.chassis.power_state) + self.assertEqual(sushy.STATE_ENABLED, self.chassis.status.state) + self.assertEqual(44.45, self.chassis.height_mm) + self.assertEqual(431.8, self.chassis.width_mm) + self.assertEqual(711, self.chassis.depth_mm) + self.assertEqual(15.31, self.chassis.weight_kg) + self.assertEqual(sushy.HEALTH_OK, self.chassis.status.health) + self.assertEqual(sushy.CHASSIS_INTRUSION_SENSOR_NORMAL, + self.chassis.physical_security.intrusion_sensor) + self.assertEqual(123, + self.chassis.physical_security.intrusion_sensor_number + ) + self.assertEqual(sushy.CHASSIS_INTRUSION_SENSOR_RE_ARM_MANUAL, + self.chassis.physical_security.intrusion_sensor_re_arm + ) + + def test__parse_attributes_return(self): + attributes = self.chassis._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('Blade', attributes.get('name')) + self.assertEqual(sushy.INDICATOR_LED_OFF, + attributes.get('indicator_led')) + self.assertEqual(sushy.POWER_STATE_ON, attributes.get('power_state')) + self.assertEqual({'intrusion_sensor': + sushy.CHASSIS_INTRUSION_SENSOR_NORMAL, + 'intrusion_sensor_number': + 123, + 'intrusion_sensor_re_arm': + 'manual re arm chassis intrusion sensor'}, + attributes.get('physical_security')) + + def test_get_allowed_reset_chasis_values(self): + # | GIVEN | + expected = {sushy.RESET_TYPE_POWER_CYCLE, + sushy.RESET_TYPE_PUSH_POWER_BUTTON, + sushy.RESET_TYPE_FORCE_ON, sushy.RESET_TYPE_NMI, + sushy.RESET_TYPE_FORCE_RESTART, + sushy.RESET_TYPE_GRACEFUL_RESTART, sushy.RESET_TYPE_ON, + sushy.RESET_TYPE_FORCE_OFF, + sushy.RESET_TYPE_GRACEFUL_SHUTDOWN} + # | WHEN | + values = self.chassis.get_allowed_reset_chassis_values() + # | THEN | + self.assertEqual(expected, values) + self.assertIsInstance(values, set) + + def test_get_allowed_reset_chassis_values_for_no_values_set(self): + # | GIVEN | + self.chassis._actions.reset.allowed_values = [] + expected = {sushy.RESET_TYPE_POWER_CYCLE, + sushy.RESET_TYPE_PUSH_POWER_BUTTON, + sushy.RESET_TYPE_FORCE_ON, sushy.RESET_TYPE_NMI, + sushy.RESET_TYPE_FORCE_RESTART, + sushy.RESET_TYPE_GRACEFUL_RESTART, sushy.RESET_TYPE_ON, + sushy.RESET_TYPE_FORCE_OFF, + sushy.RESET_TYPE_GRACEFUL_SHUTDOWN} + # | WHEN | + values = self.chassis.get_allowed_reset_chassis_values() + # | THEN | + self.assertEqual(expected, values) + self.assertIsInstance(values, set) + + def test_get_allowed_reset_chassis_values_missing_action_reset_attr(self): + # | GIVEN | + self.chassis._actions.reset = None + # | WHEN & THEN | + self.assertRaisesRegex( + exceptions.MissingActionError, 'action #Chassis.Reset') + + def test_reset_chassis(self): + self.chassis.reset_chassis(sushy.RESET_TYPE_GRACEFUL_RESTART) + self.chassis._conn.post.assert_called_once_with( + '/redfish/v1/Chassis/Blade1/Actions/Chassis.Reset', + data={'ResetType': 'GracefulRestart'}) + + def test_reset_chassis_with_invalid_value(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.chassis.reset_chassis, 'invalid-value') + + def test_set_indicator_led(self): + with mock.patch.object( + self.chassis, 'invalidate', autospec=True) as invalidate_mock: + self.chassis.set_indicator_led(sushy.INDICATOR_LED_BLINKING) + self.chassis._conn.patch.assert_called_once_with( + '/redfish/v1/Chassis/Blade1', + data={'IndicatorLED': 'Blinking'}) + + invalidate_mock.assert_called_once_with() + + def test_set_indicator_led_invalid_state(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.chassis.set_indicator_led, + 'spooky-glowing') + + def test_managers(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'manager.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_managers = self.chassis.managers + self.assertIsInstance(actual_managers[0], manager.Manager) + self.assertEqual( + '/redfish/v1/Managers/Blade1BMC', actual_managers[0].path) + + def test_systems(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_systems = self.chassis.systems + self.assertIsInstance(actual_systems[0], system.System) + self.assertEqual( + '/redfish/v1/Systems/529QB9450R6', actual_systems[0].path) + + +class ChassisCollectionTestCase(base.TestCase): + + def setUp(self): + super(ChassisCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'chassis_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.chassis = chassis.ChassisCollection( + self.conn, '/redfish/v1/Chassis', redfish_version='1.5.0') + + @mock.patch.object(chassis, 'Chassis', autospec=True) + def test_get_member(self, chassis_mock): + self.chassis.get_member('/redfish/v1/Chassis/MultiBladeEncl') + chassis_mock.assert_called_once_with( + self.chassis._conn, '/redfish/v1/Chassis/MultiBladeEncl', + self.chassis.redfish_version, None) + + @mock.patch.object(chassis, 'Chassis', autospec=True) + def test_get_members(self, chassis_mock): + members = self.chassis.get_members() + calls = [ + mock.call(self.chassis._conn, '/redfish/v1/Chassis/MultiBladeEncl', + self.chassis.redfish_version, None), + mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade1', + self.chassis.redfish_version, None), + mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade2', + self.chassis.redfish_version, None), + mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade3', + self.chassis.redfish_version, None), + mock.call(self.chassis._conn, '/redfish/v1/Chassis/Blade4', + self.chassis.redfish_version, None) + ] + chassis_mock.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(5, len(members)) diff --git a/sushy/tests/unit/resources/chassis/test_power.py b/sushy/tests/unit/resources/chassis/test_power.py new file mode 100644 index 0000000000000000000000000000000000000000..ffe5727a5ece52a23df165c41b863746dc37d6a0 --- /dev/null +++ b/sushy/tests/unit/resources/chassis/test_power.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +from sushy.resources.chassis.power import power +from sushy.tests.unit import base + + +class PowerTestCase(base.TestCase): + + def setUp(self): + super(PowerTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/power.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.power = power.Power( + self.conn, '/redfish/v1/Chassis/MultiBladeEnc1/Power', + redfish_version='1.5.0') + + def test__parse_attributes(self): + self.power._parse_attributes(self.json_doc) + self.assertEqual('1.5.0', self.power.redfish_version) + self.assertEqual('Power', self.power.identity) + self.assertEqual('Quad Blade Chassis Power', self.power.name) + + self.assertEqual('0', self.power.power_supplies[0].identity) + self.assertEqual('Power Supply 0', self.power.power_supplies[0].name) + self.assertEqual('enabled', self.power.power_supplies[0].status.state) + self.assertEqual('ok', self.power.power_supplies[0].status.health) + self.assertEqual('ac', self.power.power_supplies[0].power_supply_type) + self.assertEqual('ac240v', + self.power.power_supplies[0].line_input_voltage_type) + self.assertEqual(220, self.power.power_supplies[0].line_input_voltage) + self.assertEqual(1450, + self.power.power_supplies[0].power_capacity_watts) + self.assertEqual( + 'ac', + self.power.power_supplies[0].input_ranges[0].input_type + ) + self.assertEqual( + 185, + self.power.power_supplies[0].input_ranges[0].minimum_voltage + ) + self.assertEqual( + 250, + self.power.power_supplies[0].input_ranges[0].maximum_voltage + ) + self.assertEqual( + 47, + self.power.power_supplies[0].input_ranges[0].minimum_frequency_hz + ) + self.assertEqual( + 63, + self.power.power_supplies[0].input_ranges[0].maximum_frequency_hz + ) + self.assertEqual( + 1450, + self.power.power_supplies[0].input_ranges[0].output_wattage + ) + self.assertEqual(650, + self.power.power_supplies[0].last_power_output_watts) + self.assertEqual('325457-A06', self.power.power_supplies[0].model) + self.assertEqual('Cyberdyne', + self.power.power_supplies[0].manufacturer) + self.assertEqual('2.20', + self.power.power_supplies[0].firmware_version) + self.assertEqual('1S0000523', + self.power.power_supplies[0].serial_number) + self.assertEqual('425-591-654', + self.power.power_supplies[0].part_number) + self.assertEqual('425-591-654', + self.power.power_supplies[0].spare_part_number) + + self.assertEqual('1', self.power.power_supplies[1].identity) + self.assertEqual('Power Supply 1', self.power.power_supplies[1].name) + self.assertEqual('enabled', self.power.power_supplies[1].status.state) + self.assertEqual('ok', self.power.power_supplies[1].status.health) + self.assertEqual('ac', self.power.power_supplies[1].power_supply_type) + self.assertEqual('ac240v', + self.power.power_supplies[1].line_input_voltage_type) + self.assertEqual(222, self.power.power_supplies[1].line_input_voltage) + self.assertEqual(1450, + self.power.power_supplies[1].power_capacity_watts) + self.assertEqual( + 'ac', + self.power.power_supplies[1].input_ranges[0].input_type + ) + self.assertEqual( + 185, + self.power.power_supplies[1].input_ranges[0].minimum_voltage + ) + self.assertEqual( + 250, + self.power.power_supplies[1].input_ranges[0].maximum_voltage + ) + self.assertEqual( + 47, + self.power.power_supplies[1].input_ranges[0].minimum_frequency_hz + ) + self.assertEqual( + 63, + self.power.power_supplies[1].input_ranges[0].maximum_frequency_hz + ) + self.assertEqual( + 1450, + self.power.power_supplies[1].input_ranges[0].output_wattage + ) + self.assertEqual(635, + self.power.power_supplies[1].last_power_output_watts) + self.assertEqual('325457-A06', self.power.power_supplies[1].model) + self.assertEqual('Cyberdyne', + self.power.power_supplies[1].manufacturer) + self.assertEqual('2.20', + self.power.power_supplies[1].firmware_version) + self.assertEqual('1S0000524', + self.power.power_supplies[1].serial_number) + self.assertEqual('425-591-654', + self.power.power_supplies[1].part_number) + self.assertEqual('425-591-654', + self.power.power_supplies[1].spare_part_number) + + def test__parse_attributes_return(self): + attributes = self.power._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('Quad Blade Chassis Power', attributes.get('name')) + self.assertEqual([{'firmware_version': '2.20', + 'identity': '0', + 'indicator_led': None, + 'input_ranges': + [{'input_type': 'ac', + 'maximum_frequency_hz': 63, + 'maximum_voltage': 250, + 'minimum_frequency_hz': 47, + 'minimum_voltage': 185, + 'output_wattage': 1450}], + 'last_power_output_watts': 650, + 'line_input_voltage': 220, + 'line_input_voltage_type': 'ac240v', + 'manufacturer': 'Cyberdyne', + 'model': '325457-A06', + 'name': 'Power Supply 0', + 'part_number': '425-591-654', + 'power_capacity_watts': 1450, + 'power_supply_type': 'ac', + 'serial_number': '1S0000523', + 'spare_part_number': '425-591-654', + 'status': {'health': 'ok', 'health_rollup': None, + 'state': 'enabled'}}, + {'firmware_version': '2.20', + 'identity': '1', + 'indicator_led': None, + 'input_ranges': + [{'input_type': 'ac', + 'maximum_frequency_hz': 63, + 'maximum_voltage': 250, + 'minimum_frequency_hz': 47, + 'minimum_voltage': 185, + 'output_wattage': 1450}], + 'last_power_output_watts': 635, + 'line_input_voltage': 222, + 'line_input_voltage_type': 'ac240v', + 'manufacturer': 'Cyberdyne', + 'model': '325457-A06', + 'name': 'Power Supply 1', + 'part_number': '425-591-654', + 'power_capacity_watts': 1450, + 'power_supply_type': 'ac', + 'serial_number': '1S0000524', + 'spare_part_number': '425-591-654', + 'status': {'health': 'ok', 'health_rollup': None, + 'state': 'enabled'}}], + attributes.get('power_supplies')) diff --git a/sushy/tests/unit/resources/chassis/test_thermal.py b/sushy/tests/unit/resources/chassis/test_thermal.py new file mode 100644 index 0000000000000000000000000000000000000000..830e6ccc72d669c97b0f09b6f78fbf06139e8095 --- /dev/null +++ b/sushy/tests/unit/resources/chassis/test_thermal.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +from sushy.resources.chassis.thermal import thermal +from sushy.tests.unit import base + + +class ThermalTestCase(base.TestCase): + + def setUp(self): + super(ThermalTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/thermal.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.thermal = thermal.Thermal( + self.conn, '/redfish/v1/Chassis/Blade1/Thermal', + redfish_version='1.5.0') + + def test__parse_attributes(self): + self.thermal._parse_attributes(self.json_doc) + self.assertEqual('1.5.0', self.thermal.redfish_version) + self.assertEqual('Thermal', self.thermal.identity) + self.assertEqual('Blade Thermal', self.thermal.name) + + self.assertEqual('0', self.thermal.fans[0].identity) + self.assertEqual('CPU Fan', self.thermal.fans[0].name) + self.assertEqual('CPU', self.thermal.fans[0].physical_context) + self.assertEqual('enabled', self.thermal.fans[0].status.state) + self.assertEqual('ok', self.thermal.fans[0].status.health) + self.assertEqual(6000, self.thermal.fans[0].reading) + self.assertEqual('RPM', self.thermal.fans[0].reading_units) + self.assertEqual(2000, self.thermal.fans[0].lower_threshold_fatal) + self.assertEqual(0, self.thermal.fans[0].min_reading_range) + self.assertEqual(10000, self.thermal.fans[0].max_reading_range) + + self.assertEqual('0', self.thermal.temperatures[0].identity) + self.assertEqual('CPU Temp', self.thermal.temperatures[0].name) + self.assertEqual('enabled', self.thermal.temperatures[0].status.state) + self.assertEqual('ok', self.thermal.temperatures[0].status.health) + self.assertEqual(62, self.thermal.temperatures[0].reading_celsius) + self.assertEqual( + 75, + self.thermal.temperatures[0].upper_threshold_non_critical + ) + self.assertEqual( + 90, + self.thermal.temperatures[0].upper_threshold_critical + ) + self.assertEqual( + 95, + self.thermal.temperatures[0].upper_threshold_fatal + ) + self.assertEqual(0, + self.thermal.temperatures[0].min_reading_range_temp) + self.assertEqual(120, + self.thermal.temperatures[0].max_reading_range_temp) + self.assertEqual('CPU', self.thermal.temperatures[0].physical_context) + + def test__parse_attributes_return(self): + attributes = self.thermal._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual([{'identity': '0', + 'indicator_led': None, + 'lower_threshold_critical': None, + 'lower_threshold_fatal': 2000, + 'lower_threshold_non_critical': None, + 'manufacturer': None, + 'max_reading_range': 10000, + 'min_reading_range': 0, + 'model': None, + 'name': 'CPU Fan', + 'part_number': None, + 'physical_context': 'CPU', + 'reading': 6000, + 'reading_units': 'RPM', + 'serial_number': None, + 'status': + {'health': 'ok', 'health_rollup': None, + 'state': 'enabled'}, + 'upper_threshold_critical': None, + 'upper_threshold_fatal': None, + 'upper_threshold_non_critical': None}], + attributes.get('fans')) + self.assertEqual([{'identity': '0', + 'lower_threshold_critical': None, + 'lower_threshold_fatal': None, + 'lower_threshold_non_critical': None, + 'max_allowable_operating_value': None, + 'max_reading_range_temp': 120, + 'min_allowable_operating_value': None, + 'min_reading_range_temp': 0, + 'name': 'CPU Temp', + 'physical_context': 'CPU', + 'reading_celsius': 62, + 'sensor_number': None, + 'status': {'health': 'ok', 'health_rollup': None, + 'state': 'enabled'}, + 'upper_threshold_critical': 90, + 'upper_threshold_fatal': 95, + 'upper_threshold_non_critical': 75}], + attributes.get('temperatures')) diff --git a/sushy/tests/unit/resources/compositionservice/__init__.py b/sushy/tests/unit/resources/compositionservice/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/tests/unit/resources/compositionservice/test_compositionservice.py b/sushy/tests/unit/resources/compositionservice/test_compositionservice.py new file mode 100644 index 0000000000000000000000000000000000000000..ad5f3b7ffcd601a28a1582203eb9a9e255af5aef --- /dev/null +++ b/sushy/tests/unit/resources/compositionservice/test_compositionservice.py @@ -0,0 +1,69 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from unittest import mock + +from sushy.resources.compositionservice import compositionservice +from sushy.resources.compositionservice import resourceblock +from sushy.resources.compositionservice import resourcezone +from sushy.resources import constants as res_cons +from sushy.tests.unit import base + + +class CompositionServiceTestCase(base.TestCase): + + def setUp(self): + super(CompositionServiceTestCase, self).setUp() + self.conn = mock.Mock() + with open( + 'sushy/tests/unit/json_samples/compositionservice.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.comp_ser = compositionservice.CompositionService( + self.conn, + '/redfish/v1/CompositionService', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.comp_ser._parse_attributes(self.json_doc) + self.assertFalse(self.comp_ser.allow_overprovisioning) + self.assertTrue(self.comp_ser.allow_zone_affinity) + self.assertTrue(self.comp_ser.description, 'CompositionService1') + self.assertEqual( + 'CompositionService', + self.comp_ser.identity) + self.assertEqual( + 'Composition Service', + self.comp_ser.name) + self.assertEqual(res_cons.STATE_ENABLED, self.comp_ser.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.comp_ser.status.health) + self.assertTrue(self.comp_ser.service_enabled) + + @mock.patch.object(resourceblock, 'ResourceBlockCollection', autospec=True) + def test_get_resource_blocks(self, mock_resourceblock_col): + _ = self.comp_ser.resource_blocks + mock_resourceblock_col.assert_called_once_with( + self.comp_ser._conn, + self.comp_ser._get_resource_blocks_collection_path, + self.comp_ser.redfish_version, None) + + @mock.patch.object(resourcezone, 'ResourceZoneCollection', autospec=True) + def test_get_resource_zones(self, mock_resourcezone_col): + _ = self.comp_ser.resource_zones + mock_resourcezone_col.assert_called_once_with( + self.comp_ser._conn, + self.comp_ser._get_resource_zones_collection_path, + self.comp_ser.redfish_version, None) diff --git a/sushy/tests/unit/resources/compositionservice/test_resourceblock.py b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py new file mode 100644 index 0000000000000000000000000000000000000000..ce03bdbc136554342d9ca03d6c2ffee9cf9df3d8 --- /dev/null +++ b/sushy/tests/unit/resources/compositionservice/test_resourceblock.py @@ -0,0 +1,110 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from unittest import mock + +from sushy import exceptions +from sushy.resources.compositionservice import constants as res_block_cons +from sushy.resources.compositionservice import resourceblock +from sushy.resources import constants as res_cons +from sushy.tests.unit import base + + +class ResourceBlockTestCase(base.TestCase): + + def setUp(self): + super(ResourceBlockTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/resourceblock.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.res_block = resourceblock.ResourceBlock( + self.conn, + '/redfish/v1/CompositionService/ResourceBlocks/DriveBlock3', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.res_block._parse_attributes(self.json_doc) + self.assertEqual( + res_block_cons.COMPOSITION_STATE_COMPOSED, + self.res_block.composition_status.composition_state) + self.assertEqual(1, self.res_block.composition_status.max_compositions) + self.assertEqual( + 0, self.res_block.composition_status.number_of_compositions) + self.assertFalse(self.res_block.composition_status.reserved_state) + self.assertTrue(self.res_block.composition_status.sharing_capable) + self.assertFalse(self.res_block.composition_status.sharing_enabled) + self.assertEqual('ResourceBlock1', self.res_block.description) + self.assertEqual('DriveBlock3', self.res_block.identity) + self.assertEqual('Drive Block 3', self.res_block.name) + self.assertEqual( + res_block_cons.RESOURCE_BLOCK_TYPE_STORAGE, + self.res_block.resource_block_type) + self.assertEqual( + res_cons.STATE_ENABLED, + self.res_block.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.res_block.status.health) + exp_path = '/redfish/v1/CompositionService/ResourceBlocks/DriveBlock3' + self.assertEqual(exp_path, self.res_block.path) + + def test__parse_attributes_missing_identity(self): + self.res_block.json.pop('Id') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Id', + self.res_block._parse_attributes, self.json_doc) + + +class ResourceBlockCollectionTestCase(base.TestCase): + + def setUp(self): + super(ResourceBlockCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'resourceblock_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.res_block_col = resourceblock.ResourceBlockCollection( + self.conn, '/redfish/v1/CompositionService/ResourceBlocks', + '1.0.2', None) + + def test__parse_attributes(self): + path = '/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1' + self.res_block_col._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.res_block_col.redfish_version) + self.assertEqual( + 'Resource Block Collection', + self.res_block_col.name) + self.assertEqual((path,), self.res_block_col.members_identities) + + @mock.patch.object(resourceblock, 'ResourceBlock', autospec=True) + def test_get_member(self, mock_resourceblock): + path = '/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1' + self.res_block_col.get_member(path) + mock_resourceblock.assert_called_once_with( + self.res_block_col._conn, path, + self.res_block_col.redfish_version, None) + + @mock.patch.object(resourceblock, 'ResourceBlock', autospec=True) + def test_get_members(self, mock_resourceblock): + path = '/redfish/v1/CompositionService/ResourceBlocks/ComputeBlock1' + members = self.res_block_col.get_members() + mock_resourceblock.assert_called_once_with( + self.res_block_col._conn, path, + self.res_block_col.redfish_version, None) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/compositionservice/test_resourcezone.py b/sushy/tests/unit/resources/compositionservice/test_resourcezone.py new file mode 100644 index 0000000000000000000000000000000000000000..0f0b4dae7cff9541a14e1ebb647403a7d09ab392 --- /dev/null +++ b/sushy/tests/unit/resources/compositionservice/test_resourcezone.py @@ -0,0 +1,97 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from unittest import mock + +from sushy import exceptions +from sushy.resources.compositionservice import resourcezone +from sushy.resources import constants as res_cons +from sushy.tests.unit import base + + +class ResourceZoneTestCase(base.TestCase): + + def setUp(self): + super(ResourceZoneTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/resourcezone.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.res_zone = resourcezone.ResourceZone( + self.conn, + '/redfish/v1/CompositionService/ResourceZones/1', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.res_zone._parse_attributes(self.json_doc) + self.assertEqual('ResourceZone1', self.res_zone.description) + self.assertEqual('1', self.res_zone.identity) + self.assertEqual('Resource Zone 1', self.res_zone.name) + self.assertEqual( + res_cons.STATE_ENABLED, + self.res_zone.status.state) + self.assertEqual( + res_cons.HEALTH_OK, + self.res_zone.status.health) + exp_path = '/redfish/v1/CompositionService/ResourceZones/1' + self.assertEqual(exp_path, self.res_zone.path) + + def test__parse_attributes_missing_identity(self): + self.res_zone.json.pop('Id') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Id', + self.res_zone._parse_attributes, self.json_doc) + + +class ResourceZoneCollectionTestCase(base.TestCase): + + def setUp(self): + super(ResourceZoneCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'resourcezone_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.res_zone_col = resourcezone.ResourceZoneCollection( + self.conn, '/redfish/v1/CompositionService/ResourceZones', + '1.0.2', None) + + def test__parse_attributes(self): + path = '/redfish/v1/CompositionService/ResourceZones/1' + self.res_zone_col._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.res_zone_col.redfish_version) + self.assertEqual('Resource Zone Collection', self.res_zone_col.name) + self.assertEqual((path,), self.res_zone_col.members_identities) + + @mock.patch.object(resourcezone, 'ResourceZone', autospec=True) + def test_get_member(self, mock_resourcezone): + path = '/redfish/v1/CompositionService/ResourceZones/1' + self.res_zone_col.get_member(path) + mock_resourcezone.assert_called_once_with( + self.res_zone_col._conn, path, + self.res_zone_col.redfish_version, None) + + @mock.patch.object(resourcezone, 'ResourceZone', autospec=True) + def test_get_members(self, mock_resourcezone): + path = '/redfish/v1/CompositionService/ResourceZones/1' + members = self.res_zone_col.get_members() + mock_resourcezone.assert_called_once_with( + self.res_zone_col._conn, path, + self.res_zone_col.redfish_version, None) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/fabric/__init__.py b/sushy/tests/unit/resources/fabric/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/tests/unit/resources/fabric/test_endpoint.py b/sushy/tests/unit/resources/fabric/test_endpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..0b3b1b7fa53f607bfa6820806f9bdec88c43a29d --- /dev/null +++ b/sushy/tests/unit/resources/fabric/test_endpoint.py @@ -0,0 +1,51 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +import sushy +from sushy.resources.fabric import endpoint +from sushy.tests.unit import base + + +class EndpointTestCase(base.TestCase): + + def setUp(self): + super(EndpointTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'endpoint.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.fab_endpoint = endpoint.Endpoint( + self.conn, '/redfish/v1/Fabrics/SAS/Endpoints/Drive1', + redfish_version='1.0.2') + + def test__parse_atrtributes(self): + self.fab_endpoint._parse_attributes(self.json_doc) + self.assertEqual('Drive1', self.fab_endpoint.identity) + self.assertEqual('SAS Drive', self.fab_endpoint.name) + self.assertEqual(sushy.PROTOCOL_TYPE_SAS, + self.fab_endpoint.endpoint_protocol) + self.assertEqual(sushy.ENTITY_TYPE_DRIVE, + self.fab_endpoint.connected_entities[0].entity_type) + self.assertEqual(sushy.ENTITY_ROLE_TARGET, + self.fab_endpoint.connected_entities[0].entity_role) + con_entity = self.fab_endpoint.connected_entities[0] + self.assertEqual(sushy.DURABLE_NAME_FORMAT_NAA, + con_entity.identifiers[0].durable_name_format) + self.assertEqual('32ADF365C6C1B7C3', + con_entity.identifiers[0].durable_name) diff --git a/sushy/tests/unit/resources/fabric/test_fabric.py b/sushy/tests/unit/resources/fabric/test_fabric.py new file mode 100644 index 0000000000000000000000000000000000000000..d6155e05ad006afb4495518071720a6b7a56612a --- /dev/null +++ b/sushy/tests/unit/resources/fabric/test_fabric.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +import sushy +from sushy.resources.fabric import endpoint +from sushy.resources.fabric import fabric +from sushy.tests.unit import base + + +class FabricTestCase(base.TestCase): + + def setUp(self): + super(FabricTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/fabric.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.fabric = fabric.Fabric(self.conn, '/redfish/v1/Fabrics/SAS', + redfish_version='1.0.3') + + def test__parse_attributes(self): + # | WHEN | + self.fabric._parse_attributes(self.json_doc) + # | THEN | + self.assertEqual('1.0.3', self.fabric.redfish_version) + self.assertEqual('SAS', self.fabric.identity) + self.assertEqual('SAS Fabric', self.fabric.name) + self.assertEqual('A SAS Fabric with redundant switches.', + self.fabric.description) + self.assertEqual(sushy.PROTOCOL_TYPE_SAS, + self.fabric.fabric_type) + self.assertEqual(sushy.STATE_ENABLED, self.fabric.status.state) + self.assertEqual(sushy.HEALTH_OK, self.fabric.status.health) + + def test_endpoints(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'endpoint_collection.json') as f: + endpoint_collection_return_value = json.load(f) + + with open('sushy/tests/unit/json_samples/' + 'endpoint.json') as f: + endpoint_return_value = json.load(f) + + self.conn.get.return_value.json.side_effect = [ + endpoint_collection_return_value, endpoint_return_value] + + # | WHEN | + actual_endpoints = self.fabric.endpoints + + # | THEN | + self.assertIsInstance(actual_endpoints, + endpoint.EndpointCollection) + self.assertEqual(actual_endpoints.name, 'Endpoint Collection') + + member = actual_endpoints.get_member( + '/redfish/v1/Fabrics/SAS/Endpoints/Drive1') + + self.assertEqual(member.name, "SAS Drive") + self.assertEqual(member.endpoint_protocol, sushy.PROTOCOL_TYPE_SAS) + + def test_endpoints_on_refresh(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'endpoint_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + endpts = self.fabric.endpoints + self.assertIsInstance(endpts, endpoint.EndpointCollection) + + # On refreshing the fabric instance... + with open('sushy/tests/unit/json_samples/fabric.json', 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + + self.fabric.invalidate() + self.fabric.refresh(force=False) + + # | WHEN & THEN | + self.assertTrue(endpts._is_stale) + + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'endpoint_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + self.assertIsInstance(self.fabric.endpoints, + endpoint.EndpointCollection) + self.assertFalse(endpts._is_stale) + + +class FabricCollectionTestCase(base.TestCase): + + def setUp(self): + super(FabricCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'fabric_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.fabric = fabric.FabricCollection( + self.conn, '/redfish/v1/Fabrics', '1.0.3', None) + + @mock.patch.object(fabric, 'Fabric', autospec=True) + def test_get_member(self, fabric_mock): + self.fabric.get_member('/redfish/v1/Fabrics/SAS1') + fabric_mock.assert_called_once_with( + self.fabric._conn, '/redfish/v1/Fabrics/SAS1', + self.fabric.redfish_version, None) + + @mock.patch.object(fabric, 'Fabric', autospec=True) + def test_get_members(self, fabric_mock): + members = self.fabric.get_members() + calls = [ + mock.call(self.fabric._conn, '/redfish/v1/Fabrics/SAS1', + self.fabric.redfish_version, None), + mock.call(self.fabric._conn, '/redfish/v1/Fabrics/SAS2', + self.fabric.redfish_version, None) + ] + fabric_mock.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(2, len(members)) diff --git a/sushy/tests/unit/resources/manager/test_manager.py b/sushy/tests/unit/resources/manager/test_manager.py index 1e78be7287771c3917621a163552df249f823751..4de5f3d4a7e8cbfe992fdba11260120fdf4b07c4 100644 --- a/sushy/tests/unit/resources/manager/test_manager.py +++ b/sushy/tests/unit/resources/manager/test_manager.py @@ -11,12 +11,15 @@ # under the License. import json +from unittest import mock -import mock import sushy from sushy import exceptions +from sushy.resources.chassis import chassis from sushy.resources.manager import manager +from sushy.resources.manager import virtual_media +from sushy.resources.system import system from sushy.tests.unit import base @@ -25,18 +28,21 @@ class ManagerTestCase(base.TestCase): def setUp(self): super(ManagerTestCase, self).setUp() self.conn = mock.Mock() - with open('sushy/tests/unit/json_samples/manager.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/manager.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.manager = manager.Manager(self.conn, '/redfish/v1/Managers/BMC', redfish_version='1.0.2') def test__parse_attributes(self): # | WHEN | - self.manager._parse_attributes() + self.manager._parse_attributes(self.json_doc) # | THEN | self.assertEqual('1.0.2', self.manager.redfish_version) self.assertEqual('1.00', self.manager.firmware_version) + self.assertFalse(self.manager.auto_dst_enabled) self.assertEqual(True, self.manager.graphical_console.service_enabled) self.assertEqual( 2, self.manager.graphical_console.max_concurrent_sessions) @@ -206,6 +212,88 @@ class ManagerTestCase(base.TestCase): self.assertRaises(exceptions.InvalidParameterValueError, self.manager.reset_manager, 'invalid-value') + def test_virtual_media(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'virtual_media_collection.json') as f: + virtual_media_collection_return_value = json.load(f) + + with open('sushy/tests/unit/json_samples/' + 'virtual_media.json') as f: + virtual_media_return_value = json.load(f) + + self.conn.get.return_value.json.side_effect = [ + virtual_media_collection_return_value, virtual_media_return_value] + + # | WHEN | + actual_virtual_media = self.manager.virtual_media + + # | THEN | + self.assertIsInstance(actual_virtual_media, + virtual_media.VirtualMediaCollection) + self.assertEqual(actual_virtual_media.name, 'Virtual Media Services') + + member = actual_virtual_media.get_member( + '/redfish/v1/Managers/BMC/VirtualMedia/Floppy1') + + self.assertEqual(member.image_name, "Sardine2.1.43.35.6a") + self.assertTrue(member.inserted) + self.assertFalse(member.write_protected) + + def test_virtual_media_on_refresh(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'virtual_media_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + vrt_media = self.manager.virtual_media + self.assertIsInstance(vrt_media, virtual_media.VirtualMediaCollection) + + # On refreshing the manager instance... + with open('sushy/tests/unit/json_samples/manager.json', 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + + self.manager.invalidate() + self.manager.refresh(force=False) + + # | WHEN & THEN | + self.assertTrue(vrt_media._is_stale) + + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'virtual_media_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + self.assertIsInstance(self.manager.virtual_media, + virtual_media.VirtualMediaCollection) + self.assertFalse(vrt_media._is_stale) + + def test_systems(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_systems = self.manager.systems + self.assertIsInstance(actual_systems[0], system.System) + self.assertEqual( + '/redfish/v1/Systems/437XR1138R2', actual_systems[0].path) + + def test_chassis(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'chassis.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_chassis = self.manager.chassis + self.assertIsInstance(actual_chassis[0], chassis.Chassis) + self.assertEqual( + '/redfish/v1/Chassis/1U', actual_chassis[0].path) + class ManagerCollectionTestCase(base.TestCase): @@ -213,8 +301,8 @@ class ManagerCollectionTestCase(base.TestCase): super(ManagerCollectionTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' - 'manager_collection.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + 'manager_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) self.managers = manager.ManagerCollection( self.conn, '/redfish/v1/Managers', redfish_version='1.0.2') @@ -223,13 +311,13 @@ class ManagerCollectionTestCase(base.TestCase): self.managers.get_member('/redfish/v1/Managers/BMC') Manager_mock.assert_called_once_with( self.managers._conn, '/redfish/v1/Managers/BMC', - redfish_version=self.managers.redfish_version) + self.managers.redfish_version, None) @mock.patch.object(manager, 'Manager', autospec=True) def test_get_members(self, Manager_mock): members = self.managers.get_members() Manager_mock.assert_called_once_with( self.managers._conn, '/redfish/v1/Managers/BMC', - redfish_version=self.managers.redfish_version) + self.managers.redfish_version, None) self.assertIsInstance(members, list) self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/manager/test_virtual_media.py b/sushy/tests/unit/resources/manager/test_virtual_media.py new file mode 100644 index 0000000000000000000000000000000000000000..fd52af8cf26cd29232107fc5500141524ccd6120 --- /dev/null +++ b/sushy/tests/unit/resources/manager/test_virtual_media.py @@ -0,0 +1,208 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from http import client as http_client +import json +from unittest import mock + + +import sushy +from sushy import exceptions +from sushy.resources.manager import virtual_media +from sushy.tests.unit import base + + +class VirtualMediaTestCase(base.TestCase): + + def setUp(self): + super(VirtualMediaTestCase, self).setUp() + self.conn = mock.Mock() + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD'} + with open('sushy/tests/unit/json_samples/' + 'virtual_media.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.sys_virtual_media = virtual_media.VirtualMedia( + self.conn, '/redfish/v1/Managers/BMC/VirtualMedia/Floppy1', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.sys_virtual_media._parse_attributes(self.json_doc) + self.assertEqual('Virtual Removable Media', + self.sys_virtual_media.name) + self.assertEqual('Floppy1', self.sys_virtual_media.identity) + self.assertEqual('https://www.dmtf.org/freeImages/Sardine.img', + self.sys_virtual_media.image) + self.assertEqual('Sardine2.1.43.35.6a', + self.sys_virtual_media.image_name) + self.assertEqual(sushy.CONNECTED_VIA_URI, + self.sys_virtual_media.connected_via) + self.assertEqual([sushy.VIRTUAL_MEDIA_FLOPPY, + sushy.VIRTUAL_MEDIA_USBSTICK], + self.sys_virtual_media.media_types) + self.assertEqual(True, self.sys_virtual_media.inserted) + self.assertEqual(False, self.sys_virtual_media.write_protected) + + def test__parse_attributes_return(self): + attributes = self.sys_virtual_media._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('https://www.dmtf.org/freeImages/Sardine.img', + attributes.get('image')) + self.assertEqual(sushy.CONNECTED_VIA_URI, + attributes.get('connected_via')) + self.assertEqual([sushy.VIRTUAL_MEDIA_FLOPPY, + sushy.VIRTUAL_MEDIA_USBSTICK], + attributes.get('media_types')) + + def test_insert_media_none(self): + self.sys_virtual_media._actions.insert_media = None + self.assertRaisesRegex( + exceptions.MissingActionError, 'action #VirtualMedia.InsertMedia', + self.sys_virtual_media.insert_media, + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + + self.sys_virtual_media._actions = None + self.assertRaisesRegex( + exceptions.MissingActionError, 'action #VirtualMedia.InsertMedia', + self.sys_virtual_media.insert_media, + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + + def test_insert_media(self): + self.assertFalse(self.sys_virtual_media._is_stale) + self.sys_virtual_media.insert_media( + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + self.sys_virtual_media._conn.post.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" + "/VirtualMedia.InsertMedia"), + data={"Image": "https://www.dmtf.org/freeImages/Sardine.img", + "Inserted": True, "WriteProtected": False} + ) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_insert_media_fallback(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH'} + self.sys_virtual_media._actions.insert_media = None + self.sys_virtual_media.insert_media( + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": "https://www.dmtf.org/freeImages/Sardine.img", + "Inserted": True, "WriteProtected": False}, + headers=None) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_insert_media_fallback_with_etag(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH', + 'ETag': '"3d7b8a7360bf2941d"'} + self.sys_virtual_media._actions.insert_media = None + self.sys_virtual_media.insert_media( + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": "https://www.dmtf.org/freeImages/Sardine.img", + "Inserted": True, "WriteProtected": False}, + headers={"If-Match": '"3d7b8a7360bf2941d"'}) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_insert_media_fallback_with_weak_etag(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH', + 'ETag': 'W/"3d7b8a7360bf2941d"'} + self.sys_virtual_media._actions.insert_media = None + self.sys_virtual_media.insert_media( + "https://www.dmtf.org/freeImages/Sardine.img", True, False) + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": "https://www.dmtf.org/freeImages/Sardine.img", + "Inserted": True, "WriteProtected": False}, + headers={"If-Match": '"3d7b8a7360bf2941d"'}) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_none(self): + self.sys_virtual_media._actions.eject_media = None + self.assertRaisesRegex( + exceptions.MissingActionError, 'action #VirtualMedia.EjectMedia', + self.sys_virtual_media.eject_media) + + self.sys_virtual_media._actions = None + self.assertRaisesRegex( + exceptions.MissingActionError, 'action #VirtualMedia.EjectMedia', + self.sys_virtual_media.eject_media) + + def test_eject_media(self): + self.assertFalse(self.sys_virtual_media._is_stale) + self.sys_virtual_media.eject_media() + self.sys_virtual_media._conn.post.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" + "/VirtualMedia.EjectMedia")) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_fallback(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH'} + self.sys_virtual_media._actions.eject_media = None + self.sys_virtual_media.eject_media() + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": None, "Inserted": False}, headers=None) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_fallback_with_etag(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH', + 'ETag': '"3d7b8a7360bf2941d"'} + self.sys_virtual_media._actions.eject_media = None + self.sys_virtual_media.eject_media() + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": None, "Inserted": False}, + headers={"If-Match": '"3d7b8a7360bf2941d"'}) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_fallback_with_weak_etag(self): + self.conn.get.return_value.headers = {'Allow': 'GET,HEAD,PATCH', + 'ETag': 'W/"3d7b8a7360bf2941d"'} + self.sys_virtual_media._actions.eject_media = None + self.sys_virtual_media.eject_media() + self.sys_virtual_media._conn.patch.assert_called_once_with( + ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1"), + data={"Image": None, "Inserted": False}, + headers={"If-Match": '"3d7b8a7360bf2941d"'}) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_pass_empty_dict_415(self): + target_uri = ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" + "/VirtualMedia.EjectMedia") + self.conn.post.side_effect = [exceptions.HTTPError( + method='POST', url=target_uri, response=mock.MagicMock( + status_code=http_client.UNSUPPORTED_MEDIA_TYPE)), '200'] + self.sys_virtual_media.eject_media() + post_calls = [ + mock.call(target_uri), + mock.call(target_uri, data={})] + self.sys_virtual_media._conn.post.assert_has_calls(post_calls) + self.assertTrue(self.sys_virtual_media._is_stale) + + def test_eject_media_pass_empty_dict_400(self): + target_uri = ("/redfish/v1/Managers/BMC/VirtualMedia/Floppy1/Actions" + "/VirtualMedia.EjectMedia") + self.conn.post.side_effect = [exceptions.HTTPError( + method='POST', url=target_uri, response=mock.MagicMock( + status_code=http_client.BAD_REQUEST)), '200'] + self.sys_virtual_media.eject_media() + post_calls = [ + mock.call(target_uri), + mock.call(target_uri, data={})] + self.sys_virtual_media._conn.post.assert_has_calls(post_calls) + self.assertTrue(self.sys_virtual_media._is_stale) diff --git a/sushy/tests/unit/resources/oem/__init__.py b/sushy/tests/unit/resources/oem/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/tests/unit/resources/oem/test_common.py b/sushy/tests/unit/resources/oem/test_common.py new file mode 100644 index 0000000000000000000000000000000000000000..79e35db49c64951393c0f521ede00cfcf86fccbe --- /dev/null +++ b/sushy/tests/unit/resources/oem/test_common.py @@ -0,0 +1,196 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from unittest import mock + +import stevedore + +from sushy import exceptions +from sushy.resources import base as res_base +from sushy.resources.oem import base as oem_base +from sushy.resources.oem import common as oem_common +from sushy.tests.unit import base + + +class ContosoResourceOEMExtension(oem_base.OEMResourceBase): + pass + + +class FauxResourceOEMExtension(oem_base.OEMResourceBase): + pass + + +class ResourceOEMCommonMethodsTestCase(base.TestCase): + + def setUp(self): + super(ResourceOEMCommonMethodsTestCase, self).setUp() + # We use ExtensionManager.make_test_instance() and instantiate the + # test instance outside of the test cases in setUp. Inside of the + # test cases we set this as the return value of the mocked + # constructor. Also note that this instrumentation has been done + # only for one specific resource namespace which gets passed in the + # constructor of ExtensionManager. Moreover, this setUp also enables + # us to verify that the constructor is called correctly while still + # using a more realistic ExtensionManager. + contoso_ep = mock.Mock() + contoso_ep.module_name = __name__ + contoso_ep.attrs = ['ContosoResourceOEMExtension'] + self.contoso_extn = stevedore.extension.Extension( + 'contoso', contoso_ep, lambda: ContosoResourceOEMExtension, None) + self.contoso_extn_dup = stevedore.extension.Extension( + 'contoso_dup', contoso_ep, + lambda: ContosoResourceOEMExtension, None) + + faux_ep = mock.Mock() + faux_ep.module_name = __name__ + faux_ep.attrs = ['FauxResourceOEMExtension'] + self.faux_extn = stevedore.extension.Extension( + 'faux', faux_ep, lambda: FauxResourceOEMExtension, None) + self.faux_extn_dup = stevedore.extension.Extension( + 'faux_dup', faux_ep, lambda: FauxResourceOEMExtension, None) + + self.fake_ext_mgr = ( + stevedore.extension.ExtensionManager.make_test_instance( + [self.contoso_extn, self.faux_extn])) + self.fake_ext_mgr2 = ( + stevedore.extension.ExtensionManager.make_test_instance( + [self.contoso_extn_dup, self.faux_extn_dup])) + + def tearDown(self): + super(ResourceOEMCommonMethodsTestCase, self).tearDown() + if oem_common._global_extn_mgrs_by_resource: + oem_common._global_extn_mgrs_by_resource = {} + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test__create_extension_manager(self, ExtensionManager_mock): + system_resource_oem_ns = 'sushy.resources.system.oems' + ExtensionManager_mock.return_value = self.fake_ext_mgr + + result = oem_common._create_extension_manager(system_resource_oem_ns) + + self.assertEqual(self.fake_ext_mgr, result) + ExtensionManager_mock.assert_called_once_with( + system_resource_oem_ns, propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test__create_extension_manager_no_extns(self, ExtensionManager_mock): + system_resource_oem_ns = 'sushy.resources.system.oems' + ExtensionManager_mock.return_value.names.return_value = [] + + self.assertRaisesRegex( + exceptions.ExtensionError, 'No extensions found', + oem_common._create_extension_manager, + system_resource_oem_ns) + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test__get_extension_manager_of_resource(self, ExtensionManager_mock): + ExtensionManager_mock.return_value = self.fake_ext_mgr + + result = oem_common._get_extension_manager_of_resource('system') + self.assertEqual(self.fake_ext_mgr, result) + ExtensionManager_mock.assert_called_once_with( + namespace='sushy.resources.system.oems', + propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + ExtensionManager_mock.reset_mock() + + result = oem_common._get_extension_manager_of_resource('manager') + self.assertEqual(self.fake_ext_mgr, result) + ExtensionManager_mock.assert_called_once_with( + namespace='sushy.resources.manager.oems', + propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + for name, extension in result.items(): + self.assertTrue(name in ('contoso', 'faux')) + self.assertTrue(extension in (self.contoso_extn, + self.faux_extn)) + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test_get_resource_extension_by_vendor(self, ExtensionManager_mock): + oem_resource_mock = mock.Mock() + oem_resource_mock.set_parent_resource = lambda *x: oem_resource_mock + resource_instance_mock = mock.Mock() + resource_instance_mock.clone_resource = lambda *x: oem_resource_mock + ExtensionManager_mock.side_effect = [self.fake_ext_mgr, + self.fake_ext_mgr2] + + result = oem_common.get_resource_extension_by_vendor( + 'system', 'Faux', resource_instance_mock) + self.assertEqual(result, oem_resource_mock) + ExtensionManager_mock.assert_called_once_with( + 'sushy.resources.system.oems', propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + ExtensionManager_mock.reset_mock() + + oem_resource_mock.obj = None + result = oem_common.get_resource_extension_by_vendor( + 'system', 'Contoso', resource_instance_mock) + self.assertEqual(result, oem_resource_mock) + self.assertFalse(ExtensionManager_mock.called) + ExtensionManager_mock.reset_mock() + + result = oem_common.get_resource_extension_by_vendor( + 'manager', 'Faux_dup', resource_instance_mock) + self.assertEqual(result, oem_resource_mock) + ExtensionManager_mock.assert_called_once_with( + 'sushy.resources.manager.oems', propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + ExtensionManager_mock.reset_mock() + + result = oem_common.get_resource_extension_by_vendor( + 'manager', 'Contoso_dup', resource_instance_mock) + self.assertEqual(result, oem_resource_mock) + self.assertFalse(ExtensionManager_mock.called) + ExtensionManager_mock.reset_mock() + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test_get_resource_extension_by_vendor_fail( + self, ExtensionManager_mock): + resource_instance_mock = mock.Mock(spec=res_base.ResourceBase) + # ``fake_ext_mgr2`` has extension names as ``faux_dup`` + # and ``contoso_dup``. + ExtensionManager_mock.return_value = self.fake_ext_mgr2 + + self.assertRaisesRegex( + exceptions.OEMExtensionNotFoundError, + 'No sushy.resources.system.oems OEM extension found ' + 'by name "faux"', + oem_common.get_resource_extension_by_vendor, + 'sushy.resources.system.oems', 'Faux', resource_instance_mock) + + @mock.patch.object(stevedore, 'ExtensionManager', autospec=True) + def test_get_resource_extension_by_vendor_different_resources( + self, ExtensionManager_mock): + oem_resource_mock = mock.Mock() + oem_resource_mock.set_parent_resource = lambda *x: oem_resource_mock + resource_instance_mock = mock.Mock() + resource_instance_mock.clone_resource = lambda *x: oem_resource_mock + oem_resource_mock2 = mock.Mock() + oem_resource_mock2.set_parent_resource = lambda *x: oem_resource_mock2 + resource_instance_mock2 = mock.Mock() + resource_instance_mock2.clone_resource = lambda *x: oem_resource_mock2 + ExtensionManager_mock.side_effect = [self.fake_ext_mgr] + + result = oem_common.get_resource_extension_by_vendor( + 'system', 'Faux', resource_instance_mock) + self.assertEqual(result, oem_resource_mock) + ExtensionManager_mock.assert_called_once_with( + 'sushy.resources.system.oems', propagate_map_exceptions=True, + on_load_failure_callback=oem_common._raise) + ExtensionManager_mock.reset_mock() + + result2 = oem_common.get_resource_extension_by_vendor( + 'system', 'Faux', resource_instance_mock2) + self.assertEqual(result2, oem_resource_mock2) + ExtensionManager_mock.assert_not_called() + ExtensionManager_mock.reset_mock() diff --git a/sushy/tests/unit/resources/oem/test_fake.py b/sushy/tests/unit/resources/oem/test_fake.py new file mode 100644 index 0000000000000000000000000000000000000000..1d585eb6e87a78dd9f838d6da0b8223b1e4a9f30 --- /dev/null +++ b/sushy/tests/unit/resources/oem/test_fake.py @@ -0,0 +1,56 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +from sushy.resources.oem import fake +from sushy.resources.system import system +from sushy.tests.unit import base + + +class FakeOEMSystemExtensionTestCase(base.TestCase): + + def setUp(self): + super(FakeOEMSystemExtensionTestCase, self).setUp() + self.conn = mock.MagicMock() + with open('sushy/tests/unit/json_samples/system.json', 'r') as f: + self.conn.get.return_value.json.return_value = json.loads(f.read()) + + self.sys_instance = system.System( + self.conn, '/redfish/v1/Systems/437XR1138R2', + redfish_version='1.0.2') + self.fake_sys_oem_extn = fake.FakeOEMSystemExtension( + self.conn, '', + redfish_version='1.0.2') + self.fake_sys_oem_extn = self.fake_sys_oem_extn.set_parent_resource( + self.sys_instance, 'Contoso') + + def test__parse_oem_attributes(self): + self.assertEqual('#Contoso.ComputerSystem', + self.fake_sys_oem_extn.data_type) + self.assertEqual('PacWest Production Facility', ( + self.fake_sys_oem_extn.production_location.facility_name)) + self.assertEqual('USA', ( + self.fake_sys_oem_extn.production_location.country)) + self.assertEqual( + "/redfish/v1/Systems/437XR1138R2/Oem/Contoso/Actions/" + "Contoso.Reset", + self.fake_sys_oem_extn._actions.reset.target_uri) + + def test_get_reset_system_path(self): + value = self.fake_sys_oem_extn.get_reset_system_path() + expected = ( + '/redfish/v1/Systems/437XR1138R2/Oem/Contoso/Actions/Contoso.Reset' + ) + self.assertEqual(expected, value) diff --git a/sushy/tests/unit/resources/registry/__init__.py b/sushy/tests/unit/resources/registry/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/tests/unit/resources/registry/test_message_registry.py b/sushy/tests/unit/resources/registry/test_message_registry.py new file mode 100644 index 0000000000000000000000000000000000000000..fea7e7cae072aaad635e703a840827931654ddc7 --- /dev/null +++ b/sushy/tests/unit/resources/registry/test_message_registry.py @@ -0,0 +1,293 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import json +from unittest import mock + + +from sushy.resources import base as sushy_base +from sushy.resources import constants as res_cons +from sushy.resources.registry import message_registry +from sushy.tests.unit import base + + +class MessageRegistryTestCase(base.TestCase): + + def setUp(self): + super(MessageRegistryTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.registry = message_registry.MessageRegistry( + self.conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.registry._parse_attributes(self.json_doc) + self.assertEqual('Test.1.1.1', self.registry.identity) + self.assertEqual('Test Message Registry', self.registry.name) + self.assertEqual('en', self.registry.language) + self.assertEqual('This registry defines messages for sushy testing', + self.registry.description) + self.assertEqual('Test', self.registry.registry_prefix) + self.assertEqual('1.1.1', self.registry.registry_version) + self.assertEqual('sushy', self.registry.owning_entity) + self.assertEqual(4, len(self.registry.messages)) + self.assertEqual('Everything OK', + self.registry.messages['Success'].description) + self.assertEqual('Everything done successfully.', + self.registry.messages['Success'].message) + self.assertEqual(res_cons.SEVERITY_OK, + self.registry.messages['Success'].severity) + self.assertEqual(0, self.registry.messages['Success'].number_of_args) + self.assertEqual(2, len(self.registry.messages['TooBig'].param_types)) + self.assertEqual(res_cons.PARAMTYPE_STRING, + self.registry.messages['TooBig'].param_types[0]) + self.assertEqual(res_cons.PARAMTYPE_NUMBER, + self.registry.messages['TooBig'].param_types[1]) + self.assertEqual('Panic', self.registry.messages['Failed'].resolution) + self.assertEqual( + 2, len(self.registry.messages['MissingThings'].param_types)) + self.assertEqual(res_cons.SEVERITY_WARNING, + self.registry.messages['MissingThings'].severity) + self.assertEqual( + res_cons.PARAMTYPE_STRING, + self.registry.messages['MissingThings'].param_types[0]) + self.assertEqual( + res_cons.PARAMTYPE_NUMBER, + self.registry.messages['MissingThings'].param_types[1]) + self.assertEqual( + 'Try Later', self.registry.messages['MissingThings'].resolution) + + def test__parse_attributes_return(self): + attributes = self.registry._parse_attributes(self.json_doc) + + self.assertEqual({'Failed': + {'description': 'Nothing is OK', + 'message': 'The property %1 broke everything.', + 'number_of_args': 1, + 'param_types': ['string'], + 'resolution': 'Panic', + 'severity': 'critical'}, + 'MissingThings': + {'description': '', + 'message': + "Property's %1 value cannot be less than %2.", + 'number_of_args': 2, + 'param_types': ['string', 'number'], + 'resolution': 'Try Later', + 'severity': 'warning'}, + 'Success': + {'description': 'Everything OK', + 'message': 'Everything done successfully.', + 'number_of_args': 0, 'param_types': None, + 'resolution': 'None', 'severity': 'ok'}, + 'TooBig': + {'description': 'Value too big', + 'message': + "Property's %1 value cannot be greater than %2.", + 'number_of_args': 2, + 'param_types': ['string', 'number'], + 'resolution': 'Try again', + 'severity': 'warning'}}, + attributes.get('messages')) + + def test__parse_attributes_missing_msg_desc(self): + self.json_doc['Messages']['Success'].pop('Description') + self.registry._parse_attributes(self.json_doc) + self.assertEqual('', self.registry.messages['Success'].description) + + def test__parse_attributes_missing_msg_severity(self): + self.json_doc['Messages']['Success'].pop('Severity') + self.registry._parse_attributes(self.json_doc) + self.assertEqual('warning', self.registry.messages['Success'].severity) + + def test__parse_attribtues_unknown_param_type(self): + self.registry.json['Messages']['Failed']['ParamTypes'] = \ + ['unknown_type'] + self.assertRaisesRegex(KeyError, + 'unknown_type', + self.registry._parse_attributes, self.json_doc) + + def test_parse_message(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.TooBig' + message_field.message_args = ['arg1', 10] + message_field.severity = None + message_field.resolution = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Try again', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_WARNING, parsed_msg.severity) + self.assertEqual('Property\'s arg1 value cannot be greater than 10.', + parsed_msg.message) + + def test_parse_message_with_severity_resolution_no_args(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.Success' + message_field.severity = res_cons.SEVERITY_OK + message_field.resolution = 'Do nothing' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Do nothing', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_OK, parsed_msg.severity) + self.assertEqual('Everything done successfully.', + parsed_msg.message) + + def test_parse_message_bad_registry(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'BadRegistry.TooBig' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field, parsed_msg) + + def test_parse_message_bad_message_key_existing_message(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.BadMessageKey' + message_field.message = 'Message' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field.message, 'Message') + self.assertEqual(message_field.message, parsed_msg.message) + + def test_parse_message_bad_message_key_no_existing_message(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.BadMessageKey' + message_field.message = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field.message, 'unknown') + self.assertEqual(message_field.message, parsed_msg.message) + + def test_parse_message_fallback_to_messages(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Messages': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Success' + message_field.severity = res_cons.SEVERITY_OK + message_field.resolution = 'Do nothing' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Do nothing', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_OK, parsed_msg.severity) + self.assertEqual('Everything done successfully.', + parsed_msg.message) + + def test_parse_message_fallback_to_basemessages(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'BaseMessages': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Success' + message_field.severity = res_cons.SEVERITY_OK + message_field.resolution = 'Do nothing' + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Do nothing', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_OK, parsed_msg.severity) + self.assertEqual('Everything done successfully.', + parsed_msg.message) + + def test_parse_message_fallback_failed(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'BadMessageKey' + message_field.message = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual(message_field.message, 'unknown') + self.assertEqual(message_field.message, parsed_msg.message) + + def test_parse_message_not_enough_args(self): + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + registries = {'Test.1.0.0': registry} + message_field = sushy_base.MessageListField('Foo') + message_field.message_id = 'Test.1.0.0.TooBig' + message_field.message_args = ['arg1'] + message_field.severity = None + message_field.resolution = None + + parsed_msg = message_registry.parse_message(registries, message_field) + + self.assertEqual('Try again', parsed_msg.resolution) + self.assertEqual(res_cons.SEVERITY_WARNING, parsed_msg.severity) + self.assertEqual('Property\'s arg1 value cannot be greater than ' + 'unknown.', parsed_msg.message) diff --git a/sushy/tests/unit/resources/registry/test_message_registry_file.py b/sushy/tests/unit/resources/registry/test_message_registry_file.py new file mode 100644 index 0000000000000000000000000000000000000000..79367a2e95e497e816f711cbdfedf532110d260f --- /dev/null +++ b/sushy/tests/unit/resources/registry/test_message_registry_file.py @@ -0,0 +1,297 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import json +from unittest import mock + +from sushy.resources.base import FieldData +from sushy.resources.registry import message_registry_file +from sushy.tests.unit import base + + +class MessageRegistryFileTestCase(base.TestCase): + + def setUp(self): + super(MessageRegistryFileTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'message_registry_file.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.reg_file = message_registry_file.MessageRegistryFile( + self.conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.reg_file._parse_attributes(self.json_doc) + self.assertEqual('Test', self.reg_file.identity) + self.assertEqual('Test Message Registry File', self.reg_file.name) + self.assertEqual('Message Registry file for testing', + self.reg_file.description) + self.assertEqual('en', self.reg_file.languages[0]) + self.assertEqual('Test.1.0', self.reg_file.registry) + self.assertEqual('default', self.reg_file.location[0].language) + self.assertEqual('/redfish/v1/Registries/Test/Test.1.0.json', + self.reg_file.location[0].uri) + self.assertEqual('https://example.com/Registries/Test.1.0.json', + self.reg_file.location[0].publication_uri) + self.assertEqual('/redfish/v1/Registries/Archive.zip', + self.reg_file.location[0].archive_uri) + self.assertEqual('Test.1.0.json', + self.reg_file.location[0].archive_file) + + def test__parse_attributes_return(self): + attributes = self.reg_file._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('Test Message Registry File', attributes.get('name')) + self.assertEqual('Test', attributes.get('identity')) + self.assertEqual(['en'], attributes.get('languages')) + self.assertEqual('Test.1.0', attributes.get('registry')) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.base.JsonDataReader', autospec=True) + def test_get_message_registry_uri(self, mock_reader, mock_msg_reg): + mock_reader_rv = mock.Mock() + mock_reader.return_value = mock_reader_rv + mock_reader_rv.get_data.return_value = FieldData(200, {}, { + "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", + }) + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + + registry = self.reg_file.get_message_registry('en', None) + mock_msg_reg.assert_called_once_with( + self.conn, path='/redfish/v1/Registries/Test/Test.1.0.json', + reader=None, redfish_version=self.reg_file.redfish_version) + self.assertEqual(mock_msg_reg_rv, registry) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.base.JsonArchiveReader', autospec=True) + def test_get_message_registry_archive(self, mock_reader, mock_msg_reg): + mock_reader_rv = mock.Mock() + mock_reader.return_value = mock_reader_rv + mock_msg_reg_rv = mock.Mock() + mock_reader_rv.get_data.return_value = FieldData(200, {}, { + "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", + }) + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].uri = None + + registry = self.reg_file.get_message_registry('fr', None) + mock_msg_reg.assert_called_once_with( + self.conn, path='/redfish/v1/Registries/Archive.zip', + redfish_version=self.reg_file.redfish_version, + reader=mock_reader_rv) + mock_reader.assert_called_once_with('Test.1.0.json') + self.assertEqual(mock_msg_reg_rv, registry) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.base.JsonPublicFileReader', autospec=True) + def test_get_message_registry_public(self, mock_reader, mock_msg_reg): + public_connector = mock.Mock() + mock_reader_rv = mock.Mock() + mock_reader.return_value = mock_reader_rv + mock_msg_reg_rv = mock.Mock() + mock_reader_rv.get_data.return_value = FieldData(200, {}, { + "@odata.type": "#MessageRegistry.v1_1_1.MessageRegistry", + }) + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].uri = None + self.reg_file.location[0].archive_uri = None + + registry = self.reg_file.get_message_registry('en', public_connector) + mock_msg_reg.assert_called_once_with( + public_connector, + path='https://example.com/Registries/Test.1.0.json', + redfish_version=self.reg_file.redfish_version, + reader=mock_reader_rv) + self.assertEqual(mock_msg_reg_rv, registry) + + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.LOG', + autospec=True) + def test_get_message_registry_unknown_type( + self, mock_log, mock_registry_type): + mock_fishing_registry = mock_registry_type.return_value + mock_fishing_registry._odata_type = 'FishingRegistry' + + registry = self.reg_file.get_message_registry('en', None) + self.assertIsNone(registry) + mock_log.debug.assert_called_with( + 'Ignoring unsupported flavor of registry %(registry)s', + {'registry': 'FishingRegistry'}) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.LOG', + autospec=True) + def test_get_message_registry_invalid(self, mock_log, mock_msg_reg): + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].uri = None + self.reg_file.location[0].archive_uri = None + self.reg_file.location[0].publication_uri = None + + registry = self.reg_file.get_message_registry('en', None) + mock_msg_reg.assert_not_called() + self.assertIsNone(registry) + mock_log.warning.assert_called_with( + 'No message registry found for %(language)s or default', + {'language': 'en'}) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.LOG', + autospec=True) + def test_get_message_registry_invalid_uri( + self, mock_log, mock_msg_reg_type, mock_msg_reg): + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].uri = {'extref': 'http://127.0.0.1/reg'} + mock_msg_reg.side_effect = TypeError('Wrong URL type') + mock_msg_reg_type.return_value._odata_type = mock.MagicMock( + endswith=mock.MagicMock(return_value=True)) + + registry = self.reg_file.get_message_registry('en', None) + + self.assertIsNone(registry) + + mock_msg_reg_type.assert_called_once_with( + mock.ANY, + path={'extref': 'http://127.0.0.1/reg'}, reader=None, + redfish_version='1.0.2') + + mock_msg_reg.assert_called_once_with( + mock.ANY, + path={'extref': 'http://127.0.0.1/reg'}, reader=None, + redfish_version='1.0.2') + + expected_calls = [ + mock.call( + 'Cannot load message registry from location %(location)s: ' + '%(error)s', + {'location': {'extref': 'http://127.0.0.1/reg'}, + 'error': mock.ANY}), + mock.call( + 'No message registry found for %(language)s or default', + {'language': 'en'}) + ] + + mock_log.warning.assert_has_calls(expected_calls) + + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', + autospec=True) + def test_get_message_registry_non_default_lang(self, mock_registry_type): + mock_fishing_registry = mock_registry_type.return_value + mock_fishing_registry._odata_type = 'FishingRegistry' + self.reg_file.location[0].language = 'en' + registry = self.reg_file.get_message_registry('en', None) + mock_registry_type.assert_called_once_with( + self.conn, path='/redfish/v1/Registries/Test/Test.1.0.json', + reader=None, redfish_version=self.reg_file.redfish_version) + self.assertIsNone(registry) + + @mock.patch('sushy.resources.registry.message_registry_file.LOG', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', + autospec=True) + def test_get_message_registry_loading_type_fails( + self, mock_reg_type, mock_log): + mock_reg_type.side_effect = TypeError('Something wrong') + + registry = self.reg_file.get_message_registry('en', None) + self.assertTrue(mock_reg_type.called) + self.assertIsNone(registry) + mock_log.warning.assert_any_call( + 'Cannot load message registry type from location ' + '%(location)s: %(error)s', + {'location': '/redfish/v1/Registries/Test/Test.1.0.json', + 'error': mock.ANY}) + mock_log.warning.assert_called_with( + 'No message registry found for %(language)s or default', + {'language': 'en'}) + + @mock.patch('sushy.resources.registry.message_registry_file.RegistryType', + autospec=True) + def test_get_message_registry_strangely_cased_lang( + self, mock_registry_type): + mock_fishing_registry = mock_registry_type.return_value + mock_fishing_registry._odata_type = 'FishingRegistry' + self.reg_file.location[0].language = 'En' + registry = self.reg_file.get_message_registry('en', None) + mock_registry_type.assert_called_once_with( + self.conn, path='/redfish/v1/Registries/Test/Test.1.0.json', + reader=None, redfish_version=self.reg_file.redfish_version) + self.assertIsNone(registry) + + @mock.patch('sushy.resources.registry.message_registry.MessageRegistry', + autospec=True) + @mock.patch('sushy.resources.registry.message_registry_file.LOG', + autospec=True) + def test_get_message_registry_missing_lang(self, mock_log, mock_msg_reg): + mock_msg_reg_rv = mock.Mock() + mock_msg_reg.return_value = mock_msg_reg_rv + self.reg_file.location[0].language = 'cz' + + registry = self.reg_file.get_message_registry('en', None) + mock_msg_reg.assert_not_called() + self.assertIsNone(registry) + mock_log.warning.assert_called_with( + 'No message registry found for %(language)s or default', + {'language': 'en'}) + + @mock.patch('sushy.resources.base.logging.warning', + autospec=True) + def test__parse_attributes_missing_registry(self, mock_log): + self.json_doc.pop('Registry') + self.reg_file._parse_attributes(self.json_doc) + self.assertEqual('UNKNOWN.0.0', self.reg_file.registry) + mock_log.assert_called_with( + 'Applying default "UNKNOWN.0.0" on required, but missing ' + 'attribute "[\'Registry\']"') + + +class MessageRegistryFileCollectionTestCase(base.TestCase): + + def setUp(self): + super(MessageRegistryFileCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'message_registry_file_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.reg_file_col =\ + message_registry_file.MessageRegistryFileCollection( + self.conn, '/redfish/v1/Registries', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.reg_file_col._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.reg_file_col.redfish_version) + self.assertEqual('Message Registry Test Collection', + self.reg_file_col.name) + self.assertEqual(('/redfish/v1/Registries/Test',), + self.reg_file_col.members_identities) diff --git a/sushy/tests/unit/resources/sessionservice/__init__.py b/sushy/tests/unit/resources/sessionservice/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/tests/unit/resources/sessionservice/test_session.py b/sushy/tests/unit/resources/sessionservice/test_session.py new file mode 100644 index 0000000000000000000000000000000000000000..34492b5e9613d3d1107b048dff7928f2b55a4a62 --- /dev/null +++ b/sushy/tests/unit/resources/sessionservice/test_session.py @@ -0,0 +1,102 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + +from sushy import exceptions +from sushy.resources.sessionservice import session +from sushy.tests.unit import base + + +class SessionTestCase(base.TestCase): + + def setUp(self): + super(SessionTestCase, self).setUp() + self.conn = mock.Mock() + self.auth = mock.Mock() + with open('sushy/tests/unit/json_samples/session.json') as f: + self.json_doc = json.load(f) + self.conn.get.return_value.json.return_value = self.json_doc + self.auth._session_key = 'fake_x_auth_token' + self.auth._session_uri = self.json_doc['@odata.id'] + self.conn._auth = self.auth + + self.sess_inst = session.Session( + self.conn, '/redfish/v1/SessionService/Sessions/1234567890ABCDEF', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.sess_inst._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.sess_inst.redfish_version) + self.assertEqual('1234567890ABCDEF', self.sess_inst.identity) + self.assertEqual('User Session', self.sess_inst.name) + exp_path = '/redfish/v1/SessionService/Sessions/1234567890ABCDEF' + self.assertEqual(exp_path, self.sess_inst.path) + + def test__parse_attributes_missing_identity(self): + self.sess_inst.json.pop('Id') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Id', + self.sess_inst._parse_attributes, self.json_doc) + + def test_session_close(self): + session_key = self.sess_inst._conn._auth._session_key + session_uri = self.sess_inst._conn._auth._session_uri + self.assertEqual(session_key, 'fake_x_auth_token') + self.assertEqual(session_uri, self.sess_inst.path) + self.sess_inst.delete() + self.sess_inst._conn.delete.assert_called_with(session_uri) + + +class SessionCollectionTestCase(base.TestCase): + + def setUp(self): + super(SessionCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'session_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.sess_col = session.SessionCollection( + self.conn, '/redfish/v1/SessionService/Sessions', + redfish_version='1.0.2') + + def test__parse_attributes(self): + path = '/redfish/v1/SessionService/Sessions/104f9d68f58abb85' + self.sess_col._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.sess_col.redfish_version) + self.assertEqual('Session Collection', self.sess_col.name) + self.assertEqual((path,), self.sess_col.members_identities) + + @mock.patch.object(session, 'Session', autospec=True) + def test_get_member(self, mock_session): + path = '/redfish/v1/SessionService/Sessions/104f9d68f58abb85' + self.sess_col.get_member(path) + mock_session.assert_called_once_with( + self.sess_col._conn, path, + self.sess_col.redfish_version, None) + + @mock.patch.object(session, 'Session', autospec=True) + def test_get_members(self, mock_session): + path = '/redfish/v1/SessionService/Sessions/104f9d68f58abb85' + members = self.sess_col.get_members() + mock_session.assert_called_once_with( + self.sess_col._conn, path, + self.sess_col.redfish_version, None) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/sessionservice/test_sessionservice.py b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py new file mode 100644 index 0000000000000000000000000000000000000000..61a102f848ce38f9b85629bdf4fd7f142aae294a --- /dev/null +++ b/sushy/tests/unit/resources/sessionservice/test_sessionservice.py @@ -0,0 +1,164 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +from sushy import exceptions +from sushy.resources.sessionservice import session +from sushy.resources.sessionservice import sessionservice +from sushy.tests.unit import base + + +class SessionServiceTestCase(base.TestCase): + + def setUp(self): + super(SessionServiceTestCase, self).setUp() + self.conn = mock.MagicMock() + with open('sushy/tests/unit/json_samples/session_service.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.sess_serv_inst = sessionservice.SessionService( + self.conn, '/redfish/v1/SessionService', + redfish_version='1.0.2') + + @mock.patch.object(sessionservice, 'LOG', autospec=True) + def test__init_throws_exception(self, mock_LOG): + self.conn.get.return_value.json.reset_mock() + self.conn.get.return_value.json.side_effect = ( + exceptions.AccessError( + 'GET', 'any_url', mock.MagicMock())) + sessionservice.SessionService( + self.conn, '/redfish/v1/SessionService', redfish_version='1.0.2') + self.assertTrue(mock_LOG.debug.called) + + def test__parse_attributes(self): + self.sess_serv_inst._parse_attributes(self.json_doc) + exp_path = '/redfish/v1/SessionService' + self.assertEqual('1.0.2', self.sess_serv_inst.redfish_version) + self.assertEqual('SessionService', self.sess_serv_inst.identity) + self.assertEqual('Session Service', self.sess_serv_inst.name) + self.assertEqual(30, self.sess_serv_inst.session_timeout) + self.assertEqual(exp_path, self.sess_serv_inst.path) + + def test__get_sessions_collection_path(self): + self.sess_serv_inst.json.pop('Sessions') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Sessions', + self.sess_serv_inst._get_sessions_collection_path) + + @mock.patch.object(session, 'SessionCollection', autospec=True) + def test_session_collection(self, mock_sess_col): + self.sess_serv_inst.sessions + mock_sess_col.assert_called_once_with( + self.sess_serv_inst._conn, + '/redfish/v1/SessionService/Sessions', + self.sess_serv_inst.redfish_version, None) + + def test_create_session(self): + with open('sushy/tests/unit/json_samples/' + 'session_creation_headers.json') as f: + self.conn.post.return_value.headers = json.load(f) + + session_key, session_uri = ( + self.sess_serv_inst.create_session('foo', 'secret')) + self.assertEqual('adc530e2016a0ea98c76c087f0e4b76f', session_key) + self.assertEqual( + '/redfish/v1/SessionService/Sessions/151edd65d41c0b89', + session_uri) + + def test_create_session_unknown_path(self): + del self.sess_serv_inst.json['Sessions'] + with open('sushy/tests/unit/json_samples/' + 'session_creation_headers.json') as f: + self.conn.post.return_value.headers = json.load(f) + + session_key, session_uri = ( + self.sess_serv_inst.create_session('foo', 'secret')) + self.assertEqual('adc530e2016a0ea98c76c087f0e4b76f', session_key) + self.assertEqual( + '/redfish/v1/SessionService/Sessions/151edd65d41c0b89', + session_uri) + uri = self.sess_serv_inst.path + '/Sessions' + data = {'UserName': 'foo', 'Password': 'secret'} + self.conn.post.assert_called_once_with(uri, + data=data) + + def test_create_session_missing_x_auth_token(self): + with open('sushy/tests/unit/json_samples/' + 'session_creation_headers.json') as f: + self.conn.post.return_value.headers = json.load(f) + + self.conn.post.return_value.headers.pop('X-Auth-Token') + self.assertRaisesRegex( + exceptions.MissingXAuthToken, 'No X-Auth-Token returned', + self.sess_serv_inst.create_session, 'foo', 'bar') + + @mock.patch.object(sessionservice, 'LOG', autospec=True) + def test_create_session_missing_location(self, mock_LOG): + with open('sushy/tests/unit/json_samples/' + 'session_creation_headers.json') as f: + self.conn.post.return_value.headers = json.load(f) + + self.conn.post.return_value.headers.pop('Location') + self.sess_serv_inst.create_session('foo', 'bar') + self.assertTrue(mock_LOG.warning.called) + + def _setUp_sessions(self): + self.conn.get.return_value.json.reset_mock() + successive_return_values = [] + with open('sushy/tests/unit/json_samples/session.json') as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + def test_sessions(self): + # | GIVEN | + self._setUp_sessions() + # | WHEN | + actual_sessions = self.sess_serv_inst.sessions + # | THEN | + self.assertIsInstance(actual_sessions, session.SessionCollection) + self.conn.get.return_value.json.assert_called_once_with() + + # reset mock + self.conn.get.return_value.json.reset_mock() + + # | WHEN & THEN | + # tests for same object on invoking subsequently + self.assertIs(actual_sessions, self.sess_serv_inst.sessions) + self.conn.get.return_value.json.assert_not_called() + + def test_sessions_on_refresh(self): + # | GIVEN | + self._setUp_sessions() + # | WHEN & THEN | + self.assertIsInstance(self.sess_serv_inst.sessions, + session.SessionCollection) + + self.conn.get.return_value.json.side_effect = None + # On refreshing the sess_serv_inst instance... + with open('sushy/tests/unit/json_samples/session.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.sess_serv_inst.refresh(force=True) + + # | WHEN & THEN | + self.assertFalse(self.sess_serv_inst.sessions._is_stale) + + def test_close_session(self): + self.sess_serv_inst.close_session('session/identity') + self.conn.delete.assert_called_once_with('session/identity') diff --git a/sushy/tests/unit/resources/system/storage/__init__.py b/sushy/tests/unit/resources/system/storage/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/tests/unit/resources/system/storage/test_drive.py b/sushy/tests/unit/resources/system/storage/test_drive.py new file mode 100644 index 0000000000000000000000000000000000000000..a3978bcc1a6f392168901e5f07a0f33d8eeba342 --- /dev/null +++ b/sushy/tests/unit/resources/system/storage/test_drive.py @@ -0,0 +1,75 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +import sushy +from sushy import exceptions +from sushy.resources.system.storage import drive +from sushy.tests.unit import base + + +class DriveTestCase(base.TestCase): + + def setUp(self): + super(DriveTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/drive.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.stor_drive = drive.Drive( + self.conn, + '/redfish/v1/Systems/437XR1138/Storage/1/Drives/32ADF365C6C1B7BD', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.stor_drive._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.stor_drive.redfish_version) + self.assertEqual('32ADF365C6C1B7BD', self.stor_drive.identity) + self.assertEqual('Drive Sample', self.stor_drive.name) + self.assertEqual(512, self.stor_drive.block_size_bytes) + self.assertEqual(899527000000, self.stor_drive.capacity_bytes) + identifiers = self.stor_drive.identifiers + self.assertIsInstance(identifiers, list) + self.assertEqual(1, len(identifiers)) + identifier = identifiers[0] + self.assertEqual(sushy.DURABLE_NAME_FORMAT_NAA, + identifier.durable_name_format) + self.assertEqual('32ADF365C6C1B7BD', identifier.durable_name) + self.assertEqual('Contoso', self.stor_drive.manufacturer) + self.assertEqual('HDD', self.stor_drive.media_type) + self.assertEqual('C123', self.stor_drive.model) + self.assertEqual('C123-1111', self.stor_drive.part_number) + self.assertEqual(sushy.PROTOCOL_TYPE_SAS, self.stor_drive.protocol) + self.assertEqual('1234570', self.stor_drive.serial_number) + self.assertEqual(sushy.STATE_ENABLED, self.stor_drive.status.state) + self.assertEqual(sushy.HEALTH_OK, self.stor_drive.status.health) + + def test_set_indicator_led(self): + with mock.patch.object( + self.stor_drive, 'invalidate', + autospec=True) as invalidate_mock: + self.stor_drive.set_indicator_led(sushy.INDICATOR_LED_BLINKING) + self.stor_drive._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138/Storage/1/Drives/' + '32ADF365C6C1B7BD', data={'IndicatorLED': 'Blinking'}) + + invalidate_mock.assert_called_once_with() + + def test_set_indicator_led_invalid_state(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.stor_drive.set_indicator_led, + 'spooky-glowing') diff --git a/sushy/tests/unit/resources/system/storage/test_storage.py b/sushy/tests/unit/resources/system/storage/test_storage.py new file mode 100644 index 0000000000000000000000000000000000000000..365dc80cc3cd4a1967a4d2b382b974eb2db1bc7b --- /dev/null +++ b/sushy/tests/unit/resources/system/storage/test_storage.py @@ -0,0 +1,369 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +import sushy +from sushy.resources.system.storage import drive +from sushy.resources.system.storage import storage +from sushy.resources.system.storage import volume +from sushy.tests.unit import base + + +STORAGE_DRIVE_FILE_NAMES = [ + 'sushy/tests/unit/json_samples/drive.json', + 'sushy/tests/unit/json_samples/drive2.json', + 'sushy/tests/unit/json_samples/drive3.json' +] + +STORAGE_VOLUME_FILE_NAMES = [ + 'sushy/tests/unit/json_samples/volume_collection.json', + 'sushy/tests/unit/json_samples/volume.json', + 'sushy/tests/unit/json_samples/volume2.json', + 'sushy/tests/unit/json_samples/volume3.json' +] + + +class StorageTestCase(base.TestCase): + + def setUp(self): + super(StorageTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/storage.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.storage = storage.Storage( + self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.storage._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.storage.redfish_version) + self.assertEqual('1', self.storage.identity) + self.assertEqual('Local Storage Controller', self.storage.name) + self.assertEqual('ok', self.storage.status.health) + self.assertEqual('ok', self.storage.status.health_rollup) + self.assertEqual('enabled', self.storage.status.state) + self.assertEqual( + ('/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3', # noqa + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233', # noqa + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD', # noqa + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2', # noqa + ), self.storage.drives_identities) + + def test_get_drive(self): + # | WHEN | + actual_drive = self.storage.get_drive( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/' + '35D38F11ACEF7BD3') + # | THEN | + self.assertIsInstance(actual_drive, drive.Drive) + self.assertTrue(self.conn.get.return_value.json.called) + + @mock.patch.object(drive, 'Drive', autospec=True) + def test_drives(self, Drive_mock): + # | WHEN | + all_drives = self.storage.drives + # | THEN | + calls = [ + mock.call(self.storage._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/35D38F11ACEF7BD3', # noqa + self.storage.redfish_version, None), + mock.call(self.storage._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3F5A8C54207B7233', # noqa + self.storage.redfish_version, None), + mock.call(self.storage._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/32ADF365C6C1B7BD', # noqa + self.storage.redfish_version, None), + mock.call(self.storage._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Drives/3D58ECBC375FD9F2', # noqa + self.storage.redfish_version, None) + ] + Drive_mock.assert_has_calls(calls) + self.assertIsInstance(all_drives, list) + self.assertEqual(4, len(all_drives)) + self.assertIsInstance(all_drives[0], drive.Drive.__class__) + + # returning cached value + Drive_mock.reset_mock() + # | WHEN | + all_drives = self.storage.drives + # | THEN | + self.assertFalse(Drive_mock.called) + self.assertIsInstance(all_drives, list) + self.assertEqual(4, len(all_drives)) + self.assertIsInstance(all_drives[0], drive.Drive.__class__) + + def test_storage_controllers(self): + controllers = self.storage.storage_controllers + self.assertIsInstance(controllers, list) + self.assertEqual(1, len(controllers)) + controller = controllers[0] + self.assertEqual('0', controller.member_id) + self.assertEqual('Contoso Integrated RAID', controller.name) + self.assertEqual('ok', controller.status.health) + self.assertEqual('enabled', controller.status.state) + identifiers = controller.identifiers + self.assertIsInstance(identifiers, list) + self.assertEqual(1, len(identifiers)) + identifier = identifiers[0] + self.assertEqual(sushy.DURABLE_NAME_FORMAT_NAA, + identifier.durable_name_format) + self.assertEqual('345C59DBD970859C', identifier.durable_name) + self.assertEqual(12, controller.speed_gbps) + self.assertEqual([sushy.PROTOCOL_TYPE_PCIe], + controller.controller_protocols) + self.assertEqual([sushy.PROTOCOL_TYPE_SAS, sushy.PROTOCOL_TYPE_SATA], + controller.device_protocols) + self.assertEqual([sushy.RAID_TYPE_RAID0, sushy.RAID_TYPE_RAID1], + controller.raid_types) + + def test_drives_after_refresh(self): + self.storage.refresh() + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + all_drives = self.storage.drives + self.assertIsInstance(all_drives, list) + self.assertEqual(4, len(all_drives)) + for drv in all_drives: + self.assertIsInstance(drv, drive.Drive) + + def test_drives_max_size_bytes(self): + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(899527000000, self.storage.drives_max_size_bytes) + + # for any subsequent fetching it gets it from the cached value + self.conn.get.return_value.json.reset_mock() + self.assertEqual(899527000000, self.storage.drives_max_size_bytes) + self.conn.get.return_value.json.assert_not_called() + + def test_drives_max_size_bytes_after_refresh(self): + self.storage.refresh() + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(899527000000, self.storage.drives_max_size_bytes) + + def test_volumes(self): + # | GIVEN | + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/volume_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN | + actual_volumes = self.storage.volumes + # | THEN | + self.assertIsInstance(actual_volumes, + volume.VolumeCollection) + self.conn.get.return_value.json.assert_called_once_with() + + def test_volumes_cached(self): + # | GIVEN | + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/volume_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # invoke it once + actual_volumes = self.storage.volumes + self.conn.get.return_value.json.reset_mock() + # | WHEN & THEN | + # tests for same object on invoking subsequently + self.assertIs(actual_volumes, + self.storage.volumes) + self.conn.get.return_value.json.assert_not_called() + + def test_volumes_on_refresh(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/volume_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + vols = self.storage.volumes + self.assertIsInstance(vols, volume.VolumeCollection) + + # On refreshing the system instance... + with open('sushy/tests/unit/json_samples/storage.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.storage.invalidate() + self.storage.refresh(force=False) + + # | WHEN & THEN | + self.assertTrue(vols._is_stale) + + # | GIVEN | + with open('sushy/tests/unit/json_samples/volume_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.storage.volumes, + volume.VolumeCollection) + + +class StorageCollectionTestCase(base.TestCase): + + def setUp(self): + super(StorageCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'storage_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.stor_col = storage.StorageCollection( + self.conn, '/redfish/v1/Systems/437XR1138R2/Storage', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.stor_col._parse_attributes(self.json_doc) + self.assertEqual(( + '/redfish/v1/Systems/437XR1138R2/Storage/1',), + self.stor_col.members_identities) + + @mock.patch.object(storage, 'Storage', autospec=True) + def test_get_member(self, Storage_mock): + self.stor_col.get_member( + '/redfish/v1/Systems/437XR1138R2/Storage/1') + Storage_mock.assert_called_once_with( + self.stor_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1', + self.stor_col.redfish_version, None) + + @mock.patch.object(storage, 'Storage', autospec=True) + def test_get_members(self, Storage_mock): + members = self.stor_col.get_members() + Storage_mock.assert_called_once_with( + self.stor_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1', + self.stor_col.redfish_version, None) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) + + def test_drives_sizes_bytes(self): + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual([899527000000, 899527000000, 899527000000, + 899527000000], self.stor_col.drives_sizes_bytes) + + def test_max_drive_size_bytes(self): + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(899527000000, self.stor_col.max_drive_size_bytes) + + # for any subsequent fetching it gets it from the cached value + self.conn.get.return_value.json.reset_mock() + self.assertEqual(899527000000, self.stor_col.max_drive_size_bytes) + self.conn.get.return_value.json.assert_not_called() + + def test_max_drive_size_bytes_after_refresh(self): + self.stor_col.refresh(force=False) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(899527000000, self.stor_col.max_drive_size_bytes) + + def test_volumes_sizes_bytes(self): + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_VOLUME_FILE_NAMES: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual([107374182400, 899527000000, 1073741824000], + self.stor_col.volumes_sizes_bytes) + + def test_max_volume_size_bytes(self): + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_VOLUME_FILE_NAMES: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(1073741824000, self.stor_col.max_volume_size_bytes) + + # for any subsequent fetching it gets it from the cached value + self.conn.get.return_value.json.reset_mock() + self.assertEqual(1073741824000, self.stor_col.max_volume_size_bytes) + self.conn.get.return_value.json.assert_not_called() + + def test_max_volume_size_bytes_after_refresh(self): + self.stor_col.refresh(force=False) + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + with open('sushy/tests/unit/json_samples/storage.json') as f: + successive_return_values.append(json.load(f)) + # repeating the 3rd one to provide mock data for 4th iteration. + for fname in STORAGE_VOLUME_FILE_NAMES: + with open(fname) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(1073741824000, self.stor_col.max_volume_size_bytes) diff --git a/sushy/tests/unit/resources/system/storage/test_volume.py b/sushy/tests/unit/resources/system/storage/test_volume.py new file mode 100644 index 0000000000000000000000000000000000000000..44170f08631d7c02f39d330663bbd5365559706e --- /dev/null +++ b/sushy/tests/unit/resources/system/storage/test_volume.py @@ -0,0 +1,382 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + +from dateutil import parser + +import sushy +from sushy import exceptions +from sushy.resources import constants as res_cons +from sushy.resources.system.storage import constants as store_cons +from sushy.resources.system.storage import volume +from sushy import taskmonitor +from sushy.tests.unit import base + + +class VolumeTestCase(base.TestCase): + + def setUp(self): + super(VolumeTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/volume.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.stor_volume = volume.Volume( + self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.stor_volume._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.stor_volume.redfish_version) + self.assertEqual('1', self.stor_volume.identity) + self.assertEqual('Virtual Disk 1', self.stor_volume.name) + self.assertEqual(899527000000, self.stor_volume.capacity_bytes) + self.assertEqual(sushy.VOLUME_TYPE_MIRRORED, + self.stor_volume.volume_type) + self.assertFalse(self.stor_volume.encrypted) + identifiers = self.stor_volume.identifiers + self.assertIsInstance(identifiers, list) + self.assertEqual(1, len(identifiers)) + identifier = identifiers[0] + self.assertEqual(sushy.DURABLE_NAME_FORMAT_UUID, + identifier.durable_name_format) + self.assertEqual('38f1818b-111e-463a-aa19-fa54f792e468', + identifier.durable_name) + self.assertIsNone(self.stor_volume.block_size_bytes) + + def test_initialize_volume_immediate(self): + target_uri = '/redfish/v1/Systems/3/Storage/RAIDIntegrated/' \ + 'Volumes/1/Actions/Volume.Initialize' + self.stor_volume.initialize_volume( + store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_volume._conn.post.assert_called_once_with( + target_uri, data={'InitializeType': 'Fast', + '@Redfish.OperationApplyTime': 'Immediate'}, + blocking=True, timeout=500) + + def test_initialize_volume_on_reset(self): + target_uri = '/redfish/v1/Systems/3/Storage/RAIDIntegrated/' \ + 'Volumes/1/Actions/Volume.Initialize' + self.stor_volume.initialize_volume( + store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=res_cons.APPLY_TIME_ON_RESET) + self.stor_volume._conn.post.assert_called_once_with( + target_uri, data={'InitializeType': 'Fast', + '@Redfish.OperationApplyTime': 'OnReset'}, + blocking=False, timeout=500) + + def test_initialize_volume_bad_value(self): + self.assertRaisesRegex( + exceptions.InvalidParameterValueError, + 'The parameter.*lazy.*invalid', + self.stor_volume.initialize_volume, 'lazy') + + def test_initialize_immediate(self): + target_uri = '/redfish/v1/Systems/3/Storage/RAIDIntegrated/' \ + 'Volumes/1/Actions/Volume.Initialize' + self.stor_volume.initialize( + store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_volume._conn.post.assert_called_once_with( + target_uri, data={'InitializeType': 'Fast', + '@Redfish.OperationApplyTime': 'Immediate'}, + blocking=True, timeout=500) + + def test_initialize_on_reset(self): + target_uri = '/redfish/v1/Systems/3/Storage/RAIDIntegrated/' \ + 'Volumes/1/Actions/Volume.Initialize' + self.stor_volume.initialize( + store_cons.VOLUME_INIT_TYPE_FAST, + apply_time=res_cons.APPLY_TIME_ON_RESET) + self.stor_volume._conn.post.assert_called_once_with( + target_uri, data={'InitializeType': 'Fast', + '@Redfish.OperationApplyTime': 'OnReset'}, + blocking=False, timeout=500) + + def test_delete_volume(self): + self.stor_volume.delete_volume() + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=None, blocking=False, timeout=500) + + def test_delete_volume_with_payload(self): + payload = {'@Redfish.OperationApplyTime': 'Immediate'} + self.stor_volume.delete_volume(payload=payload) + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=payload, blocking=True, timeout=500) + + def test_delete_volume_immediate(self): + payload = {} + self.conn.delete.return_value.status_code = 200 + resource = self.stor_volume.delete_volume( + payload=payload, apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=payload, blocking=True, timeout=500) + self.assertIsNone(resource) + + def test_delete_volume_on_reset(self): + payload = {} + self.conn.delete.return_value.status_code = 202 + self.conn.delete.return_value.headers = { + 'location': '/redfish/v1/taskmon/4608f7e6', + 'retry-after': '120' + } + task_mon = self.stor_volume.delete_volume( + payload=payload, apply_time=res_cons.APPLY_TIME_ON_RESET, + timeout=250) + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=payload, blocking=False, timeout=250) + self.assertIsNotNone(task_mon) + self.assertEqual(task_mon.resource_name, 'task_monitor') + self.assertEqual(task_mon.path, '/redfish/v1/taskmon/4608f7e6') + + def test_delete_immediate(self): + payload = {} + self.conn.delete.return_value.status_code = 200 + resource = self.stor_volume.delete( + payload=payload, apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=payload, blocking=True, timeout=500) + self.assertIsNone(resource) + + def test_delete_on_reset(self): + payload = {} + self.conn.delete.return_value.status_code = 202 + self.conn.delete.return_value.headers = { + 'Location': '/redfish/v1/taskmon/4608f7e6', + 'Retry-After': '120' + } + self.conn.delete.return_value.json.return_value = {'Id': 3, + 'Name': 'Test'} + task_mon = self.stor_volume.delete( + payload=payload, apply_time=res_cons.APPLY_TIME_ON_RESET, + timeout=250) + self.stor_volume._conn.delete.assert_called_once_with( + self.stor_volume._path, data=payload, blocking=False, timeout=250) + self.assertIsNotNone(task_mon) + self.assertIsInstance(task_mon, taskmonitor.TaskMonitor) + self.assertEqual(task_mon.task_monitor_uri, + '/redfish/v1/taskmon/4608f7e6') + + +class VolumeCollectionTestCase(base.TestCase): + + def setUp(self): + super(VolumeCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'volume_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.stor_vol_col = volume.VolumeCollection( + self.conn, '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', + redfish_version='1.0.2') + self.stor_vol_col.refresh = mock.Mock() + + def test__parse_attributes(self): + self.stor_vol_col._parse_attributes(self.json_doc) + self.assertEqual(( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2', + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3'), + self.stor_vol_col.members_identities) + + def test_operation_apply_time_support(self): + support = self.stor_vol_col.operation_apply_time_support + self.assertIsNotNone(support) + self.assertEqual(600, support.maintenance_window_duration_in_seconds) + self.assertEqual(parser.parse('2017-05-03T23:12:37-05:00'), + support.maintenance_window_start_time) + self.assertEqual('/redfish/v1/Systems/437XR1138R2', + support._maintenance_window_resource.resource_uri) + self.assertEqual(['Immediate', 'OnReset', 'AtMaintenanceWindowStart'], + support.supported_values) + self.assertEqual([res_cons.APPLY_TIME_IMMEDIATE, + res_cons.APPLY_TIME_ON_RESET, + res_cons.APPLY_TIME_MAINT_START], + support.mapped_supported_values) + + @mock.patch.object(volume, 'Volume', autospec=True) + def test_get_member(self, Volume_mock): + self.stor_vol_col.get_member( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1') + Volume_mock.assert_called_once_with( + self.stor_vol_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', + self.stor_vol_col.redfish_version, None) + + @mock.patch.object(volume, 'Volume', autospec=True) + def test_get_members(self, Volume_mock): + members = self.stor_vol_col.get_members() + calls = [ + mock.call(self.stor_vol_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/1', + self.stor_vol_col.redfish_version, None), + mock.call(self.stor_vol_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/2', + self.stor_vol_col.redfish_version, None), + mock.call(self.stor_vol_col._conn, + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/3', + self.stor_vol_col.redfish_version, None), + ] + Volume_mock.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(3, len(members)) + + def test_max_size_bytes(self): + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + file_names = ['sushy/tests/unit/json_samples/volume.json', + 'sushy/tests/unit/json_samples/volume2.json', + 'sushy/tests/unit/json_samples/volume3.json'] + for file_name in file_names: + with open(file_name) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(1073741824000, self.stor_vol_col.max_size_bytes) + + # for any subsequent fetching it gets it from the cached value + self.conn.get.return_value.json.reset_mock() + self.assertEqual(1073741824000, self.stor_vol_col.max_size_bytes) + self.conn.get.return_value.json.assert_not_called() + + def test_max_size_bytes_after_refresh(self): + self.stor_vol_col.refresh() + self.conn.get.return_value.json.reset_mock() + + successive_return_values = [] + file_names = ['sushy/tests/unit/json_samples/volume.json', + 'sushy/tests/unit/json_samples/volume2.json', + 'sushy/tests/unit/json_samples/volume3.json'] + for file_name in file_names: + with open(file_name) as f: + successive_return_values.append(json.load(f)) + self.conn.get.return_value.json.side_effect = successive_return_values + + self.assertEqual(1073741824000, self.stor_vol_col.max_size_bytes) + + def test_create_volume_immediate(self): + payload = { + 'Name': 'My Volume 4', + 'VolumeType': 'Mirrored', + 'RAIDType': 'RAID1', + 'CapacityBytes': 107374182400 + } + expected_payload = dict(payload) + expected_payload['@Redfish.OperationApplyTime'] = 'Immediate' + with open('sushy/tests/unit/json_samples/volume4.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.conn.post.return_value.status_code = 201 + self.conn.post.return_value.headers.return_value = { + 'Location': '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4' + } + new_vol = self.stor_vol_col.create_volume( + payload, apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_vol_col._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', + data=expected_payload, blocking=True, timeout=500) + self.stor_vol_col.refresh.assert_called_once() + self.assertIsNotNone(new_vol) + self.assertEqual('4', new_vol.identity) + self.assertEqual('My Volume 4', new_vol.name) + self.assertEqual(107374182400, new_vol.capacity_bytes) + self.assertEqual(sushy.VOLUME_TYPE_MIRRORED, new_vol.volume_type) + self.assertEqual(sushy.RAID_TYPE_RAID1, new_vol.raid_type) + + def test_create_volume_on_reset(self): + payload = { + 'Name': 'My Volume 4', + 'VolumeType': 'Mirrored', + 'RAIDType': 'RAID1', + 'CapacityBytes': 107374182400 + } + expected_payload = dict(payload) + expected_payload['@Redfish.OperationApplyTime'] = 'OnReset' + with open('sushy/tests/unit/json_samples/volume4.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = { + 'Location': '/redfish/v1/taskmon/4608f7e6', + 'retry-after': '120' + } + task_mon = self.stor_vol_col.create_volume( + payload, apply_time=res_cons.APPLY_TIME_ON_RESET) + self.stor_vol_col._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', + data=expected_payload, blocking=False, timeout=500) + self.assertIsNotNone(task_mon) + self.assertEqual(task_mon.resource_name, 'task_monitor') + self.assertEqual(task_mon.path, '/redfish/v1/taskmon/4608f7e6') + + def test_create_immediate(self): + payload = { + 'Name': 'My Volume 4', + 'VolumeType': 'Mirrored', + 'RAIDType': 'RAID1', + 'CapacityBytes': 107374182400 + } + expected_payload = dict(payload) + expected_payload['@Redfish.OperationApplyTime'] = 'Immediate' + with open('sushy/tests/unit/json_samples/volume4.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.conn.post.return_value.status_code = 201 + self.conn.post.return_value.headers.return_value = { + 'Location': '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes/4' + } + new_vol = self.stor_vol_col.create( + payload, apply_time=res_cons.APPLY_TIME_IMMEDIATE) + self.stor_vol_col._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', + data=expected_payload, blocking=True, timeout=500) + self.stor_vol_col.refresh.assert_called_once() + self.assertIsNotNone(new_vol) + self.assertEqual('4', new_vol.identity) + self.assertEqual('My Volume 4', new_vol.name) + self.assertEqual(107374182400, new_vol.capacity_bytes) + self.assertEqual(sushy.VOLUME_TYPE_MIRRORED, new_vol.volume_type) + self.assertEqual(sushy.RAID_TYPE_RAID1, new_vol.raid_type) + + def test_create_on_reset(self): + payload = { + 'Name': 'My Volume 4', + 'VolumeType': 'Mirrored', + 'RAIDType': 'RAID1', + 'CapacityBytes': 107374182400 + } + expected_payload = dict(payload) + expected_payload['@Redfish.OperationApplyTime'] = 'OnReset' + with open('sushy/tests/unit/json_samples/task.json') as f: + self.conn.post.return_value.json.return_value = json.load(f) + + self.conn.post.return_value.content.return_value = "Something" + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = { + 'Location': '/redfish/v1/taskmon/4608f7e6', + 'Retry-After': '120' + } + task_mon = self.stor_vol_col.create( + payload, apply_time=res_cons.APPLY_TIME_ON_RESET) + self.stor_vol_col._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/Storage/1/Volumes', + data=expected_payload, blocking=False, timeout=500) + self.assertIsNotNone(task_mon) + self.assertIsInstance(task_mon, taskmonitor.TaskMonitor) + self.assertEqual(task_mon.task_monitor_uri, + '/redfish/v1/TaskService/Tasks/545') diff --git a/sushy/tests/unit/resources/system/test_bios.py b/sushy/tests/unit/resources/system/test_bios.py new file mode 100644 index 0000000000000000000000000000000000000000..7356a569ba2b38ae9c6ac425975d2850b622c634 --- /dev/null +++ b/sushy/tests/unit/resources/system/test_bios.py @@ -0,0 +1,335 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import datetime +from http import client as http_client +import json +from unittest import mock + +from dateutil import parser + +from sushy import exceptions +from sushy.resources import constants as res_cons +from sushy.resources.registry import message_registry +from sushy.resources import settings +from sushy.resources.system import bios +from sushy.tests.unit import base + + +class BiosTestCase(base.TestCase): + + def setUp(self): + super(BiosTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/bios.json') as f: + self.bios_json = json.load(f) + with open('sushy/tests/unit/json_samples/bios_settings.json') as f: + self.bios_settings_json = json.load(f) + + self.conn.get.return_value.json.side_effect = [ + self.bios_json, + self.bios_settings_json, + self.bios_settings_json] + + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + + self.sys_bios = bios.Bios( + self.conn, '/redfish/v1/Systems/437XR1138R2/BIOS', + registries={'Test.1.0': registry}, + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.sys_bios._parse_attributes(self.bios_json) + self.assertEqual('1.0.2', self.sys_bios.redfish_version) + self.assertEqual('BIOS', self.sys_bios.identity) + self.assertEqual('BIOS Configuration Current Settings', + self.sys_bios.name) + self.assertIsNone(self.sys_bios.description) + self.assertEqual('BiosAttributeRegistryP89.v1_0_0', + self.sys_bios._attribute_registry) + self.assertEqual('', self.sys_bios.attributes['AdminPhone']) + self.assertEqual('Uefi', self.sys_bios.attributes['BootMode']) + self.assertEqual(0, self.sys_bios.attributes['ProcCoreDisable']) + self.assertEqual([res_cons.APPLY_TIME_ON_RESET, + res_cons.APPLY_TIME_MAINT_RESET], + self.sys_bios.supported_apply_times) + self.assertEqual(600, self.sys_bios.maintenance_window + .maintenance_window_duration_in_seconds) + self.assertEqual(parser.parse('2020-09-01T04:30:00-06:00'), + self.sys_bios.maintenance_window + .maintenance_window_start_time) + # testing here if settings subfield parsed by checking ETag, + # other settings fields tested in specific settings test + self.assertEqual('9234ac83b9700123cc32', + self.sys_bios._settings._etag) + self.assertEqual('(404) 555-1212', + self.sys_bios.pending_attributes['AdminPhone']) + self.assertEqual(settings.UPDATE_FAILURE, + self.sys_bios.update_status.status) + + def test__parse_attributes_return(self): + attributes = self.sys_bios._parse_attributes(self.bios_json) + + # Test that various types are returned correctly + self.assertEqual('BIOS Configuration Current Settings', + attributes.get('name')) + self.assertEqual({'AdminPhone': '', + 'BootMode': 'Uefi', + 'EmbeddedSata': 'Raid', + 'NicBoot1': 'NetworkBoot', + 'NicBoot2': 'Disabled', + 'PowerProfile': 'MaxPerf', + 'ProcCoreDisable': 0, + 'ProcHyperthreading': 'Enabled', + 'ProcTurboMode': 'Enabled', + 'UsbControl': 'UsbEnabled'}, + attributes.get('attributes')) + self.assertEqual({'maintenance_window_duration_in_seconds': 600, + 'maintenance_window_start_time': + parser.parse('2020-09-01T04:30:00-06:00')}, + attributes.get('maintenance_window')) + + def test_set_attribute(self): + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled') + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled'}}) + + def test_set_attribute_apply_time(self): + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled', + res_cons.APPLY_TIME_ON_RESET) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'OnReset'}}) + + def test_set_attribute_apply_time_with_maintenance_window(self): + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled', + res_cons.APPLY_TIME_MAINT_RESET, + datetime.datetime(2020, 9, 1, 4, 30, 0), + 600) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'InMaintenanceWindowOnReset', + 'MaintenanceWindowStartTime': '2020-09-01T04:30:00', + 'MaintenanceWindowDurationInSeconds': 600}}) + + def test_set_attribute_on_refresh(self): + self.conn.get.reset_mock() + # make it to instantiate pending attributes + self.sys_bios.pending_attributes + self.assertTrue(self.conn.get.called) + + self.conn.get.reset_mock() + + self.sys_bios.pending_attributes + self.assertFalse(self.conn.get.called) + + self.sys_bios.set_attribute('ProcTurboMode', 'Disabled') + # make it to refresh pending attributes on next retrieval + self.sys_bios.pending_attributes + self.assertTrue(self.conn.get.called) + + def test_set_attributes(self): + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}}) + + def test_set_attributes_apply_time(self): + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_IMMEDIATE) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'Immediate'}}) + + def test_set_attributes_apply_time_with_maintenance_window(self): + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_MAINT_START, + datetime.datetime(2020, 9, 1, 4, 30, 0), + 600) + self.sys_bios._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + '@Redfish.SettingsApplyTime': { + '@odata.type': '#Settings.v1_0_0.PreferredApplyTime', + 'ApplyTime': 'AtMaintenanceWindowStart', + 'MaintenanceWindowStartTime': '2020-09-01T04:30:00', + 'MaintenanceWindowDurationInSeconds': 600}}) + + def test_set_attributes_apply_time_missing(self): + self.assertRaises(ValueError, + self.sys_bios.set_attributes, + {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + maint_window_start_time=datetime.datetime.now(), + maint_window_duration=600) + + def test_set_attributes_maint_window_start_time_missing(self): + self.assertRaises(ValueError, + self.sys_bios.set_attributes, + {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_MAINT_START, + maint_window_duration=600) + + def test_set_attributes_maint_window_duration_missing(self): + self.assertRaises(ValueError, + self.sys_bios.set_attributes, + {'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}, + res_cons.APPLY_TIME_MAINT_START, + datetime.datetime.now()) + + def test_set_attributes_on_refresh(self): + self.conn.get.reset_mock() + # make it to instantiate pending attributes + self.sys_bios.pending_attributes + self.assertTrue(self.conn.get.called) + + self.conn.get.reset_mock() + + self.sys_bios.pending_attributes + self.assertFalse(self.conn.get.called) + + self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled', + 'UsbControl': 'UsbDisabled'}) + # make it to refresh pending attributes on next retrieval + self.sys_bios.pending_attributes + self.assertTrue(self.conn.get.called) + + def test_apply_time_settings(self): + self.conn.get.reset_mock() + apply_time_settings = self.sys_bios.apply_time_settings + self.assertIsNotNone(apply_time_settings) + self.assertEqual('OnReset', apply_time_settings.apply_time) + self.assertEqual(['OnReset', 'Immediate', 'AtMaintenanceWindowStart', + 'InMaintenanceWindowOnReset'], + apply_time_settings.apply_time_allowable_values) + self.assertEqual(parser.parse('2017-05-03T23:12:37-05:00'), + apply_time_settings.maintenance_window_start_time) + self.assertEqual(600, apply_time_settings. + maintenance_window_duration_in_seconds) + + def test__get_reset_bios_action_element(self): + value = self.sys_bios._get_reset_bios_action_element() + self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Actions/' + 'Bios.ResetBios', + value.target_uri) + + def test_reset_bios_missing_action(self): + self.sys_bios._actions.reset_bios = None + self.assertRaisesRegex( + exceptions.MissingActionError, '#Bios.ResetBios', + self.sys_bios.reset_bios) + + def test__parse_attributes_missing_reset_bios_target(self): + self.sys_bios.json['Actions']['#Bios.ResetBios'].pop( + 'target') + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'attribute Actions/#Bios.ResetBios/target', + self.sys_bios._parse_attributes, self.bios_json) + + def test_reset_bios(self): + self.sys_bios.reset_bios() + self.sys_bios._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios') + + def test_reset_bios_handle_http_error_415(self): + + target_uri = ( + '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios') + self.conn.post.side_effect = [exceptions.HTTPError( + method='POST', url=target_uri, response=mock.MagicMock( + status_code=http_client.UNSUPPORTED_MEDIA_TYPE)), '200'] + post_calls = [ + mock.call(target_uri), mock.call(target_uri, data={})] + self.sys_bios.reset_bios() + self.sys_bios._conn.post.assert_has_calls(post_calls) + + def test_reset_bios_handle_http_error_400(self): + + target_uri = ( + '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios') + self.conn.post.side_effect = [exceptions.HTTPError( + method='POST', url=target_uri, response=mock.MagicMock( + status_code=http_client.BAD_REQUEST)), '200'] + post_calls = [ + mock.call(target_uri), mock.call(target_uri, data={})] + self.sys_bios.reset_bios() + self.sys_bios._conn.post.assert_has_calls(post_calls) + + def test_reset_bios_handle_http_error_405(self): + + target_uri = ( + '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ResetBios') + self.conn.post.side_effect = exceptions.HTTPError( + method='POST', url=target_uri, response=mock.MagicMock( + status_code=http_client.METHOD_NOT_ALLOWED)) + self.assertRaises( + exceptions.HTTPError, + self.sys_bios.reset_bios) + self.sys_bios._conn.post.assert_called_once_with(target_uri) + + def test__get_change_password_element(self): + value = self.sys_bios._get_change_password_element() + self.assertEqual("/redfish/v1/Systems/437XR1138R2/BIOS/Actions/" + "Bios.ChangePassword", + value.target_uri) + + def test_change_password_missing_action(self): + self.sys_bios._actions.change_password = None + self.assertRaisesRegex( + exceptions.MissingActionError, '#Bios.ChangePassword', + self.sys_bios.change_password, 'newpassword', + 'oldpassword', + 'adminpassword') + + def test__parse_attributes_missing_change_password_target(self): + self.sys_bios.json['Actions']['#Bios.ChangePassword'].pop( + 'target') + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'attribute Actions/#Bios.ChangePassword/target', + self.sys_bios._parse_attributes, self.bios_json) + + def test_change_password(self): + self.sys_bios.change_password('newpassword', + 'oldpassword', + 'adminpassword') + self.sys_bios._conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Actions/Bios.ChangePassword', + data={'OldPassword': 'oldpassword', + 'NewPassword': 'newpassword', + 'PasswordName': 'adminpassword'}) diff --git a/sushy/tests/unit/resources/system/test_ethernet_interfaces.py b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py new file mode 100644 index 0000000000000000000000000000000000000000..ffcde96f72b517ed070240f907fb445d01953843 --- /dev/null +++ b/sushy/tests/unit/resources/system/test_ethernet_interfaces.py @@ -0,0 +1,107 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +from sushy.resources import constants as res_cons +from sushy.resources.system import ethernet_interface +from sushy.tests.unit import base + + +class EthernetInterfaceTestCase(base.TestCase): + + def setUp(self): + super(EthernetInterfaceTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'ethernet_interfaces.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + eth_path = ("/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/" + "12446A3B0411") + self.sys_eth = ethernet_interface.EthernetInterface( + self.conn, eth_path, redfish_version='1.0.2') + + def test__parse_attributes(self): + self.sys_eth._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.sys_eth.redfish_version) + self.assertEqual('1', self.sys_eth.identity) + self.assertEqual('Ethernet Interface', self.sys_eth.name) + self.assertEqual('System NIC 1', self.sys_eth.description) + self.assertEqual( + '12:44:6A:3B:04:11', self.sys_eth.permanent_mac_address) + self.assertEqual('12:44:6A:3B:04:11', self.sys_eth.mac_address) + self.assertEqual(res_cons.STATE_ENABLED, self.sys_eth.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.sys_eth.status.health) + self.assertEqual(1000, self.sys_eth.speed_mbps) + + +class EthernetInterfaceCollectionTestCase(base.TestCase): + + def setUp(self): + super(EthernetInterfaceCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'ethernet_interfaces_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.sys_eth_col = ethernet_interface.EthernetInterfaceCollection( + self.conn, '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.sys_eth_col._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.sys_eth_col.redfish_version) + self.assertEqual('Ethernet Interface Collection', + self.sys_eth_col.name) + eth_path = ('/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/' + '12446A3B0411',) + self.assertEqual(eth_path, self.sys_eth_col.members_identities) + + @mock.patch.object(ethernet_interface, 'EthernetInterface', autospec=True) + def test_get_member(self, mock_eth): + self.sys_eth_col.get_member( + '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/' + '12446A3B0411') + mock_eth.assert_called_once_with( + self.sys_eth_col._conn, + ('/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/' + '12446A3B0411'), + self.sys_eth_col.redfish_version, None) + + @mock.patch.object(ethernet_interface, 'EthernetInterface', autospec=True) + def test_get_members(self, mock_eth): + members = self.sys_eth_col.get_members() + eth_path = ("/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/" + "12446A3B0411") + calls = [ + mock.call(self.sys_eth_col._conn, eth_path, + self.sys_eth_col.redfish_version, None), + ] + mock_eth.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) + + def test_summary(self): + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/' + 'ethernet_interfaces.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + expected_summary = {'12:44:6A:3B:04:11': res_cons.STATE_ENABLED} + actual_summary = self.sys_eth_col.summary + self.assertEqual(expected_summary, actual_summary) diff --git a/sushy/tests/unit/resources/system/test_processor.py b/sushy/tests/unit/resources/system/test_processor.py index 849c6e3c2c74142345c41605026c1a282dc38c52..202e76311e82430628a2b86ac213d868a43d31b7 100644 --- a/sushy/tests/unit/resources/system/test_processor.py +++ b/sushy/tests/unit/resources/system/test_processor.py @@ -13,10 +13,11 @@ # under the License. import json +from unittest import mock -import mock import sushy +from sushy.resources import constants as res_cons from sushy.resources.system import processor from sushy.tests.unit import base @@ -26,29 +27,54 @@ class ProcessorTestCase(base.TestCase): def setUp(self): super(ProcessorTestCase, self).setUp() self.conn = mock.Mock() - with open('sushy/tests/unit/json_samples/processor.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/processor.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.sys_processor = processor.Processor( self.conn, '/redfish/v1/Systems/437XR1138R2/Processors/CPU1', redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_processor._parse_attributes() + self.sys_processor._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_processor.redfish_version) self.assertEqual('CPU1', self.sys_processor.identity) self.assertEqual('CPU 1', self.sys_processor.socket) - self.assertEqual('CPU', self.sys_processor.processor_type) + self.assertEqual( + sushy.PROCESSOR_TYPE_CPU, + self.sys_processor.processor_type) self.assertEqual(sushy.PROCESSOR_ARCH_x86, self.sys_processor.processor_architecture) - self.assertEqual('x86-64', self.sys_processor.instruction_set) + self.assertEqual( + sushy.PROCESSOR_INSTRUCTIONSET_x86_64, + self.sys_processor.instruction_set) self.assertEqual('Intel(R) Corporation', self.sys_processor.manufacturer) self.assertEqual('Multi-Core Intel(R) Xeon(R) processor 7xxx Series', self.sys_processor.model) + self.assertEqual('0x42', + self.sys_processor.processor_id.effective_family) + self.assertEqual('0x61', + self.sys_processor.processor_id.effective_model) + self.assertEqual('0x34AC34DC8901274A', + self.sys_processor.processor_id. + identification_registers) + self.assertEqual('0x429943', + self.sys_processor.processor_id.microcode_info) + self.assertEqual('0x1', + self.sys_processor.processor_id.step) + self.assertEqual('GenuineIntel', + self.sys_processor.processor_id.vendor_id) + self.assertEqual(3700, self.sys_processor.max_speed_mhz) self.assertEqual(8, self.sys_processor.total_cores) self.assertEqual(16, self.sys_processor.total_threads) + self.assertEqual(res_cons.STATE_ENABLED, + self.sys_processor.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.sys_processor.status.health) + self.assertEqual(res_cons.HEALTH_OK, + self.sys_processor.status.health_rollup) class ProcessorCollectionTestCase(base.TestCase): @@ -57,14 +83,17 @@ class ProcessorCollectionTestCase(base.TestCase): super(ProcessorCollectionTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' - 'processor_collection.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + 'processor_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.sys_processor_col = processor.ProcessorCollection( self.conn, '/redfish/v1/Systems/437XR1138R2/Processors', redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_processor_col._parse_attributes() + self.sys_processor_col._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_processor_col.redfish_version) self.assertEqual('Processors Collection', self.sys_processor_col.name) self.assertEqual(('/redfish/v1/Systems/437XR1138R2/Processors/CPU1', @@ -78,7 +107,7 @@ class ProcessorCollectionTestCase(base.TestCase): mock_processor.assert_called_once_with( self.sys_processor_col._conn, '/redfish/v1/Systems/437XR1138R2/Processors/CPU1', - redfish_version=self.sys_processor_col.redfish_version) + self.sys_processor_col.redfish_version, None) @mock.patch.object(processor, 'Processor', autospec=True) def test_get_members(self, mock_processor): @@ -86,10 +115,10 @@ class ProcessorCollectionTestCase(base.TestCase): calls = [ mock.call(self.sys_processor_col._conn, '/redfish/v1/Systems/437XR1138R2/Processors/CPU1', - redfish_version=self.sys_processor_col.redfish_version), + self.sys_processor_col.redfish_version, None), mock.call(self.sys_processor_col._conn, '/redfish/v1/Systems/437XR1138R2/Processors/CPU2', - redfish_version=self.sys_processor_col.redfish_version) + self.sys_processor_col.redfish_version, None) ] mock_processor.assert_has_calls(calls) self.assertIsInstance(members, list) @@ -98,16 +127,15 @@ class ProcessorCollectionTestCase(base.TestCase): def _setUp_processor_summary(self): self.conn.get.return_value.json.reset_mock() successive_return_values = [] - with open('sushy/tests/unit/json_samples/processor.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) - with open('sushy/tests/unit/json_samples/processor2.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) + file_names = ['sushy/tests/unit/json_samples/processor.json', + 'sushy/tests/unit/json_samples/processor2.json'] + for file_name in file_names: + with open(file_name) as f: + successive_return_values.append(json.load(f)) self.conn.get.return_value.json.side_effect = successive_return_values def test_summary(self): - # check for the underneath variable value - self.assertIsNone(self.sys_processor_col._summary) # | GIVEN | self._setUp_processor_summary() # | WHEN | @@ -138,12 +166,10 @@ class ProcessorCollectionTestCase(base.TestCase): self.conn.get.return_value.json.side_effect = None # On refreshing the sys_processor_col instance... with open('sushy/tests/unit/json_samples/' - 'processor_collection.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) - self.sys_processor_col.refresh() - - # | WHEN & THEN | - self.assertIsNone(self.sys_processor_col._summary) + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.sys_processor_col.invalidate() + self.sys_processor_col.refresh(force=False) # | GIVEN | self._setUp_processor_summary() diff --git a/sushy/tests/unit/resources/system/test_secure_boot.py b/sushy/tests/unit/resources/system/test_secure_boot.py new file mode 100644 index 0000000000000000000000000000000000000000..b189e72c933116bb80d6f18519a94078119c05fc --- /dev/null +++ b/sushy/tests/unit/resources/system/test_secure_boot.py @@ -0,0 +1,112 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + +from sushy import exceptions +from sushy.resources.system import constants +from sushy.resources.system import secure_boot +from sushy.resources.system import secure_boot_database +from sushy.tests.unit import base + + +class SecureBootTestCase(base.TestCase): + + def setUp(self): + super(SecureBootTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/secure_boot.json') as f: + self.secure_boot_json = json.load(f) + + self.conn.get.return_value.json.return_value = self.secure_boot_json + self.secure_boot = secure_boot.SecureBoot( + self.conn, '/redfish/v1/Systems/437XR1138R2/SecureBoot', + registries={}, redfish_version='1.1.0') + + def test__parse_attributes(self): + self.secure_boot._parse_attributes(self.secure_boot_json) + self.assertEqual('1.1.0', self.secure_boot.redfish_version) + self.assertEqual('SecureBoot', self.secure_boot.identity) + self.assertEqual('UEFI Secure Boot', self.secure_boot.name) + self.assertIsNone(self.secure_boot.description) + self.assertIs(False, self.secure_boot.enabled) + self.assertEqual(constants.SECURE_BOOT_DISABLED, + self.secure_boot.current_boot) + self.assertEqual(constants.SECURE_BOOT_MODE_DEPLOYED, + self.secure_boot.mode) + + @mock.patch.object(secure_boot.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values(self, mock_log): + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + constants.SECURE_BOOT_RESET_KEYS_DELETE_ALL, + constants.SECURE_BOOT_RESET_KEYS_DELETE_PK}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertFalse(mock_log.called) + + @mock.patch.object(secure_boot.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values_no_values(self, mock_log): + self.secure_boot._actions.reset_keys.allowed_values = None + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + constants.SECURE_BOOT_RESET_KEYS_DELETE_ALL, + constants.SECURE_BOOT_RESET_KEYS_DELETE_PK}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertTrue(mock_log.called) + + @mock.patch.object(secure_boot.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values_custom_values(self, mock_log): + self.secure_boot._actions.reset_keys.allowed_values = [ + 'ResetAllKeysToDefault', + 'IamNotRedfishCompatible', + ] + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertFalse(mock_log.called) + + def test_set_enabled(self): + self.secure_boot.set_enabled(True) + self.conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/SecureBoot', + data={'SecureBootEnable': True}) + + def test_set_enabled_wrong_type(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.secure_boot.set_enabled, 'banana') + + def test_reset_keys(self): + self.secure_boot.reset_keys( + constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT) + self.conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/SecureBoot' + '/Actions/SecureBoot.ResetKeys', + data={'ResetKeysType': 'ResetAllKeysToDefault'}) + + def test_reset_keys_wrong_value(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.secure_boot.reset_keys, 'DeleteEverything') + + def test_databases(self): + self.conn.get.return_value.json.reset_mock() + + with open('sushy/tests/unit/json_samples/' + 'secure_boot_database_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + result = self.secure_boot.databases + self.assertIsInstance( + result, secure_boot_database.SecureBootDatabaseCollection) + self.conn.get.return_value.json.assert_called_once_with() + + self.conn.get.return_value.json.reset_mock() + + self.assertIs(result, self.secure_boot.databases) + self.conn.get.return_value.json.assert_not_called() diff --git a/sushy/tests/unit/resources/system/test_secure_boot_database.py b/sushy/tests/unit/resources/system/test_secure_boot_database.py new file mode 100644 index 0000000000000000000000000000000000000000..714c8efff400be931a7decde812833f2f12eb8c6 --- /dev/null +++ b/sushy/tests/unit/resources/system/test_secure_boot_database.py @@ -0,0 +1,138 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + +from sushy import exceptions +from sushy.resources.system import constants +from sushy.resources.system import secure_boot_database +from sushy.tests.unit import base + + +class SecureBootDatabaseTestCase(base.TestCase): + + def setUp(self): + super(SecureBootDatabaseTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'secure_boot_database.json') as f: + self.secure_boot_json = json.load(f) + + self.conn.get.return_value.json.return_value = self.secure_boot_json + self.secure_boot = secure_boot_database.SecureBootDatabase( + self.conn, + '/redfish/v1/Systems/437XR1138R2/SecureBoot' + '/SecureBootDatabases/db', + registries={}, redfish_version='1.0.0') + + def test__parse_attributes(self): + self.secure_boot._parse_attributes(self.secure_boot_json) + self.assertEqual('1.0.0', self.secure_boot.redfish_version) + self.assertEqual('db', self.secure_boot.identity) + self.assertEqual('db - Authorized Signature Database', + self.secure_boot.name) + + @mock.patch.object(secure_boot_database.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values(self, mock_log): + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + constants.SECURE_BOOT_RESET_KEYS_DELETE_ALL}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertFalse(mock_log.called) + + @mock.patch.object(secure_boot_database.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values_no_values(self, mock_log): + self.secure_boot._actions.reset_keys.allowed_values = None + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT, + constants.SECURE_BOOT_RESET_KEYS_DELETE_ALL}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertTrue(mock_log.called) + + @mock.patch.object(secure_boot_database.LOG, 'warning', autospec=True) + def test_get_allowed_reset_keys_values_custom_values(self, mock_log): + self.secure_boot._actions.reset_keys.allowed_values = [ + 'ResetAllKeysToDefault', + 'IamNotRedfishCompatible', + ] + self.assertEqual({constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT}, + self.secure_boot.get_allowed_reset_keys_values()) + self.assertFalse(mock_log.called) + + def test_reset_keys(self): + self.secure_boot.reset_keys( + constants.SECURE_BOOT_RESET_KEYS_TO_DEFAULT) + self.conn.post.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/db' + '/Actions/SecureBootDatabase.ResetKeys', + data={'ResetKeysType': 'ResetAllKeysToDefault'}) + + def test_reset_keys_wrong_value(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.secure_boot.reset_keys, 'DeleteEverything') + + +class SecureBootDatabaseCollectionTestCase(base.TestCase): + + def setUp(self): + super(SecureBootDatabaseCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'secure_boot_database_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.collection = secure_boot_database.SecureBootDatabaseCollection( + self.conn, '/redfish/v1/Systems/437XR1138R2/SecureBootDatabases', + redfish_version='1.0.0') + + def test__parse_attributes(self): + self.collection._parse_attributes(self.json_doc) + self.assertEqual('1.0.0', self.collection.redfish_version) + self.assertEqual('UEFI SecureBoot Database Collection', + self.collection.name) + self.assertEqual(tuple( + '/redfish/v1/Systems/437XR1138R2/SecureBoot/SecureBootDatabases/' + + member + for member in ('PK', 'KEK', 'db', 'dbx', + 'PKDefault', 'KEKDefault', + 'dbDefault', 'dbxDefault') + ), self.collection.members_identities) + + @mock.patch.object(secure_boot_database, 'SecureBootDatabase', + autospec=True) + def test_get_member(self, mock_secure_boot_database): + self.collection.get_member( + '/redfish/v1/Systems/437XR1138R2/SecureBoot' + '/SecureBootDatabases/db') + mock_secure_boot_database.assert_called_once_with( + self.collection._conn, + '/redfish/v1/Systems/437XR1138R2/SecureBoot' + '/SecureBootDatabases/db', + self.collection.redfish_version, None) + + @mock.patch.object(secure_boot_database, 'SecureBootDatabase', + autospec=True) + def test_get_members(self, mock_secure_boot_database): + members = self.collection.get_members() + calls = [ + mock.call(self.collection._conn, + '/redfish/v1/Systems/437XR1138R2/SecureBoot' + '/SecureBootDatabases/%s' % member, + self.collection.redfish_version, None) + for member in ('PK', 'KEK', 'db', 'dbx', + 'PKDefault', 'KEKDefault', + 'dbDefault', 'dbxDefault') + ] + mock_secure_boot_database.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(8, len(members)) diff --git a/sushy/tests/unit/resources/system/test_simple_storage.py b/sushy/tests/unit/resources/system/test_simple_storage.py new file mode 100644 index 0000000000000000000000000000000000000000..f4e5c1ab6b3ff6b28caf759d020a030081a23ff0 --- /dev/null +++ b/sushy/tests/unit/resources/system/test_simple_storage.py @@ -0,0 +1,140 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + + +from sushy.resources import constants as res_cons +from sushy.resources.system import simple_storage +from sushy.tests.unit import base + + +class SimpleStorageTestCase(base.TestCase): + + def setUp(self): + super(SimpleStorageTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'simple_storage.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.simpl_stor = simple_storage.SimpleStorage( + self.conn, '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.simpl_stor._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.simpl_stor.redfish_version) + self.assertEqual('1', self.simpl_stor.identity) + self.assertEqual('Simple Storage Controller', self.simpl_stor.name) + self.assertEqual(8000000000000, + self.simpl_stor.devices[0].capacity_bytes) + self.assertEqual(4000000000000, + self.simpl_stor.devices[1].capacity_bytes) + self.assertEqual(res_cons.STATE_ENABLED, + self.simpl_stor.devices[0].status.state) + self.assertEqual(res_cons.STATE_ABSENT, + self.simpl_stor.devices[2].status.state) + self.assertEqual(res_cons.HEALTH_OK, + self.simpl_stor.devices[0].status.health) + + +class SimpleStorageCollectionTestCase(base.TestCase): + + def setUp(self): + super(SimpleStorageCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'simple_storage_collection.json') as f: + + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.simpl_stor_col = simple_storage.SimpleStorageCollection( + self.conn, '/redfish/v1/Systems/437XR1138R2/SimpleStorage', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.simpl_stor_col._parse_attributes(self.json_doc) + self.assertEqual(( + '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1',), + self.simpl_stor_col.members_identities) + + @mock.patch.object(simple_storage, 'SimpleStorage', autospec=True) + def test_get_member(self, SimpleStorage_mock): + self.simpl_stor_col.get_member( + '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1') + SimpleStorage_mock.assert_called_once_with( + self.simpl_stor_col._conn, + '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', + self.simpl_stor_col.redfish_version, None) + + @mock.patch.object(simple_storage, 'SimpleStorage', autospec=True) + def test_get_members(self, SimpleStorage_mock): + members = self.simpl_stor_col.get_members() + SimpleStorage_mock.assert_called_once_with( + self.simpl_stor_col._conn, + '/redfish/v1/Systems/437XR1138R2/SimpleStorage/1', + self.simpl_stor_col.redfish_version, None) + self.assertIsInstance(members, list) + self.assertEqual(1, len(members)) + + def test_disks_sizes_bytes(self): + self.conn.get.return_value.json.reset_mock() + + with open('sushy/tests/unit/json_samples/' + 'simple_storage.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.assertEqual([4000000000000, 8000000000000], + self.simpl_stor_col.disks_sizes_bytes) + + def test_disks_sizes_bytes_capacity_bytes_none(self): + self.conn.get.return_value.json.reset_mock() + + with open('sushy/tests/unit/json_samples/' + 'simple_storage.json') as f: + json_doc = json.load(f) + + json_doc['Devices'][0]['CapacityBytes'] = None + self.conn.get.return_value.json.return_value = json_doc + + self.assertEqual([4000000000000], + self.simpl_stor_col.disks_sizes_bytes) + + def test_max_size_bytes(self): + self.conn.get.return_value.json.reset_mock() + + with open('sushy/tests/unit/json_samples/' + 'simple_storage.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.assertEqual(8000000000000, self.simpl_stor_col.max_size_bytes) + + # for any subsequent fetching it gets it from the cached value + self.conn.get.return_value.json.reset_mock() + self.assertEqual(8000000000000, self.simpl_stor_col.max_size_bytes) + self.conn.get.return_value.json.assert_not_called() + + def test_max_size_bytes_after_refresh(self): + self.simpl_stor_col.refresh() + self.conn.get.return_value.json.reset_mock() + + with open('sushy/tests/unit/json_samples/' + 'simple_storage.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.assertEqual(8000000000000, self.simpl_stor_col.max_size_bytes) diff --git a/sushy/tests/unit/resources/system/test_system.py b/sushy/tests/unit/resources/system/test_system.py index 2bf879e0501cb1d90027160b76c706584ae09120..f9db8b4d5fa17a0348c341d59a0fab159e7a8dfd 100644 --- a/sushy/tests/unit/resources/system/test_system.py +++ b/sushy/tests/unit/resources/system/test_system.py @@ -14,12 +14,21 @@ # under the License. import json +from unittest import mock -import mock +from dateutil import parser import sushy from sushy import exceptions +from sushy.resources.chassis import chassis +from sushy.resources import constants as res_cons +from sushy.resources.manager import manager +from sushy.resources.oem import fake +from sushy.resources.system import bios +from sushy.resources.system import mappings as sys_map from sushy.resources.system import processor +from sushy.resources.system import secure_boot +from sushy.resources.system import simple_storage from sushy.resources.system import system from sushy.tests.unit import base @@ -29,47 +38,98 @@ class SystemTestCase(base.TestCase): def setUp(self): super(SystemTestCase, self).setUp() self.conn = mock.Mock() - with open('sushy/tests/unit/json_samples/system.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/system.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc self.sys_inst = system.System( self.conn, '/redfish/v1/Systems/437XR1138R2', redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_inst.redfish_version) self.assertEqual('Chicago-45Z-2381', self.sys_inst.asset_tag) - self.assertEqual('P79 v1.33 (02/28/2015)', self.sys_inst.bios_version) + self.assertEqual('P79 v1.45 (12/06/2017)', self.sys_inst.bios_version) self.assertEqual('Web Front End node', self.sys_inst.description) self.assertEqual('web483', self.sys_inst.hostname) self.assertEqual('437XR1138R2', self.sys_inst.identity) - self.assertEqual('Off', self.sys_inst.indicator_led) + self.assertEqual(sushy.INDICATOR_LED_OFF, + self.sys_inst.indicator_led) self.assertEqual('Contoso', self.sys_inst.manufacturer) self.assertEqual('WebFrontEnd483', self.sys_inst.name) self.assertEqual('224071-J23', self.sys_inst.part_number) self.assertEqual('437XR1138R2', self.sys_inst.serial_number) self.assertEqual('8675309', self.sys_inst.sku) - self.assertEqual('Physical', self.sys_inst.system_type) + self.assertEqual(sushy.SYSTEM_TYPE_PHYSICAL, + self.sys_inst.system_type) self.assertEqual('38947555-7742-3448-3784-823347823834', self.sys_inst.uuid) + self.assertEqual(res_cons.STATE_ENABLED, self.sys_inst.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.sys_inst.status.health) + self.assertEqual(res_cons.HEALTH_OK, + self.sys_inst.status.health_rollup) self.assertEqual(sushy.SYSTEM_POWER_STATE_ON, self.sys_inst.power_state) self.assertEqual(96, self.sys_inst.memory_summary.size_gib) self.assertEqual("OK", self.sys_inst.memory_summary.health) - self.assertIsNone(self.sys_inst._processors) + self.assertIsNotNone(self.sys_inst.maintenance_window) + self.assertEqual(1, self.sys_inst.maintenance_window + .maintenance_window_duration_in_seconds) + self.assertEqual(parser.parse('2016-03-07T14:44:30-05:05'), + self.sys_inst.maintenance_window + .maintenance_window_start_time) + for oem_vendor in self.sys_inst.oem_vendors: + self.assertIn(oem_vendor, ('Contoso', 'Chipwise')) + + def test__parse_attributes_return(self): + attributes = self.sys_inst._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('Chicago-45Z-2381', attributes.get('asset_tag')) + self.assertEqual(sushy.INDICATOR_LED_OFF, + attributes.get('indicator_led')) + self.assertEqual({'health': res_cons.HEALTH_OK, + 'health_rollup': res_cons.HEALTH_OK, + 'state': res_cons.STATE_ENABLED}, + attributes.get('status')) + self.assertEqual({'maintenance_window_duration_in_seconds': 1, + 'maintenance_window_start_time': + parser.parse('2016-03-07T14:44:30-05:05')}, + attributes.get('maintenance_window')) + self.assertEqual({'reset': {'allowed_values': + ['On', 'ForceOff', 'GracefulShutdown', + 'GracefulRestart', 'ForceRestart', 'Nmi', + 'ForceOn', 'PushPowerButton'], + 'operation_apply_time_support': + {'_maintenance_window_resource': + {'resource_uri': + '/redfish/v1/Systems/437XR1138R2'}, + 'maintenance_window_duration_in_seconds': 600, + 'maintenance_window_start_time': + parser.parse('2017-05-03T23:12:37-05:00'), + 'supported_values': + ['Immediate', 'AtMaintenanceWindowStart'], + 'mapped_supported_values': + [res_cons.APPLY_TIME_IMMEDIATE, + res_cons.APPLY_TIME_MAINT_START]}, + 'target_uri': + '/redfish/v1/Systems/437XR1138R2/Actions/' + 'ComputerSystem.Reset'}}, + attributes.get('_actions')) def test__parse_attributes_missing_actions(self): self.sys_inst.json.pop('Actions') self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Actions', - self.sys_inst._parse_attributes) + self.sys_inst._parse_attributes, self.json_doc) def test__parse_attributes_missing_boot(self): self.sys_inst.json.pop('Boot') self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Boot', - self.sys_inst._parse_attributes) + self.sys_inst._parse_attributes, self.json_doc) def test__parse_attributes_missing_reset_target(self): self.sys_inst.json['Actions']['#ComputerSystem.Reset'].pop( @@ -77,7 +137,20 @@ class SystemTestCase(base.TestCase): self.assertRaisesRegex( exceptions.MissingAttributeError, 'attribute Actions/#ComputerSystem.Reset/target', - self.sys_inst._parse_attributes) + self.sys_inst._parse_attributes, self.json_doc) + + def test__parse_attributes_null_memory_capacity(self): + self.sys_inst.json['MemorySummary']['TotalSystemMemoryGiB'] = None + self.sys_inst._parse_attributes(self.json_doc) + self.assertIsNone(self.sys_inst.memory_summary.size_gib) + + def test__parse_attributes_bad_maintenance_window_time(self): + self.sys_inst.json['@Redfish.MaintenanceWindow'][ + 'MaintenanceWindowStartTime'] = 'bad date' + self.assertRaisesRegex( + exceptions.MalformedAttributeError, + '@Redfish.MaintenanceWindow/MaintenanceWindowStartTime', + self.sys_inst._parse_attributes, self.json_doc) def test_get__reset_action_element(self): value = self.sys_inst._get_reset_action_element() @@ -90,7 +163,8 @@ class SystemTestCase(base.TestCase): "GracefulRestart", "ForceRestart", "Nmi", - "ForceOn" + "ForceOn", + "PushPowerButton" ], value.allowed_values) @@ -108,7 +182,8 @@ class SystemTestCase(base.TestCase): sushy.RESET_FORCE_OFF, sushy.RESET_FORCE_ON, sushy.RESET_ON, - sushy.RESET_NMI]) + sushy.RESET_NMI, + sushy.RESET_PUSH_POWER_BUTTON]) self.assertEqual(expected, values) self.assertIsInstance(values, set) @@ -130,6 +205,20 @@ class SystemTestCase(base.TestCase): self.assertIsInstance(values, set) self.assertEqual(1, mock_log.call_count) + def test_reset_action_operation_apply_time_support(self): + support = self.sys_inst._actions.reset.operation_apply_time_support + self.assertIsNotNone(support) + self.assertEqual(['Immediate', 'AtMaintenanceWindowStart'], + support.supported_values) + self.assertEqual([res_cons.APPLY_TIME_IMMEDIATE, + res_cons.APPLY_TIME_MAINT_START], + support.mapped_supported_values) + self.assertEqual(parser.parse('2017-05-03T23:12:37-05:00'), + support.maintenance_window_start_time) + self.assertEqual(600, support.maintenance_window_duration_in_seconds) + self.assertEqual('/redfish/v1/Systems/437XR1138R2', + support._maintenance_window_resource.resource_uri) + def test_reset_system(self): self.sys_inst.reset_system(sushy.RESET_FORCE_OFF) self.sys_inst._conn.post.assert_called_once_with( @@ -178,6 +267,64 @@ class SystemTestCase(base.TestCase): self.assertIsInstance(values, set) self.assertEqual(1, mock_log.call_count) + def test_set_system_boot_options(self): + self.sys_inst.set_system_boot_options( + sushy.BOOT_SOURCE_TARGET_PXE, + enabled=sushy.BOOT_SOURCE_ENABLED_CONTINUOUS, + mode=sushy.BOOT_SOURCE_MODE_UEFI) + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', + data={'Boot': {'BootSourceOverrideEnabled': 'Continuous', + 'BootSourceOverrideTarget': 'Pxe', + 'BootSourceOverrideMode': 'UEFI'}}) + + def test_set_system_boot_options_no_mode_specified(self): + self.sys_inst.set_system_boot_options( + sushy.BOOT_SOURCE_TARGET_HDD, + enabled=sushy.BOOT_SOURCE_ENABLED_ONCE) + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', + data={'Boot': {'BootSourceOverrideEnabled': 'Once', + 'BootSourceOverrideTarget': 'Hdd'}}) + + def test_set_system_boot_options_no_target_specified(self): + self.sys_inst.set_system_boot_options( + enabled=sushy.BOOT_SOURCE_ENABLED_CONTINUOUS, + mode=sushy.BOOT_SOURCE_MODE_UEFI) + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', + data={'Boot': {'BootSourceOverrideEnabled': 'Continuous', + 'BootSourceOverrideMode': 'UEFI'}}) + + def test_set_system_boot_options_no_freq_specified(self): + self.sys_inst.set_system_boot_options( + target=sushy.BOOT_SOURCE_TARGET_PXE, + mode=sushy.BOOT_SOURCE_MODE_UEFI) + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', + data={'Boot': {'BootSourceOverrideTarget': 'Pxe', + 'BootSourceOverrideMode': 'UEFI'}}) + + def test_set_system_boot_options_nothing_specified(self): + self.sys_inst.set_system_boot_options() + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', data={}) + + def test_set_system_boot_options_invalid_target(self): + self.assertRaises(exceptions.InvalidParameterValueError, + self.sys_inst.set_system_boot_source, + 'invalid-target') + + def test_set_system_boot_options_invalid_enabled(self): + with self.assertRaisesRegex( + exceptions.InvalidParameterValueError, + '"enabled" value.*{0}'.format( + list(sys_map.BOOT_SOURCE_ENABLED_MAP_REV))): + + self.sys_inst.set_system_boot_options( + sushy.BOOT_SOURCE_TARGET_HDD, + enabled='invalid-enabled') + def test_set_system_boot_source(self): self.sys_inst.set_system_boot_source( sushy.BOOT_SOURCE_TARGET_PXE, @@ -204,10 +351,29 @@ class SystemTestCase(base.TestCase): 'invalid-target') def test_set_system_boot_source_invalid_enabled(self): + with self.assertRaisesRegex( + exceptions.InvalidParameterValueError, + '"enabled" value.*{0}'.format( + list(sys_map.BOOT_SOURCE_ENABLED_MAP_REV))): + + self.sys_inst.set_system_boot_source( + sushy.BOOT_SOURCE_TARGET_HDD, + enabled='invalid-enabled') + + def test_set_indicator_led(self): + with mock.patch.object( + self.sys_inst, 'invalidate', autospec=True) as invalidate_mock: + self.sys_inst.set_indicator_led(sushy.INDICATOR_LED_BLINKING) + self.sys_inst._conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2', + data={'IndicatorLED': 'Blinking'}) + + invalidate_mock.assert_called_once_with() + + def test_set_indicator_led_invalid_state(self): self.assertRaises(exceptions.InvalidParameterValueError, - self.sys_inst.set_system_boot_source, - sushy.BOOT_SOURCE_TARGET_HDD, - enabled='invalid-enabled') + self.sys_inst.set_indicator_led, + 'spooky-glowing') def test__get_processor_collection_path_missing_processors_attr(self): self.sys_inst._json.pop('Processors') @@ -219,42 +385,40 @@ class SystemTestCase(base.TestCase): # | GIVEN | self.sys_inst._json['MemorySummary']['Status'].pop('HealthRollup') # | WHEN | - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) # | THEN | self.assertEqual(96, self.sys_inst.memory_summary.size_gib) - self.assertEqual(None, self.sys_inst.memory_summary.health) + self.assertIsNone(self.sys_inst.memory_summary.health) # | GIVEN | self.sys_inst._json['MemorySummary'].pop('Status') # | WHEN | - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) # | THEN | self.assertEqual(96, self.sys_inst.memory_summary.size_gib) - self.assertEqual(None, self.sys_inst.memory_summary.health) + self.assertIsNone(self.sys_inst.memory_summary.health) # | GIVEN | self.sys_inst._json['MemorySummary'].pop('TotalSystemMemoryGiB') # | WHEN | - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) # | THEN | - self.assertEqual(None, self.sys_inst.memory_summary.size_gib) - self.assertEqual(None, self.sys_inst.memory_summary.health) + self.assertIsNone(self.sys_inst.memory_summary.size_gib) + self.assertIsNone(self.sys_inst.memory_summary.health) # | GIVEN | self.sys_inst._json.pop('MemorySummary') # | WHEN | - self.sys_inst._parse_attributes() + self.sys_inst._parse_attributes(self.json_doc) # | THEN | - self.assertEqual(None, self.sys_inst.memory_summary) + self.assertIsNone(self.sys_inst.memory_summary) def test_processors(self): - # check for the underneath variable value - self.assertIsNone(self.sys_inst._processors) # | GIVEN | self.conn.get.return_value.json.reset_mock() - with open('sushy/tests/unit/json_samples/processor_collection.json', - 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) # | WHEN | actual_processors = self.sys_inst.processors # | THEN | @@ -272,43 +436,43 @@ class SystemTestCase(base.TestCase): def test_processors_on_refresh(self): # | GIVEN | - with open('sushy/tests/unit/json_samples/processor_collection.json', - 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) # | WHEN & THEN | self.assertIsInstance(self.sys_inst.processors, processor.ProcessorCollection) # On refreshing the system instance... - with open('sushy/tests/unit/json_samples/system.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) - self.sys_inst.refresh() + with open('sushy/tests/unit/json_samples/system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) - # | WHEN & THEN | - self.assertIsNone(self.sys_inst._processors) + self.sys_inst.invalidate() + self.sys_inst.refresh(force=False) # | GIVEN | - with open('sushy/tests/unit/json_samples/processor_collection.json', - 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) # | WHEN & THEN | self.assertIsInstance(self.sys_inst.processors, processor.ProcessorCollection) def _setUp_processor_summary(self): self.conn.get.return_value.json.reset_mock() - with open('sushy/tests/unit/json_samples/processor_collection.json', - 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + with open('sushy/tests/unit/json_samples/' + 'processor_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) # fetch processors for the first time self.sys_inst.processors successive_return_values = [] - with open('sushy/tests/unit/json_samples/processor.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) - with open('sushy/tests/unit/json_samples/processor2.json', 'r') as f: - successive_return_values.append(json.loads(f.read())) + file_names = ['sushy/tests/unit/json_samples/processor.json', + 'sushy/tests/unit/json_samples/processor2.json'] + for file_name in file_names: + with open(file_name) as f: + successive_return_values.append(json.load(f)) self.conn.get.return_value.json.side_effect = successive_return_values @@ -333,6 +497,179 @@ class SystemTestCase(base.TestCase): self.sys_inst.processors.summary) self.conn.get.return_value.json.assert_not_called() + def test_ethernet_interfaces(self): + self.conn.get.return_value.json.reset_mock() + eth_coll_return_value = None + eth_return_value = None + with open('sushy/tests/unit/json_samples/' + 'ethernet_interfaces_collection.json') as f: + eth_coll_return_value = json.load(f) + with open('sushy/tests/unit/json_samples/' + 'ethernet_interfaces.json') as f: + eth_return_value = json.load(f) + + self.conn.get.return_value.json.side_effect = [eth_coll_return_value, + eth_return_value] + + actual_macs = self.sys_inst.ethernet_interfaces.summary + expected_macs = ( + {'12:44:6A:3B:04:11': res_cons.STATE_ENABLED}) + self.assertEqual(expected_macs, actual_macs) + + def test_bios(self): + self.conn.get.return_value.json.reset_mock() + bios_return_value = None + with open('sushy/tests/unit/json_samples/bios.json') as f: + bios_return_value = json.load(f) + self.conn.get.return_value.json.side_effect = [bios_return_value] + + self.assertIsInstance(self.sys_inst.bios, bios.Bios) + self.assertEqual('BIOS Configuration Current Settings', + self.sys_inst.bios.name) + + def test_secure_boot(self): + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/secure_boot.json') as f: + self.conn.get.return_value.json.side_effect = [json.load(f)] + + self.assertIsInstance(self.sys_inst.secure_boot, + secure_boot.SecureBoot) + self.assertEqual('UEFI Secure Boot', self.sys_inst.secure_boot.name) + + def test_simple_storage_for_missing_attr(self): + self.sys_inst.json.pop('SimpleStorage') + with self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute SimpleStorage'): + self.sys_inst.simple_storage + + def test_simple_storage(self): + # | GIVEN | + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/' + 'simple_storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN | + actual_simple_storage = self.sys_inst.simple_storage + # | THEN | + self.assertIsInstance(actual_simple_storage, + simple_storage.SimpleStorageCollection) + self.conn.get.return_value.json.assert_called_once_with() + + # reset mock + self.conn.get.return_value.json.reset_mock() + # | WHEN & THEN | + # tests for same object on invoking subsequently + self.assertIs(actual_simple_storage, + self.sys_inst.simple_storage) + self.conn.get.return_value.json.assert_not_called() + + def test_simple_storage_on_refresh(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'simple_storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.sys_inst.simple_storage, + simple_storage.SimpleStorageCollection) + + # On refreshing the system instance... + with open('sushy/tests/unit/json_samples/system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.sys_inst.invalidate() + self.sys_inst.refresh(force=False) + + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'simple_storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.sys_inst.simple_storage, + simple_storage.SimpleStorageCollection) + + def test_storage_for_missing_attr(self): + self.sys_inst.json.pop('SimpleStorage') + with self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Storage'): + self.sys_inst.storage + + def test_storage(self): + # | GIVEN | + self.conn.get.return_value.json.reset_mock() + with open('sushy/tests/unit/json_samples/' + 'storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN | + actual_storage = self.sys_inst.simple_storage + # | THEN | + self.assertIsInstance(actual_storage, + simple_storage.SimpleStorageCollection) + self.conn.get.return_value.json.assert_called_once_with() + + # reset mock + self.conn.get.return_value.json.reset_mock() + # | WHEN & THEN | + # tests for same object on invoking subsequently + self.assertIs(actual_storage, self.sys_inst.simple_storage) + self.conn.get.return_value.json.assert_not_called() + + def test_storage_on_refresh(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.sys_inst.simple_storage, + simple_storage.SimpleStorageCollection) + + # On refreshing the system instance... + with open('sushy/tests/unit/json_samples/system.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + self.sys_inst.invalidate() + self.sys_inst.refresh(force=False) + + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'storage_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + # | WHEN & THEN | + self.assertIsInstance(self.sys_inst.simple_storage, + simple_storage.SimpleStorageCollection) + + def test_managers(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'manager.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_managers = self.sys_inst.managers + self.assertIsInstance(actual_managers[0], manager.Manager) + self.assertEqual( + '/redfish/v1/Managers/BMC', actual_managers[0].path) + + def test_chassis(self): + # | GIVEN | + with open('sushy/tests/unit/json_samples/' + 'chassis.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + + # | WHEN & THEN | + actual_chassis = self.sys_inst.chassis + self.assertIsInstance(actual_chassis[0], chassis.Chassis) + self.assertEqual( + '/redfish/v1/Chassis/1U', actual_chassis[0].path) + + def test_get_oem_extension(self): + # | WHEN | + contoso_system_extn_inst = self.sys_inst.get_oem_extension('Contoso') + # | THEN | + self.assertIsInstance(contoso_system_extn_inst, + fake.FakeOEMSystemExtension) + self.assertIs(self.sys_inst, contoso_system_extn_inst._parent_resource) + self.assertEqual('Contoso', contoso_system_extn_inst._vendor_id) + class SystemCollectionTestCase(base.TestCase): @@ -340,13 +677,16 @@ class SystemCollectionTestCase(base.TestCase): super(SystemCollectionTestCase, self).setUp() self.conn = mock.Mock() with open('sushy/tests/unit/json_samples/' - 'system_collection.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) + 'system_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.sys_col = system.SystemCollection( self.conn, '/redfish/v1/Systems', redfish_version='1.0.2') def test__parse_attributes(self): - self.sys_col._parse_attributes() + self.sys_col._parse_attributes(self.json_doc) self.assertEqual('1.0.2', self.sys_col.redfish_version) self.assertEqual('Computer System Collection', self.sys_col.name) self.assertEqual(('/redfish/v1/Systems/437XR1138R2',), @@ -357,13 +697,13 @@ class SystemCollectionTestCase(base.TestCase): self.sys_col.get_member('/redfish/v1/Systems/437XR1138R2') mock_system.assert_called_once_with( self.sys_col._conn, '/redfish/v1/Systems/437XR1138R2', - redfish_version=self.sys_col.redfish_version) + self.sys_col.redfish_version, None) @mock.patch.object(system, 'System', autospec=True) def test_get_members(self, mock_system): members = self.sys_col.get_members() mock_system.assert_called_once_with( self.sys_col._conn, '/redfish/v1/Systems/437XR1138R2', - redfish_version=self.sys_col.redfish_version) + self.sys_col.redfish_version, None) self.assertIsInstance(members, list) self.assertEqual(1, len(members)) diff --git a/sushy/tests/unit/resources/taskservice/__init__.py b/sushy/tests/unit/resources/taskservice/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/tests/unit/resources/taskservice/test_task.py b/sushy/tests/unit/resources/taskservice/test_task.py new file mode 100644 index 0000000000000000000000000000000000000000..ecf568c31543ab5ace51a7d0839d2978673163e0 --- /dev/null +++ b/sushy/tests/unit/resources/taskservice/test_task.py @@ -0,0 +1,172 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from http import client as http_client +import json +from unittest import mock + +from sushy.resources import constants as res_cons +from sushy.resources.taskservice import task +from sushy.tests.unit import base + + +class TaskTestCase(base.TestCase): + + def setUp(self): + super(TaskTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/task.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + message_registry = mock.Mock() + message = mock.Mock() + message.message = "Property %1 is read only." + message.number_of_args = 1 + message_registry.messages = {"PropertyNotWriteable": message} + + self.task = task.Task( + self.conn, '/redfish/v1/TaskService/Tasks/545', + redfish_version='1.4.3', + registries={'Base.1.0': message_registry}) + + def test__parse_attributes(self): + self.task._parse_attributes(self.json_doc) + self.assertEqual('545', self.task.identity) + self.assertEqual('Task 545', self.task.name) + self.assertEqual('Task description', self.task.description) + self.assertEqual('/taskmon/545', self.task.task_monitor) + self.assertEqual('2012-03-07T14:44+06:00', self.task.start_time) + self.assertEqual('2012-03-07T14:45+06:00', self.task.end_time) + self.assertEqual(100, self.task.percent_complete) + self.assertEqual(res_cons.TASK_STATE_COMPLETED, self.task.task_state) + self.assertEqual(res_cons.HEALTH_OK, self.task.task_status) + self.assertEqual(1, len(self.task.messages)) + self.assertEqual('Base.1.0.PropertyNotWriteable', + self.task.messages[0].message_id) + self.assertEqual('Property %1 is read only.', + self.task.messages[0].message) + self.assertEqual(res_cons.SEVERITY_WARNING, + self.task.messages[0].severity) + + def test_is_processing_true(self): + self.task.status_code = http_client.ACCEPTED + self.assertTrue(self.task.is_processing) + + def test_is_processing_false(self): + self.task.status_code = http_client.OK + self.assertFalse(self.task.is_processing) + + def test_parse_messages(self): + self.task.parse_messages() + self.assertEqual('Property SKU is read only.', + self.task.messages[0].message) + + +class TaskCollectionTestCase(base.TestCase): + + def setUp(self): + super(TaskCollectionTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'task_collection.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.task_col = task.TaskCollection( + self.conn, '/redfish/v1/TaskService/Tasks', + redfish_version='1.0.2') + + def test__parse_attributes(self): + self.task_col._parse_attributes(self.json_doc) + self.assertEqual('1.0.2', self.task_col.redfish_version) + self.assertEqual('Task Collection', self.task_col.name) + self.assertEqual(('/redfish/v1/TaskService/Tasks/545', + '/redfish/v1/TaskService/Tasks/546'), + self.task_col.members_identities) + + @mock.patch.object(task, 'Task', autospec=True) + def test_get_member(self, mock_task): + self.task_col.get_member( + '/redfish/v1/TaskService/Tasks/545') + mock_task.assert_called_once_with( + self.task_col._conn, + '/redfish/v1/TaskService/Tasks/545', + self.task_col.redfish_version, None) + + @mock.patch.object(task, 'Task', autospec=True) + def test_get_members(self, mock_task): + members = self.task_col.get_members() + calls = [ + mock.call(self.task_col._conn, + '/redfish/v1/TaskService/Tasks/545', + self.task_col.redfish_version, None), + mock.call(self.task_col._conn, + '/redfish/v1/TaskService/Tasks/546', + self.task_col.redfish_version, None), + ] + mock_task.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(2, len(members)) + + def _setUp_task_summary(self): + self.conn.get.return_value.json.reset_mock() + successive_return_values = [] + file_names = ['sushy/tests/unit/json_samples/task.json', + 'sushy/tests/unit/json_samples/task2.json'] + for file_name in file_names: + with open(file_name) as f: + successive_return_values.append(json.load(f)) + + self.conn.get.return_value.json.side_effect = successive_return_values + + def test_summary(self): + # | GIVEN | + self._setUp_task_summary() + # | WHEN | + actual_summary = self.task_col.summary + # | THEN | + self.assertEqual({'545': 'completed', '546': 'pending'}, + actual_summary) + + # reset mock + self.conn.get.return_value.json.reset_mock() + + # | WHEN & THEN | + # tests for same object on invoking subsequently + self.assertIs(actual_summary, + self.task_col.summary) + self.conn.get.return_value.json.assert_not_called() + + def test_summary_on_refresh(self): + # | GIVEN | + self._setUp_task_summary() + # | WHEN & THEN | + self.assertEqual({'545': 'completed', '546': 'pending'}, + self.task_col.summary) + + self.conn.get.return_value.json.side_effect = None + # On refreshing the task_col instance... + with open('sushy/tests/unit/json_samples/' + 'task_collection.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.task_col.invalidate() + self.task_col.refresh(force=False) + + # | GIVEN | + self._setUp_task_summary() + # | WHEN & THEN | + self.assertEqual({'545': 'completed', '546': 'pending'}, + self.task_col.summary) diff --git a/sushy/tests/unit/resources/taskservice/test_taskmonitor.py b/sushy/tests/unit/resources/taskservice/test_taskmonitor.py new file mode 100644 index 0000000000000000000000000000000000000000..aa784d8f705d1b2a7d7a88968cb08f3306621df9 --- /dev/null +++ b/sushy/tests/unit/resources/taskservice/test_taskmonitor.py @@ -0,0 +1,179 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from http import client as http_client +import json +from unittest import mock + +from sushy.resources import base as resource_base +from sushy.resources.taskservice import task +from sushy.resources.taskservice import taskmonitor +from sushy.tests.unit import base + + +class TaskMonitorTestCase(base.TestCase): + + def setUp(self): + super(TaskMonitorTestCase, self).setUp() + self.conn = mock.Mock() + + with open('sushy/tests/unit/json_samples/task.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'}, + self.json_doc) + + self.task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + field_data=self.field_data + ) + + @mock.patch.object(taskmonitor.LOG, 'warning', autospec=True) + def test_init_deprecation_warning(self, mock_log): + taskmonitor.TaskMonitor(self.conn, '/Task/545') + + mock_log.assert_called_once_with( + 'sushy.resources.taskservice.taskmonitor.TaskMonitor ' + 'is deprecated. Use sushy.taskmonitor.TaskMonitor.') + + def test_init_accepted_no_content(self): + field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Content-Length': 0, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'}, + None) + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + field_data=field_data) + + self.assertEqual(http_client.ACCEPTED, + task_monitor._field_data._status_code) + self.assertEqual( + 0, task_monitor._field_data._headers['Content-Length']) + + def test_init_accepted_content(self): + self.assertIsNotNone(self.task_monitor._task) + + def test_init_no_field_data(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + + task_monitor = taskmonitor.TaskMonitor(self.conn, '/Task/545') + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(task_monitor._task) + + def test_refresh_no_content(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 0} + self.conn.get.return_value.content = None + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNone(self.task_monitor._task) + + def test_refresh_content_no_task(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + self.task_monitor._task = None + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(self.task_monitor._task) + + def test_refresh_content_task(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(self.task_monitor._task) + + def test_refresh_done(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 200 + + self.task_monitor.refresh() + + self.conn.get.assert_called_once_with(path='/Task/545') + self.assertIsNone(self.task_monitor._task) + + def test_task_monitor(self): + self.assertEqual('/Task/545', self.task_monitor.task_monitor) + + def test_is_processing(self): + self.assertTrue(self.task_monitor.is_processing) + + def test_retry_after(self): + self.assertEqual(20, self.task_monitor.retry_after) + + def test_cancellable(self): + self.assertTrue(self.task_monitor.cancellable) + + def test_not_cancellable_no_header(self): + field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20}, + self.json_doc) + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + field_data=field_data + ) + + self.assertFalse(task_monitor.cancellable) + + def test_not_cancellable(self): + field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'GET'}, + self.json_doc) + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + field_data=field_data + ) + + self.assertFalse(task_monitor.cancellable) + + def test_task(self): + tm_task = self.task_monitor.task + + self.assertIsInstance(tm_task, task.Task) + self.assertEqual('545', tm_task.identity) diff --git a/sushy/tests/unit/resources/taskservice/test_taskservice.py b/sushy/tests/unit/resources/taskservice/test_taskservice.py new file mode 100644 index 0000000000000000000000000000000000000000..1c13d8ad501c82ab5acd3ce5b603e87386664a36 --- /dev/null +++ b/sushy/tests/unit/resources/taskservice/test_taskservice.py @@ -0,0 +1,54 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from unittest import mock + +from sushy.resources import constants as res_cons +from sushy.resources.taskservice import constants as ts_cons +from sushy.resources.taskservice import task +from sushy.resources.taskservice import taskservice +from sushy.tests.unit import base + + +class TaskServiceTestCase(base.TestCase): + + def setUp(self): + super(TaskServiceTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/taskservice.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.tsk_serv = taskservice.TaskService( + self.conn, '/redfish/v1/TaskService/TaskService', + redfish_version='1.3.0') + + def test__parse_attributes(self): + self.tsk_serv._parse_attributes(self.json_doc) + self.assertEqual('TaskService', self.tsk_serv.identity) + self.assertTrue(self.tsk_serv.service_enabled) + self.assertTrue(self.tsk_serv.event_on_task_state_change) + self.assertEqual(res_cons.STATE_ENABLED, self.tsk_serv.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.tsk_serv.status.health) + self.assertEqual(self.tsk_serv.overwrite_policy, + ts_cons.OVERWRITE_POLICY_MANUAL) + + @mock.patch.object(task, 'TaskCollection', autospec=True) + def test_tasks(self, task_collection_mock): + self.tsk_serv.tasks + task_collection_mock.assert_called_once_with( + self.conn, '/redfish/v1/TaskService/Tasks', + self.tsk_serv.redfish_version, + self.tsk_serv._registries) diff --git a/sushy/tests/unit/resources/test_base.py b/sushy/tests/unit/resources/test_base.py index 96a1db44e7ffe754835cc9746f7ee32231f53915..0ee4b936b0b587efd68391766a3bdf0f0f4241f5 100644 --- a/sushy/tests/unit/resources/test_base.py +++ b/sushy/tests/unit/resources/test_base.py @@ -14,51 +14,190 @@ # under the License. import copy - -import mock +from http import client as http_client +import io +import json +from unittest import mock +import zipfile from sushy import exceptions from sushy.resources import base as resource_base from sushy.tests.unit import base +BASE_RESOURCE_JSON = { + "@odata.type": "#FauxResource.v1_0_0.FauxResource", + "Id": "1111AAAA", + "Name": "Faux Resource", + "@odata.id": "/redfish/v1/FauxResource/1111AAAA", + "Oem": { + "Contoso": { + "@odata.type": "http://contoso.com/schemas/extensions.v1_2_1#contoso.AnvilTypes1", # noqa + "slogan": "Contoso never fail", + "disclaimer": "* Most of the time" + }, + "EID_412_ASB_123": { + "@odata.type": "http://AnotherStandardsBody/schemas.v1_0_1#styleInfoExt", # noqa + "Style": "Executive" + } + }, + "Links": { + "Oem": { + "Contoso": { + "@odata.type": "http://contoso.com/schemas/extensions.v1_2_1#contoso.AnvilTypes1", # noqa + "slogan": "Contoso never fail", + "disclaimer": "* Most of the time" + }, + "EID_420_ASB_345": { + "@odata.type": "http://AnotherStandardsBody/schemas.v1_0_1#styleInfoExt", # noqa + "Style": "Executive" + } + } + } + +} + + class BaseResource(resource_base.ResourceBase): - def _parse_attributes(self): + def _parse_attributes(self, json_doc): pass +class BaseResource2(resource_base.ResourceBase): + pass + + class ResourceBaseTestCase(base.TestCase): def setUp(self): super(ResourceBaseTestCase, self).setUp() self.conn = mock.Mock() + self.conn.get.return_value.json.return_value = ( + copy.deepcopy(BASE_RESOURCE_JSON)) self.base_resource = BaseResource(connector=self.conn, path='/Foo', redfish_version='1.0.2') + self.assertFalse(self.base_resource._is_stale) + self.base_resource2 = BaseResource2(connector=self.conn, path='/Foo', + redfish_version='1.0.2') # refresh() is called in the constructor self.conn.reset_mock() - def test_refresh(self): + def test_refresh_no_force(self): + self.base_resource.refresh(force=False) + self.conn.get.assert_not_called() + + def test_refresh_force(self): self.base_resource.refresh() self.conn.get.assert_called_once_with(path='/Foo') + def test_invalidate(self): + self.base_resource.invalidate() + self.conn.get.assert_not_called() + + self.base_resource.refresh(force=False) + self.conn.get.assert_called_once_with(path='/Foo') + + def test_invalidate_force_refresh(self): + self.base_resource.invalidate(force_refresh=True) + self.conn.get.assert_called_once_with(path='/Foo') + + def test_refresh_archive(self): + mock_response = mock.Mock( + headers={'content-type': 'application/zip'}) + with open('sushy/tests/unit/json_samples/TestRegistry.zip', 'rb') as f: + mock_response.content = f.read() + self.conn.get.return_value = mock_response + + resource = BaseResource(connector=self.conn, + path='/Foo', + redfish_version='1.0.2', + reader=resource_base. + JsonArchiveReader('Test.2.0.json')) + + self.assertIsNotNone(resource._json) + self.assertEqual('Test.2.0.0', resource._json['Id']) + + @mock.patch.object(resource_base, 'LOG', autospec=True) + def test_refresh_archive_not_implemented(self, mock_log): + mock_response = mock.Mock( + headers={'content-type': 'application/gzip'}) + self.conn.get.return_value = mock_response + BaseResource(connector=self.conn, + path='/Foo', + redfish_version='1.0.2', + reader=resource_base.JsonArchiveReader('Test.2.0.json')) + mock_log.error.assert_called_once() + + @mock.patch.object(io, 'BytesIO', autospec=True) + def test_refresh_archive_badzip_error(self, mock_io): + mock_response = mock.Mock( + headers={'content-type': 'application/zip'}) + mock_io.side_effect = zipfile.BadZipfile('Something wrong') + self.conn.get.return_value = mock_response + + self.assertRaises(exceptions.SushyError, + BaseResource, connector=self.conn, + path='/Foo', + redfish_version='1.0.2', + reader=resource_base. + JsonArchiveReader('Test.2.0.json')) + + def test_init_default_reader(self): + resource_a = BaseResource(connector=self.conn) + resource_b = BaseResource(connector=self.conn) + + self.assertIsInstance(resource_a._reader, resource_base.JsonDataReader) + self.assertIsInstance(resource_b._reader, resource_base.JsonDataReader) + + self.assertIsNot(resource_a._reader, resource_b._reader) + + def test__parse_attributes(self): + expected_oem_vendors = ['Contoso', 'EID_412_ASB_123', + 'EID_420_ASB_345'] + actual_oem_vendors = sorted(self.base_resource2.oem_vendors) + self.assertEqual(expected_oem_vendors, actual_oem_vendors) + self.assertEqual('base_resource2', self.base_resource2.resource_name) + + def test_refresh_local(self): + resource = BaseResource(None, 'json_samples/message_registry.json', + reader=resource_base. + JsonPackagedFileReader('sushy.tests.unit')) + self.assertIsNotNone(resource._json) + self.assertEqual('Test.1.1.1', resource._json['Id']) + + def test_refresh_public(self): + mock_connector = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + mock_connector.get.return_value.json.return_value = json.load(f) + resource = BaseResource(mock_connector, 'https://example.com/' + 'message_registry.json', + reader=resource_base.JsonPublicFileReader()) + mock_connector.get.assert_called_once_with('https://example.com/' + 'message_registry.json') + self.assertIsNotNone(resource._json) + self.assertEqual('Test.1.1.1', resource._json['Id']) + class TestResource(resource_base.ResourceBase): """A concrete Test Resource to test against""" - def __init__(self, connector, identity, redfish_version=None): - """Ctor of TestResouce + def __init__(self, connector, identity, redfish_version=None, + registries=None): + """Ctor of TestResource :param connector: A Connector instance :param identity: The id of the Resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. """ super(TestResource, self).__init__(connector, 'Fakes/%s' % identity, - redfish_version) + redfish_version, registries) self.identity = identity - def _parse_attributes(self): + def _parse_attributes(self, json_doc): pass @@ -69,15 +208,17 @@ class TestResourceCollection(resource_base.ResourceCollectionBase): def _resource_type(self): return TestResource - def __init__(self, connector, redfish_version=None): + def __init__(self, connector, redfish_version=None, registries=None): """Ctor of TestResourceCollection :param connector: A Connector instance :param redfish_version: The version of RedFish. Used to construct the object according to schema of the given version. + :param registries: Dict of Redfish Message Registry objects to be + used in any resource that needs registries to parse messages. """ - super(TestResourceCollection, self).__init__(connector, 'Fakes', - redfish_version) + super(TestResourceCollection, self).__init__( + connector, 'Fakes', redfish_version, registries) class ResourceCollectionBaseTestCase(base.TestCase): @@ -86,7 +227,7 @@ class ResourceCollectionBaseTestCase(base.TestCase): super(ResourceCollectionBaseTestCase, self).setUp() self.conn = mock.MagicMock() self.test_resource_collection = TestResourceCollection( - self.conn, redfish_version='1.0.x') + self.conn, redfish_version='1.0.x', registries=None) self.conn.reset_mock() def test_get_member(self): @@ -106,16 +247,15 @@ class ResourceCollectionBaseTestCase(base.TestCase): self.test_resource_collection.members_identities = ('1',) self.conn.get.side_effect = exceptions.ResourceNotFoundError( method='GET', url='http://foo.bar:8000/redfish/v1/Fakes/2', - response=mock.Mock(status_code=404)) + response=mock.MagicMock(status_code=http_client.NOT_FOUND)) # | WHEN & THEN | self.assertRaises(exceptions.ResourceNotFoundError, self.test_resource_collection.get_member, '2') self.conn.get.assert_called_once_with(path='Fakes/2') - def test_get_members(self): + def _validate_get_members_result(self, member_ids): # | GIVEN | # setting some valid member paths - member_ids = ('1', '2') self.test_resource_collection.members_identities = member_ids # | WHEN | result = self.test_resource_collection.get_members() @@ -125,12 +265,51 @@ class ResourceCollectionBaseTestCase(base.TestCase): self.assertIsInstance(val, TestResource) self.assertTrue(val.identity in member_ids) self.assertEqual('1.0.x', val.redfish_version) + self.assertFalse(val._is_stale) + + return result + + def test_get_members(self): + self._validate_get_members_result(('1', '2')) + + def test_get_members_on_refresh(self): + all_members = self._validate_get_members_result(('1', '2')) + + # Call resource invalidate + self.test_resource_collection.invalidate() + self.assertTrue(self.test_resource_collection._is_stale) + # Now invoke refresh action on resource. This can be viewed as + # "light refresh" which involves only the resource's fresh retrieval + # and not its nested resources (these are only marked as stale). + self.test_resource_collection.refresh(force=False) + # resource itself is fresh + self.assertFalse(self.test_resource_collection._is_stale) + # members are marked as stale + for m in all_members: + self.assertTrue(m._is_stale) + + self._validate_get_members_result(('1', '2')) + # members are also now freshly retrieved + for m in all_members: + self.assertFalse(m._is_stale) + + # Again invalidate and do a forced refresh on resource + self.test_resource_collection.invalidate(force_refresh=True) + # Now, even the members are also freshly retrieved. This can be viewed + # as "cascading refresh" which involves not only the resource's fresh + # retrieval but also its nested resources. + for m in all_members: + self.assertFalse(m._is_stale) + + def test_get_members_caching(self): + result = self._validate_get_members_result(('1', '2')) + self.assertIs(result, self.test_resource_collection.get_members()) TEST_JSON = { 'String': 'a string', 'Integer': '42', - 'List': ['a string', 42], + 'MappedList': ['raw1', 'raw2', 'raw'], 'Nested': { 'String': 'another string', 'Integer': 0, @@ -138,12 +317,28 @@ TEST_JSON = { 'Field': 'field value' }, 'Mapped': 'raw' + }, + 'ListField': [ + { + 'String': 'a third string', + 'Integer': 1 + }, + { + 'String': 'a fourth string', + 'Integer': 2 + } + ], + 'Dictionary': { + 'key1': {'property_a': 'value1', 'property_b': 'value2'}, + 'key2': {'property_a': 'value3', 'property_b': 'value4'} } } MAPPING = { - 'raw': 'real' + 'raw': 'real', + 'raw1': 'real1', + 'raw2': 'real2' } @@ -155,10 +350,23 @@ class NestedTestField(resource_base.CompositeField): non_existing = resource_base.Field('NonExisting', default=3.14) +class TestListField(resource_base.ListField): + string = resource_base.Field('String', required=True) + integer = resource_base.Field('Integer', adapter=int) + + +class TestDictionaryField(resource_base.DictionaryField): + property_a = resource_base.Field('property_a') + property_b = resource_base.Field('property_b') + + class ComplexResource(resource_base.ResourceBase): string = resource_base.Field('String', required=True) integer = resource_base.Field('Integer', adapter=int) nested = NestedTestField('Nested') + mapped_list = resource_base.MappedListField('MappedList', MAPPING) + field_list = TestListField('ListField') + dictionary = TestDictionaryField('Dictionary') non_existing_nested = NestedTestField('NonExistingNested') non_existing_mapped = resource_base.MappedField('NonExistingMapped', MAPPING) @@ -181,42 +389,54 @@ class FieldTestCase(base.TestCase): self.assertEqual('field value', self.test_resource.nested.nested_field) self.assertEqual('real', self.test_resource.nested.mapped) self.assertEqual(3.14, self.test_resource.nested.non_existing) + self.assertEqual(['real1', 'real2', 'real'], + self.test_resource.mapped_list) + self.assertEqual('a third string', + self.test_resource.field_list[0].string) + self.assertEqual(2, self.test_resource.field_list[1].integer) + self.assertEqual(2, len(self.test_resource.dictionary)) + self.assertEqual('value1', + self.test_resource.dictionary['key1'].property_a) + self.assertEqual('value4', + self.test_resource.dictionary['key2'].property_b) self.assertIsNone(self.test_resource.non_existing_nested) self.assertIsNone(self.test_resource.non_existing_mapped) def test_missing_required(self): del self.json['String'] - self.assertRaisesRegex(exceptions.MissingAttributeError, - 'String', self.test_resource.refresh) + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'String', self.test_resource.refresh, force=True) def test_missing_nested_required(self): del self.json['Nested']['String'] - self.assertRaisesRegex(exceptions.MissingAttributeError, - 'Nested/String', self.test_resource.refresh) + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Nested/String', self.test_resource.refresh, force=True) def test_missing_nested_required2(self): del self.json['Nested']['Object']['Field'] self.assertRaisesRegex(exceptions.MissingAttributeError, 'Nested/Object/Field', - self.test_resource.refresh) + self.test_resource.refresh, force=True) def test_malformed_int(self): self.json['Integer'] = 'banana' self.assertRaisesRegex( exceptions.MalformedAttributeError, 'attribute Integer is malformed.*invalid literal for int', - self.test_resource.refresh) + self.test_resource.refresh, force=True) def test_malformed_nested_int(self): self.json['Nested']['Integer'] = 'banana' self.assertRaisesRegex( exceptions.MalformedAttributeError, 'attribute Nested/Integer is malformed.*invalid literal for int', - self.test_resource.refresh) + self.test_resource.refresh, force=True) def test_mapping_missing(self): self.json['Nested']['Mapped'] = 'banana' - self.test_resource.refresh() + self.test_resource.refresh(force=True) self.assertIsNone(self.test_resource.nested.mapped) @@ -236,3 +456,24 @@ class FieldTestCase(base.TestCase): # Regular attributes cannot be accessed via mapping self.assertRaisesRegex(KeyError, '_load', lambda: field['_load']) self.assertRaisesRegex(KeyError, '__init__', lambda: field['__init__']) + + +class PartialKeyResource(resource_base.ResourceBase): + string = resource_base.Field( + lambda key, **context: key.startswith('Str')) + integer = resource_base.Field( + lambda key, value, **context: key == 'Integer' and int(value) < 42) + + +class FieldPartialKeyTestCase(base.TestCase): + def setUp(self): + super(FieldPartialKeyTestCase, self).setUp() + self.conn = mock.Mock() + self.json = copy.deepcopy(TEST_JSON) + self.conn.get.return_value.json.return_value = self.json + self.test_resource = PartialKeyResource( + self.conn, redfish_version='1.0.x') + + def test_ok(self): + self.assertEqual('a string', self.test_resource.string) + self.assertIsNone(self.test_resource.integer) diff --git a/sushy/tests/unit/resources/test_settings.py b/sushy/tests/unit/resources/test_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..90474df9b1ff70cdb76b3353abbe8004e0a3331b --- /dev/null +++ b/sushy/tests/unit/resources/test_settings.py @@ -0,0 +1,111 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json +from unittest import mock + +from sushy.resources import constants as res_cons +from sushy.resources.registry import message_registry +from sushy.resources import settings +from sushy.tests.unit import base + + +class SettingsFieldTestCase(base.TestCase): + + def setUp(self): + super(SettingsFieldTestCase, self).setUp() + with open('sushy/tests/unit/json_samples/settings.json') as f: + self.json = json.load(f) + + self.settings = settings.SettingsField() + + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/message_registry.json') as f: + conn.get.return_value.json.return_value = json.load(f) + registry = message_registry.MessageRegistry( + conn, '/redfish/v1/Registries/Test', + redfish_version='1.0.2') + self.registries = {'Test.1.0': registry} + + @mock.patch.object(settings, 'LOG', autospec=True) + def test__load(self, mock_LOG): + instance = self.settings._load(self.json, mock.Mock()) + + self.assertEqual('9234ac83b9700123cc32', + instance._etag) + self.assertEqual('2016-03-07T14:44:30-05:00', + instance.time) + self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + instance._settings_object_idref.resource_uri) + self.assertEqual('Test.1.0.Failed', + instance.messages[0].message_id) + self.assertEqual('Settings %1 update failed due to invalid value', + instance.messages[0].message) + self.assertEqual(res_cons.SEVERITY_CRITICAL, + instance.messages[0].severity) + self.assertEqual('Fix the value and try again', + instance.messages[0].resolution) + self.assertEqual('arg1', + instance.messages[0].message_args[0]) + self.assertEqual('#/Attributes/ProcTurboMode', + instance.messages[0]._related_properties[0]) + self.assertEqual('/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + instance._settings_object_idref.resource_uri) + self.assertEqual([res_cons.APPLY_TIME_ON_RESET, + res_cons.APPLY_TIME_MAINT_RESET], + instance._supported_apply_times) + self.assertIsNone(instance.maintenance_window) + mock_LOG.warning.assert_called_once() + mock_LOG.reset_mock() + self.assertIsNone(instance.operation_apply_time_support) + mock_LOG.warning.assert_called_once() + + def test_commit(self): + conn = mock.Mock() + instance = self.settings._load(self.json, conn) + instance.commit(conn, {'Attributes': {'key': 'value'}}) + conn.patch.assert_called_once_with( + '/redfish/v1/Systems/437XR1138R2/BIOS/Settings', + data={'Attributes': {'key': 'value'}}) + + def test_get_status_failure(self): + instance = self.settings._load(self.json, mock.Mock()) + + status = instance.get_status(self.registries) + self.assertEqual(status.status, + settings.UPDATE_FAILURE) + self.assertEqual(status.messages[0].severity, + res_cons.SEVERITY_CRITICAL) + self.assertEqual(status.messages[0].message, + 'The property arg1 broke everything.') + + def test_get_status_success(self): + instance = self.settings._load(self.json, mock.Mock()) + instance.messages[0].message_id = 'Test.1.0.Success' + instance.messages[0].severity = res_cons.SEVERITY_OK + status = instance.get_status(self.registries) + self.assertEqual(status.status, + settings.UPDATE_SUCCESS) + self.assertEqual(status.messages[0].severity, res_cons.SEVERITY_OK) + self.assertEqual(status.messages[0].message, + 'Everything done successfully.') + + def test_get_status_noupdates(self): + instance = self.settings._load(self.json, mock.Mock()) + instance.time = None + status = instance.get_status(self.registries) + self.assertEqual(status.status, + settings.NO_UPDATES) + self.assertIsNone(status.messages) diff --git a/sushy/tests/unit/resources/test_task_monitor.py b/sushy/tests/unit/resources/test_task_monitor.py new file mode 100644 index 0000000000000000000000000000000000000000..ae81ec454d472691a83ea1a3fa90a61019e5689f --- /dev/null +++ b/sushy/tests/unit/resources/test_task_monitor.py @@ -0,0 +1,138 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from datetime import timedelta +from unittest import mock + +from dateutil import parser + +from sushy.resources import task_monitor +from sushy.resources.task_monitor import TaskMonitor +from sushy.tests.unit import base + + +class TaskMonitorTestCase(base.TestCase): + + def setUp(self): + super(TaskMonitorTestCase, self).setUp() + self.conn = mock.Mock() + self.data = {'fake': 'data'} + self.http_date = 'Fri, 31 Dec 1999 23:59:59 GMT' + self.seconds = 120 + self.datetime = parser.parse(self.http_date) + self.req_headers = {'X-Fake': 'header'} + self.res_headers1 = {'location': 'https://sample.com/foo/bar', + 'retry-after': self.http_date} + self.res_headers2 = {'location': 'https://sample.com/foo/bar', + 'retry-after': str(self.seconds)} + + @mock.patch.object(task_monitor.LOG, 'warning', autospec=True) + def test_init_deprecation_warning(self, mock_log): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.json.return_value = {} + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + TaskMonitor(self.conn, res.headers.get('location'))\ + .set_retry_after(res.headers.get('retry-after')) + mock_log.assert_called_once_with( + 'sushy.resources.task_monitor.TaskMonitor is deprecated. ' + 'Use sushy.taskmonitor.TaskMonitor') + + def test_task_in_progress(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.json.return_value = {} + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location'))\ + .set_retry_after(res.headers.get('retry-after')) + self.assertIsNotNone(tm) + self.assertTrue(tm.in_progress) + + def test_task_not_in_progress(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.status_code = 201 + self.conn.get.return_value.json.return_value = self.data.copy() + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location'))\ + .set_retry_after(res.headers.get('retry-after')) + self.assertIsNotNone(tm) + self.assertFalse(tm.in_progress) + + def test_retry_after_http_date(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.json.return_value = {} + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location')) \ + .set_retry_after(res.headers.get('retry-after')) + self.assertIsNotNone(tm) + self.assertEqual(self.datetime, tm.retry_after) + + def test_retry_after_seconds(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers2.copy() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = self.res_headers2.copy() + self.conn.get.return_value.json.return_value = {} + start = datetime.now() + timedelta(seconds=self.seconds) + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location')) \ + .set_retry_after(res.headers.get('retry-after')) + end = datetime.now() + timedelta(seconds=self.seconds) + self.assertIsNotNone(tm) + self.assertTrue(start <= tm.retry_after <= end) + + def test_sleep_for(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers2.copy() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = self.res_headers2.copy() + self.conn.get.return_value.json.return_value = {} + start = datetime.now() + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location')) \ + .set_retry_after(res.headers.get('retry-after')) + self.assertIsNotNone(tm) + sleep_for = tm.sleep_for + elapsed = (datetime.now() - start).total_seconds() + self.assertTrue(self.seconds - elapsed <= sleep_for <= self.seconds) + + def test_response(self): + self.conn.post.return_value.status_code = 202 + self.conn.post.return_value.headers = self.res_headers1.copy() + self.conn.get.return_value.status_code = 201 + self.conn.get.return_value.json.return_value = self.data.copy() + res = self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.req_headers.copy()) + tm = TaskMonitor(self.conn, res.headers.get('location')) \ + .set_retry_after(res.headers.get('retry-after')) + self.assertIsNotNone(tm) + self.assertFalse(tm.in_progress) + response = tm.response + self.assertEqual(201, response.status_code) + self.assertEqual(self.data.copy(), response.json()) diff --git a/sushy/tests/unit/resources/updateservice/__init__.py b/sushy/tests/unit/resources/updateservice/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sushy/tests/unit/resources/updateservice/test_softwareinventory.py b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py new file mode 100644 index 0000000000000000000000000000000000000000..b365fb4a26213b295dfc8d962f4c4562facb18b3 --- /dev/null +++ b/sushy/tests/unit/resources/updateservice/test_softwareinventory.py @@ -0,0 +1,130 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from unittest import mock + +from sushy import exceptions +from sushy.resources import constants as res_cons +from sushy.resources.updateservice import softwareinventory +from sushy.tests.unit import base + + +class SoftwareInventoryTestCase(base.TestCase): + + def setUp(self): + super(SoftwareInventoryTestCase, self).setUp() + conn = mock.Mock() + with open( + 'sushy/tests/unit/json_samples/softwareinventory.json') as f: + self.json_doc = json.load(f) + + conn.get.return_value.json.return_value = self.json_doc + + self.soft_inv = softwareinventory.SoftwareInventory( + conn, + '/redfish/v1/UpdateService/SoftwareInventory/1', + redfish_version='1.3.0') + + def test__parse_attributes(self): + self.soft_inv._parse_attributes(self.json_doc) + self.assertEqual('BMC', self.soft_inv.identity) + self.assertEqual( + '1.30.367a12-rev1', + self.soft_inv.lowest_supported_version) + self.assertEqual('Contoso', self.soft_inv.manufacturer) + self.assertEqual('Contoso BMC Firmware', self.soft_inv.name) + self.assertEqual('2017-08-22T12:00:00', self.soft_inv.release_date) + self.assertEqual( + res_cons.STATE_ENABLED, + self.soft_inv.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.soft_inv.status.health) + self.assertEqual( + '1624A9DF-5E13-47FC-874A-DF3AFF143089', + self.soft_inv.software_id) + self.assertTrue(self.soft_inv.updateable) + self.assertEqual('1.45.455b66-rev4', self.soft_inv.version) + + def test__parse_attributes_return(self): + attributes = self.soft_inv._parse_attributes(self.json_doc) + + # Test that various types are returned correctly + self.assertEqual('BMC', attributes.get('identity')) + self.assertEqual({'health': res_cons.HEALTH_OK, + 'health_rollup': None, + 'state': res_cons.STATE_ENABLED}, + attributes.get('status')) + self.assertEqual(True, attributes.get('updateable')) + + def test__parse_attributes_missing_identity(self): + self.soft_inv.json.pop('Id') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Id', + self.soft_inv._parse_attributes, self.json_doc) + + +class SoftwareInventoryCollectionTestCase(base.TestCase): + + def setUp(self): + super(SoftwareInventoryCollectionTestCase, self).setUp() + conn = mock.Mock() + with open('sushy/tests/unit/json_samples/' + 'firmwareinventory_collection.json') as f: + self.json_doc = json.load(f) + + conn.get.return_value.json.return_value = self.json_doc + + self.soft_inv_col = softwareinventory.SoftwareInventoryCollection( + conn, '/redfish/v1/UpdateService/FirmwareInventory', + redfish_version='1.3.0') + + def test__parse_attributes(self): + self.soft_inv_col._parse_attributes(self.json_doc) + self.assertEqual('1.3.0', self.soft_inv_col.redfish_version) + self.assertEqual( + 'Firmware Inventory Collection', + self.soft_inv_col.name) + + @mock.patch.object( + softwareinventory, 'SoftwareInventory', autospec=True) + def test_get_member(self, mock_softwareinventory): + path = ('/redfish/v1/UpdateService/FirmwareInventory/' + 'Current-102303-19.0.12') + self.soft_inv_col.get_member(path) + mock_softwareinventory.assert_called_once_with( + self.soft_inv_col._conn, path, + self.soft_inv_col.redfish_version, None) + + @mock.patch.object( + softwareinventory, 'SoftwareInventory', autospec=True) + def test_get_members(self, mock_softwareinventory): + members = self.soft_inv_col.get_members() + calls = [ + mock.call(self.soft_inv_col._conn, + ('/redfish/v1/UpdateService/FirmwareInventory/' + 'Current-101560-25.5.6.0009'), + self.soft_inv_col.redfish_version, None), + + mock.call(self.soft_inv_col._conn, + ('/redfish/v1/UpdateService/FirmwareInventory/' + 'Installed-101560-25.5.6.0009'), + self.soft_inv_col.redfish_version, None), + + mock.call(self.soft_inv_col._conn, + ('/redfish/v1/UpdateService/FirmwareInventory/' + 'Previous-102302-18.8.9'), + self.soft_inv_col.redfish_version, None) + ] + mock_softwareinventory.assert_has_calls(calls) + self.assertIsInstance(members, list) + self.assertEqual(3, len(members)) diff --git a/sushy/tests/unit/resources/updateservice/test_updateservice.py b/sushy/tests/unit/resources/updateservice/test_updateservice.py new file mode 100644 index 0000000000000000000000000000000000000000..9d76522b872eda7fea2d9e3d6b998855983b4166 --- /dev/null +++ b/sushy/tests/unit/resources/updateservice/test_updateservice.py @@ -0,0 +1,217 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from unittest import mock + +from sushy import exceptions +from sushy.resources import constants as res_cons +from sushy.resources.updateservice import constants as ups_cons +from sushy.resources.updateservice import softwareinventory +from sushy.resources.updateservice import updateservice +from sushy import taskmonitor +from sushy.tests.unit import base + + +class UpdateServiceTestCase(base.TestCase): + + def setUp(self): + super(UpdateServiceTestCase, self).setUp() + self.conn = mock.Mock() + with open('sushy/tests/unit/json_samples/updateservice.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.upd_serv = updateservice.UpdateService( + self.conn, '/redfish/v1/UpdateService/UpdateService', + redfish_version='1.3.0') + + def test__parse_attributes(self): + self.upd_serv._parse_attributes(self.json_doc) + self.assertEqual('UpdateService', self.upd_serv.identity) + self.assertEqual('/FWUpdate', self.upd_serv.http_push_uri) + self.assertIn('/FWUpdate', self.upd_serv.http_push_uri_targets) + self.assertFalse(self.upd_serv.http_push_uri_targets_busy) + self.assertEqual('Update service', self.upd_serv.name) + self.assertTrue(self.upd_serv.service_enabled) + self.assertEqual(res_cons.STATE_ENABLED, self.upd_serv.status.state) + self.assertEqual(res_cons.HEALTH_OK, self.upd_serv.status.health) + self.assertEqual( + res_cons.HEALTH_OK, + self.upd_serv.status.health_rollup) + + def test__parse_attributes_missing_actions(self): + self.upd_serv.json.pop('Actions') + self.assertRaisesRegex( + exceptions.MissingAttributeError, 'attribute Actions', + self.upd_serv._parse_attributes, self.json_doc) + + def test_simple_update(self): + with open('sushy/tests/unit/json_samples/task_monitor.json') as f: + task_json = json.load(f) + task_submitted = mock.Mock() + task_submitted.json.return_value = task_json + task_submitted.status_code = 202 + task_submitted.headers = {'Content-Length': 42, + 'Location': '/Task/545'} + self.conn.post.return_value = task_submitted + + tm = self.upd_serv.simple_update( + image_uri='local.server/update.exe', + targets=['/redfish/v1/UpdateService/FirmwareInventory/BMC'], + transfer_protocol=ups_cons.UPDATE_PROTOCOL_HTTPS) + + self.assertIsInstance(tm, taskmonitor.TaskMonitor) + self.assertEqual('/Task/545', tm.task_monitor) + + self.upd_serv._conn.post.assert_called_once_with( + '/redfish/v1/UpdateService/Actions/SimpleUpdate', + data={ + 'ImageURI': 'local.server/update.exe', + 'Targets': ['/redfish/v1/UpdateService/FirmwareInventory/BMC'], + 'TransferProtocol': 'HTTPS'}) + + def test_simple_update_task_uri(self): + with open('sushy/tests/unit/json_samples/task.json') as f: + task_json = json.load(f) + task_submitted = mock.Mock() + task_submitted.json.return_value = task_json + task_submitted.status_code = 202 + task_submitted.headers = {'Content-Length': 42, + 'Location': '/Taskmonitor/545'} + self.conn.post.return_value = task_submitted + + tm = self.upd_serv.simple_update( + image_uri='local.server/update.exe', + targets=['/redfish/v1/UpdateService/FirmwareInventory/BMC'], + transfer_protocol=ups_cons.UPDATE_PROTOCOL_HTTPS) + + self.assertIsInstance(tm, taskmonitor.TaskMonitor) + self.assertEqual('/redfish/v1/TaskService/Tasks/545', tm.task_monitor) + + self.upd_serv._conn.post.assert_called_once_with( + '/redfish/v1/UpdateService/Actions/SimpleUpdate', + data={ + 'ImageURI': 'local.server/update.exe', + 'Targets': ['/redfish/v1/UpdateService/FirmwareInventory/BMC'], + 'TransferProtocol': 'HTTPS'}) + + def test_simple_update_missing_location(self): + with open('sushy/tests/unit/json_samples/task_monitor.json') as f: + task_json = json.load(f) + task_submitted = mock.Mock() + task_submitted.json.return_value = task_json + task_submitted.status_code = 202 + task_submitted.headers = {'Allow': 'GET'} + self.conn.post.return_value = task_submitted + + self.assertRaises( + exceptions.MissingHeaderError, + self.upd_serv.simple_update, + image_uri='local.server/update.exe', + targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', + transfer_protocol='HTTPS') + + def test_simple_update_backward_compatible_protocol(self): + with open('sushy/tests/unit/json_samples/task.json') as f: + task_json = json.load(f) + task_submitted = mock.Mock() + task_submitted.json.return_value = task_json + task_submitted.status_code = 202 + task_submitted.headers = {'Content-Length': 42, + 'Location': '/Task/545'} + self.conn.post.return_value = task_submitted + + self.upd_serv.simple_update( + image_uri='local.server/update.exe', + targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', + transfer_protocol='HTTPS') + self.upd_serv._conn.post.assert_called_once_with( + '/redfish/v1/UpdateService/Actions/SimpleUpdate', + data={ + 'ImageURI': 'local.server/update.exe', + 'Targets': '/redfish/v1/UpdateService/Actions/SimpleUpdate', + 'TransferProtocol': 'HTTPS'}) + + def test_simple_update_without_target(self): + with open('sushy/tests/unit/json_samples/task.json') as f: + task_json = json.load(f) + task_submitted = mock.Mock() + task_submitted.json.return_value = task_json + task_submitted.status_code = 202 + task_submitted.headers = {'Content-Length': 42, + 'Location': '/Task/545'} + self.conn.post.return_value = task_submitted + self.upd_serv.simple_update( + image_uri='local.server/update.exe', + transfer_protocol='HTTPS') + self.upd_serv._conn.post.assert_called_once_with( + '/redfish/v1/UpdateService/Actions/SimpleUpdate', + data={ + 'ImageURI': 'local.server/update.exe', + 'TransferProtocol': 'HTTPS'}) + + def test_simple_update_bad_protocol(self): + self.assertRaises( + exceptions.InvalidParameterValueError, + self.upd_serv.simple_update, + image_uri='local.server/update.exe', + targets='/redfish/v1/UpdateService/Actions/SimpleUpdate', + transfer_protocol='ROYAL') + + @mock.patch.object(softwareinventory, 'SoftwareInventoryCollection', + autospec=True) + def test_software_inventory(self, software_inventory_collection_mock): + self.upd_serv.software_inventory + software_inventory_collection_mock.assert_called_once_with( + self.conn, '/redfish/v1/UpdateService/SoftwareInventory', + self.upd_serv.redfish_version, + self.upd_serv._registries) + + @mock.patch.object(softwareinventory, 'SoftwareInventoryCollection', + autospec=True) + def test_firmware_inventory(self, software_inventory_collection_mock): + self.upd_serv.firmware_inventory + software_inventory_collection_mock.assert_called_once_with( + self.conn, '/redfish/v1/UpdateService/FirmwareInventory', + self.upd_serv.redfish_version, + self.upd_serv._registries) + + +class UpdateServiceNoInvTestCase(base.TestCase): + + def setUp(self): + super(UpdateServiceNoInvTestCase, self).setUp() + self.conn = mock.Mock() + no_inv_json = 'sushy/tests/unit/json_samples/updateservice_no_inv.json' + with open(no_inv_json) as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.upd_serv = updateservice.UpdateService( + self.conn, '/redfish/v1/UpdateService/UpdateService', + redfish_version='1.3.0') + + def test_software_inventory_when_sw_inv_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'SoftwareInventory/@odata.id', + getattr, self.upd_serv, 'software_inventory') + + def test_firmware_inventory_when_fw_inv_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'FirmwareInventory/@odata.id', + getattr, self.upd_serv, 'firmware_inventory') diff --git a/sushy/tests/unit/test_auth.py b/sushy/tests/unit/test_auth.py new file mode 100644 index 0000000000000000000000000000000000000000..e477a72e76e2098bd86f1e2755d5518c1a5876c2 --- /dev/null +++ b/sushy/tests/unit/test_auth.py @@ -0,0 +1,373 @@ +# Copyright 2017 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from unittest import mock + +import requests + +from sushy import auth +from sushy import connector +from sushy import exceptions +from sushy import main +from sushy.tests.unit import base + + +class BasicAuthTestCase(base.TestCase): + + @mock.patch.object(main, 'Sushy', autospec=True) + @mock.patch.object(connector, 'Connector', autospec=True) + def setUp(self, mock_connector, mock_root): + super(BasicAuthTestCase, self).setUp() + self.username = 'TestUsername' + self.password = 'TestP@$$W0RD' + self.base_auth = auth.BasicAuth(self.username, + self.password) + self.conn = mock_connector.return_value + self.root = mock_root.return_value + + def test_init(self): + self.assertEqual(self.username, + self.base_auth._username) + self.assertEqual(self.password, + self.base_auth._password) + self.assertIsNone(self.base_auth._root_resource) + self.assertIsNone(self.base_auth._connector) + + def test_set_context(self): + self.base_auth.set_context(self.root, self.conn) + self.assertEqual(self.base_auth._root_resource, + self.root) + self.assertEqual(self.base_auth._connector, + self.conn) + + def test__do_authenticate_no_context(self): + self.assertRaises(RuntimeError, + self.base_auth.authenticate) + + def test__do_authenticate(self): + self.base_auth.set_context(self.root, self.conn) + self.base_auth.authenticate() + self.conn.set_http_basic_auth.assert_called_once_with(self.username, + self.password) + + def test_can_refresh_session(self): + self.assertFalse(self.base_auth.can_refresh_session()) + + @mock.patch.object(auth.BasicAuth, 'close', autospec=True) + def test_context_manager(self, auth_close): + with auth.BasicAuth(self.username, self.password) as base_auth: + self.assertEqual(self.username, base_auth._username) + self.assertEqual(self.password, base_auth._password) + auth_close.assert_called_once_with(base_auth) + + +class SessionAuthTestCase(base.TestCase): + + @mock.patch.object(main, 'Sushy', autospec=True) + @mock.patch.object(connector, 'Connector', autospec=True) + def setUp(self, mock_connector, mock_root): + super(SessionAuthTestCase, self).setUp() + self.username = 'TestUsername' + self.password = 'TestP@$$W0RD' + self.sess_key = 'TestingKey' + self.sess_uri = ('https://testing:8000/redfish/v1/' + 'SessionService/Sessions/testing') + self.sess_auth = auth.SessionAuth(self.username, + self.password) + self.conn = mock_connector.return_value + self.conn._session = mock.Mock(spec=requests.Session) + self.conn._session.headers = {} + self.conn._session.auth = None + self.root = mock_root.return_value + + def test_init(self): + self.assertEqual(self.username, + self.sess_auth._username) + self.assertEqual(self.password, + self.sess_auth._password) + self.assertIsNone(self.sess_auth._root_resource) + self.assertIsNone(self.sess_auth._connector) + self.assertIsNone(self.sess_auth._session_key) + self.assertIsNone(self.sess_auth._session_resource_id) + + def test_get_session_key(self): + self.sess_auth._session_key = self.sess_key + self.assertEqual(self.sess_key, + self.sess_auth.get_session_key()) + + def test_get_session_resource_id(self): + self.sess_auth._session_resource_id = self.sess_uri + self.assertEqual(self.sess_uri, + self.sess_auth.get_session_resource_id()) + + def test_reset_session_attrs(self): + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth._session_key = self.sess_key + self.sess_auth._session_resource_id = self.sess_uri + self.conn._session.headers = {'X-Auth-Token': 'meow'} + self.assertEqual(self.sess_uri, + self.sess_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_auth.get_session_key()) + self.sess_auth.reset_session_attrs() + self.assertIsNone(self.sess_auth.get_session_resource_id()) + self.assertIsNone(self.sess_auth.get_session_key()) + self.assertNotIn('X-Auth-Token', self.conn._session.headers) + + def test_set_context(self): + self.sess_auth.set_context(self.root, self.conn) + self.assertEqual(self.sess_auth._root_resource, + self.root) + self.assertEqual(self.sess_auth._connector, + self.conn) + + def test__do_authenticate_no_context(self): + self.assertRaises(RuntimeError, + self.sess_auth.authenticate) + + def test__do_authenticate(self): + self.assertIsNone(self.sess_auth.get_session_resource_id()) + self.assertIsNone(self.sess_auth.get_session_key()) + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.authenticate() + self.assertEqual(self.sess_uri, + self.sess_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_auth.get_session_key()) + self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) + + def test_can_refresh_session(self): + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.authenticate() + + self.assertTrue(self.sess_auth.can_refresh_session()) + + def test_refresh(self): + self.assertIsNone(self.sess_auth.get_session_resource_id()) + self.assertIsNone(self.sess_auth.get_session_key()) + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + self._session = mock.Mock(spec=requests.Session) + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.refresh_session() + self.assertEqual(self.sess_uri, + self.sess_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_auth.get_session_key()) + self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) + + def test_close_do_nothing(self): + self.sess_auth._session_key = None + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.close() + self.conn.delete.assert_not_called() + + def test_close(self): + self.sess_auth._session_key = self.sess_key + self.sess_auth._session_resource_id = self.sess_uri + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.close() + self.conn.delete.assert_called_once_with(self.sess_uri) + self.assertIsNone(self.sess_auth.get_session_resource_id()) + self.assertIsNone(self.sess_auth.get_session_key()) + + @mock.patch.object(auth, 'LOG', autospec=True) + def test_close_fail(self, mock_LOG): + self.sess_auth._session_key = self.sess_key + self.sess_auth._session_resource_id = self.sess_uri + self.conn.delete.side_effect = ( + exceptions.ServerSideError( + 'DELETE', 'any_url', mock.MagicMock())) + + self.sess_auth.set_context(self.root, self.conn) + self.sess_auth.close() + + self.assertTrue(mock_LOG.warning.called) + self.assertIsNone(self.sess_auth.get_session_resource_id()) + self.assertIsNone(self.sess_auth.get_session_key()) + + @mock.patch.object(auth.SessionAuth, 'close', autospec=True) + def test_context_manager(self, auth_close): + with auth.SessionAuth(self.username, self.password) as session_auth: + self.assertEqual(self.username, session_auth._username) + self.assertEqual(self.password, session_auth._password) + auth_close.assert_called_once_with(session_auth) + + +class SessionOrBasicAuthTestCase(base.TestCase): + + @mock.patch.object(main, 'Sushy', autospec=True) + @mock.patch.object(connector, 'Connector', autospec=True) + def setUp(self, mock_connector, mock_root): + super(SessionOrBasicAuthTestCase, self).setUp() + self.username = 'TestUsername' + self.password = 'TestP@$$W0RD' + self.sess_key = 'TestingKey' + self.sess_uri = ('https://testing:8000/redfish/v1/' + 'SessionService/Sessions/testing') + self.conn = mock_connector.return_value + self.conn._session = mock.Mock(spec=requests.Session) + self.conn._session.headers = {} + self.conn._session.auth = None + self.root = mock_root.return_value + + self.sess_basic_auth = auth.SessionOrBasicAuth(self.username, + self.password) + + def test_init(self): + self.assertEqual(self.username, + self.sess_basic_auth._username) + self.assertEqual(self.password, + self.sess_basic_auth._password) + self.assertIsNone(self.sess_basic_auth._root_resource) + self.assertIsNone(self.sess_basic_auth._connector) + self.assertIsNone(self.sess_basic_auth._session_key) + self.assertIsNone(self.sess_basic_auth._session_resource_id) + + def test_get_session_key(self): + self.sess_basic_auth._session_key = self.sess_key + self.assertEqual(self.sess_key, + self.sess_basic_auth.get_session_key()) + + def test_get_session_resource_id(self): + self.sess_basic_auth._session_resource_id = self.sess_uri + self.assertEqual(self.sess_uri, + self.sess_basic_auth.get_session_resource_id()) + + def test_reset_session_attrs(self): + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth._session_key = self.sess_key + self.sess_basic_auth._session_resource_id = self.sess_uri + self.conn._session.auth = 'meow' + self.conn._session.headers = {'X-Auth-Token': 'meow'} + self.assertEqual(self.sess_uri, + self.sess_basic_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_basic_auth.get_session_key()) + self.sess_basic_auth.reset_session_attrs() + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + self.assertNotIn('X-Auth-Token', self.conn._session.headers) + self.assertIsNone(self.conn._session.auth) + + def test_set_context(self): + self.sess_basic_auth.set_context(self.root, self.conn) + self.assertEqual(self.sess_basic_auth._root_resource, + self.root) + self.assertEqual(self.sess_basic_auth._connector, + self.conn) + + def test__do_authenticate_no_context(self): + self.assertRaises(RuntimeError, + self.sess_basic_auth.authenticate) + + def test__do_authenticate(self): + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.authenticate() + self.assertEqual(self.sess_uri, + self.sess_basic_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_basic_auth.get_session_key()) + self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) + + def test__do_authenticate_for_basic_auth(self): + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.side_effect = exceptions.SushyError + self.root.get_session_service.return_value = mock_sess_serv + + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.authenticate() + + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + self.conn.set_http_basic_auth.assert_called_once_with( + self.username, self.password) + + def test_can_refresh_session(self): + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.authenticate() + + self.assertTrue(self.sess_basic_auth.can_refresh_session()) + + def test_refresh_no_previous_session(self): + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.refresh_session() + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + self.conn.set_http_session_auth.assert_not_called() + self.conn.set_http_basic_auth.assert_not_called() + + def test_refresh_previous_session_exists(self): + self.sess_basic_auth._session_key = 'ThisisFirstKey' + test_url = ('https://testing:8000/redfish/v1/SessionService' + '/Sessions/testingfirst') + self.sess_basic_auth._session_resource_id = test_url + mock_sess_serv = mock.Mock() + mock_sess_serv.create_session.return_value = (self.sess_key, + self.sess_uri) + self.root.get_session_service.return_value = mock_sess_serv + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.refresh_session() + self.assertEqual(self.sess_uri, + self.sess_basic_auth.get_session_resource_id()) + self.assertEqual(self.sess_key, + self.sess_basic_auth.get_session_key()) + self.conn.set_http_session_auth.assert_called_once_with(self.sess_key) + + def test_close_do_nothing(self): + self.conn.delete.assert_not_called() + + def test_close(self): + self.sess_basic_auth._session_key = self.sess_key + self.sess_basic_auth._session_resource_id = self.sess_uri + self.sess_basic_auth.set_context(self.root, self.conn) + self.sess_basic_auth.close() + self.conn.delete.assert_called_once_with(self.sess_uri) + self.assertIsNone(self.sess_basic_auth.get_session_resource_id()) + self.assertIsNone(self.sess_basic_auth.get_session_key()) + + @mock.patch.object(auth.SessionOrBasicAuth, 'close', autospec=True) + def test_context_manager(self, auth_close): + with auth.SessionOrBasicAuth( + self.username, self.password) as session_or_base_auth: + self.assertEqual(self.username, session_or_base_auth._username) + self.assertEqual(self.password, session_or_base_auth._password) + auth_close.assert_called_once_with(session_or_base_auth) diff --git a/sushy/tests/unit/test_connector.py b/sushy/tests/unit/test_connector.py index 53b04ec0095033272114cfdfef348749dcb03a40..40d3164eeb17591b81b53020fabbcb93886fdf82 100644 --- a/sushy/tests/unit/test_connector.py +++ b/sushy/tests/unit/test_connector.py @@ -13,11 +13,13 @@ # License for the specific language governing permissions and limitations # under the License. +from http import client as http_client import json +from unittest import mock -import mock import requests +from sushy import auth as sushy_auth from sushy import connector from sushy import exceptions from sushy.tests.unit import base @@ -25,120 +27,395 @@ from sushy.tests.unit import base class ConnectorMethodsTestCase(base.TestCase): - def setUp(self): + @mock.patch.object(sushy_auth, 'SessionOrBasicAuth', autospec=True) + def setUp(self, mock_auth): + mock_auth.get_session_key.return_value = None super(ConnectorMethodsTestCase, self).setUp() self.conn = connector.Connector( - 'http://foo.bar:1234', username='user', - password='pass', verify=True) + 'http://foo.bar:1234', verify=True) + self.conn._auth = mock_auth self.data = {'fake': 'data'} self.headers = {'X-Fake': 'header'} + def test_init_with_credentials(self): + conn = connector.Connector('http://foo.bar:1234', + username='admin', + password='password') + self.assertEqual(conn._session.auth, ('admin', 'password')) + + def test_init_with_callback(self): + def response_callback(response): + return + + conn = connector.Connector('http://foo.bar:1234', + username='admin', + password='password', + response_callback=response_callback) + self.assertIs(conn._response_callback, response_callback) + @mock.patch.object(connector.Connector, '_op', autospec=True) def test_get(self, mock__op): - self.conn.get(path='fake/path', data=self.data, headers=self.headers) + self.conn.get(path='fake/path', data=self.data.copy(), + headers=self.headers.copy()) + mock__op.assert_called_once_with(mock.ANY, 'GET', 'fake/path', + data=self.data, headers=self.headers, + blocking=False, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_get_blocking(self, mock__op): + self.conn.get(path='fake/path', data=self.data.copy(), + headers=self.headers.copy(), blocking=True) mock__op.assert_called_once_with(mock.ANY, 'GET', 'fake/path', - self.data, self.headers) + data=self.data, headers=self.headers, + blocking=True, timeout=60) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_post(self, mock__op): - self.conn.post(path='fake/path', data=self.data, headers=self.headers) + self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'POST', 'fake/path', - self.data, self.headers) + data=self.data, headers=self.headers, + blocking=False, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_post_blocking(self, mock__op): + self.conn.post(path='fake/path', data=self.data.copy(), + headers=self.headers.copy(), blocking=True, timeout=120) + mock__op.assert_called_once_with(mock.ANY, 'POST', 'fake/path', + data=self.data, headers=self.headers, + blocking=True, timeout=120) @mock.patch.object(connector.Connector, '_op', autospec=True) def test_patch(self, mock__op): - self.conn.patch(path='fake/path', data=self.data, headers=self.headers) + self.conn.patch(path='fake/path', data=self.data.copy(), + headers=self.headers.copy()) mock__op.assert_called_once_with(mock.ANY, 'PATCH', 'fake/path', - self.data, self.headers) + data=self.data, headers=self.headers, + blocking=False, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_patch_blocking(self, mock__op): + self.conn.patch(path='fake/path', data=self.data.copy(), + headers=self.headers.copy(), blocking=True) + mock__op.assert_called_once_with(mock.ANY, 'PATCH', 'fake/path', + data=self.data, headers=self.headers, + blocking=True, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_put(self, mock__op): + self.conn.put(path='fake/path', data=self.data.copy(), + headers=self.headers.copy()) + mock__op.assert_called_once_with(mock.ANY, 'PUT', 'fake/path', + data=self.data, headers=self.headers, + blocking=False, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_put_blocking(self, mock__op): + self.conn.put(path='fake/path', data=self.data.copy(), + headers=self.headers.copy(), blocking=True) + mock__op.assert_called_once_with(mock.ANY, 'PUT', 'fake/path', + data=self.data, headers=self.headers, + blocking=True, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_delete(self, mock__op): + self.conn.delete(path='fake/path', data=self.data.copy(), + headers=self.headers.copy()) + mock__op.assert_called_once_with(mock.ANY, 'DELETE', 'fake/path', + data=self.data, headers=self.headers, + blocking=False, timeout=60) + + @mock.patch.object(connector.Connector, '_op', autospec=True) + def test_delete_blocking(self, mock__op): + self.conn.delete(path='fake/path', data=self.data.copy(), + headers=self.headers.copy(), blocking=True) + mock__op.assert_called_once_with(mock.ANY, 'DELETE', 'fake/path', + data=self.data, headers=self.headers, + blocking=True, timeout=60) + + def test_set_auth(self): + mock_auth = mock.MagicMock() + self.conn.set_auth(mock_auth) + self.assertEqual(mock_auth, self.conn._auth) + + def test_set_http_basic_auth(self): + self.conn.set_http_basic_auth('foo', 'secret') + self.assertEqual(('foo', 'secret'), self.conn._session.auth) + + def test_set_http_session_auth(self): + self.conn.set_http_session_auth('hash-token') + self.assertTrue('X-Auth-Token' in self.conn._session.headers) + self.assertEqual( + 'hash-token', self.conn._session.headers['X-Auth-Token']) + + def test_close(self): + session = mock.Mock(spec=requests.Session) + self.conn._session = session + self.conn.close() + session.close.assert_called_once_with() class ConnectorOpTestCase(base.TestCase): - def setUp(self): + @mock.patch.object(sushy_auth, 'SessionOrBasicAuth', autospec=True) + def setUp(self, mock_auth): + mock_auth.get_session_key.return_value = None + mock_auth._session_key = None + self.auth = mock_auth super(ConnectorOpTestCase, self).setUp() self.conn = connector.Connector( - 'http://foo.bar:1234', username='user', - password='pass', verify=True) + 'http://foo.bar:1234', verify=True) + self.conn._auth = mock_auth self.data = {'fake': 'data'} self.headers = {'X-Fake': 'header'} self.session = mock.Mock(spec=requests.Session) self.conn._session = self.session self.request = self.session.request - self.request.return_value.status_code = 200 + self.request.return_value.status_code = http_client.OK def test_ok_get(self): - expected_headers = self.headers.copy() + self.conn._op('GET', path='fake/path', headers=self.headers) + self.request.assert_called_once_with( + 'GET', 'http://foo.bar:1234/fake/path', + headers=self.headers, json=None) + + def test_response_callback(self): + mock_response_callback = mock.MagicMock() + self.conn._response_callback = mock_response_callback self.conn._op('GET', path='fake/path', headers=self.headers) + self.assertEqual(1, mock_response_callback.call_count) + + def test_ok_get_url_redirect_false(self): + self.conn._op('GET', path='fake/path', headers=self.headers, + allow_redirects=False) self.request.assert_called_once_with( 'GET', 'http://foo.bar:1234/fake/path', - data=None, headers=expected_headers) + headers=self.headers, json=None, allow_redirects=False) def test_ok_post(self): - expected_headers = self.headers.copy() - expected_headers['Content-Type'] = 'application/json' + self.conn._op('POST', path='fake/path', data=self.data.copy(), + headers=self.headers) + self.request.assert_called_once_with( + 'POST', 'http://foo.bar:1234/fake/path', + json=self.data, headers=self.headers) - self.conn._op('POST', path='fake/path', data=self.data, + def test_ok_put(self): + self.conn._op('PUT', path='fake/path', data=self.data.copy(), headers=self.headers) + self.request.assert_called_once_with( + 'PUT', 'http://foo.bar:1234/fake/path', + json=self.data, headers=self.headers) + + def test_ok_delete(self): + expected_headers = self.headers.copy() + expected_headers['OData-Version'] = '4.0' + self.conn._op('DELETE', path='fake/path', headers=self.headers.copy()) + self.request.assert_called_once_with( + 'DELETE', 'http://foo.bar:1234/fake/path', + headers=expected_headers, json=None) + + def test_ok_post_with_session(self): + self.conn._session.headers = {} + self.conn._session.headers['X-Auth-Token'] = 'asdf1234' + expected_headers = self.headers.copy() + expected_headers['OData-Version'] = '4.0' + expected_headers['Content-Type'] = 'application/json' + self.conn._op('POST', path='fake/path', headers=self.headers, + data=self.data) self.request.assert_called_once_with( 'POST', 'http://foo.bar:1234/fake/path', - data=json.dumps(self.data), headers=expected_headers) + json=self.data, headers=expected_headers) + self.assertEqual(self.conn._session.headers, + {'X-Auth-Token': 'asdf1234'}) + + def test_odata_version_header_redfish(self): + path = '/redfish/v1/path' + headers = dict(self.headers) + expected_headers = dict(self.headers) + expected_headers['OData-Version'] = '4.0' + self.request.reset_mock() + self.conn._op('GET', path=path, headers=headers) + self.request.assert_called_once_with( + 'GET', 'http://foo.bar:1234' + path, + headers=expected_headers, json=None) + + def test_odata_version_header_redfish_no_headers(self): + path = '/redfish/v1/bar' + expected_headers = {'OData-Version': '4.0'} + self.conn._op('GET', path=path) + self.request.assert_called_once_with( + 'GET', 'http://foo.bar:1234' + path, + headers=expected_headers, json=None) + + def test_odata_version_header_redfish_existing_header(self): + path = '/redfish/v1/foo' + headers = {'OData-Version': '3.0'} + expected_headers = dict(headers) + self.conn._op('GET', path=path, headers=headers) + self.request.assert_called_once_with( + 'GET', 'http://foo.bar:1234' + path, + headers=expected_headers, json=None) + + def test_timed_out_session_unable_to_create_session(self): + self.conn._auth.can_refresh_session.return_value = False + self.conn._session = self.session + self.request = self.session.request + self.request.return_value.status_code = http_client.FORBIDDEN + self.request.return_value.json.side_effect = ValueError('no json') + with self.assertRaisesRegex(exceptions.AccessError, + 'unknown error') as ae: + self.conn._op('POST', path='fake/path', data=self.data, + headers=self.headers) + exc = ae.exception + self.assertEqual(http_client.FORBIDDEN, exc.status_code) + + def test_timed_out_session_re_established(self): + self.auth._session_key = 'asdf1234' + self.auth.get_session_key.return_value = 'asdf1234' + self.conn._auth = self.auth + self.session = mock.Mock(spec=requests.Session) + self.conn._session = self.session + self.request = self.session.request + first_response = mock.MagicMock() + first_response.status_code = http_client.FORBIDDEN + second_response = mock.MagicMock() + second_response.status_code = http_client.OK + second_response.json = {'Test': 'Testing'} + self.request.side_effect = [first_response, second_response] + response = self.conn._op('POST', path='fake/path', data=self.data, + headers=self.headers) + self.auth.refresh_session.assert_called_with() + self.auth.can_refresh_session.assert_called_with() + self.assertEqual(response.json, second_response.json) def test_connection_error(self): self.request.side_effect = requests.exceptions.ConnectionError self.assertRaises(exceptions.ConnectionError, self.conn._op, 'GET') def test_unknown_http_error(self): - self.request.return_value.status_code = 409 + self.request.return_value.status_code = http_client.CONFLICT self.request.return_value.json.side_effect = ValueError('no json') with self.assertRaisesRegex(exceptions.HTTPError, 'unknown error') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception - self.assertEqual(409, exc.status_code) + self.assertEqual(http_client.CONFLICT, exc.status_code) self.assertIsNone(exc.body) self.assertIsNone(exc.detail) def test_known_http_error(self): - self.request.return_value.status_code = 400 - with open('sushy/tests/unit/json_samples/error.json', 'r') as f: + self.request.return_value.status_code = http_client.BAD_REQUEST + with open('sushy/tests/unit/json_samples/error.json') as f: self.request.return_value.json.return_value = json.load(f) with self.assertRaisesRegex(exceptions.BadRequestError, - 'A general error has occurred') as cm: + 'body submitted was malformed JSON') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception - self.assertEqual(400, exc.status_code) + self.assertEqual(http_client.BAD_REQUEST, exc.status_code) self.assertIsNotNone(exc.body) - self.assertIn('A general error has occurred', exc.detail) + self.assertIn('body submitted was malformed JSON', exc.detail) - def test_not_found_error(self): - self.request.return_value.status_code = 404 + def test_known_http_error_nonlist_ext_info(self): + self.request.return_value.status_code =\ + http_client.UNSUPPORTED_MEDIA_TYPE + with open('sushy/tests/unit/json_samples/' + 'error_single_ext_info.json') as f: + self.request.return_value.json.return_value = json.load(f) + + with self.assertRaisesRegex(exceptions.HTTPError, + 'See Resolution for information') as cm: + self.conn._op('POST', 'http://foo.bar') + exc = cm.exception + self.assertEqual(http_client.UNSUPPORTED_MEDIA_TYPE, exc.status_code) + self.assertIsNotNone(exc.body) + self.assertIn('See Resolution for information', exc.detail) + + @mock.patch('time.sleep', autospec=True) + def test_not_found_error(self, mock_sleep): + self.request.return_value.status_code = http_client.NOT_FOUND self.request.return_value.json.side_effect = ValueError('no json') with self.assertRaisesRegex(exceptions.ResourceNotFoundError, 'Resource http://foo.bar not found') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception - self.assertEqual(404, exc.status_code) + self.assertEqual(http_client.NOT_FOUND, exc.status_code) + self.assertFalse(mock_sleep.called) + self.assertEqual(1, self.request.call_count) - def test_server_error(self): - self.request.return_value.status_code = 500 + @mock.patch('time.sleep', autospec=True) + def test_server_error(self, mock_sleep): + self.request.return_value.status_code = ( + http_client.INTERNAL_SERVER_ERROR) self.request.return_value.json.side_effect = ValueError('no json') with self.assertRaisesRegex(exceptions.ServerSideError, 'unknown error') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception - self.assertEqual(500, exc.status_code) + self.assertEqual(http_client.INTERNAL_SERVER_ERROR, exc.status_code) + self.assertEqual(5, mock_sleep.call_count) + self.assertEqual(6, self.request.call_count) def test_access_error(self): - self.request.return_value.status_code = 403 + self.conn._auth.can_refresh_session.return_value = False + + self.request.return_value.status_code = http_client.FORBIDDEN self.request.return_value.json.side_effect = ValueError('no json') with self.assertRaisesRegex(exceptions.AccessError, 'unknown error') as cm: self.conn._op('GET', 'http://foo.bar') exc = cm.exception - self.assertEqual(403, exc.status_code) + self.assertEqual(http_client.FORBIDDEN, exc.status_code) + + @mock.patch.object(connector.LOG, 'debug', autospec=True) + def test_access_error_service_session(self, mock_log): + self.conn._auth.can_refresh_session.return_value = False + + self.request.return_value.status_code = http_client.FORBIDDEN + self.request.return_value.json.side_effect = ValueError('no json') + + with self.assertRaisesRegex(exceptions.AccessError, + 'unknown error') as cm: + self.conn._op('GET', 'http://redfish/v1/SessionService') + exc = cm.exception + mock_log.assert_called_with( + 'HTTP GET of SessionService failed %s, ' + 'this is expected prior to authentication', 'HTTP GET ' + 'http://redfish/v1/SessionService returned code ' + '%s. unknown error Extended information: ' + 'none' % http_client.FORBIDDEN) + self.assertEqual(http_client.FORBIDDEN, exc.status_code) + + def test_blocking_no_location_header(self): + self.request.return_value.status_code = http_client.ACCEPTED + self.request.return_value.headers = {'retry-after': 5} + with self.assertRaisesRegex(exceptions.ConnectionError, + 'status 202, but no Location header'): + self.conn._op('POST', 'http://foo.bar', blocking=True) + + @mock.patch('sushy.connector.time.sleep', autospec=True) + def test_blocking_task_fails(self, mock_sleep): + response1 = mock.MagicMock(spec=requests.Response) + response1.status_code = http_client.ACCEPTED + response1.headers = { + 'Retry-After': 5, + 'Location': '/redfish/v1/taskmon/1', + 'Content-Length': 10 + } + response1.json.return_value = {'Id': 3, 'Name': 'Test'} + response2 = mock.MagicMock(spec=requests.Response) + response2.status_code = http_client.BAD_REQUEST + message = 'Unable to create Volume with given parameters' + response2.json.return_value = { + 'error': { + 'message': message + } + } + self.request.side_effect = [response1, response1, response2] + with self.assertRaisesRegex(exceptions.BadRequestError, message): + self.conn._op('POST', 'http://foo.bar', blocking=True) diff --git a/sushy/tests/unit/test_main.py b/sushy/tests/unit/test_main.py index 6f300becdaac55ea02e70a0d229e4cc9a6eb5454..0b4769a3c0a837d73b401c866d89d74edf9dc530 100644 --- a/sushy/tests/unit/test_main.py +++ b/sushy/tests/unit/test_main.py @@ -14,65 +14,449 @@ # under the License. import json +from unittest import mock -import mock - +from sushy import auth from sushy import connector +from sushy import exceptions from sushy import main +from sushy.resources.chassis import chassis +from sushy.resources.compositionservice import compositionservice +from sushy.resources.fabric import fabric from sushy.resources.manager import manager +from sushy.resources.registry import message_registry_file +from sushy.resources.sessionservice import session +from sushy.resources.sessionservice import sessionservice from sushy.resources.system import system +from sushy.resources.updateservice import updateservice +from sushy import taskmonitor from sushy.tests.unit import base class MainTestCase(base.TestCase): + @mock.patch.object(auth, 'SessionOrBasicAuth', autospec=True) @mock.patch.object(connector, 'Connector', autospec=True) - def setUp(self, mock_connector): + @mock.patch.object(sessionservice, 'SessionService', autospec=True) + def setUp(self, mock_session_service, mock_connector, mock_auth): super(MainTestCase, self).setUp() self.conn = mock.Mock() + self.sess_serv = mock.Mock() + self.sess_serv.create_session.return_value = (None, None) + mock_session_service.return_value = self.sess_serv mock_connector.return_value = self.conn - with open('sushy/tests/unit/json_samples/root.json', 'r') as f: - self.conn.get.return_value.json.return_value = json.loads(f.read()) - self.root = main.Sushy( - 'http://foo.bar:1234', username='foo', password='bar', - verify=True) + with open('sushy/tests/unit/json_samples/root.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + self.root = main.Sushy('http://foo.bar:1234', + verify=True, auth=mock_auth) mock_connector.assert_called_once_with( - 'http://foo.bar:1234', 'foo', 'bar', True) + 'http://foo.bar:1234', verify=True) def test__parse_attributes(self): - self.root._parse_attributes() + self.root._parse_attributes(self.json_doc) self.assertEqual('RootService', self.root.identity) self.assertEqual('Root Service', self.root.name) self.assertEqual('1.0.2', self.root.redfish_version) self.assertEqual('92384634-2938-2342-8820-489239905423', self.root.uuid) + self.assertEqual('Product', self.root.product) + self.assertTrue(self.root.protocol_features_supported.excerpt_query) + self.assertFalse(self.root.protocol_features_supported.expand_query) + self.assertTrue(self.root.protocol_features_supported.filter_query) + self.assertTrue( + self.root.protocol_features_supported.only_member_query) + self.assertFalse(self.root.protocol_features_supported.select_query) self.assertEqual('/redfish/v1/Systems', self.root._systems_path) self.assertEqual('/redfish/v1/Managers', self.root._managers_path) + self.assertEqual('/redfish/v1/Chassis', self.root._chassis_path) + self.assertEqual('/redfish/v1/Fabrics', self.root._fabrics_path) + self.assertEqual('/redfish/v1/SessionService', + self.root._session_service_path) + self.assertEqual('/redfish/v1/CompositionService', + self.root._composition_service_path) + + @mock.patch.object(connector, 'Connector', autospec=True) + def test__init_throws_exception(self, mock_Connector): + self.assertRaises( + ValueError, main.Sushy, 'http://foo.bar:1234', + 'foo', 'bar', auth=mock.MagicMock()) + + @mock.patch.object(connector, 'Connector', autospec=True) + def test_custom_connector(self, mock_Sushy_Connector): + connector_mock = mock.MagicMock() + with open('sushy/tests/unit/json_samples/root.json') as f: + connector_mock.get.return_value.json.return_value = ( + json.load(f)) + main.Sushy('http://foo.bar:1234', 'foo', 'bar', + connector=connector_mock) + self.assertTrue(connector_mock.post.called) + self.assertTrue(connector_mock.get.called) + self.assertFalse(mock_Sushy_Connector.called) @mock.patch.object(system, 'SystemCollection', autospec=True) - def test_get_system_collection(self, mock_system_collection): + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) + def test_get_system_collection( + self, mock_lazy_registries, mock_system_collection): + self.root._standard_message_registries_path = None self.root.get_system_collection() mock_system_collection.assert_called_once_with( self.root._conn, '/redfish/v1/Systems', - redfish_version=self.root.redfish_version) + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries + ) @mock.patch.object(system, 'System', autospec=True) - def test_get_system(self, mock_system): + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) + def test_get_system(self, mock_lazy_registries, mock_system): + self.root._standard_message_registries_path = None self.root.get_system('fake-system-id') mock_system.assert_called_once_with( self.root._conn, 'fake-system-id', - redfish_version=self.root.redfish_version) + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries) + + @mock.patch.object(system, 'SystemCollection', autospec=True) + @mock.patch.object(system, 'System', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) + def test_get_system_default_ok( + self, mock_lazy_registries, mock_system, mock_system_collection): + self.root._standard_message_registries_path = None + mock_system.path = 'fake-system-id' + mock_members = mock_system_collection.return_value.get_members + mock_members.return_value = [mock_system] + self.root.get_system() + mock_system_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Systems', + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries + ) + mock_system.assert_called_once_with( + self.root._conn, 'fake-system-id', + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries) + + @mock.patch.object(system, 'SystemCollection', autospec=True) + @mock.patch.object(system, 'System', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) + def test_get_system_default_failure( + self, mock_lazy_registries, mock_system, mock_system_collection): + self.root._standard_message_registries_path = None + mock_members = mock_system_collection.return_value.get_members + mock_members.return_value = [] + self.assertRaises(exceptions.UnknownDefaultError, self.root.get_system) + mock_system_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Systems', + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries + ) + + @mock.patch.object(chassis, 'Chassis', autospec=True) + def test_get_chassis(self, mock_chassis): + self.root.get_chassis('fake-chassis-id') + mock_chassis.assert_called_once_with( + self.root._conn, 'fake-chassis-id', + self.root.redfish_version, self.root.lazy_registries) + + @mock.patch.object(chassis, 'ChassisCollection', autospec=True) + @mock.patch.object(chassis, 'Chassis', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) + def test_get_chassis_default_ok( + self, mock_lazy_registries, mock_chassis, mock_chassis_collection): + self.root._standard_message_registries_path = None + mock_chassis.path = 'fake-chassis-id' + mock_members = mock_chassis_collection.return_value.get_members + mock_members.return_value = [mock_chassis] + self.root.get_chassis() + mock_chassis_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Chassis', + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries + ) + mock_chassis.assert_called_once_with( + self.root._conn, 'fake-chassis-id', + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries + ) + + @mock.patch.object(chassis, 'ChassisCollection', autospec=True) + @mock.patch.object(chassis, 'Chassis', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) + def test_get_chassis_default_failure( + self, mock_lazy_registries, mock_chassis, mock_chassis_collection): + self.root._standard_message_registries_path = None + mock_members = mock_chassis_collection.return_value.get_members + mock_members.return_value = [] + self.assertRaises( + exceptions.UnknownDefaultError, self.root.get_chassis) + mock_chassis_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Chassis', + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries + ) + + @mock.patch.object(chassis, 'ChassisCollection', autospec=True) + def test_get_chassis_collection(self, chassis_collection_mock): + self.root.get_chassis_collection() + chassis_collection_mock.assert_called_once_with( + self.root._conn, '/redfish/v1/Chassis', + self.root.redfish_version, self.root.lazy_registries) + + @mock.patch.object(fabric, 'Fabric', autospec=True) + def test_get_fabric(self, mock_fabric): + self.root.get_fabric('fake-fabric-id') + mock_fabric.assert_called_once_with( + self.root._conn, 'fake-fabric-id', + self.root.redfish_version, self.root.lazy_registries) + + @mock.patch.object(fabric, 'FabricCollection', autospec=True) + def test_get_fabric_collection(self, fabric_collection_mock): + self.root.get_fabric_collection() + fabric_collection_mock.assert_called_once_with( + self.root._conn, '/redfish/v1/Fabrics', + self.root.redfish_version, self.root.lazy_registries) @mock.patch.object(manager, 'ManagerCollection', autospec=True) def test_get_manager_collection(self, ManagerCollection_mock): self.root.get_manager_collection() ManagerCollection_mock.assert_called_once_with( self.root._conn, '/redfish/v1/Managers', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.lazy_registries) @mock.patch.object(manager, 'Manager', autospec=True) def test_get_manager(self, Manager_mock): self.root.get_manager('fake-manager-id') Manager_mock.assert_called_once_with( self.root._conn, 'fake-manager-id', - redfish_version=self.root.redfish_version) + self.root.redfish_version, self.root.lazy_registries) + + @mock.patch.object(manager, 'ManagerCollection', autospec=True) + @mock.patch.object(manager, 'Manager', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) + def test_get_manager_default_ok( + self, mock_lazy_registries, mock_manager, mock_manager_collection): + self.root._standard_message_registries_path = None + mock_manager.path = 'fake-manager-id' + mock_members = mock_manager_collection.return_value.get_members + mock_members.return_value = [mock_manager] + self.root.get_manager() + mock_manager_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Managers', + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries + ) + mock_manager.assert_called_once_with( + self.root._conn, 'fake-manager-id', + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries) + + @mock.patch.object(manager, 'ManagerCollection', autospec=True) + @mock.patch.object(manager, 'Manager', autospec=True) + @mock.patch('sushy.Sushy.lazy_registries', autospec=True) + def test_get_manager_default_failure( + self, mock_lazy_registries, mock_manager, mock_system_collection): + self.root._standard_message_registries_path = None + mock_members = mock_system_collection.return_value.get_members + mock_members.return_value = [] + self.assertRaises( + exceptions.UnknownDefaultError, self.root.get_manager) + mock_system_collection.assert_called_once_with( + self.root._conn, '/redfish/v1/Managers', + redfish_version=self.root.redfish_version, + registries=mock_lazy_registries + ) + + @mock.patch.object(sessionservice, 'SessionService', autospec=True) + def test_get_sessionservice(self, mock_sess_serv): + self.root.get_session_service() + mock_sess_serv.assert_called_once_with( + self.root._conn, '/redfish/v1/SessionService', + self.root.redfish_version) + + @mock.patch.object(session, 'Session', autospec=True) + def test_get_session(self, mock_sess): + self.root.get_session('asdf') + mock_sess.assert_called_once_with( + self.root._conn, 'asdf', + self.root.redfish_version, self.root.lazy_registries) + + @mock.patch.object(updateservice, 'UpdateService', autospec=True) + def test_get_update_service(self, mock_upd_serv): + self.root.get_update_service() + mock_upd_serv.assert_called_once_with( + self.root._conn, '/redfish/v1/UpdateService', + self.root.redfish_version, self.root.lazy_registries) + + @mock.patch.object(message_registry_file, + 'MessageRegistryFileCollection', + autospec=True) + def test__get_registry_collection( + self, MessageRegistryFileCollection_mock): + self.root._get_registry_collection() + MessageRegistryFileCollection_mock.assert_called_once_with( + self.root._conn, '/redfish/v1/Registries', + self.root.redfish_version) + + @mock.patch.object( + compositionservice, 'CompositionService', autospec=True) + def test_get_composition_service(self, mock_comp_ser): + self.root.get_composition_service() + mock_comp_ser.assert_called_once_with( + self.root._conn, '/redfish/v1/CompositionService', + self.root.redfish_version, self.root.lazy_registries) + + def test__get_standard_message_registry_collection(self): + registries = self.root._get_standard_message_registry_collection() + + self.assertEqual(5, len(registries)) + self.assertIn('Base.1.3.0', {r.identity for r in registries}) + + @mock.patch('sushy.Sushy._get_standard_message_registry_collection', + autospec=True) + @mock.patch('sushy.Sushy._get_registry_collection', autospec=True) + def test__get_message_registries(self, mock_col, mock_st_col): + mock_msg_reg1 = mock.Mock() + mock_msg_reg1.registry_prefix = 'RegistryA' + mock_msg_reg1.registry_version = '2.0.0' + mock_msg_reg1.language = 'en' + mock_st_col.return_value = [mock_msg_reg1] + + mock_msg_reg2 = mock.Mock() + mock_msg_reg2.registry_prefix = 'RegistryB' + mock_msg_reg2.registry_version = '1.0.0' + mock_msg_reg_file = mock.Mock() + mock_msg_reg_file.identity = 'Messages' + mock_msg_reg_file.registry = 'RegistryB.1.0' + mock_msg_reg_file.get_message_registry.return_value = mock_msg_reg2 + mock_col.return_value.get_members.return_value = [mock_msg_reg_file] + + registries = self.root.registries + self.assertEqual({'RegistryA.2.0': mock_msg_reg1, + 'RegistryB.1.0': mock_msg_reg2, + 'Messages': mock_msg_reg2}, registries) + + @mock.patch('sushy.Sushy._get_standard_message_registry_collection', + autospec=True) + @mock.patch('sushy.Sushy._get_registry_collection', autospec=True) + def test__get_message_registries_caching(self, mock_col, mock_st_col): + mock_msg_reg1 = mock.Mock() + mock_msg_reg1.registry_prefix = 'RegistryA' + mock_msg_reg1.registry_version = '2.0.0' + mock_msg_reg1.language = 'en' + mock_st_col.return_value = [mock_msg_reg1] + + mock_msg_reg2 = mock.Mock() + mock_msg_reg2.registry_prefix = 'RegistryB' + mock_msg_reg2.registry_version = '1.0.0' + mock_msg_reg_file = mock.Mock() + mock_msg_reg_file.identity = 'Messages' + mock_msg_reg_file.registry = 'RegistryB.1.0' + mock_msg_reg_file.get_message_registry.return_value = mock_msg_reg2 + mock_col.return_value.get_members.return_value = [mock_msg_reg_file] + + registries = self.root.registries + + self.assertEqual(1, mock_col.call_count) + self.assertEqual(1, mock_st_col.call_count) + + cached_registries = self.root.registries + + self.assertEqual(1, mock_col.call_count) + self.assertEqual(1, mock_st_col.call_count) + + expected = { + 'RegistryA.2.0': mock_msg_reg1, + 'RegistryB.1.0': mock_msg_reg2, + 'Messages': mock_msg_reg2 + } + + self.assertEqual(expected, registries) + self.assertEqual(cached_registries, registries) + + @mock.patch('sushy.Sushy._get_standard_message_registry_collection', + autospec=True) + @mock.patch('sushy.Sushy._get_registry_collection', autospec=True) + def test_registries_provided_empty(self, mock_col, mock_st_col): + mock_msg_reg1 = mock.Mock() + mock_msg_reg1.registry_prefix = 'RegistryA' + mock_msg_reg1.registry_version = '2.0.0' + mock_msg_reg1.language = 'en' + mock_st_col.return_value = [mock_msg_reg1] + mock_col.return_value = None + + registries = self.root.registries + self.assertEqual({'RegistryA.2.0': mock_msg_reg1}, registries) + + @mock.patch('sushy.Sushy.registries', autospec=True) + def test_lazy_registries(self, mock_registries): + registries = self.root.lazy_registries + self.assertEqual(0, mock_registries.__getitem__.call_count) + registries[1] + self.assertEqual(1, mock_registries.__getitem__.call_count) + + def test_get_sessions_path(self): + expected = '/redfish/v1/SessionService/Sessions' + self.assertEqual(expected, self.root.get_sessions_path()) + + @mock.patch.object(taskmonitor, 'TaskMonitor', autospec=True) + def test_get_task_monitor(self, mock_task_mon): + self.root.get_task_monitor('/TaskService/Task/123') + mock_task_mon.assert_called_once_with( + self.root._conn, '/TaskService/Task/123', + self.root.redfish_version, self.root.lazy_registries) + + +class BareMinimumMainTestCase(base.TestCase): + + def setUp(self): + super(BareMinimumMainTestCase, self).setUp() + self.conn = mock.MagicMock() + with open('sushy/tests/unit/json_samples/' + 'bare_minimum_root.json') as f: + self.conn.get.return_value.json.return_value = json.load(f) + self.root = main.Sushy('http://foo.bar:1234', verify=True, + auth=mock.MagicMock(), connector=self.conn) + + def test_get_system_collection_when_systems_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Systems/@odata.id', self.root.get_system_collection) + + def test_get_manager_collection_when_managers_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Managers/@odata.id', self.root.get_manager_collection) + + def test_get_chassis_collection_when_chassis_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Chassis/@odata.id', self.root.get_chassis_collection) + + def test_get_fabric_collection_when_fabrics_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Fabrics/@odata.id', self.root.get_fabric_collection) + + def test_get_session_service_when_sessionservice_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'SessionService/@odata.id', self.root.get_session_service) + + def test_get_update_service_when_updateservice_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'UpdateService/@odata.id', self.root.get_update_service) + + def test_get_composition_service_when_compositionservice_attr_absent(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'CompositionService/@odata.id', self.root.get_composition_service) + + def test__get_registry_collection_when_registries_attr_absent(self): + self.assertIsNone(self.root._get_registry_collection()) + + def test_get_sessions_path_fail(self): + self.assertRaisesRegex( + exceptions.MissingAttributeError, + 'Links/Sessions/@data.id', self.root.get_sessions_path) diff --git a/sushy/tests/unit/test_taskmonitor.py b/sushy/tests/unit/test_taskmonitor.py new file mode 100644 index 0000000000000000000000000000000000000000..80d897d7c090cbe9b45b72c26c2a6b35f26d2d18 --- /dev/null +++ b/sushy/tests/unit/test_taskmonitor.py @@ -0,0 +1,359 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from http import client as http_client +import json +from unittest import mock + +import requests + +from sushy import exceptions +from sushy.resources import base as resource_base +from sushy.resources.taskservice import task +from sushy import taskmonitor +from sushy.tests.unit import base + + +class TaskMonitorTestCase(base.TestCase): + + def setUp(self): + super(TaskMonitorTestCase, self).setUp() + self.conn = mock.Mock() + + with open('sushy/tests/unit/json_samples/task.json') as f: + self.json_doc = json.load(f) + + self.conn.get.return_value.json.return_value = self.json_doc + + self.field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'}, + self.json_doc) + + self.response = mock.Mock() + self.response.status_code = http_client.ACCEPTED + self.response.headers = {'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'} + self.response.content = json.dumps(self.json_doc).encode('utf-8') + self.response.json.return_value = self.json_doc + + self.task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + response=self.response + ) + + @mock.patch.object(taskmonitor.LOG, 'warning', autospec=True) + def test_init_field_data(self, mock_log): + field_data = resource_base.FieldData( + http_client.ACCEPTED, + {'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'}, + self.json_doc) + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + field_data=field_data) + + self.assertEqual(field_data.status_code, + task_monitor.response.status_code) + self.assertEqual(field_data.headers, + task_monitor.response.headers) + self.assertEqual(field_data.json_doc, + task_monitor.response.json()) + mock_log.assert_called_once_with( + 'TaskMonitor field_data is deprecated in TaskMonitor. ' + 'Use response.') + + def test_init_accepted_no_content(self): + response = mock.Mock() + response.status_code = http_client.ACCEPTED + response.headers = {'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'DELETE'} + response.content = None + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + response=response) + + self.assertIsNone(task_monitor.task) + + def test_init_accepted_content(self): + self.assertIsNotNone(self.task_monitor.task) + + def test_init_no_response(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + + task_monitor = taskmonitor.TaskMonitor(self.conn, '/Task/545') + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(task_monitor.task) + + def test_refresh_no_content(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 0} + self.conn.get.return_value.content = None + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNone(self.task_monitor.task) + + def test_refresh_content_no_task(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + self.task_monitor._task = None + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(self.task_monitor.task) + + def test_refresh_content_task(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {'Content-Length': 42} + + self.task_monitor.refresh() + + self.conn.get.assert_called_with(path='/Task/545') + self.assertEqual(1, self.conn.get.call_count) + self.assertIsNotNone(self.task_monitor.task) + + def test_refresh_done(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 200 + + self.task_monitor.refresh() + + self.conn.get.assert_called_once_with(path='/Task/545') + self.assertIsNone(self.task_monitor.task) + + @mock.patch.object(taskmonitor.LOG, 'warning', autospec=True) + def test_task_monitor(self, mock_log): + self.assertEqual('/Task/545', self.task_monitor.task_monitor) + mock_log.assert_called_once_with( + 'task_monitor is deprecated in TaskMonitor. Use task_monitor_uri.') + + def test_task_monitor_uri(self): + self.assertEqual('/Task/545', self.task_monitor.task_monitor_uri) + + def test_is_processing(self): + self.assertTrue(self.task_monitor.is_processing) + + def test_check_is_processing_not_processing(self): + response = mock.Mock() + response.status_code = http_client.OK + response.headers = {} + response.content = None + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + response=response) + + self.assertEqual(False, task_monitor.check_is_processing) + + def test_check_is_processing_refreshing(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + self.conn.get.return_value.headers = {} + self.conn.get.return_value.content = None + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545') + + self.assertEqual(True, task_monitor.check_is_processing) + + @mock.patch.object(taskmonitor.LOG, 'warning', autospec=True) + def test_retry_after(self, mock_log): + self.assertEqual(20, self.task_monitor.retry_after) + mock_log.assert_called_once_with('TaskMonitor retry_after is ' + 'deprecated, use sleep_for.') + + def test_cancellable(self): + self.assertTrue(self.task_monitor.cancellable) + + def test_sleep_for_retry_after_empty(self): + self.task_monitor._response.headers["Retry-After"] = None + self.assertEqual(1, self.task_monitor.sleep_for) + + def test_sleep_for_retry_after_digit(self): + self.assertEqual(20, self.task_monitor.sleep_for) + + def test_sleep_for_retry_after_date_past(self): + self.task_monitor._response.headers["Retry-After"] =\ + 'Fri, 31 Dec 1999 23:59:59 GMT' + self.assertEqual(0, self.task_monitor.sleep_for) + + def test_not_cancellable_no_header(self): + response = mock.Mock() + response.status_code = http_client.ACCEPTED + response.headers = { + 'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20} + response.json.return_value = self.json_doc + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + response=response + ) + + self.assertFalse(task_monitor.cancellable) + + def test_not_cancellable(self): + response = mock.Mock() + response.status_code = http_client.ACCEPTED + response.headers = { + 'Content-Length': 42, + 'Location': '/Task/545', + 'Retry-After': 20, + 'Allow': 'GET'} + response.json.return_value = self.json_doc + + task_monitor = taskmonitor.TaskMonitor( + self.conn, '/Task/545', + response=response + ) + + self.assertFalse(task_monitor.cancellable) + + def test_task(self): + tm_task = self.task_monitor.task + + self.assertIsInstance(tm_task, task.Task) + self.assertEqual('545', tm_task.identity) + + def test_get_task(self): + tm_task = self.task_monitor.get_task() + + self.assertIsInstance(tm_task, task.Task) + self.assertEqual('545', tm_task.identity) + + @mock.patch('time.sleep', autospec=True) + def test_wait(self, mock_time): + self.conn.reset_mock() + response1 = mock.MagicMock(spec=requests.Response) + response1.status_code = http_client.ACCEPTED + response1.headers = { + 'Retry-After': 5, + 'Location': '/redfish/v1/taskmon/1', + 'Content-Length': 10 + } + response1.json.return_value = {'Id': 3, 'Name': 'Test'} + + response2 = mock.MagicMock(spec=requests.Response) + response2.status_code = http_client.OK + response2.headers = { + 'Retry-After': 5, + 'Location': '/redfish/v1/taskmon/1', + 'Content-Length': 10 + } + response2.json.return_value = {'Id': 3, 'Name': 'Test'} + + self.conn.get.side_effect = [response1, response2] + self.task_monitor.wait(60) + + self.assertFalse(self.task_monitor.is_processing) + self.assertEqual(response2, self.task_monitor.response) + + @mock.patch('time.sleep', autospec=True) + def test_wait_timeout(self, mock_time): + self.conn.reset_mock() + response1 = mock.MagicMock(spec=requests.Response) + response1.status_code = http_client.ACCEPTED + response1.headers = { + 'Retry-After': 5, + 'Location': '/redfish/v1/taskmon/1', + 'Content-Length': 10 + } + response1.json.return_value = {'Id': 3, 'Name': 'Test'} + + self.conn.get.side_effect = [response1, response1] + + self.assertRaises(exceptions.ConnectionError, + self.task_monitor.wait, -10) + + def test_from_response_no_content(self): + self.conn.reset_mock() + self.conn.get.return_value.status_code = 202 + response = mock.Mock() + response.content = None + response.headers = {'Location': '/Task/545'} + response.status_code = http_client.ACCEPTED + + tm = taskmonitor.TaskMonitor.from_response( + self.conn, response, + '/redfish/v1/UpdateService/Actions/SimpleUpdate') + + self.assertIsInstance(tm, taskmonitor.TaskMonitor) + self.assertEqual('/Task/545', tm.task_monitor_uri) + self.assertIsNotNone(tm.task) + self.assertEqual('545', tm.task.identity) + + def test_from_response_odata_id(self): + response = mock.Mock() + response.content = "something" + response.json.return_value = {'Id': '545', 'Name': 'test', + '@odata.id': '545'} + response.headers = {'Location': '/TaskMonitor/'} + response.status_code = http_client.ACCEPTED + + tm = taskmonitor.TaskMonitor.from_response( + self.conn, response, + '/redfish/v1/UpdateService/Actions/SimpleUpdate') + + self.assertIsInstance(tm, taskmonitor.TaskMonitor) + self.assertEqual('/TaskMonitor/545', tm.task_monitor_uri) + self.assertIsNotNone(tm.task) + self.assertEqual('545', tm.task.identity) + + def test_from_response_location_header_missing(self): + response = mock.Mock() + response.content = "something" + response.json.return_value = {'Id': '545', 'Name': 'test'} + response.headers = {} + response.status_code = http_client.ACCEPTED + + self.assertRaises(exceptions.MissingHeaderError, + taskmonitor.TaskMonitor.from_response, + self.conn, response, + '/redfish/v1/UpdateService/Actions/SimpleUpdate') + + def test_from_response(self): + response = mock.Mock() + response.content = "something" + response.json.return_value = {'Id': '545', 'Name': 'test'} + response.headers = {'Location': '/Task/545'} + response.status_code = http_client.ACCEPTED + + tm = taskmonitor.TaskMonitor.from_response( + self.conn, response, + '/redfish/v1/UpdateService/Actions/SimpleUpdate') + + self.assertIsInstance(tm, taskmonitor.TaskMonitor) + self.assertEqual('/Task/545', tm.task_monitor_uri) + self.assertIsNotNone(tm.task) + self.assertEqual('545', tm.task.identity) diff --git a/sushy/tests/unit/test_utils.py b/sushy/tests/unit/test_utils.py index f53731a37ded989eac3da57ef048aad08fd62f89..83aafb90b35b4d3685beb0cf8eea85010a81c840 100644 --- a/sushy/tests/unit/test_utils.py +++ b/sushy/tests/unit/test_utils.py @@ -13,9 +13,13 @@ # License for the specific language governing permissions and limitations # under the License. +import json +from unittest import mock -import mock +from sushy import exceptions +from sushy.resources import base as resource_base +from sushy.resources.system import system from sushy.tests.unit import base from sushy import utils @@ -35,3 +39,208 @@ class UtilsTestCase(base.TestCase): expected = ('/redfish/v1/Systems/FOO', '/redfish/v1/Systems/BAR') self.assertEqual(expected, utils.get_members_identities(members)) self.assertEqual(1, log_mock.call_count) + + def test_int_or_none(self): + self.assertEqual(1, utils.int_or_none('1')) + self.assertIsNone(None, utils.int_or_none(None)) + + def test_bool_or_none_none(self): + self.assertIsNone(utils.bool_or_none(None)) + + def test_bool_or_none_bool(self): + self.assertEqual(True, utils.bool_or_none(True)) + + def setUp(self): + super(UtilsTestCase, self).setUp() + self.conn = mock.MagicMock() + with open('sushy/tests/unit/json_samples/system.json') as f: + system_json = json.load(f) + self.conn.get.return_value.json.return_value = system_json + + self.sys_inst = system.System(self.conn, + '/redfish/v1/Systems/437XR1138R2', + redfish_version='1.0.2') + + def test_get_sub_resource_path_by(self): + subresource_path = 'EthernetInterfaces' + expected_result = '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces' + value = utils.get_sub_resource_path_by(self.sys_inst, + subresource_path) + self.assertEqual(expected_result, value) + + def test_get_sub_resource_path_by_list(self): + subresource_path = ['EthernetInterfaces'] + expected_result = '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces' + value = utils.get_sub_resource_path_by(self.sys_inst, + subresource_path) + self.assertEqual(expected_result, value) + + def test_get_sub_resource_path_by_collection(self): + subresource_path = ["Links", "ManagedBy"] + expected_result = ['/redfish/v1/Managers/BMC'] + value = utils.get_sub_resource_path_by(self.sys_inst, + subresource_path, + is_collection=True) + self.assertEqual(expected_result, value) + + def test_get_sub_resource_path_by_fails(self): + subresource_path = ['Links', 'Chassis'] + expected_result = 'attribute Links/Chassis/@odata.id is missing' + self.assertRaisesRegex( + exceptions.MissingAttributeError, + expected_result, + utils.get_sub_resource_path_by, + self.sys_inst, subresource_path) + + def test_get_sub_resource_path_by_fails_with_empty_path(self): + self.assertRaisesRegex( + ValueError, + '"subresource_name" cannot be empty', + utils.get_sub_resource_path_by, + self.sys_inst, []) + + def test_get_sub_resource_path_by_fails_with_empty_string(self): + self.assertRaisesRegex( + ValueError, + '"subresource_name" cannot be empty', + utils.get_sub_resource_path_by, + self.sys_inst, '') + + def test_max_safe(self): + self.assertEqual(10, utils.max_safe([1, 3, 2, 8, 5, 10, 6])) + self.assertEqual(821, utils.max_safe([15, 300, 270, None, 821, None])) + self.assertEqual(0, utils.max_safe([])) + self.assertIsNone(utils.max_safe([], default=None)) + + def test_camelcase_to_underscore_joined(self): + input_vs_expected = [ + ('GarbageCollection', 'garbage_collection'), + ('DD', 'dd'), + ('rr', 'rr'), + ('AABbbC', 'aa_bbb_c'), + ('AABbbCCCDd', 'aa_bbb_ccc_dd'), + ('Manager', 'manager'), + ('EthernetInterfaceCollection', 'ethernet_interface_collection'), + (' ', ' '), + ] + for inp, exp in input_vs_expected: + self.assertEqual(exp, utils.camelcase_to_underscore_joined(inp)) + + def test_camelcase_to_underscore_joined_fails_with_empty_string(self): + self.assertRaisesRegex( + ValueError, + '"camelcase_str" cannot be empty', + utils.camelcase_to_underscore_joined, '') + + +class NestedResource(resource_base.ResourceBase): + + def _parse_attributes(self, json_doc): + pass + + +class BaseResource(resource_base.ResourceBase): + + def _parse_attributes(self, json_doc): + pass + + def _do_some_crunch_work_to_get_a(self): + return 'a' + + @utils.cache_it + def get_a(self): + return self._do_some_crunch_work_to_get_a() + + def _do_some_crunch_work_to_get_b(self): + return 'b' + + @utils.cache_it + def get_b(self): + return self._do_some_crunch_work_to_get_b() + + @property + @utils.cache_it + def nested_resource(self): + return NestedResource( + self._conn, "path/to/nested_resource", + redfish_version=self.redfish_version) + + @property + @utils.cache_it + def few_nested_resources(self): + return [NestedResource(self._conn, "/nested_res1", + redfish_version=self.redfish_version), + NestedResource(self._conn, "/nested_res2", + redfish_version=self.redfish_version)] + + +class CacheTestCase(base.TestCase): + + def setUp(self): + super(CacheTestCase, self).setUp() + self.conn = mock.Mock() + self.res = BaseResource(connector=self.conn, path='/Foo', + redfish_version='1.0.2') + + def test_cache_nested_resource_retrieval(self): + nested_res = self.res.nested_resource + few_nested_res = self.res.few_nested_resources + + self.assertIsInstance(nested_res, NestedResource) + self.assertIs(nested_res, self.res.nested_resource) + self.assertIsInstance(few_nested_res, list) + for n_res in few_nested_res: + self.assertIsInstance(n_res, NestedResource) + self.assertIs(few_nested_res, self.res.few_nested_resources) + + self.res.invalidate() + self.res.refresh(force=False) + + self.assertIsNotNone(self.res._cache_nested_resource) + self.assertTrue(self.res._cache_nested_resource._is_stale) + self.assertIsNotNone(self.res._cache_few_nested_resources) + for n_res in self.res._cache_few_nested_resources: + self.assertTrue(n_res._is_stale) + + self.assertIsInstance(self.res.nested_resource, NestedResource) + self.assertFalse(self.res._cache_nested_resource._is_stale) + self.assertIsInstance(self.res.few_nested_resources, list) + for n_res in self.res._cache_few_nested_resources: + self.assertFalse(n_res._is_stale) + + def test_cache_non_resource_retrieval(self): + with mock.patch.object( + self.res, '_do_some_crunch_work_to_get_a', + wraps=self.res._do_some_crunch_work_to_get_a, + autospec=True) as do_work_to_get_a_spy: + result = self.res.get_a() + self.assertTrue(do_work_to_get_a_spy.called) + + do_work_to_get_a_spy.reset_mock() + # verify subsequent invocation + self.assertEqual(result, self.res.get_a()) + self.assertFalse(do_work_to_get_a_spy.called) + + def test_cache_clear_only_selected_attr(self): + self.res.nested_resource + self.res.get_a() + self.res.get_b() + + utils.cache_clear(self.res, False, only_these=['get_a']) + + # cache cleared (set to None) + self.assertIsNone(self.res._cache_get_a) + # cache retained + self.assertEqual('b', self.res._cache_get_b) + self.assertFalse(self.res._cache_nested_resource._is_stale) + + def test_cache_clear_failure(self): + self.assertRaises( + TypeError, utils.cache_clear, self.res, False, only_these=10) + + def test_sanitize(self): + orig = {'UserName': 'admin', 'Password': 'pwd', + 'nested': {'answer': 42, 'password': 'secret'}} + expected = {'UserName': 'admin', 'Password': '***', + 'nested': {'answer': 42, 'password': '***'}} + self.assertEqual(expected, utils.sanitize(orig)) diff --git a/sushy/utils.py b/sushy/utils.py index f1e243cf5df0210664a0b028d9df64006baad40c..958e9a1b7e9c65eb9cf487c0a606dd558879de5a 100644 --- a/sushy/utils.py +++ b/sushy/utils.py @@ -13,10 +13,17 @@ # License for the specific language governing permissions and limitations # under the License. +import collections +import functools import logging +import threading + +from sushy import exceptions LOG = logging.getLogger(__name__) +CACHE_ATTR_NAMES_VAR_NAME = '_cache_attr_names' + def revert_dictionary(dictionary): """Given a dictionary revert it's mapping @@ -45,3 +52,303 @@ def get_members_identities(members): members_list.append(path.rstrip('/')) return tuple(members_list) + + +def int_or_none(x): + """Given a value x it cast as int or None + + :param x: The value to transform and return + :returns: Either None or x cast to an int + + """ + if x is None: + return None + return int(x) + + +def bool_or_none(x): + """Given a value x this method returns either a bool or None + + :param x: The value to transform and return + :returns: Either None or x cast to a bool + + """ + if x is None: + return None + return bool(x) + + +def get_sub_resource_path_by(resource, subresource_name, is_collection=False): + """Helper function to find the subresource path + + :param resource: ResourceBase instance on which the name + gets queried upon. + :param subresource_name: name of the resource field to + fetch the '@odata.id' from. + :param is_collection: if `True`, expect a list of resources to + fetch the '@odata.id' from. + :returns: Resource path (if `is_collection` is `False`) or + a list of resource paths (if `is_collection` is `True`). + """ + if not subresource_name: + raise ValueError('"subresource_name" cannot be empty') + + if not isinstance(subresource_name, list): + subresource_name = [subresource_name] + + body = resource.json + for path_item in subresource_name: + body = body.get(path_item, {}) + + if not body: + raise exceptions.MissingAttributeError( + attribute='/'.join(subresource_name), resource=resource.path) + + elements = [] + + try: + if is_collection: + for element in body: + elements.append(element['@odata.id']) + return elements + + return body['@odata.id'] + + except (TypeError, KeyError): + attribute = '/'.join(subresource_name) + if is_collection: + attribute += '[%s]' % len(elements) + attribute += '/@odata.id' + raise exceptions.MissingAttributeError( + attribute=attribute, resource=resource.path) + + +def max_safe(iterable, default=0): + """Helper wrapper over builtin max() function. + + This function is just a wrapper over builtin max() w/o ``key`` argument. + The ``default`` argument specifies an object to return if the provided + ``iterable`` is empty. Also it filters out the None type values. + + :param iterable: an iterable + :param default: 0 by default + """ + + try: + return max(x for x in iterable if x is not None) + except ValueError: + # TypeError is not caught here as that should be thrown. + return default + + +def setdefaultattr(obj, name, default): + """Python's ``dict.setdefault`` applied on Python objects. + + If name is an attribute with obj, return its value. If not, set name + attribute with a value of default and return default. + + :param obj: a python object + :param name: name of attribute + :param default: default value to be set + """ + + try: + return getattr(obj, name) + except AttributeError: + setattr(obj, name, default) + return default + + +def cache_it(res_accessor_method): + """Utility decorator to cache the return value of the decorated method. + + This decorator is to be used with any Sushy resource class method. + This will internally create an attribute on the resource namely + ``_cache_``. This is referred to as the "caching + attribute". This attribute will eventually hold the resultant value from + the method invocation (when method gets first time called) and for every + subsequent calls to that method this cached value will get returned. It + expects the decorated method to contain its own logic of evaluation. + + This also assigns a variable named ``_cache_attr_names`` on the resource. + This variable maintains a collection of all the existing + "caching attribute" names. + + To invalidate or clear the cache use :py:func:`~cache_clear`. + Usage: + + .. code-block:: python + + class SomeResource(base.ResourceBase): + ... + @cache_it + def get_summary(self): + # do some calculation and return the result + # and this result will be cached. + return result + ... + def _do_refresh(self, force): + cache_clear(self, force) + + If the returned value is a Sushy resource instance or a sequence whose + element is of type Sushy resource it handles the case of calling the + ``refresh()`` method of that resource. This is done to avoid unnecessary + recreation of a new resource instance which got already created at the + first place in contrast to fresh retrieval of the resource json data. + Again, the ``force`` argument is deliberately set to False to do only the + "light refresh" of the resource (only the fresh retrieval of resource) + instead of doing the complete exhaustive "cascading refresh" (resource + with all its nested subresources recursively). + + .. code-block:: python + + class SomeResource(base.ResourceBase): + ... + @property + @cache_it + def nested_resource(self): + return NestedResource( + self._conn, "Path/to/NestedResource", + redfish_version=self.redfish_version) + ... + def _do_refresh(self, force): + # selective attribute clearing + cache_clear(self, force, only_these=['nested_resource']) + + Do note that this is not thread safe. So guard your code to protect it + from any kind of concurrency issues while using this decorator. + + :param res_accessor_method: the resource accessor decorated method. + + """ + cache_attr_name = '_cache_' + res_accessor_method.__name__ + + @functools.wraps(res_accessor_method) + def func_wrapper(res_selfie): + + cache_attr_val = getattr(res_selfie, cache_attr_name, None) + if cache_attr_val is None: + + cache_attr_val = res_accessor_method(res_selfie) + setattr(res_selfie, cache_attr_name, cache_attr_val) + + # Note(deray): Each resource instance maintains a collection of + # all the cache attribute names in a private attribute. + cache_attr_names = setdefaultattr( + res_selfie, CACHE_ATTR_NAMES_VAR_NAME, set()) + cache_attr_names.add(cache_attr_name) + + from sushy.resources import base + + if isinstance(cache_attr_val, base.ResourceBase): + cache_attr_val.refresh(force=False) + elif isinstance(cache_attr_val, collections.abc.Sequence): + for elem in cache_attr_val: + if isinstance(elem, base.ResourceBase): + elem.refresh(force=False) + + return cache_attr_val + + return func_wrapper + + +def cache_clear(res_selfie, force_refresh, only_these=None): + """Clear some or all cached values of the resource. + + If the cache variable refers to a resource instance then the + ``invalidate()`` method is called on that. Otherwise it is set to None. + Should there be a need to force refresh the resource and its sub-resources, + "cascading refresh", ``force_refresh`` is to be set to True. + + This is the complimentary method of ``cache_it`` decorator. + + :param res_selfie: the resource instance. + :param force_refresh: force_refresh argument of ``invalidate()`` method. + :param only_these: expects a sequence of specific method names + for which the cached value/s need to be cleared only. When None, all + the cached values are cleared. + """ + cache_attr_names = setdefaultattr( + res_selfie, CACHE_ATTR_NAMES_VAR_NAME, set()) + if only_these is not None: + if not isinstance(only_these, collections.abc.Sequence): + raise TypeError("'only_these' must be a sequence.") + + cache_attr_names = cache_attr_names.intersection( + '_cache_' + attr for attr in only_these) + + for cache_attr_name in cache_attr_names: + cache_attr_val = getattr(res_selfie, cache_attr_name) + + from sushy.resources import base + + if isinstance(cache_attr_val, base.ResourceBase): + cache_attr_val.invalidate(force_refresh) + elif isinstance(cache_attr_val, collections.abc.Sequence): + for elem in cache_attr_val: + if isinstance(elem, base.ResourceBase): + elem.invalidate(force_refresh) + else: + setattr(res_selfie, cache_attr_name, None) + break + else: + setattr(res_selfie, cache_attr_name, None) + + +def camelcase_to_underscore_joined(camelcase_str): + """Convert camelCase string to underscore_joined string + + :param camelcase_str: The camelCase string + :returns: the equivalent underscore_joined string + """ + if not camelcase_str: + raise ValueError('"camelcase_str" cannot be empty') + + r = camelcase_str[0].lower() + for i, letter in enumerate(camelcase_str[1:], 1): + if letter.isupper(): + try: + if (camelcase_str[i - 1].islower() + or camelcase_str[i + 1].islower()): + r += '_' + except IndexError: + pass + + r += letter.lower() + + return r + + +def synchronized(wrapped): + """Simple synchronization decorator. + + Decorating a method like so: + + .. code-block:: python + + @synchronized + def foo(self, *args): + ... + + ensures that only one thread will execute the foo method at a time. + """ + lock = threading.RLock() + + @functools.wraps(wrapped) + def wrapper(*args, **kwargs): + with lock: + return wrapped(*args, **kwargs) + + return wrapper + + +_REMOVE = frozenset(['password', 'x-auth-token']) + + +def sanitize(item): + """Remove passwords from the item.""" + if isinstance(item, dict): + return {key: ('***' if key.lower() in _REMOVE else sanitize(value)) + for key, value in item.items()} + else: + return item diff --git a/test-requirements.txt b/test-requirements.txt index f796a230dcc393a0f2599f2c75444ecf7945ff19..16c5d484c29a0fc10235d9a1330bd446fd47ae2f 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,16 +2,7 @@ # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. -hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0 - +# unit tests coverage!=4.4,>=4.0 # Apache-2.0 -python-subunit>=0.0.18 # Apache-2.0/BSD -sphinx>=1.6.2 # BSD -openstackdocstheme>=1.16.0 # Apache-2.0 -oslotest>=1.10.0 # Apache-2.0 -testrepository>=0.0.18 # Apache-2.0/BSD -testscenarios>=0.4 # Apache-2.0/BSD -testtools>=1.4.0 # MIT - -# releasenotes -reno!=2.3.1,>=1.8.0 # Apache-2.0 +oslotest>=3.2.0 # Apache-2.0 +stestr>=2.0.0 # Apache-2.0 diff --git a/tools/tox_install.sh b/tools/tox_install.sh deleted file mode 100755 index 456aadc2ad4908cbfa0a426c5541afefe413e74c..0000000000000000000000000000000000000000 --- a/tools/tox_install.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env bash - -# Client constraint file contains this client version pin that is in conflict -# with installing the client from source. We should replace the version pin in -# the constraints file before applying it for from-source installation. - -ZUUL_CLONER=/usr/zuul-env/bin/zuul-cloner -BRANCH_NAME=master -CLIENT_NAME=sushy -requirements_installed=$(echo "import openstack_requirements" | python 2>/dev/null ; echo $?) - -set -e - -CONSTRAINTS_FILE=$1 -shift - -install_cmd="pip install" -mydir=$(mktemp -dt "$CLIENT_NAME-tox_install-XXXXXXX") -trap "rm -rf $mydir" EXIT -localfile=$mydir/upper-constraints.txt -if [[ $CONSTRAINTS_FILE != http* ]]; then - CONSTRAINTS_FILE=file://$CONSTRAINTS_FILE -fi -curl $CONSTRAINTS_FILE -k -o $localfile -install_cmd="$install_cmd -c$localfile" - -if [ $requirements_installed -eq 0 ]; then - echo "ALREADY INSTALLED" > /tmp/tox_install.txt - echo "Requirements already installed; using existing package" -elif [ -x "$ZUUL_CLONER" ]; then - echo "ZUUL CLONER" > /tmp/tox_install.txt - pushd $mydir - $ZUUL_CLONER --cache-dir \ - /opt/git \ - --branch $BRANCH_NAME \ - git://git.openstack.org \ - openstack/requirements - cd openstack/requirements - $install_cmd -e . - popd -else - echo "PIP HARDCODE" > /tmp/tox_install.txt - if [ -z "$REQUIREMENTS_PIP_LOCATION" ]; then - REQUIREMENTS_PIP_LOCATION="git+https://git.openstack.org/openstack/requirements@$BRANCH_NAME#egg=requirements" - fi - $install_cmd -U -e ${REQUIREMENTS_PIP_LOCATION} -fi - -# This is the main purpose of the script: Allow local installation of -# the current repo. It is listed in constraints file and thus any -# install will be constrained and we need to unconstrain it. -edit-constraints $localfile -- $CLIENT_NAME "-e file://$PWD#egg=$CLIENT_NAME" - -$install_cmd -U $* -exit $? diff --git a/tox.ini b/tox.ini index 95889731de63d263628e4c5bc8ef761eaaf09b63..34b07b1777d4ccc80ec5e44c2863bb1e1d2abccf 100644 --- a/tox.ini +++ b/tox.ini @@ -1,47 +1,90 @@ [tox] -minversion = 2.0 -envlist = py36,py35,py27,pypy,pep8 +minversion = 3.9.0 +envlist = py3,pep8 skipsdist = True +ignore_basepython_conflict=true [testenv] +basepython = python3 usedevelop = True setenv = VIRTUAL_ENV={envdir} PYTHONWARNINGS=default::DeprecationWarning -install_command = - {toxinidir}/tools/tox_install.sh {env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt?h=stable/pike} {opts} {packages} -deps = -r{toxinidir}/test-requirements.txt -commands = python setup.py test --slowest --testr-args='{posargs}' +deps = + -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/wallaby} + -r{toxinidir}/test-requirements.txt + -r{toxinidir}/requirements.txt +commands = stestr run --slowest {posargs} [testenv:pep8] +deps= + hacking>=3.1.0,<4.0.0 # Apache-2.0 + flake8-import-order>=0.17.1 # LGPLv3 + pycodestyle>=2.0.0,<2.7.0 # MIT commands = flake8 {posargs} [testenv:venv] commands = {posargs} [testenv:cover] +setenv = + {[testenv]setenv} + PYTHON=coverage run --parallel-mode # After running this target, visit sushy/cover/index.html # in your browser, to see a nicer presentation report with annotated # HTML listings detailing missed lines. commands = coverage erase - python setup.py test --coverage --testr-args='{posargs}' - coverage report --omit=*test* - coverage html --omit=*test* + stestr run {posargs} + coverage combine + coverage report + coverage html + coverage xml -o cover/coverage.xml [testenv:docs] -commands = python setup.py build_sphinx +deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/wallaby} + -r{toxinidir}/requirements.txt + -r{toxinidir}/doc/requirements.txt +commands = sphinx-build -W -b html doc/source doc/build/html + +[testenv:pdf-docs] +usedevelop = False +whitelist_externals = make +deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/wallaby} + -r{toxinidir}/doc/requirements.txt +commands = sphinx-build -b latex doc/source doc/build/pdf + make -C doc/build/pdf [testenv:releasenotes] +usedevelop = False +deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/wallaby} + -r{toxinidir}/doc/requirements.txt commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [testenv:debug] -commands = oslo_debug_helper {posargs} +commands = oslo_debug_helper -t sushy/tests {posargs} [flake8] -# E123, E125 skipped as they are invalid PEP-8. - show-source = True -ignore = E123,E125 +# E123, E125 skipped as they are invalid PEP-8. +# E741 ambiguous variable name. +# W503 Line break occurred before a binary operator. Conflicts with W504. +ignore = E123,E125,E741,W503 +# [H106] Don't put vim configuration in source files. +# [H203] Use assertIs(Not)None to check for None. +# [H204] Use assert(Not)Equal to check for equality. +# [H205] Use assert(Greater|Less)(Equal) for comparison. +# [H210] Require 'autospec', 'spec', or 'spec_set' in mock.patch/mock.patch.object calls +# [H904] Delay string interpolations at logging calls. +enable-extensions=H106,H203,H204,H205,H210,H904 builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build +import-order-style = pep8 +application-import-names = sushy +filename = *.py + +[testenv:lower-constraints] +deps = + -c{toxinidir}/lower-constraints.txt + -r{toxinidir}/test-requirements.txt + -r{toxinidir}/requirements.txt diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml new file mode 100644 index 0000000000000000000000000000000000000000..debeca9420fad98c3e2fd9b76c4dede0f485e32c --- /dev/null +++ b/zuul.d/project.yaml @@ -0,0 +1,15 @@ +- project: + templates: + - check-requirements + - openstack-cover-jobs + - openstack-python3-wallaby-jobs + - publish-openstack-docs-pti + - release-notes-jobs-python3 + check: + jobs: + - sushy-tempest-partition-bios-redfish-pxe + - sushy-tempest-partition-uefi-redfish-vmedia + gate: + jobs: + - sushy-tempest-partition-bios-redfish-pxe + - sushy-tempest-partition-uefi-redfish-vmedia diff --git a/zuul.d/sushy-jobs.yaml b/zuul.d/sushy-jobs.yaml new file mode 100644 index 0000000000000000000000000000000000000000..96da6fbcf6d5e15c787146c42a92437d26de3090 --- /dev/null +++ b/zuul.d/sushy-jobs.yaml @@ -0,0 +1,21 @@ +- job: + name: sushy-tempest-partition-bios-redfish-pxe + parent: ironic-tempest-partition-bios-redfish-pxe + irrelevant-files: + - ^.*\.rst$ + - ^doc/.*$ + - ^test-requirements.txt$ + - ^sushy/tests/.*$ + required-projects: + - openstack/sushy + +- job: + name: sushy-tempest-partition-uefi-redfish-vmedia + parent: ironic-tempest-partition-uefi-redfish-vmedia + irrelevant-files: + - ^.*\.rst$ + - ^doc/.*$ + - ^test-requirements.txt$ + - ^sushy/tests/.*$ + required-projects: + - openstack/sushy